From a1aa989ae4ec40d69711acfde613ee0be43c05ae Mon Sep 17 00:00:00 2001 From: Will Daly Date: Tue, 18 Oct 2022 19:20:53 -0700 Subject: [PATCH 01/85] Add --enable-cilium-dataplane to AKS cluster create (#5465) --- src/aks-preview/HISTORY.rst | 5 + src/aks-preview/azext_aks_preview/_consts.py | 3 + src/aks-preview/azext_aks_preview/_help.py | 6 + src/aks-preview/azext_aks_preview/_params.py | 1 + src/aks-preview/azext_aks_preview/custom.py | 1 + .../managed_cluster_decorator.py | 11 + ...s_create_with_enable_cilium_dataplane.yaml | 892 ++++++++++++++++++ .../tests/latest/test_aks_commands.py | 32 + .../latest/test_managed_cluster_decorator.py | 36 + src/aks-preview/linter_exclusions.yml | 3 + src/aks-preview/setup.py | 2 +- 11 files changed, 991 insertions(+), 1 deletion(-) create mode 100644 src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_create_with_enable_cilium_dataplane.yaml diff --git a/src/aks-preview/HISTORY.rst b/src/aks-preview/HISTORY.rst index b24b9ea575d..1a56c1282c5 100644 --- a/src/aks-preview/HISTORY.rst +++ b/src/aks-preview/HISTORY.rst @@ -12,6 +12,11 @@ To release a new version, please select a new version number (usually plus 1 to Pending +++++++ +0.5.109 ++++++++ + +* Add --enable-cilium-dataplane flag for creating a cluster that uses Cilium as the networking dataplane. + 0.5.108 +++++++ diff --git a/src/aks-preview/azext_aks_preview/_consts.py b/src/aks-preview/azext_aks_preview/_consts.py index 11dc0d40b04..58b55b38231 100644 --- a/src/aks-preview/azext_aks_preview/_consts.py +++ b/src/aks-preview/azext_aks_preview/_consts.py @@ -91,6 +91,9 @@ # network plugin mode CONST_NETWORK_PLUGIN_MODE_OVERLAY = "overlay" +# networkprofile eBPF dataplane +CONST_EBPF_DATAPLANE_CILIUM = "cilium" + # disk driver versions CONST_DISK_DRIVER_V1 = "v1" CONST_DISK_DRIVER_V2 = "v2" diff --git a/src/aks-preview/azext_aks_preview/_help.py b/src/aks-preview/azext_aks_preview/_help.py index 779cc61f012..d3bf7cd96fa 100644 --- a/src/aks-preview/azext_aks_preview/_help.py +++ b/src/aks-preview/azext_aks_preview/_help.py @@ -215,6 +215,12 @@ Using together with "azure" network plugin. Specify "azure" for Azure network policy manager and "calico" for calico network policy controller. Defaults to "" (network policy disabled). + - name: --enable-cilium-dataplane + type: bool + short-summary: Use Cilium as the networking dataplane for the Kubernetes cluster. + long-summary: | + Used together with the "azure" network plugin. + Requires either --pod-subnet-id or --network-plugin-mode=overlay. - name: --no-ssh-key -x type: string short-summary: Do not use or create a local SSH key. diff --git a/src/aks-preview/azext_aks_preview/_params.py b/src/aks-preview/azext_aks_preview/_params.py index 807f624fcf9..28cd6af313a 100644 --- a/src/aks-preview/azext_aks_preview/_params.py +++ b/src/aks-preview/azext_aks_preview/_params.py @@ -333,6 +333,7 @@ def load_arguments(self, _): c.argument('dns_zone_resource_id') c.argument('enable_keda', action='store_true', is_preview=True) c.argument('enable_node_restriction', action='store_true', is_preview=True, help="enable node restriction for cluster") + c.argument('enable_cilium_dataplane', action='store_true', is_preview=True) # nodepool c.argument('host_group_id', validator=validate_host_group_id, is_preview=True) c.argument('crg_id', validator=validate_crg_id, is_preview=True) diff --git a/src/aks-preview/azext_aks_preview/custom.py b/src/aks-preview/azext_aks_preview/custom.py index a11caf82d59..35d6e9b25b1 100644 --- a/src/aks-preview/azext_aks_preview/custom.py +++ b/src/aks-preview/azext_aks_preview/custom.py @@ -673,6 +673,7 @@ def aks_create( enable_keda=False, enable_node_restriction=False, enable_vpa=False, + enable_cilium_dataplane=False, # nodepool host_group_id=None, crg_id=None, diff --git a/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py b/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py index 6d214673201..947b94c287f 100644 --- a/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py +++ b/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py @@ -49,6 +49,7 @@ CONST_LOAD_BALANCER_SKU_BASIC, CONST_PRIVATE_DNS_ZONE_NONE, CONST_PRIVATE_DNS_ZONE_SYSTEM, + CONST_EBPF_DATAPLANE_CILIUM, ) from azext_aks_preview._helpers import ( get_cluster_snapshot_by_snapshot_id, @@ -321,6 +322,13 @@ def get_network_plugin_mode(self) -> Union[str, None]: """ return self.raw_param.get('network_plugin_mode') + def get_enable_cilium_dataplane(self) -> bool: + """Get the value of enable_cilium_dataplane + + :return: bool + """ + return bool(self.raw_param.get('enable_cilium_dataplane')) + def get_load_balancer_managed_outbound_ipv6_count(self) -> Union[int, None]: """Obtain the expected count of IPv6 managed outbound IPs. @@ -2160,6 +2168,9 @@ def set_up_network_profile(self, mc: ManagedCluster) -> ManagedCluster: network_profile.network_plugin_mode = self.context.get_network_plugin_mode() + if self.context.get_enable_cilium_dataplane(): + network_profile.ebpf_dataplane = CONST_EBPF_DATAPLANE_CILIUM + return mc def set_up_api_server_access_profile(self, mc: ManagedCluster) -> ManagedCluster: diff --git a/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_create_with_enable_cilium_dataplane.yaml b/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_create_with_enable_cilium_dataplane.yaml new file mode 100644 index 00000000000..efe62a55114 --- /dev/null +++ b/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_create_with_enable_cilium_dataplane.yaml @@ -0,0 +1,892 @@ +interactions: +- request: + body: '{"location": "centraluseuap", "identity": {"type": "SystemAssigned"}, "properties": + {"kubernetesVersion": "", "dnsPrefix": "cliakstest-cliteste542dzhka-8ecadf", + "agentPoolProfiles": [{"count": 1, "vmSize": "Standard_DS2_v2", "osDiskSizeGB": + 0, "workloadRuntime": "OCIContainer", "osType": "Linux", "enableAutoScaling": + false, "type": "VirtualMachineScaleSets", "mode": "System", "orchestratorVersion": + "", "upgradeSettings": {}, "enableNodePublicIP": false, "enableCustomCATrust": + false, "scaleSetPriority": "Regular", "scaleSetEvictionPolicy": "Delete", "spotMaxPrice": + -1.0, "nodeTaints": [], "enableEncryptionAtHost": false, "enableUltraSSD": false, + "enableFIPS": false, "name": "nodepool1"}], "linuxProfile": {"adminUsername": + "azureuser", "ssh": {"publicKeys": [{"keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ== + test@example.com\n"}]}}, "addonProfiles": {}, "enableRBAC": true, "enablePodSecurityPolicy": + false, "networkProfile": {"networkPlugin": "azure", "networkPluginMode": "overlay", + "ebpfDataplane": "cilium", "podCidr": "10.244.0.0/16", "outboundType": "loadBalancer", + "loadBalancerSku": "standard"}, "disableLocalAccounts": false, "storageProfile": + {}}}' + headers: + AKSHTTPCustomFeatures: + - Microsoft.ContainerService/CiliumDataplanePreview + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + Content-Length: + - '1854' + Content-Type: + - application/json + ParameterSetName: + - --resource-group --name --location --network-plugin --network-plugin-mode + --ssh-key-value --pod-cidr --node-count --enable-cilium-dataplane --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.10.6 + (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000001?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000001\",\n + \ \"location\": \"centraluseuap\",\n \"name\": \"cliakstest000001\",\n \"type\": + \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \"provisioningState\": + \"Creating\",\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"kubernetesVersion\": + \"1.23.12\",\n \"currentKubernetesVersion\": \"1.23.12\",\n \"dnsPrefix\": + \"cliakstest-cliteste542dzhka-8ecadf\",\n \"fqdn\": \"cliakstest-cliteste542dzhka-8ecadf-57167e23.hcp.centraluseuap.azmk8s.io\",\n + \ \"azurePortalFQDN\": \"cliakstest-cliteste542dzhka-8ecadf-57167e23.portal.hcp.centraluseuap.azmk8s.io\",\n + \ \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \"count\": + 1,\n \"vmSize\": \"Standard_DS2_v2\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": + \"Managed\",\n \"kubeletDiskType\": \"OS\",\n \"workloadRuntime\": + \"OCIContainer\",\n \"maxPods\": 250,\n \"type\": \"VirtualMachineScaleSets\",\n + \ \"enableAutoScaling\": false,\n \"provisioningState\": \"Creating\",\n + \ \"powerState\": {\n \"code\": \"Running\"\n },\n \"orchestratorVersion\": + \"1.23.12\",\n \"currentOrchestratorVersion\": \"1.23.12\",\n \"enableNodePublicIP\": + false,\n \"enableCustomCATrust\": false,\n \"mode\": \"System\",\n + \ \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n + \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\": + \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\": {},\n + \ \"enableFIPS\": false\n }\n ],\n \"linuxProfile\": {\n \"adminUsername\": + \"azureuser\",\n \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ== + test@example.com\\n\"\n }\n ]\n }\n },\n \"windowsProfile\": + {\n \"adminUsername\": \"azureuser\",\n \"enableCSIProxy\": true\n },\n + \ \"servicePrincipalProfile\": {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n + \ },\n \"nodeResourceGroup\": \"MC_clitest000001_cliakstest000001_centraluseuap\",\n + \ \"enableRBAC\": true,\n \"enablePodSecurityPolicy\": false,\n \"networkProfile\": + {\n \"networkPlugin\": \"azure\",\n \"networkPluginMode\": \"overlay\",\n + \ \"loadBalancerSku\": \"standard\",\n \"loadBalancerProfile\": {\n \"managedOutboundIPs\": + {\n \"count\": 1\n },\n \"backendPoolType\": \"nodeIPConfiguration\"\n + \ },\n \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\",\n + \ \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\",\n + \ \"outboundType\": \"loadBalancer\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\n + \ ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\": + [\n \"IPv4\"\n ],\n \"ebpfDataplane\": \"cilium\"\n },\n \"maxAgentPools\": + 100,\n \"disableLocalAccounts\": false,\n \"securityProfile\": {},\n \"storageProfile\": + {\n \"diskCSIDriver\": {\n \"enabled\": true,\n \"version\": \"v1\"\n + \ },\n \"fileCSIDriver\": {\n \"enabled\": true\n },\n \"snapshotController\": + {\n \"enabled\": true\n }\n },\n \"oidcIssuerProfile\": {\n \"enabled\": + false\n },\n \"workloadAutoScalerProfile\": {}\n },\n \"identity\": + {\n \"type\": \"SystemAssigned\",\n \"principalId\":\"00000000-0000-0000-0000-000000000001\",\n + \ \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\": + {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" + headers: + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/centraluseuap/operations/2b4c07d1-f579-4182-ad2e-c164bc0f59a2?api-version=2016-03-30 + cache-control: + - no-cache + content-length: + - '3961' + content-type: + - application/json + date: + - Tue, 18 Oct 2022 20:42:58 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --network-plugin --network-plugin-mode + --ssh-key-value --pod-cidr --node-count --enable-cilium-dataplane --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.10.6 + (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/centraluseuap/operations/2b4c07d1-f579-4182-ad2e-c164bc0f59a2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"d1074c2b-79f5-8241-ad2e-c164bc0f59a2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T20:42:58.4899792Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Tue, 18 Oct 2022 20:43:28 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --network-plugin --network-plugin-mode + --ssh-key-value --pod-cidr --node-count --enable-cilium-dataplane --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.10.6 + (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/centraluseuap/operations/2b4c07d1-f579-4182-ad2e-c164bc0f59a2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"d1074c2b-79f5-8241-ad2e-c164bc0f59a2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T20:42:58.4899792Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Tue, 18 Oct 2022 20:43:58 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --network-plugin --network-plugin-mode + --ssh-key-value --pod-cidr --node-count --enable-cilium-dataplane --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.10.6 + (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/centraluseuap/operations/2b4c07d1-f579-4182-ad2e-c164bc0f59a2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"d1074c2b-79f5-8241-ad2e-c164bc0f59a2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T20:42:58.4899792Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Tue, 18 Oct 2022 20:44:29 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --network-plugin --network-plugin-mode + --ssh-key-value --pod-cidr --node-count --enable-cilium-dataplane --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.10.6 + (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/centraluseuap/operations/2b4c07d1-f579-4182-ad2e-c164bc0f59a2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"d1074c2b-79f5-8241-ad2e-c164bc0f59a2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T20:42:58.4899792Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Tue, 18 Oct 2022 20:44:59 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --network-plugin --network-plugin-mode + --ssh-key-value --pod-cidr --node-count --enable-cilium-dataplane --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.10.6 + (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/centraluseuap/operations/2b4c07d1-f579-4182-ad2e-c164bc0f59a2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"d1074c2b-79f5-8241-ad2e-c164bc0f59a2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T20:42:58.4899792Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Tue, 18 Oct 2022 20:45:29 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --network-plugin --network-plugin-mode + --ssh-key-value --pod-cidr --node-count --enable-cilium-dataplane --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.10.6 + (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/centraluseuap/operations/2b4c07d1-f579-4182-ad2e-c164bc0f59a2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"d1074c2b-79f5-8241-ad2e-c164bc0f59a2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T20:42:58.4899792Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Tue, 18 Oct 2022 20:45:58 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --network-plugin --network-plugin-mode + --ssh-key-value --pod-cidr --node-count --enable-cilium-dataplane --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.10.6 + (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/centraluseuap/operations/2b4c07d1-f579-4182-ad2e-c164bc0f59a2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"d1074c2b-79f5-8241-ad2e-c164bc0f59a2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T20:42:58.4899792Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Tue, 18 Oct 2022 20:46:29 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --network-plugin --network-plugin-mode + --ssh-key-value --pod-cidr --node-count --enable-cilium-dataplane --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.10.6 + (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/centraluseuap/operations/2b4c07d1-f579-4182-ad2e-c164bc0f59a2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"d1074c2b-79f5-8241-ad2e-c164bc0f59a2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T20:42:58.4899792Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Tue, 18 Oct 2022 20:46:59 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --network-plugin --network-plugin-mode + --ssh-key-value --pod-cidr --node-count --enable-cilium-dataplane --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.10.6 + (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/centraluseuap/operations/2b4c07d1-f579-4182-ad2e-c164bc0f59a2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"d1074c2b-79f5-8241-ad2e-c164bc0f59a2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T20:42:58.4899792Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Tue, 18 Oct 2022 20:47:29 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --network-plugin --network-plugin-mode + --ssh-key-value --pod-cidr --node-count --enable-cilium-dataplane --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.10.6 + (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/centraluseuap/operations/2b4c07d1-f579-4182-ad2e-c164bc0f59a2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"d1074c2b-79f5-8241-ad2e-c164bc0f59a2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T20:42:58.4899792Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Tue, 18 Oct 2022 20:47:59 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --network-plugin --network-plugin-mode + --ssh-key-value --pod-cidr --node-count --enable-cilium-dataplane --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.10.6 + (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/centraluseuap/operations/2b4c07d1-f579-4182-ad2e-c164bc0f59a2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"d1074c2b-79f5-8241-ad2e-c164bc0f59a2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T20:42:58.4899792Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Tue, 18 Oct 2022 20:48:30 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --network-plugin --network-plugin-mode + --ssh-key-value --pod-cidr --node-count --enable-cilium-dataplane --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.10.6 + (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/centraluseuap/operations/2b4c07d1-f579-4182-ad2e-c164bc0f59a2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"d1074c2b-79f5-8241-ad2e-c164bc0f59a2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T20:42:58.4899792Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Tue, 18 Oct 2022 20:49:00 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --network-plugin --network-plugin-mode + --ssh-key-value --pod-cidr --node-count --enable-cilium-dataplane --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.10.6 + (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/centraluseuap/operations/2b4c07d1-f579-4182-ad2e-c164bc0f59a2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"d1074c2b-79f5-8241-ad2e-c164bc0f59a2\",\n \"status\": + \"Succeeded\",\n \"startTime\": \"2022-10-18T20:42:58.4899792Z\",\n \"endTime\": + \"2022-10-18T20:49:03.6167394Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '170' + content-type: + - application/json + date: + - Tue, 18 Oct 2022 20:49:30 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --network-plugin --network-plugin-mode + --ssh-key-value --pod-cidr --node-count --enable-cilium-dataplane --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.10.6 + (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000001?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000001\",\n + \ \"location\": \"centraluseuap\",\n \"name\": \"cliakstest000001\",\n \"type\": + \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \"provisioningState\": + \"Succeeded\",\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"kubernetesVersion\": + \"1.23.12\",\n \"currentKubernetesVersion\": \"1.23.12\",\n \"dnsPrefix\": + \"cliakstest-cliteste542dzhka-8ecadf\",\n \"fqdn\": \"cliakstest-cliteste542dzhka-8ecadf-57167e23.hcp.centraluseuap.azmk8s.io\",\n + \ \"azurePortalFQDN\": \"cliakstest-cliteste542dzhka-8ecadf-57167e23.portal.hcp.centraluseuap.azmk8s.io\",\n + \ \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \"count\": + 1,\n \"vmSize\": \"Standard_DS2_v2\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": + \"Managed\",\n \"kubeletDiskType\": \"OS\",\n \"workloadRuntime\": + \"OCIContainer\",\n \"maxPods\": 250,\n \"type\": \"VirtualMachineScaleSets\",\n + \ \"enableAutoScaling\": false,\n \"provisioningState\": \"Succeeded\",\n + \ \"powerState\": {\n \"code\": \"Running\"\n },\n \"orchestratorVersion\": + \"1.23.12\",\n \"currentOrchestratorVersion\": \"1.23.12\",\n \"enableNodePublicIP\": + false,\n \"enableCustomCATrust\": false,\n \"mode\": \"System\",\n + \ \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n + \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\": + \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\": {},\n + \ \"enableFIPS\": false\n }\n ],\n \"linuxProfile\": {\n \"adminUsername\": + \"azureuser\",\n \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ== + test@example.com\\n\"\n }\n ]\n }\n },\n \"windowsProfile\": + {\n \"adminUsername\": \"azureuser\",\n \"enableCSIProxy\": true\n },\n + \ \"servicePrincipalProfile\": {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n + \ },\n \"nodeResourceGroup\": \"MC_clitest000001_cliakstest000001_centraluseuap\",\n + \ \"enableRBAC\": true,\n \"enablePodSecurityPolicy\": false,\n \"networkProfile\": + {\n \"networkPlugin\": \"azure\",\n \"networkPluginMode\": \"overlay\",\n + \ \"loadBalancerSku\": \"Standard\",\n \"loadBalancerProfile\": {\n \"managedOutboundIPs\": + {\n \"count\": 1\n },\n \"effectiveOutboundIPs\": [\n {\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000001_centraluseuap/providers/Microsoft.Network/publicIPAddresses/cc9e963f-8c7e-4051-99fd-ecba02b4abff\"\n + \ }\n ],\n \"backendPoolType\": \"nodeIPConfiguration\"\n },\n + \ \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\",\n + \ \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\",\n + \ \"outboundType\": \"loadBalancer\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\n + \ ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\": + [\n \"IPv4\"\n ],\n \"ebpfDataplane\": \"cilium\"\n },\n \"maxAgentPools\": + 100,\n \"identityProfile\": {\n \"kubeletidentity\": {\n \"resourceId\": + \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_clitest000001_cliakstest000001_centraluseuap/providers/Microsoft.ManagedIdentity/userAssignedIdentities/cliakstest000001-agentpool\",\n + \ \"clientId\":\"00000000-0000-0000-0000-000000000001\",\n \"objectId\":\"00000000-0000-0000-0000-000000000001\"\n + \ }\n },\n \"disableLocalAccounts\": false,\n \"securityProfile\": + {},\n \"storageProfile\": {\n \"diskCSIDriver\": {\n \"enabled\": + true,\n \"version\": \"v1\"\n },\n \"fileCSIDriver\": {\n \"enabled\": + true\n },\n \"snapshotController\": {\n \"enabled\": true\n }\n + \ },\n \"oidcIssuerProfile\": {\n \"enabled\": false\n },\n \"workloadAutoScalerProfile\": + {}\n },\n \"identity\": {\n \"type\": \"SystemAssigned\",\n \"principalId\":\"00000000-0000-0000-0000-000000000001\",\n + \ \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\": + {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" + headers: + cache-control: + - no-cache + content-length: + - '4626' + content-type: + - application/json + date: + - Tue, 18 Oct 2022 20:49:30 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -g -n --yes --no-wait + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.10.6 + (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000001?api-version=2022-09-02-preview + response: + body: + string: '' + headers: + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/centraluseuap/operations/c39e9646-c372-40d7-8e7b-8b4ff245ef98?api-version=2016-03-30 + cache-control: + - no-cache + content-length: + - '0' + date: + - Tue, 18 Oct 2022 20:49:32 GMT + expires: + - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/centraluseuap/operationresults/c39e9646-c372-40d7-8e7b-8b4ff245ef98?api-version=2016-03-30 + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + status: + code: 202 + message: Accepted +version: 1 diff --git a/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py b/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py index 2282ca996d4..9706c25b871 100644 --- a/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py +++ b/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py @@ -3691,6 +3691,38 @@ def test_aks_create_with_overlay_network_plugin_mode(self, resource_group, resou self.cmd( 'aks delete -g {resource_group} -n {name} --yes --no-wait', checks=[self.is_empty()]) + @AllowLargeResponse() + @AKSCustomResourceGroupPreparer(random_name_length=17, name_prefix='clitest', location='centraluseuap', preserve_default_location=True) + def test_aks_create_with_enable_cilium_dataplane(self, resource_group, resource_group_location): + # reset the count so in replay mode the random names will start with 0 + self.test_resources_count = 0 + # kwargs for string formatting + aks_name = self.create_random_name('cliakstest', 16) + self.kwargs.update({ + 'resource_group': resource_group, + 'name': aks_name, + 'location': resource_group_location, + 'resource_type': 'Microsoft.ContainerService/ManagedClusters', + 'ssh_key_value': self.generate_ssh_keys(), + }) + + # create + create_cmd = 'aks create --resource-group={resource_group} --name={name} --location={location} ' \ + '--network-plugin azure --network-plugin-mode overlay --ssh-key-value={ssh_key_value} ' \ + '--pod-cidr 10.244.0.0/16 --node-count 1 ' \ + '--enable-cilium-dataplane ' \ + '--aks-custom-headers AKSHTTPCustomFeatures=Microsoft.ContainerService/CiliumDataplanePreview' + self.cmd(create_cmd, checks=[ + self.check('provisioningState', 'Succeeded'), + self.check('networkProfile.podCidr', '10.244.0.0/16'), + self.check('networkProfile.networkPluginMode', 'overlay'), + self.check('networkProfile.ebpfDataplane', 'cilium'), + ]) + + # delete + self.cmd( + 'aks delete -g {resource_group} -n {name} --yes --no-wait', checks=[self.is_empty()]) + @AllowLargeResponse() @AKSCustomResourceGroupPreparer(random_name_length=17, name_prefix='clitest', location='centraluseuap', preserve_default_location=True) def test_aks_create_dualstack_with_default_network(self, resource_group, resource_group_location): diff --git a/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py b/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py index 72db92127b9..0289f6bd25b 100644 --- a/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py +++ b/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py @@ -598,6 +598,42 @@ def test_mc_get_network_plugin_mode(self): ) self.assertEqual(ctx_3.get_network_plugin_mode(), "") + def test_mc_get_enable_cilium_dataplane(self): + # Default, not set. + ctx_1 = AKSPreviewManagedClusterContext( + self.cmd, + AKSManagedClusterParamDict({}), + self.models, + decorator_mode=DecoratorMode.CREATE, + ) + self.assertEqual(ctx_1.get_enable_cilium_dataplane(), False) + + # Flag set to True. + ctx_2 = AKSPreviewManagedClusterContext( + self.cmd, + AKSManagedClusterParamDict( + { + "enable_cilium_dataplane": True, + } + ), + self.models, + decorator_mode=DecoratorMode.CREATE, + ) + self.assertEqual(ctx_2.get_enable_cilium_dataplane(), True) + + # Flag set to False. + ctx_3 = AKSPreviewManagedClusterContext( + self.cmd, + AKSManagedClusterParamDict( + { + "enable_cilium_dataplane": False, + } + ), + self.models, + decorator_mode=DecoratorMode.CREATE, + ) + self.assertEqual(ctx_1.get_enable_cilium_dataplane(), False) + def test_get_enable_managed_identity(self): # custom value ctx_1 = AKSPreviewManagedClusterContext( diff --git a/src/aks-preview/linter_exclusions.yml b/src/aks-preview/linter_exclusions.yml index 372a78ea978..23d355bb8e4 100644 --- a/src/aks-preview/linter_exclusions.yml +++ b/src/aks-preview/linter_exclusions.yml @@ -36,6 +36,9 @@ aks create: enable_vpa: rule_exclusions: - option_length_too_long + enable_cilium_dataplane: + rule_exclusions: + - option_length_too_long aks delete: parameters: ignore_pod_disruption_budget: diff --git a/src/aks-preview/setup.py b/src/aks-preview/setup.py index 62861891c98..632cd760c82 100644 --- a/src/aks-preview/setup.py +++ b/src/aks-preview/setup.py @@ -9,7 +9,7 @@ from setuptools import setup, find_packages -VERSION = "0.5.108" +VERSION = "0.5.109" CLASSIFIERS = [ "Development Status :: 4 - Beta", From 90b9a74b8e763bf8cbbf4bad1d2ba819d57caf2e Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Wed, 19 Oct 2022 02:26:32 +0000 Subject: [PATCH 02/85] [Release] Update index.json for extension [ aks-preview ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=9668&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/a1aa989ae4ec40d69711acfde613ee0be43c05ae --- src/index.json | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/src/index.json b/src/index.json index 03cfc1ad8cc..ed73644caf4 100644 --- a/src/index.json +++ b/src/index.json @@ -6825,6 +6825,49 @@ "version": "0.5.108" }, "sha256Digest": "e2d2b9d407cfb3ce44c5565ac9ce20ced6edf25075c31d9f8a2658993b158510" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/aks_preview-0.5.109-py2.py3-none-any.whl", + "filename": "aks_preview-0.5.109-py2.py3-none-any.whl", + "metadata": { + "azext.isPreview": true, + "azext.minCliCoreVersion": "2.38.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/aks-preview" + } + } + }, + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "aks-preview", + "summary": "Provides a preview for upcoming AKS features", + "version": "0.5.109" + }, + "sha256Digest": "b17d2536b555d6f6f3381a11003d284aa4cd29906975ab3fe47d40dc39b8aaa0" } ], "alertsmanagement": [ From 7bad51b323d74f1a7074f8d7d77fe2cef23febf8 Mon Sep 17 00:00:00 2001 From: FumingZhang <81607949+FumingZhang@users.noreply.github.com> Date: Wed, 19 Oct 2022 15:13:34 +0800 Subject: [PATCH 03/85] update pipeline settings (#5468) --- src/aks-preview/azcli_aks_live_test/scripts/setup_venv.sh | 2 +- .../azcli_aks_live_test/vsts-azcli-aks-live-test.yaml | 2 +- .../azcli_aks_live_test/vsts-azcli-aks-unit-test.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/aks-preview/azcli_aks_live_test/scripts/setup_venv.sh b/src/aks-preview/azcli_aks_live_test/scripts/setup_venv.sh index eb2210a39e1..d01c4ca5aff 100755 --- a/src/aks-preview/azcli_aks_live_test/scripts/setup_venv.sh +++ b/src/aks-preview/azcli_aks_live_test/scripts/setup_venv.sh @@ -145,7 +145,7 @@ if [[ -n ${setup_option} ]]; then echo "Start to setup az-aks-tool!" local_setup=${3:-"n"} if [[ ${local_setup} == "y" ]]; then - wheel_file=${4} + wheel_file=${4:-"/az_aks_tool-latest-py3-none-any.whl"} installAZAKSTOOLFromLocal "${wheel_file}" else installAZAKSTOOL diff --git a/src/aks-preview/azcli_aks_live_test/vsts-azcli-aks-live-test.yaml b/src/aks-preview/azcli_aks_live_test/vsts-azcli-aks-live-test.yaml index 8b693854341..e9fa239e865 100644 --- a/src/aks-preview/azcli_aks_live_test/vsts-azcli-aks-live-test.yaml +++ b/src/aks-preview/azcli_aks_live_test/vsts-azcli-aks-live-test.yaml @@ -54,7 +54,7 @@ jobs: condition: succeeded() displayName: "Start Container" - bash: | - docker exec $(ContainerName) /opt/scripts/setup_venv.sh setup-tool y + docker exec $(ContainerName) /opt/scripts/setup_venv.sh setup-tool y y /az_aks_tool-latest-py3-none-any.whl docker exec $(ContainerName) /opt/scripts/setup_venv.sh setup-az n azure-cli/ azure-cli-extensions/ condition: succeeded() displayName: "Set up Virtual Environment" diff --git a/src/aks-preview/azcli_aks_live_test/vsts-azcli-aks-unit-test.yaml b/src/aks-preview/azcli_aks_live_test/vsts-azcli-aks-unit-test.yaml index 9a4f7577748..d957049239d 100644 --- a/src/aks-preview/azcli_aks_live_test/vsts-azcli-aks-unit-test.yaml +++ b/src/aks-preview/azcli_aks_live_test/vsts-azcli-aks-unit-test.yaml @@ -54,7 +54,7 @@ jobs: condition: succeeded() displayName: "Start Container" - bash: | - docker exec $(ContainerName) /opt/scripts/setup_venv.sh setup-tool y + docker exec $(ContainerName) /opt/scripts/setup_venv.sh setup-tool y y /az_aks_tool-latest-py3-none-any.whl docker exec $(ContainerName) /opt/scripts/setup_venv.sh setup-az n azure-cli/ azure-cli-extensions/ condition: succeeded() displayName: "Set up Virtual Environment" From 6ee229c18f800bb0bcfe7fce3a4dc912ef237db7 Mon Sep 17 00:00:00 2001 From: deeksha345 <34255011+deeksha345@users.noreply.github.com> Date: Wed, 19 Oct 2022 20:36:40 -0700 Subject: [PATCH 04/85] K8s extension/release 1.3.6 (#5424) --- src/k8s-extension/HISTORY.rst | 8 + .../azext_k8s_extension/_client_factory.py | 28 +- .../azext_k8s_extension/consts.py | 2 +- .../azext_k8s_extension/custom.py | 2 + .../partner_extensions/ContainerInsights.py | 6 +- .../DataProtectionKubernetes.py | 192 ++++++++++++ .../recordings/test_k8s_extension_types.yaml | 290 ++++++++++++++++++ .../test_k8s_extension_types_scenario.py | 40 +++ src/k8s-extension/setup.py | 2 +- 9 files changed, 556 insertions(+), 14 deletions(-) create mode 100644 src/k8s-extension/azext_k8s_extension/partner_extensions/DataProtectionKubernetes.py create mode 100644 src/k8s-extension/azext_k8s_extension/tests/latest/recordings/test_k8s_extension_types.yaml create mode 100644 src/k8s-extension/azext_k8s_extension/tests/latest/test_k8s_extension_types_scenario.py diff --git a/src/k8s-extension/HISTORY.rst b/src/k8s-extension/HISTORY.rst index a9a7bd52f7f..ed03f785809 100644 --- a/src/k8s-extension/HISTORY.rst +++ b/src/k8s-extension/HISTORY.rst @@ -2,6 +2,14 @@ Release History =============== + +1.3.6 +++++++++++++++++++ +* Update the api version and add tests for extension type calls +* Fix the TypeError: cf_k8s_extension() takes 1 positional argument but 2 were given while running all az k8s-extension extension-types commands +* microsoft.azuremonitor.containers: Update DCR creation to Clusters resource group instead of workspace +* microsoft.dataprotection.kubernetes: Authoring a new k8s partner extension for the BCDR solution of AKS clusters + 1.3.5 ++++++++++++++++++ * Use the api-version 2022-04-02-preview in the CLI command az k8s-extension extension-types list diff --git a/src/k8s-extension/azext_k8s_extension/_client_factory.py b/src/k8s-extension/azext_k8s_extension/_client_factory.py index e507a64b1d8..36e450bf244 100644 --- a/src/k8s-extension/azext_k8s_extension/_client_factory.py +++ b/src/k8s-extension/azext_k8s_extension/_client_factory.py @@ -13,24 +13,24 @@ def cf_k8s_extension(cli_ctx, **kwargs): return get_mgmt_service_client(cli_ctx, SourceControlConfigurationClient, **kwargs) -def cf_k8s_extension_operation(cli_ctx, _): +def cf_k8s_extension_operation(cli_ctx, *_): return cf_k8s_extension(cli_ctx).extensions -def cf_k8s_cluster_extension_types_operation(cli_ctx, _): - return cf_k8s_extension(cli_ctx).cluster_extension_types +def cf_k8s_cluster_extension_types_operation(cli_ctx, *_): + return cf_k8s_extension(cli_ctx, api_version=consts.EXTENSION_TYPE_API_VERSION).cluster_extension_types -def cf_k8s_cluster_extension_type_operation(cli_ctx, _): - return cf_k8s_extension(cli_ctx, consts.EXTENSION_TYPE_API_VERSION).cluster_extension_type +def cf_k8s_cluster_extension_type_operation(cli_ctx, *_): + return cf_k8s_extension(cli_ctx, api_version=consts.EXTENSION_TYPE_API_VERSION).cluster_extension_type -def cf_k8s_location_extension_types_operation(cli_ctx, _): - return cf_k8s_extension(cli_ctx, consts.EXTENSION_TYPE_API_VERSION).location_extension_types +def cf_k8s_location_extension_types_operation(cli_ctx, *_): + return cf_k8s_extension(cli_ctx, api_version=consts.EXTENSION_TYPE_API_VERSION).location_extension_types -def cf_k8s_extension_type_versions_operation(cli_ctx, _): - return cf_k8s_extension(cli_ctx, consts.EXTENSION_TYPE_API_VERSION).extension_type_versions +def cf_k8s_extension_type_versions_operation(cli_ctx, *_): + return cf_k8s_extension(cli_ctx, api_version=consts.EXTENSION_TYPE_API_VERSION).extension_type_versions def cf_resource_groups(cli_ctx, subscription_id=None): @@ -51,3 +51,13 @@ def cf_log_analytics(cli_ctx, subscription_id=None): def _resource_providers_client(cli_ctx): from azure.mgmt.resource import ResourceManagementClient return get_mgmt_service_client(cli_ctx, ResourceManagementClient).providers + + +def cf_storage(cli_ctx, subscription_id=None): + from azure.mgmt.storage import StorageManagementClient + return get_mgmt_service_client(cli_ctx, StorageManagementClient, subscription_id=subscription_id) + + +def cf_managed_clusters(cli_ctx, subscription_id=None): + from azure.mgmt.containerservice import ContainerServiceClient + return get_mgmt_service_client(cli_ctx, ContainerServiceClient, subscription_id=subscription_id).managed_clusters diff --git a/src/k8s-extension/azext_k8s_extension/consts.py b/src/k8s-extension/azext_k8s_extension/consts.py index 2044f06bf4d..c69df7a73f0 100644 --- a/src/k8s-extension/azext_k8s_extension/consts.py +++ b/src/k8s-extension/azext_k8s_extension/consts.py @@ -25,4 +25,4 @@ APPLIANCE_API_VERSION = "2021-10-31-preview" HYBRIDCONTAINERSERVICE_API_VERSION = "2022-05-01-preview" -EXTENSION_TYPE_API_VERSION = "2022-04-02-preview" +EXTENSION_TYPE_API_VERSION = "2022-01-15-preview" diff --git a/src/k8s-extension/azext_k8s_extension/custom.py b/src/k8s-extension/azext_k8s_extension/custom.py index 249bfcfd5b2..e8769dbc0b6 100644 --- a/src/k8s-extension/azext_k8s_extension/custom.py +++ b/src/k8s-extension/azext_k8s_extension/custom.py @@ -27,6 +27,7 @@ from .partner_extensions.AzureDefender import AzureDefender from .partner_extensions.OpenServiceMesh import OpenServiceMesh from .partner_extensions.AzureMLKubernetes import AzureMLKubernetes +from .partner_extensions.DataProtectionKubernetes import DataProtectionKubernetes from .partner_extensions.Dapr import Dapr from .partner_extensions.DefaultExtension import ( DefaultExtension, @@ -47,6 +48,7 @@ def ExtensionFactory(extension_name): "microsoft.openservicemesh": OpenServiceMesh, "microsoft.azureml.kubernetes": AzureMLKubernetes, "microsoft.dapr": Dapr, + "microsoft.dataprotection.kubernetes": DataProtectionKubernetes, } # Return the extension if we find it in the map, else return the default diff --git a/src/k8s-extension/azext_k8s_extension/partner_extensions/ContainerInsights.py b/src/k8s-extension/azext_k8s_extension/partner_extensions/ContainerInsights.py index 8f1271b2593..fbbc45ac325 100644 --- a/src/k8s-extension/azext_k8s_extension/partner_extensions/ContainerInsights.py +++ b/src/k8s-extension/azext_k8s_extension/partner_extensions/ContainerInsights.py @@ -587,8 +587,8 @@ def _ensure_container_insights_dcr_for_monitoring(cmd, subscription_id, cluster_ raise ex # extract subscription ID and resource group from workspace_resource_id URL - parsed = parse_resource_id(workspace_resource_id) - workspace_subscription_id, workspace_resource_group = parsed["subscription"], parsed["resource_group"] + parsed = parse_resource_id(workspace_resource_id.lower()) + workspace_subscription_id = parsed["subscription"] workspace_region = '' resources = cf_resources(cmd.cli_ctx, workspace_subscription_id) try: @@ -601,7 +601,7 @@ def _ensure_container_insights_dcr_for_monitoring(cmd, subscription_id, cluster_ raise ex dataCollectionRuleName = f"MSCI-{cluster_name}-{cluster_region}" - dcr_resource_id = f"/subscriptions/{workspace_subscription_id}/resourceGroups/{workspace_resource_group}/providers/Microsoft.Insights/dataCollectionRules/{dataCollectionRuleName}" + dcr_resource_id = f"/subscriptions/{subscription_id}/resourceGroups/{cluster_resource_group_name}/providers/Microsoft.Insights/dataCollectionRules/{dataCollectionRuleName}" # first get the association between region display names and region IDs (because for some reason # the "which RPs are available in which regions" check returns region display names) diff --git a/src/k8s-extension/azext_k8s_extension/partner_extensions/DataProtectionKubernetes.py b/src/k8s-extension/azext_k8s_extension/partner_extensions/DataProtectionKubernetes.py new file mode 100644 index 00000000000..3b5b1fe5534 --- /dev/null +++ b/src/k8s-extension/azext_k8s_extension/partner_extensions/DataProtectionKubernetes.py @@ -0,0 +1,192 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +# pylint: disable=unused-argument +from knack.log import get_logger +from azure.cli.core.commands.client_factory import get_subscription_id +from azure.cli.core.azclierror import RequiredArgumentMissingError, InvalidArgumentValueError + +from .DefaultExtension import DefaultExtension +from .._client_factory import cf_storage, cf_managed_clusters +from ..vendored_sdks.models import (Extension, PatchExtension, Scope, ScopeCluster) + +logger = get_logger(__name__) + + +class DataProtectionKubernetes(DefaultExtension): + def __init__(self): + """Constants for configuration settings + - Tenant Id (required) + - Backup storage location (required) + - Resource Limits (optional) + """ + self.TENANT_ID = "credentials.tenantId" + self.BACKUP_STORAGE_ACCOUNT_CONTAINER = "configuration.backupStorageLocation.bucket" + self.BACKUP_STORAGE_ACCOUNT_NAME = "configuration.backupStorageLocation.config.storageAccount" + self.BACKUP_STORAGE_ACCOUNT_RESOURCE_GROUP = "configuration.backupStorageLocation.config.resourceGroup" + self.BACKUP_STORAGE_ACCOUNT_SUBSCRIPTION = "configuration.backupStorageLocation.config.subscriptionId" + self.RESOURCE_LIMIT_CPU = "resources.limits.cpu" + self.RESOURCE_LIMIT_MEMORY = "resources.limits.memory" + + self.blob_container = "blobContainer" + self.storage_account = "storageAccount" + self.storage_account_resource_group = "storageAccountResourceGroup" + self.storage_account_subsciption = "storageAccountSubscriptionId" + self.cpu_limit = "cpuLimit" + self.memory_limit = "memoryLimit" + + self.configuration_mapping = { + self.blob_container.lower(): self.BACKUP_STORAGE_ACCOUNT_CONTAINER, + self.storage_account.lower(): self.BACKUP_STORAGE_ACCOUNT_NAME, + self.storage_account_resource_group.lower(): self.BACKUP_STORAGE_ACCOUNT_RESOURCE_GROUP, + self.storage_account_subsciption.lower(): self.BACKUP_STORAGE_ACCOUNT_SUBSCRIPTION, + self.cpu_limit.lower(): self.RESOURCE_LIMIT_CPU, + self.memory_limit.lower(): self.RESOURCE_LIMIT_MEMORY + } + + self.bsl_configuration_settings = [ + self.blob_container, + self.storage_account, + self.storage_account_resource_group, + self.storage_account_subsciption + ] + + def Create( + self, + cmd, + client, + resource_group_name, + cluster_name, + name, + cluster_type, + cluster_rp, + extension_type, + scope, + auto_upgrade_minor_version, + release_train, + version, + target_namespace, + release_namespace, + configuration_settings, + configuration_protected_settings, + configuration_settings_file, + configuration_protected_settings_file + ): + # Current scope of DataProtection Kubernetes Backup extension is 'cluster' #TODO: add TSGs when they are in place + if scope == 'namespace': + raise InvalidArgumentValueError(f"Invalid scope '{scope}'. This extension can only be installed at 'cluster' scope.") + + scope_cluster = ScopeCluster(release_namespace=release_namespace) + ext_scope = Scope(cluster=scope_cluster, namespace=None) + + if cluster_type.lower() != 'managedclusters': + raise InvalidArgumentValueError(f"Invalid cluster type '{cluster_type}'. This extension can only be installed for managed clusters.") + + if release_namespace is not None: + logger.warning(f"Ignoring 'release-namespace': {release_namespace}") + + tenant_id = self.__get_tenant_id(cmd.cli_ctx) + if not tenant_id: + raise SystemExit(logger.error("Unable to fetch TenantId. Please check your subscription or run 'az login' to login to Azure.")) + + self.__validate_and_map_config(configuration_settings) + self.__validate_backup_storage_account(cmd.cli_ctx, resource_group_name, cluster_name, configuration_settings) + + configuration_settings[self.TENANT_ID] = tenant_id + + if release_train is None: + release_train = 'stable' + + create_identity = True + extension = Extension( + extension_type=extension_type, + auto_upgrade_minor_version=True, + release_train=release_train, + scope=ext_scope, + configuration_settings=configuration_settings + ) + return extension, name, create_identity + + def Update( + self, + cmd, + resource_group_name, + cluster_name, + auto_upgrade_minor_version, + release_train, + version, + configuration_settings, + configuration_protected_settings, + original_extension, + yes=False, + ): + if configuration_settings is None: + configuration_settings = {} + + if len(configuration_settings) > 0: + bsl_specified = self.__is_bsl_specified(configuration_settings) + self.__validate_and_map_config(configuration_settings, validate_bsl=bsl_specified) + if bsl_specified: + self.__validate_backup_storage_account(cmd.cli_ctx, resource_group_name, cluster_name, configuration_settings) + + return PatchExtension( + auto_upgrade_minor_version=True, + release_train=release_train, + configuration_settings=configuration_settings, + ) + + def __get_tenant_id(self, cli_ctx): + from azure.cli.core._profile import Profile + if not cli_ctx.data.get('tenant_id'): + cli_ctx.data['tenant_id'] = Profile(cli_ctx=cli_ctx).get_subscription()['tenantId'] + return cli_ctx.data['tenant_id'] + + def __validate_and_map_config(self, configuration_settings, validate_bsl=True): + """Validate and set configuration settings for Data Protection K8sBackup extension""" + input_configuration_settings = dict(configuration_settings.items()) + input_configuration_keys = [key.lower() for key in configuration_settings] + + if validate_bsl: + for key in self.bsl_configuration_settings: + if key.lower() not in input_configuration_keys: + raise RequiredArgumentMissingError(f"Missing required configuration setting: {key}") + + for key in input_configuration_settings: + _key = key.lower() + if _key in self.configuration_mapping: + configuration_settings[self.configuration_mapping[_key]] = configuration_settings.pop(key) + else: + configuration_settings.pop(key) + logger.warning(f"Ignoring unrecognized configuration setting: {key}") + + def __validate_backup_storage_account(self, cli_ctx, resource_group_name, cluster_name, configuration_settings): + """Validations performed on the backup storage account + - Existance of the storage account + - Cluster and storage account are in the same location + """ + sa_subscription_id = configuration_settings[self.BACKUP_STORAGE_ACCOUNT_SUBSCRIPTION] + storage_account_client = cf_storage(cli_ctx, sa_subscription_id).storage_accounts + + storage_account = storage_account_client.get_properties( + configuration_settings[self.BACKUP_STORAGE_ACCOUNT_RESOURCE_GROUP], + configuration_settings[self.BACKUP_STORAGE_ACCOUNT_NAME]) + + cluster_subscription_id = get_subscription_id(cli_ctx) + managed_clusters_client = cf_managed_clusters(cli_ctx, cluster_subscription_id) + managed_cluster = managed_clusters_client.get( + resource_group_name, + cluster_name) + + if managed_cluster.location != storage_account.location: + error_message = f"The Kubernetes managed cluster '{cluster_name} ({managed_cluster.location})' and the backup storage account '{configuration_settings[self.BACKUP_STORAGE_ACCOUNT_NAME]} ({storage_account.location})' are not in the same location. Please make sure that the cluster and the storage account are in the same location." + raise SystemExit(logger.error(error_message)) + + def __is_bsl_specified(self, configuration_settings): + """Check if the backup storage account is specified in the input""" + input_configuration_keys = [key.lower() for key in configuration_settings] + for key in self.bsl_configuration_settings: + if key.lower() in input_configuration_keys: + return True + return False diff --git a/src/k8s-extension/azext_k8s_extension/tests/latest/recordings/test_k8s_extension_types.yaml b/src/k8s-extension/azext_k8s_extension/tests/latest/recordings/test_k8s_extension_types.yaml new file mode 100644 index 00000000000..a1d59fe92da --- /dev/null +++ b/src/k8s-extension/azext_k8s_extension/tests/latest/recordings/test_k8s_extension_types.yaml @@ -0,0 +1,290 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - k8s-extension extension-types show + Connection: + - keep-alive + ParameterSetName: + - -g -c --cluster-type --extension-type + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-azure-mgmt-kubernetesconfiguration/2.0.0 + Python/3.10.0 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/cassandradatacentersoperator?api-version=2022-01-15-preview + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/cassandradatacentersoperator","name":"cassandradatacentersoperator","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":null},"releaseTrains":["stable"],"clusterTypes":["managedclusters","appliances"]}}' + headers: + api-supported-versions: + - 2021-05-01-preview, 2022-01-15-preview + cache-control: + - no-cache + content-length: + - '505' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 17 Oct 2022 19:14:48 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - k8s-extension extension-types list + Connection: + - keep-alive + ParameterSetName: + - -g -c --cluster-type + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-azure-mgmt-kubernetesconfiguration/2.0.0 + Python/3.10.0 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes?api-version=2022-01-15-preview + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/azuremonitor-containers","name":"azuremonitor-containers","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"kube-system"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.policy","name":"microsoft.policy","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"kube-system"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.openservicemesh","name":"microsoft.openservicemesh","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"arc-osm-system"}},"releaseTrains":[],"clusterTypes":["connectedclusters","provisionedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/cassandradatacentersoperator","name":"cassandradatacentersoperator","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":null},"releaseTrains":[],"clusterTypes":["managedclusters","appliances"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.apimanagement.gateway","name":"microsoft.apimanagement.gateway","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"gateway"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.web.appservice","name":"microsoft.web.appservice","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":true,"defaultReleaseNamespace":"appservice"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/ansibletoweroperator","name":"ansibletoweroperator","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":true,"defaultReleaseNamespace":"awx-operator"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.azuredefender.kubernetes","name":"microsoft.azuredefender.kubernetes","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"mdc"}},"releaseTrains":[],"clusterTypes":["connectedclusters","provisionedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.eventgrid","name":"microsoft.eventgrid","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"eventgrid-system"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.azureml.kubernetes","name":"microsoft.azureml.kubernetes","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azure-ml"}},"releaseTrains":[],"clusterTypes":["managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/dapr","name":"dapr","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"dapr-system"}},"releaseTrains":[],"clusterTypes":["managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.azurenw.mobilenetwork","name":"microsoft.azurenw.mobilenetwork","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azurenw-mn"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.arcdataservices","name":"microsoft.arcdataservices","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"arc"}},"releaseTrains":[],"clusterTypes":["connectedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.scvmm","name":"microsoft.scvmm","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azure-vmmoperator"}},"releaseTrains":[],"clusterTypes":["ConnectedClusters","Appliances"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.avs","name":"microsoft.avs","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azure-avsoperator"}},"releaseTrains":[],"clusterTypes":["ConnectedClusters","Appliances"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.vmware","name":"microsoft.vmware","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azure-vmwareoperator"}},"releaseTrains":[],"clusterTypes":["ConnectedClusters","Appliances"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.azstackhci.operator","name":"microsoft.azstackhci.operator","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azstackhci-operator"}},"releaseTrains":[],"clusterTypes":["Appliances"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.azurenw.networkfunction","name":"microsoft.azurenw.networkfunction","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azure-networkfunction"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.azuremonitor.containers","name":"microsoft.azuremonitor.containers","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"kube-system"}},"releaseTrains":[],"clusterTypes":["ConnectedCluster","ProvisionedClusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.unitycloud.konductor","name":"microsoft.unitycloud.konductor","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"konductor"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.policyinsights","name":"microsoft.policyinsights","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"kube-system"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.cnab","name":"microsoft.cnab","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"cnab-operator"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.azure.hybridnetwork","name":"microsoft.azure.hybridnetwork","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azurehybridnetwork"}},"releaseTrains":[],"clusterTypes":["appliances","provisionedclusters","connectedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.azurebackup.backupagent","name":"microsoft.azurebackup.backupagent","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azurebackup"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters","Appliances","ProvisionedClusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.aksedgeoperator","name":"microsoft.aksedgeoperator","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"aksedge-operator-system"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.flux","name":"microsoft.flux","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"flux-system"}},"releaseTrains":[],"clusterTypes":["connectedclusters","managedclusters","provisionedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.azurebackup.dataprotectionplugin","name":"microsoft.azurebackup.dataprotectionplugin","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":null},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters","Appliances","ProvisionedClusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.containerregistry.connectedregistry","name":"microsoft.containerregistry.connectedregistry","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"connected-registry"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.dapr","name":"microsoft.dapr","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"dapr-system"}},"releaseTrains":[],"clusterTypes":["connectedCluster","managedClusters","Appliances","ProvisionedClusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.hybridaksoperator","name":"microsoft.hybridaksoperator","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"hybridaks-operator-system"}},"releaseTrains":[],"clusterTypes":["Appliances"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.azurekeyvaultsecretsprovider","name":"microsoft.azurekeyvaultsecretsprovider","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"kube-system"}},"releaseTrains":[],"clusterTypes":["connectedclusters","provisionedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.testbonsaiextension","name":"microsoft.testbonsaiextension","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":true,"defaultReleaseNamespace":"kube-system"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/cassandradatacentersoperator1","name":"cassandradatacentersoperator1","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":null},"releaseTrains":[],"clusterTypes":["managedclusters","appliances"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.azurevote.previewstandard","name":"microsoft.azurevote.previewstandard","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"vote"}},"releaseTrains":[],"clusterTypes":["managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.servicelinker.connection","name":"microsoft.servicelinker.connection","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":true,"defaultReleaseNamespace":"default"}},"releaseTrains":[],"clusterTypes":["managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/deislabs.akri","name":"deislabs.akri","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"akri"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.arcextensionusage","name":"microsoft.arcextensionusage","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"arc-osm-system"}},"releaseTrains":[],"clusterTypes":["managedclusters","connectedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.azure.mobilenetwork.packetcoremonitor","name":"microsoft.azure.mobilenetwork.packetcoremonitor","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"packet-core-monitor"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters","Appliances"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.extensionsusage","name":"microsoft.extensionsusage","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azure-extensions-usage-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.connectedopenstack","name":"microsoft.connectedopenstack","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"connectedopenstack"}},"releaseTrains":[],"clusterTypes":["Appliances","connectedCluster"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.userrp","name":"microsoft.networkcloud.userrp","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-rp"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedcluster"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.aziot.edge","name":"microsoft.aziot.edge","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"aziotedge-system"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.appliance.management.operator","name":"microsoft.appliance.management.operator","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"kva-management"}},"releaseTrains":[],"clusterTypes":["Appliances"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.userrp.dev","name":"microsoft.networkcloud.userrp.dev","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-rp"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedcluster"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.clustermanager.dev","name":"microsoft.networkcloud.clustermanager.dev","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-cluster-manager-extension"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.platformcluster.dev","name":"microsoft.networkcloud.platformcluster.dev","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.managednetworkfabric","name":"microsoft.managednetworkfabric","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"managednetworkfabric"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/ + microsoft.azurebackup.mockplugin","name":" microsoft.azurebackup.mockplugin","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":" + namespace","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":null}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters","Appliances","ProvisionedClusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.azurebackup.mockplugin","name":"microsoft.azurebackup.mockplugin","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":" + namespace","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":null}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters","Appliances","ProvisionedClusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/connectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.userrp.staging","name":"microsoft.networkcloud.userrp.staging","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-rp"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedcluster"]}}],"nextLink":"https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes?api-version=2022-01-15-preview&continuationToken=JTVCJTdCJTIydG9rZW4lMjIlM0ElMjIlMkJSSUQlM0F%2Bc1VvbEFONFRzMHkxRHowQUFBQUFBQSUzRCUzRCUyM1JUJTNBMSUyM1RSQyUzQTUwJTIzSVNWJTNBMiUyM0lFTyUzQTY1NTUxJTIzUUNGJTNBOCUyMiUyQyUyMnJhbmdlJTIyJTNBJTdCJTIybWluJTIyJTNBJTIyJTIyJTJDJTIybWF4JTIyJTNBJTIyMDVDMURGRkZGRkZGRkMlMjIlN0QlN0QlNUQ%3D"}' + headers: + api-supported-versions: + - 2021-05-01-preview, 2022-01-15-preview + cache-control: + - no-cache + content-length: + - '28395' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 17 Oct 2022 19:14:50 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - k8s-extension extension-types list + Connection: + - keep-alive + ParameterSetName: + - -g -c --cluster-type + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-azure-mgmt-kubernetesconfiguration/2.0.0 + Python/3.10.0 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes?api-version=2022-01-15-preview&continuationToken=JTVCJTdCJTIydG9rZW4lMjIlM0ElMjIlMkJSSUQlM0F%2Bc1VvbEFONFRzMHkxRHowQUFBQUFBQSUzRCUzRCUyM1JUJTNBMSUyM1RSQyUzQTUwJTIzSVNWJTNBMiUyM0lFTyUzQTY1NTUxJTIzUUNGJTNBOCUyMiUyQyUyMnJhbmdlJTIyJTNBJTdCJTIybWluJTIyJTNBJTIyJTIyJTJDJTIybWF4JTIyJTNBJTIyMDVDMURGRkZGRkZGRkMlMjIlN0QlN0QlNUQ%3D + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.platformcluster.sandbox","name":"microsoft.networkcloud.platformcluster.sandbox","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.userrp.test","name":"microsoft.networkcloud.userrp.test","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-rp"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedcluster"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.platformcluster.test","name":"microsoft.networkcloud.platformcluster.test","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.clustermanager.test","name":"microsoft.networkcloud.clustermanager.test","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-cluster-manager-extension"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.clustermanager.sandbox","name":"microsoft.networkcloud.clustermanager.sandbox","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.clustermanager.staging","name":"microsoft.networkcloud.clustermanager.staging","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-cluster-manager-extension"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.platformcluster.staging","name":"microsoft.networkcloud.platformcluster.staging","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.az.edge.mqtt","name":"microsoft.az.edge.mqtt","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azedge-system"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.azedge.mqtt","name":"microsoft.azedge.mqtt","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azedge-system"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.platformcluster.euap","name":"microsoft.networkcloud.platformcluster.euap","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.platformcluster.prod","name":"microsoft.networkcloud.platformcluster.prod","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.clustermanager.prod","name":"microsoft.networkcloud.clustermanager.prod","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-cluster-manager-extension"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.clustermanager.euap","name":"microsoft.networkcloud.clustermanager.euap","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-cluster-manager-extension"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.userrp.prod","name":"microsoft.networkcloud.userrp.prod","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-rp"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedcluster"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.userrp.euap","name":"microsoft.networkcloud.userrp.euap","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-rp"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedcluster"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/cassandradatacentersoperatorv2","name":"cassandradatacentersoperatorv2","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":null},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters","Appliances","ProvisionedClusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.app.environment","name":"microsoft.app.environment","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"appservice"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/cassandradatacentersoperatorv3","name":"cassandradatacentersoperatorv3","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":null},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.defender.containers","name":"microsoft.defender.containers","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"mdc"}},"releaseTrains":[],"clusterTypes":["Connectedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.contoso.clusters","name":"microsoft.contoso.clusters","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":null},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.contoso.towers","name":"microsoft.contoso.towers","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":true,"defaultReleaseNamespace":"ansible"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.azurebackup.kubernetes.test","name":"microsoft.azurebackup.kubernetes.test","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azbackup"}},"releaseTrains":[],"clusterTypes":["Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.azstor","name":"microsoft.azstor","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azstor"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters","Appliances"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.platformruntime.dev","name":"microsoft.networkcloud.platformruntime.dev","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.platformruntime.test","name":"microsoft.networkcloud.platformruntime.test","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.platformruntime.staging","name":"microsoft.networkcloud.platformruntime.staging","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.platformruntime.euap","name":"microsoft.networkcloud.platformruntime.euap","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.platformruntime.prod","name":"microsoft.networkcloud.platformruntime.prod","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkcloud.platformruntime.sandbox","name":"microsoft.networkcloud.platformruntime.sandbox","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.networkfabricserviceextension","name":"microsoft.networkfabricserviceextension","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"managednetworkfabricservices"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest-rg/providers/Microsoft.Kubernetes/ConnectedClusters/kind-clitest-cluster/providers/Microsoft.KubernetesConfiguration/extensionTypes/microsoft.policyinsightshybridakstest","name":"microsoft.policyinsightshybridakstest","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"kube-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","ProvisionedClusters"]}}],"nextLink":null}' + headers: + api-supported-versions: + - 2021-05-01-preview, 2022-01-15-preview + cache-control: + - no-cache + content-length: + - '17847' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 17 Oct 2022 19:14:52 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - k8s-extension extension-types list-by-location + Connection: + - keep-alive + ParameterSetName: + - --location + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-azure-mgmt-kubernetesconfiguration/2.0.0 + Python/3.10.0 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes?api-version=2022-01-15-preview + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/azuremonitor-containers","name":"azuremonitor-containers","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"kube-system"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.policy","name":"microsoft.policy","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"kube-system"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.openservicemesh","name":"microsoft.openservicemesh","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"arc-osm-system"}},"releaseTrains":[],"clusterTypes":["connectedclusters","provisionedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/cassandradatacentersoperator","name":"cassandradatacentersoperator","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":null},"releaseTrains":[],"clusterTypes":["managedclusters","appliances"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.apimanagement.gateway","name":"microsoft.apimanagement.gateway","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"gateway"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.web.appservice","name":"microsoft.web.appservice","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":true,"defaultReleaseNamespace":"appservice"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/ansibletoweroperator","name":"ansibletoweroperator","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":true,"defaultReleaseNamespace":"awx-operator"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.azuredefender.kubernetes","name":"microsoft.azuredefender.kubernetes","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"mdc"}},"releaseTrains":[],"clusterTypes":["connectedclusters","provisionedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.eventgrid","name":"microsoft.eventgrid","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"eventgrid-system"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.azureml.kubernetes","name":"microsoft.azureml.kubernetes","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azure-ml"}},"releaseTrains":[],"clusterTypes":["managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/dapr","name":"dapr","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"dapr-system"}},"releaseTrains":[],"clusterTypes":["managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.azurenw.mobilenetwork","name":"microsoft.azurenw.mobilenetwork","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azurenw-mn"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.arcdataservices","name":"microsoft.arcdataservices","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"arc"}},"releaseTrains":[],"clusterTypes":["connectedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.scvmm","name":"microsoft.scvmm","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azure-vmmoperator"}},"releaseTrains":[],"clusterTypes":["ConnectedClusters","Appliances"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.avs","name":"microsoft.avs","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azure-avsoperator"}},"releaseTrains":[],"clusterTypes":["ConnectedClusters","Appliances"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.vmware","name":"microsoft.vmware","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azure-vmwareoperator"}},"releaseTrains":[],"clusterTypes":["ConnectedClusters","Appliances"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.azstackhci.operator","name":"microsoft.azstackhci.operator","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azstackhci-operator"}},"releaseTrains":[],"clusterTypes":["Appliances"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.azurenw.networkfunction","name":"microsoft.azurenw.networkfunction","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azure-networkfunction"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.azuremonitor.containers","name":"microsoft.azuremonitor.containers","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"kube-system"}},"releaseTrains":[],"clusterTypes":["ConnectedCluster","ProvisionedClusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.unitycloud.konductor","name":"microsoft.unitycloud.konductor","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"konductor"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.policyinsights","name":"microsoft.policyinsights","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"kube-system"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.cnab","name":"microsoft.cnab","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"cnab-operator"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.azure.hybridnetwork","name":"microsoft.azure.hybridnetwork","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azurehybridnetwork"}},"releaseTrains":[],"clusterTypes":["appliances","provisionedclusters","connectedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.azurebackup.backupagent","name":"microsoft.azurebackup.backupagent","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azurebackup"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters","Appliances","ProvisionedClusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.aksedgeoperator","name":"microsoft.aksedgeoperator","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"aksedge-operator-system"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.flux","name":"microsoft.flux","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"flux-system"}},"releaseTrains":[],"clusterTypes":["connectedclusters","managedclusters","provisionedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.azurebackup.dataprotectionplugin","name":"microsoft.azurebackup.dataprotectionplugin","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":null},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters","Appliances","ProvisionedClusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.containerregistry.connectedregistry","name":"microsoft.containerregistry.connectedregistry","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"connected-registry"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.dapr","name":"microsoft.dapr","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"dapr-system"}},"releaseTrains":[],"clusterTypes":["connectedCluster","managedClusters","Appliances","ProvisionedClusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.hybridaksoperator","name":"microsoft.hybridaksoperator","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"hybridaks-operator-system"}},"releaseTrains":[],"clusterTypes":["Appliances"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.azurekeyvaultsecretsprovider","name":"microsoft.azurekeyvaultsecretsprovider","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"kube-system"}},"releaseTrains":[],"clusterTypes":["connectedclusters","provisionedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.testbonsaiextension","name":"microsoft.testbonsaiextension","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":true,"defaultReleaseNamespace":"kube-system"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/cassandradatacentersoperator1","name":"cassandradatacentersoperator1","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":null},"releaseTrains":[],"clusterTypes":["managedclusters","appliances"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.azurevote.previewstandard","name":"microsoft.azurevote.previewstandard","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"vote"}},"releaseTrains":[],"clusterTypes":["managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.servicelinker.connection","name":"microsoft.servicelinker.connection","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":true,"defaultReleaseNamespace":"default"}},"releaseTrains":[],"clusterTypes":["managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/deislabs.akri","name":"deislabs.akri","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"akri"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.arcextensionusage","name":"microsoft.arcextensionusage","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"arc-osm-system"}},"releaseTrains":[],"clusterTypes":["managedclusters","connectedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.azure.mobilenetwork.packetcoremonitor","name":"microsoft.azure.mobilenetwork.packetcoremonitor","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"packet-core-monitor"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters","Appliances"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.extensionsusage","name":"microsoft.extensionsusage","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azure-extensions-usage-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.connectedopenstack","name":"microsoft.connectedopenstack","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"connectedopenstack"}},"releaseTrains":[],"clusterTypes":["Appliances","connectedCluster"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.userrp","name":"microsoft.networkcloud.userrp","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-rp"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedcluster"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.aziot.edge","name":"microsoft.aziot.edge","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"aziotedge-system"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.appliance.management.operator","name":"microsoft.appliance.management.operator","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"kva-management"}},"releaseTrains":[],"clusterTypes":["Appliances"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.userrp.dev","name":"microsoft.networkcloud.userrp.dev","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-rp"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedcluster"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.clustermanager.dev","name":"microsoft.networkcloud.clustermanager.dev","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-cluster-manager-extension"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.platformcluster.dev","name":"microsoft.networkcloud.platformcluster.dev","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.managednetworkfabric","name":"microsoft.managednetworkfabric","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"managednetworkfabric"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/ + microsoft.azurebackup.mockplugin","name":" microsoft.azurebackup.mockplugin","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":" + namespace","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":null}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters","Appliances","ProvisionedClusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.azurebackup.mockplugin","name":"microsoft.azurebackup.mockplugin","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":" + namespace","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":null}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters","Appliances","ProvisionedClusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.userrp.staging","name":"microsoft.networkcloud.userrp.staging","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-rp"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedcluster"]}}],"nextLink":"https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes?api-version=2022-01-15-preview&continuationToken=JTVCJTdCJTIydG9rZW4lMjIlM0ElMjIlMkJSSUQlM0F%2Bc1VvbEFONFRzMHkxRHowQUFBQUFBQSUzRCUzRCUyM1JUJTNBMSUyM1RSQyUzQTUwJTIzSVNWJTNBMiUyM0lFTyUzQTY1NTUxJTIzUUNGJTNBOCUyMiUyQyUyMnJhbmdlJTIyJTNBJTdCJTIybWluJTIyJTNBJTIyJTIyJTJDJTIybWF4JTIyJTNBJTIyMDVDMURGRkZGRkZGRkMlMjIlN0QlN0QlNUQ%3D"}' + headers: + api-supported-versions: + - 2021-05-01-preview, 2022-01-15-preview + cache-control: + - no-cache + content-length: + - '24621' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 17 Oct 2022 19:14:53 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - k8s-extension extension-types list-by-location + Connection: + - keep-alive + ParameterSetName: + - --location + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-azure-mgmt-kubernetesconfiguration/2.0.0 + Python/3.10.0 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes?api-version=2022-01-15-preview&continuationToken=JTVCJTdCJTIydG9rZW4lMjIlM0ElMjIlMkJSSUQlM0F%2Bc1VvbEFONFRzMHkxRHowQUFBQUFBQSUzRCUzRCUyM1JUJTNBMSUyM1RSQyUzQTUwJTIzSVNWJTNBMiUyM0lFTyUzQTY1NTUxJTIzUUNGJTNBOCUyMiUyQyUyMnJhbmdlJTIyJTNBJTdCJTIybWluJTIyJTNBJTIyJTIyJTJDJTIybWF4JTIyJTNBJTIyMDVDMURGRkZGRkZGRkMlMjIlN0QlN0QlNUQ%3D + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.platformcluster.sandbox","name":"microsoft.networkcloud.platformcluster.sandbox","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.userrp.test","name":"microsoft.networkcloud.userrp.test","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-rp"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedcluster"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.platformcluster.test","name":"microsoft.networkcloud.platformcluster.test","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.clustermanager.test","name":"microsoft.networkcloud.clustermanager.test","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-cluster-manager-extension"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.clustermanager.sandbox","name":"microsoft.networkcloud.clustermanager.sandbox","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.clustermanager.staging","name":"microsoft.networkcloud.clustermanager.staging","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-cluster-manager-extension"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.platformcluster.staging","name":"microsoft.networkcloud.platformcluster.staging","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.az.edge.mqtt","name":"microsoft.az.edge.mqtt","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azedge-system"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.azedge.mqtt","name":"microsoft.azedge.mqtt","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azedge-system"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.platformcluster.euap","name":"microsoft.networkcloud.platformcluster.euap","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.platformcluster.prod","name":"microsoft.networkcloud.platformcluster.prod","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.clustermanager.prod","name":"microsoft.networkcloud.clustermanager.prod","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-cluster-manager-extension"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.clustermanager.euap","name":"microsoft.networkcloud.clustermanager.euap","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-cluster-manager-extension"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.userrp.prod","name":"microsoft.networkcloud.userrp.prod","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-rp"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedcluster"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.userrp.euap","name":"microsoft.networkcloud.userrp.euap","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-rp"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedcluster"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/cassandradatacentersoperatorv2","name":"cassandradatacentersoperatorv2","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":null},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters","Appliances","ProvisionedClusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.app.environment","name":"microsoft.app.environment","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"appservice"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/cassandradatacentersoperatorv3","name":"cassandradatacentersoperatorv3","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":null},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.defender.containers","name":"microsoft.defender.containers","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"mdc"}},"releaseTrains":[],"clusterTypes":["Connectedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.contoso.clusters","name":"microsoft.contoso.clusters","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"namespace","clusterScopeSettings":null},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.contoso.towers","name":"microsoft.contoso.towers","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":true,"defaultReleaseNamespace":"ansible"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.azurebackup.kubernetes.test","name":"microsoft.azurebackup.kubernetes.test","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azbackup"}},"releaseTrains":[],"clusterTypes":["Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.azstor","name":"microsoft.azstor","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"azstor"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters","Appliances"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.platformruntime.dev","name":"microsoft.networkcloud.platformruntime.dev","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.platformruntime.test","name":"microsoft.networkcloud.platformruntime.test","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.platformruntime.staging","name":"microsoft.networkcloud.platformruntime.staging","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.platformruntime.euap","name":"microsoft.networkcloud.platformruntime.euap","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.platformruntime.prod","name":"microsoft.networkcloud.platformruntime.prod","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkcloud.platformruntime.sandbox","name":"microsoft.networkcloud.platformruntime.sandbox","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"nc-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","Managedclusters"]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.networkfabricserviceextension","name":"microsoft.networkfabricserviceextension","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"managednetworkfabricservices"}},"releaseTrains":[],"clusterTypes":[]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/microsoft.policyinsightshybridakstest","name":"microsoft.policyinsightshybridakstest","type":"Microsoft.KubernetesConfiguration/extensionTypes","properties":{"supportedScopes":{"defaultScope":"cluster","clusterScopeSettings":{"allowMultipleInstances":false,"defaultReleaseNamespace":"kube-system"}},"releaseTrains":[],"clusterTypes":["Connectedclusters","ProvisionedClusters"]}}],"nextLink":null}' + headers: + api-supported-versions: + - 2021-05-01-preview, 2022-01-15-preview + cache-control: + - no-cache + content-length: + - '15553' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 17 Oct 2022 19:14:54 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - k8s-extension extension-types list-versions + Connection: + - keep-alive + ParameterSetName: + - --location --extension-type + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-azure-mgmt-kubernetesconfiguration/2.0.0 + Python/3.10.0 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration/locations/eastus2euap/extensionTypes/cassandradatacentersoperator/versions?api-version=2022-01-15-preview + response: + body: + string: '{"value":[],"nextLink":null}' + headers: + api-supported-versions: + - 2021-05-01-preview, 2022-01-15-preview + cache-control: + - no-cache + content-length: + - '28' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 17 Oct 2022 19:14:56 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +version: 1 diff --git a/src/k8s-extension/azext_k8s_extension/tests/latest/test_k8s_extension_types_scenario.py b/src/k8s-extension/azext_k8s_extension/tests/latest/test_k8s_extension_types_scenario.py new file mode 100644 index 00000000000..390b1aaab4d --- /dev/null +++ b/src/k8s-extension/azext_k8s_extension/tests/latest/test_k8s_extension_types_scenario.py @@ -0,0 +1,40 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +# pylint: disable=line-too-long + +import os +from azure.cli.testsdk import (ScenarioTest, record_only) + + +TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..')) + + +class K8sExtensionTypesScenarioTest(ScenarioTest): + @record_only() + def test_k8s_extension_types(self): + extension_type = 'cassandradatacentersoperator' + self.kwargs.update({ + 'rg': 'clitest-rg', #K8sPartnerExtensionTest', + 'cluster_name': 'kind-clitest-cluster',#'k8s-extension-cluster-32469-arc', + 'cluster_type': 'connectedClusters', + 'extension_type': extension_type, + 'location': 'eastus2euap' + }) + + self.cmd('k8s-extension extension-types show -g {rg} -c {cluster_name} --cluster-type {cluster_type} ' + '--extension-type {extension_type}', checks=[ + self.check('name', '{extension_type}') + ]) + + extensionTypes_list = self.cmd('k8s-extension extension-types list -g {rg} -c {cluster_name} ' + '--cluster-type {cluster_type}').get_output_in_json() + assert len(extensionTypes_list) > 0 + + extensionTypes_locationList = self.cmd('k8s-extension extension-types list-by-location --location ' + '{location}').get_output_in_json() + assert len(extensionTypes_locationList) > 0 + + self.cmd('k8s-extension extension-types list-versions --location {location} --extension-type {extension_type}') diff --git a/src/k8s-extension/setup.py b/src/k8s-extension/setup.py index eb57c01de8a..4975bf63bd3 100644 --- a/src/k8s-extension/setup.py +++ b/src/k8s-extension/setup.py @@ -33,7 +33,7 @@ # TODO: Add any additional SDK dependencies here DEPENDENCIES = [] -VERSION = "1.3.5" +VERSION = "1.3.6" with open("README.rst", "r", encoding="utf-8") as f: README = f.read() From 39426b189a26f5f6df1eee0e6ab480e31df13e0b Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Thu, 20 Oct 2022 03:42:39 +0000 Subject: [PATCH 05/85] [Release] Update index.json for extension [ k8s-extension ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=10042&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/6ee229c18f800bb0bcfe7fce3a4dc912ef237db7 --- src/index.json | 42 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/src/index.json b/src/index.json index ed73644caf4..bccc9d62707 100644 --- a/src/index.json +++ b/src/index.json @@ -27878,6 +27878,48 @@ "version": "1.3.5" }, "sha256Digest": "14e1f4d18aad44ec13b823431e7b916751345e45e1dd2d88c78f059794c953f9" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/k8s_extension-1.3.6-py3-none-any.whl", + "filename": "k8s_extension-1.3.6-py3-none-any.whl", + "metadata": { + "azext.minCliCoreVersion": "2.24.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/k8s-extension" + } + } + }, + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "k8s-extension", + "summary": "Microsoft Azure Command-Line Tools K8s-extension Extension", + "version": "1.3.6" + }, + "sha256Digest": "0f576d0c27d25d092dfc6cc1aa2c5400e7249d732ae102fdee3feb92a9e80b6c" } ], "k8sconfiguration": [ From 74c32fe50dffc53efe99c1e08862c6398a469327 Mon Sep 17 00:00:00 2001 From: ccc Date: Thu, 20 Oct 2022 14:17:52 +0800 Subject: [PATCH 06/85] [AKS] Add NSG Control flags for `az aks create` / `az aks nodepool add` / `az aks nodepool update` (#5467) * Add NSG Control flags * Fix * Add aks command test * Fix aks command test * Fix ut * Fix UT * Fix live test * Fix typo --- src/aks-preview/HISTORY.rst | 8 + src/aks-preview/azext_aks_preview/_help.py | 18 + src/aks-preview/azext_aks_preview/_params.py | 8 + .../azext_aks_preview/_validators.py | 35 +- .../azext_aks_preview/agentpool_decorator.py | 75 + src/aks-preview/azext_aks_preview/custom.py | 6 + .../managed_cluster_decorator.py | 6 +- .../test_aks_create_with_nsg_control.yaml | 1169 +++++++++++ ..._aks_nodepool_create_with_nsg_control.yaml | 1757 +++++++++++++++++ ..._aks_nodepool_update_with_nsg_control.yaml | 1565 +++++++++++++++ .../tests/latest/test_aks_commands.py | 172 ++ .../tests/latest/test_validators.py | 60 + src/aks-preview/linter_exclusions.yml | 3 + src/aks-preview/setup.py | 2 +- 14 files changed, 4879 insertions(+), 5 deletions(-) create mode 100644 src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_create_with_nsg_control.yaml create mode 100644 src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_nodepool_create_with_nsg_control.yaml create mode 100644 src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_nodepool_update_with_nsg_control.yaml diff --git a/src/aks-preview/HISTORY.rst b/src/aks-preview/HISTORY.rst index 1a56c1282c5..090ed5b1e25 100644 --- a/src/aks-preview/HISTORY.rst +++ b/src/aks-preview/HISTORY.rst @@ -12,6 +12,14 @@ To release a new version, please select a new version number (usually plus 1 to Pending +++++++ +0.5.110 ++++++++ + +* Add `--nodepool-asg-ids` and `--nodepool-allowed-host-ports` flags for enabling NSGControl. Related commands: + * `az aks create` + * `az aks nodepool add` + * `az aks nodepool update` + 0.5.109 +++++++ diff --git a/src/aks-preview/azext_aks_preview/_help.py b/src/aks-preview/azext_aks_preview/_help.py index d3bf7cd96fa..c95e7be4b13 100644 --- a/src/aks-preview/azext_aks_preview/_help.py +++ b/src/aks-preview/azext_aks_preview/_help.py @@ -485,6 +485,12 @@ - name: --enable-vpa type: bool short-summary: Enable vertical pod autoscaler for cluster. + - name: --nodepool-allowed-host-ports + type: string + short-summary: Expose host ports on the node pool. When specified, format should be a comma-separated list of ranges with protocol, eg. 80/TCP,443/TCP,4000-5000/TCP. + - name: --nodepool-asg-ids + type: string + short-summary: The IDs of the application security groups to which the node pool's network interface should belong. When specified, format should be a comma-separated list of IDs. examples: - name: Create a Kubernetes cluster with an existing SSH public key. text: az aks create -g MyResourceGroup -n MyManagedCluster --ssh-key-value /path/to/publickey @@ -1278,6 +1284,12 @@ - name: --disable-windows-outbound-nat type: bool short-summary: Disable Windows OutboundNAT on Windows agent node pool. + - name: --allowed-host-ports + type: string + short-summary: Expose host ports on the node pool. When specified, format should be a comma-separated list of ranges with protocol, eg. 80/TCP,443/TCP,4000-5000/TCP. + - name: --asg-ids + type: string + short-summary: The IDs of the application security groups to which the node pool's network interface should belong. When specified, format should be a comma-separated list of IDs. examples: - name: Create a nodepool in an existing AKS cluster with ephemeral os enabled. text: az aks nodepool add -g MyResourceGroup -n nodepool1 --cluster-name MyManagedCluster --node-osdisk-type Ephemeral --node-osdisk-size 48 @@ -1369,6 +1381,12 @@ - name: --aks-custom-headers type: string short-summary: Send custom headers. When specified, format should be Key1=Value1,Key2=Value2 + - name: --allowed-host-ports + type: string + short-summary: Expose host ports on the node pool. When specified, format should be a comma-separated list of ranges with protocol, eg. 80/TCP,443/TCP,4000-5000/TCP. + - name: --asg-ids + type: string + short-summary: The IDs of the application security groups to which the node pool's network interface should belong. When specified, format should be a comma-separated list of IDs. examples: - name: Reconcile the nodepool back to its current state. text: az aks nodepool update -g MyResourceGroup -n nodepool1 --cluster-name MyManagedCluster diff --git a/src/aks-preview/azext_aks_preview/_params.py b/src/aks-preview/azext_aks_preview/_params.py index 28cd6af313a..6848b044604 100644 --- a/src/aks-preview/azext_aks_preview/_params.py +++ b/src/aks-preview/azext_aks_preview/_params.py @@ -128,6 +128,8 @@ validate_ksm_labels, validate_ksm_annotations, validate_disable_windows_outbound_nat, + validate_allowed_host_ports, + validate_application_security_groups, ) # candidates for enumeration @@ -344,6 +346,8 @@ def load_arguments(self, _): # no validation for aks create because it already only supports Linux. c.argument('enable_custom_ca_trust', action='store_true') c.argument('enable_vpa', action='store_true', is_preview=True, help="enable vertical pod autoscaler for cluster") + c.argument('nodepool_allowed_host_ports', validator=validate_allowed_host_ports, is_preview=True, help="allowed host ports for agentpool") + c.argument('nodepool_asg_ids', validator=validate_application_security_groups, is_preview=True, help="application security groups for agentpool") with self.argument_context('aks update') as c: # managed cluster paramerters @@ -507,6 +511,8 @@ def load_arguments(self, _): c.argument('gpu_instance_profile', arg_type=get_enum_type(gpu_instance_profiles)) c.argument('enable_custom_ca_trust', action='store_true', validator=validate_enable_custom_ca_trust) c.argument('disable_windows_outbound_nat', action='store_true', validator=validate_disable_windows_outbound_nat) + c.argument('allowed_host_ports', validator=validate_allowed_host_ports, is_preview=True) + c.argument('asg_ids', validator=validate_application_security_groups, is_preview=True) with self.argument_context('aks nodepool update') as c: c.argument('enable_cluster_autoscaler', options_list=[ @@ -526,6 +532,8 @@ def load_arguments(self, _): # extensions c.argument('enable_custom_ca_trust', action='store_true', validator=validate_enable_custom_ca_trust) c.argument('disable_custom_ca_trust', options_list=['--disable-custom-ca-trust', '--dcat'], action='store_true') + c.argument('allowed_host_ports', validator=validate_allowed_host_ports, is_preview=True) + c.argument('asg_ids', validator=validate_application_security_groups, is_preview=True) with self.argument_context('aks nodepool upgrade') as c: c.argument('max_surge', validator=validate_max_surge) diff --git a/src/aks-preview/azext_aks_preview/_validators.py b/src/aks-preview/azext_aks_preview/_validators.py index 6e69efe0be0..0ee667e8bb9 100644 --- a/src/aks-preview/azext_aks_preview/_validators.py +++ b/src/aks-preview/azext_aks_preview/_validators.py @@ -316,7 +316,8 @@ def validate_load_balancer_idle_timeout(namespace): def validate_load_balancer_backend_pool_type(namespace): """validate load balancer backend pool type""" if namespace.load_balancer_backend_pool_type is not None: - if namespace.load_balancer_backend_pool_type not in [CONST_LOAD_BALANCER_BACKEND_POOL_TYPE_NODE_IP, CONST_LOAD_BALANCER_BACKEND_POOL_TYPE_NODE_IPCONFIGURATION]: + if namespace.load_balancer_backend_pool_type not in [CONST_LOAD_BALANCER_BACKEND_POOL_TYPE_NODE_IP, + CONST_LOAD_BALANCER_BACKEND_POOL_TYPE_NODE_IPCONFIGURATION]: raise InvalidArgumentValueError( f"Invalid Load Balancer Backend Pool Type {namespace.load_balancer_backend_pool_type}, supported values are nodeIP and nodeIPConfiguration") @@ -739,3 +740,35 @@ def validate_ksm_annotations(namespace): if namespace.ksm_metric_annotations_allow_list is None: return validate_ksm_parameter(namespace.ksm_metric_annotations_allow_list) + + +def validate_allowed_host_ports(namespace): + if hasattr(namespace, "nodepool_allowed_host_ports"): + host_ports = namespace.nodepool_allowed_host_ports + else: + host_ports = namespace.allowed_host_ports + if not host_ports: + return + + regex = re.compile(r'^((\d+)|(\d+-\d+))/(tcp|udp)$') + for port_range in host_ports.split(","): + found = regex.findall(port_range) + if found: + continue + raise InvalidArgumentValueError( + "--allowed-host-ports must be a comma-separated list of port ranges in the format of /" + ) + + +def validate_application_security_groups(namespace): + if hasattr((namespace), "nodepool_asg_ids"): + asg_ids = namespace.nodepool_asg_ids + else: + asg_ids = namespace.asg_ids + if not asg_ids: + return + + from msrestazure.tools import is_valid_resource_id + for asg in asg_ids.split(","): + if not is_valid_resource_id(asg): + raise InvalidArgumentValueError(asg + " is not a valid Azure resource ID.") diff --git a/src/aks-preview/azext_aks_preview/agentpool_decorator.py b/src/aks-preview/azext_aks_preview/agentpool_decorator.py index 07056d013f0..ff82f67a4c4 100644 --- a/src/aks-preview/azext_aks_preview/agentpool_decorator.py +++ b/src/aks-preview/azext_aks_preview/agentpool_decorator.py @@ -34,6 +34,7 @@ # type variables AgentPool = TypeVar("AgentPool") AgentPoolsOperations = TypeVar("AgentPoolsOperations") +PortRange = TypeVar("PortRange") # pylint: disable=too-few-public-methods @@ -260,6 +261,49 @@ def get_disable_windows_outbound_nat(self) -> bool: """ return self._get_disable_windows_outbound_nat() + def get_asg_ids(self) -> Union[List[str], None]: + if self.agentpool_decorator_mode == AgentPoolDecoratorMode.MANAGED_CLUSTER: + asg_ids = self.raw_param.get('nodepool_asg_ids') + else: + asg_ids = self.raw_param.get('asg_ids') + + if asg_ids is None: + return None + if asg_ids == '': + return [] + + return asg_ids.split(',') + + def get_allowed_host_ports(self) -> Union[List[PortRange], None]: + if self.agentpool_decorator_mode == AgentPoolDecoratorMode.MANAGED_CLUSTER: + ports = self.raw_param.get('nodepool_allowed_host_ports') + else: + ports = self.raw_param.get('allowed_host_ports') + + if ports is None: + return None + if ports == '': + return [] + + ports = ports.split(',') + port_ranges = [] + import re + regex = re.compile(r'^((\d+)|((\d+)-(\d+)))/(tcp|udp)$') + for port in ports: + r = regex.findall(port) + if r[0][1] != '': + # single port + port_start, port_end = int(r[0][1]), int(r[0][1]) + else: + # port range + port_start, port_end = int(r[0][3]), int(r[0][4]) + port_ranges.append(self.models.PortRange( + port_start=port_start, + port_end=port_end, + protocol=r[0][5].upper(), + )) + return port_ranges + class AKSPreviewAgentPoolAddDecorator(AKSAgentPoolAddDecorator): def __init__( @@ -353,6 +397,18 @@ def set_up_agentpool_windows_profile(self, agentpool: AgentPool) -> AgentPool: return agentpool + def set_up_agentpool_network_profile(self, agentpool: AgentPool) -> AgentPool: + self._ensure_agentpool(agentpool) + + asg_ids = self.context.get_asg_ids() + allowed_host_ports = self.context.get_allowed_host_ports() + if asg_ids and allowed_host_ports: + agentpool.network_profile = self.models.AgentPoolNetworkProfile( + application_security_groups=asg_ids, + allowed_host_ports=allowed_host_ports, + ) + return agentpool + def construct_agentpool_profile_preview(self) -> AgentPool: """The overall controller used to construct the preview AgentPool profile. @@ -374,6 +430,8 @@ def construct_agentpool_profile_preview(self) -> AgentPool: agentpool = self.set_up_custom_ca_trust(agentpool) # set up agentpool windows profile agentpool = self.set_up_agentpool_windows_profile(agentpool) + # set up agentpool network profile + agentpool = self.set_up_agentpool_network_profile(agentpool) # DO NOT MOVE: keep this at the bottom, restore defaults agentpool = self._restore_defaults_in_agentpool(agentpool) @@ -427,6 +485,19 @@ def update_custom_ca_trust(self, agentpool: AgentPool) -> AgentPool: agentpool.enable_custom_ca_trust = False return agentpool + def update_network_profile(self, agentpool: AgentPool) -> AgentPool: + self._ensure_agentpool(agentpool) + + asg_ids = self.context.get_asg_ids() + allowed_host_ports = self.context.get_allowed_host_ports() + if asg_ids or allowed_host_ports: + agentpool.network_profile = self.models.AgentPoolNetworkProfile() + if asg_ids is not None: + agentpool.network_profile.application_security_groups = asg_ids + if allowed_host_ports is not None: + agentpool.network_profile.allowed_host_ports = allowed_host_ports + return agentpool + def update_agentpool_profile_preview(self, agentpools: List[AgentPool] = None) -> AgentPool: """The overall controller used to update the preview AgentPool profile. @@ -440,4 +511,8 @@ def update_agentpool_profile_preview(self, agentpools: List[AgentPool] = None) - # update custom ca trust agentpool = self.update_custom_ca_trust(agentpool) + + # update network profile + agentpool = self.update_network_profile(agentpool) + return agentpool diff --git a/src/aks-preview/azext_aks_preview/custom.py b/src/aks-preview/azext_aks_preview/custom.py index 35d6e9b25b1..2868a7e8a62 100644 --- a/src/aks-preview/azext_aks_preview/custom.py +++ b/src/aks-preview/azext_aks_preview/custom.py @@ -627,6 +627,8 @@ def aks_create( node_count=3, nodepool_tags=None, nodepool_labels=None, + nodepool_allowed_host_ports=None, + nodepool_asg_ids=None, node_osdisk_type=None, node_osdisk_size=0, vm_set_type=None, @@ -1172,6 +1174,8 @@ def aks_agentpool_add( gpu_instance_profile=None, enable_custom_ca_trust=False, disable_windows_outbound_nat=False, + allowed_host_ports=None, + asg_ids=None, ): # DO NOT MOVE: get all the original parameters and save them as a dictionary raw_parameters = locals() @@ -1219,6 +1223,8 @@ def aks_agentpool_update( # extensions enable_custom_ca_trust=False, disable_custom_ca_trust=False, + allowed_host_ports=None, + asg_ids=None, ): # DO NOT MOVE: get all the original parameters and save them as a dictionary raw_parameters = locals() diff --git a/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py b/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py index 947b94c287f..7cd115ed38d 100644 --- a/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py +++ b/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py @@ -1948,9 +1948,9 @@ def _get_enable_azure_monitor_metrics(self, enable_validation: bool = False) -> "Cannot specify --enable-azuremonitormetrics and --enable-azuremonitormetrics at the same time." ) if not check_is_msi_cluster(self.mc): - raise RequiredArgumentMissingError( - "--enable-azuremonitormetrics can only be specified for clusters with managed identity enabled" - ) + raise RequiredArgumentMissingError( + "--enable-azuremonitormetrics can only be specified for clusters with managed identity enabled" + ) return enable_azure_monitor_metrics def get_enable_azure_monitor_metrics(self) -> bool: diff --git a/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_create_with_nsg_control.yaml b/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_create_with_nsg_control.yaml new file mode 100644 index 00000000000..7f25b3bc287 --- /dev/null +++ b/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_create_with_nsg_control.yaml @@ -0,0 +1,1169 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001","name":"clitest000001","type":"Microsoft.Resources/resourceGroups","location":"eastus","tags":{"product":"azurecli","cause":"automation","date":"2022-10-19T10:06:17Z"},"properties":{"provisioningState":"Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '304' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 10:06:24 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"location": "eastus"}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + Content-Length: + - '22' + Content-Type: + - application/json + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.26.0 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg1?api-version=2021-08-01 + response: + body: + string: "{\r\n \"name\": \"asg1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg1\"\ + ,\r\n \"etag\": \"W/\\\"7c4f9beb-cb39-420f-9395-7231bd803fe5\\\"\",\r\n \ + \ \"type\": \"Microsoft.Network/applicationSecurityGroups\",\r\n \"location\"\ + : \"eastus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\ + \r\n }\r\n}" + headers: + azure-asyncnotification: + - Enabled + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/eastus/operations/d60280bc-8476-476b-824c-3ceab7219a2d?api-version=2021-08-01 + cache-control: + - no-cache + content-length: + - '376' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 10:06:29 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-arm-service-request-id: + - ef2acfa4-7040-4c04-a7d9-f5e0c137c5c5 + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.26.0 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/eastus/operations/d60280bc-8476-476b-824c-3ceab7219a2d?api-version=2021-08-01 + response: + body: + string: "{\r\n \"status\": \"Succeeded\"\r\n}" + headers: + cache-control: + - no-cache + content-length: + - '29' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 10:06:31 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-arm-service-request-id: + - 62eca40a-c3e7-4b53-af73-a3520fd8b056 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.26.0 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg1?api-version=2021-08-01 + response: + body: + string: "{\r\n \"name\": \"asg1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg1\"\ + ,\r\n \"etag\": \"W/\\\"06e1c6d8-9765-4c3e-8611-2d3959bc1ae9\\\"\",\r\n \ + \ \"type\": \"Microsoft.Network/applicationSecurityGroups\",\r\n \"location\"\ + : \"eastus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\ + \r\n }\r\n}" + headers: + cache-control: + - no-cache + content-length: + - '377' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 10:06:31 GMT + etag: + - W/"06e1c6d8-9765-4c3e-8611-2d3959bc1ae9" + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-arm-service-request-id: + - e737fe65-1c06-4796-89d6-7a284800995f + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001","name":"clitest000001","type":"Microsoft.Resources/resourceGroups","location":"eastus","tags":{"product":"azurecli","cause":"automation","date":"2022-10-19T10:06:17Z"},"properties":{"provisioningState":"Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '304' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 10:06:32 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"location": "eastus"}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + Content-Length: + - '22' + Content-Type: + - application/json + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.26.0 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg2?api-version=2021-08-01 + response: + body: + string: "{\r\n \"name\": \"asg2\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg2\"\ + ,\r\n \"etag\": \"W/\\\"d323f5dd-ce97-4d6a-ac57-7986b53dff14\\\"\",\r\n \ + \ \"type\": \"Microsoft.Network/applicationSecurityGroups\",\r\n \"location\"\ + : \"eastus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\ + \r\n }\r\n}" + headers: + azure-asyncnotification: + - Enabled + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/eastus/operations/69f7e21f-8d67-4e6c-a29b-078dd0e36640?api-version=2021-08-01 + cache-control: + - no-cache + content-length: + - '376' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 10:06:37 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-arm-service-request-id: + - 5fda085b-ad96-4e9a-a4b8-0249c479c30d + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.26.0 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/eastus/operations/69f7e21f-8d67-4e6c-a29b-078dd0e36640?api-version=2021-08-01 + response: + body: + string: "{\r\n \"status\": \"Succeeded\"\r\n}" + headers: + cache-control: + - no-cache + content-length: + - '29' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 10:06:38 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-arm-service-request-id: + - d1028a19-4789-4c85-999d-8d68fe9d9065 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.26.0 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg2?api-version=2021-08-01 + response: + body: + string: "{\r\n \"name\": \"asg2\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg2\"\ + ,\r\n \"etag\": \"W/\\\"3be88134-0822-4050-8a28-de3be339b174\\\"\",\r\n \ + \ \"type\": \"Microsoft.Network/applicationSecurityGroups\",\r\n \"location\"\ + : \"eastus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\ + \r\n }\r\n}" + headers: + cache-control: + - no-cache + content-length: + - '377' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 10:06:38 GMT + etag: + - W/"3be88134-0822-4050-8a28-de3be339b174" + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-arm-service-request-id: + - e1155034-9e8f-44a8-9435-32fad637f7f8 + status: + code: 200 + message: OK +- request: + body: '{"location": "eastus", "identity": {"type": "SystemAssigned"}, "properties": + {"kubernetesVersion": "", "dnsPrefix": "cliakstest-clitesttd3xtmkoj-8ecadf", + "agentPoolProfiles": [{"count": 1, "vmSize": "standard_d2s_v3", "osDiskSizeGB": + 0, "workloadRuntime": "OCIContainer", "osType": "Linux", "enableAutoScaling": + false, "type": "VirtualMachineScaleSets", "mode": "System", "orchestratorVersion": + "", "upgradeSettings": {}, "enableNodePublicIP": false, "enableCustomCATrust": + false, "scaleSetPriority": "Regular", "scaleSetEvictionPolicy": "Delete", "spotMaxPrice": + -1.0, "nodeTaints": [], "enableEncryptionAtHost": false, "enableUltraSSD": false, + "enableFIPS": false, "networkProfile": {"allowedHostPorts": [{"portStart": 53, + "portEnd": 53, "protocol": "UDP"}, {"portStart": 80, "portEnd": 80, "protocol": + "TCP"}, {"portStart": 443, "portEnd": 443, "protocol": "TCP"}, {"portStart": + 4000, "portEnd": 5000, "protocol": "TCP"}, {"portStart": 4000, "portEnd": 6000, + "protocol": "UDP"}], "applicationSecurityGroups": ["/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg1", + "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg2"]}, + "name": "nodepool1"}], "linuxProfile": {"adminUsername": "azureuser", "ssh": + {"publicKeys": [{"keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ== + test@example.com\n"}]}}, "addonProfiles": {}, "enableRBAC": true, "enablePodSecurityPolicy": + false, "networkProfile": {"networkPlugin": "kubenet", "podCidr": "10.244.0.0/16", + "serviceCidr": "10.0.0.0/16", "dnsServiceIP": "10.0.0.10", "dockerBridgeCidr": + "172.17.0.1/16", "outboundType": "loadBalancer", "loadBalancerSku": "standard"}, + "disableLocalAccounts": false, "storageProfile": {}}}' + headers: + AKSHTTPCustomFeatures: + - Microsoft.ContainerService/NodePublicIPNSGControlPreview + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + Content-Length: + - '2520' + Content-Type: + - application/json + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + --nodepool-asg-ids --nodepool-allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002\"\ + ,\n \"location\": \"eastus\",\n \"name\": \"cliakstest000002\",\n \"type\"\ + : \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \ + \ \"provisioningState\": \"Creating\",\n \"powerState\": {\n \"code\"\ + : \"Running\"\n },\n \"kubernetesVersion\": \"1.23.12\",\n \"currentKubernetesVersion\"\ + : \"1.23.12\",\n \"dnsPrefix\": \"cliakstest-clitesttd3xtmkoj-8ecadf\",\n\ + \ \"fqdn\": \"cliakstest-clitesttd3xtmkoj-8ecadf-400a076b.hcp.eastus.azmk8s.io\"\ + ,\n \"azurePortalFQDN\": \"cliakstest-clitesttd3xtmkoj-8ecadf-400a076b.portal.hcp.eastus.azmk8s.io\"\ + ,\n \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \ + \ \"count\": 1,\n \"vmSize\": \"standard_d2s_v3\",\n \"osDiskSizeGB\"\ + : 128,\n \"osDiskType\": \"Managed\",\n \"kubeletDiskType\": \"OS\"\ + ,\n \"workloadRuntime\": \"OCIContainer\",\n \"maxPods\": 110,\n \ + \ \"type\": \"VirtualMachineScaleSets\",\n \"enableAutoScaling\": false,\n\ + \ \"provisioningState\": \"Creating\",\n \"powerState\": {\n \ + \ \"code\": \"Running\"\n },\n \"orchestratorVersion\": \"1.23.12\"\ + ,\n \"currentOrchestratorVersion\": \"1.23.12\",\n \"enableNodePublicIP\"\ + : false,\n \"enableCustomCATrust\": false,\n \"mode\": \"System\"\ + ,\n \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n\ + \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\"\ + : \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\":\ + \ {},\n \"enableFIPS\": false,\n \"networkProfile\": {\n \"allowedHostPorts\"\ + : [\n {\n \"portStart\": 53,\n \"portEnd\": 53,\n \ + \ \"protocol\": \"UDP\"\n },\n {\n \"portStart\": 80,\n\ + \ \"portEnd\": 80,\n \"protocol\": \"TCP\"\n },\n \ + \ {\n \"portStart\": 443,\n \"portEnd\": 443,\n \"\ + protocol\": \"TCP\"\n },\n {\n \"portStart\": 4000,\n \ + \ \"portEnd\": 5000,\n \"protocol\": \"TCP\"\n },\n \ + \ {\n \"portStart\": 4000,\n \"portEnd\": 6000,\n \ + \ \"protocol\": \"UDP\"\n }\n ],\n \"applicationSecurityGroups\"\ + : [\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg1\"\ + ,\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg2\"\ + \n ]\n }\n }\n ],\n \"linuxProfile\": {\n \"adminUsername\"\ + : \"azureuser\",\n \"ssh\": {\n \"publicKeys\": [\n {\n \ + \ \"keyData\": \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ==\ + \ test@example.com\\n\"\n }\n ]\n }\n },\n \"servicePrincipalProfile\"\ + : {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n },\n \ + \ \"nodeResourceGroup\": \"MC_clitest000001_cliakstest000002_eastus\",\n \ + \ \"enableRBAC\": true,\n \"enablePodSecurityPolicy\": false,\n \"networkProfile\"\ + : {\n \"networkPlugin\": \"kubenet\",\n \"loadBalancerSku\": \"standard\"\ + ,\n \"loadBalancerProfile\": {\n \"managedOutboundIPs\": {\n \"\ + count\": 1\n },\n \"backendPoolType\": \"nodeIPConfiguration\"\n \ + \ },\n \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\"\ + ,\n \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\"\ + ,\n \"outboundType\": \"loadBalancer\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\ + \n ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\"\ + : [\n \"IPv4\"\n ]\n },\n \"maxAgentPools\": 100,\n \"disableLocalAccounts\"\ + : false,\n \"securityProfile\": {},\n \"storageProfile\": {\n \"diskCSIDriver\"\ + : {\n \"enabled\": true,\n \"version\": \"v1\"\n },\n \"fileCSIDriver\"\ + : {\n \"enabled\": true\n },\n \"snapshotController\": {\n \"\ + enabled\": true\n }\n },\n \"oidcIssuerProfile\": {\n \"enabled\"\ + : false\n },\n \"workloadAutoScalerProfile\": {}\n },\n \"identity\"\ + : {\n \"type\": \"SystemAssigned\",\n \"principalId\":\"00000000-0000-0000-0000-000000000001\"\ + ,\n \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\"\ + : {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" + headers: + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/72455bf9-dbfe-4721-95a4-0ce796dec5f7?api-version=2017-08-31 + cache-control: + - no-cache + content-length: + - '4666' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 10:06:50 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + --nodepool-asg-ids --nodepool-allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/72455bf9-dbfe-4721-95a4-0ce796dec5f7?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"f95b4572-fedb-2147-95a4-0ce796dec5f7\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T10:06:50.0921744Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 10:06:51 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + --nodepool-asg-ids --nodepool-allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/72455bf9-dbfe-4721-95a4-0ce796dec5f7?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"f95b4572-fedb-2147-95a4-0ce796dec5f7\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T10:06:50.0921744Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 10:07:21 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + --nodepool-asg-ids --nodepool-allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/72455bf9-dbfe-4721-95a4-0ce796dec5f7?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"f95b4572-fedb-2147-95a4-0ce796dec5f7\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T10:06:50.0921744Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 10:07:51 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + --nodepool-asg-ids --nodepool-allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/72455bf9-dbfe-4721-95a4-0ce796dec5f7?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"f95b4572-fedb-2147-95a4-0ce796dec5f7\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T10:06:50.0921744Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 10:08:23 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + --nodepool-asg-ids --nodepool-allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/72455bf9-dbfe-4721-95a4-0ce796dec5f7?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"f95b4572-fedb-2147-95a4-0ce796dec5f7\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T10:06:50.0921744Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 10:08:54 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + --nodepool-asg-ids --nodepool-allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/72455bf9-dbfe-4721-95a4-0ce796dec5f7?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"f95b4572-fedb-2147-95a4-0ce796dec5f7\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T10:06:50.0921744Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 10:09:24 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + --nodepool-asg-ids --nodepool-allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/72455bf9-dbfe-4721-95a4-0ce796dec5f7?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"f95b4572-fedb-2147-95a4-0ce796dec5f7\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T10:06:50.0921744Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 10:09:54 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + --nodepool-asg-ids --nodepool-allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/72455bf9-dbfe-4721-95a4-0ce796dec5f7?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"f95b4572-fedb-2147-95a4-0ce796dec5f7\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T10:06:50.0921744Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 10:10:25 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + --nodepool-asg-ids --nodepool-allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/72455bf9-dbfe-4721-95a4-0ce796dec5f7?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"f95b4572-fedb-2147-95a4-0ce796dec5f7\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T10:06:50.0921744Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 10:10:56 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + --nodepool-asg-ids --nodepool-allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/72455bf9-dbfe-4721-95a4-0ce796dec5f7?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"f95b4572-fedb-2147-95a4-0ce796dec5f7\",\n \"status\"\ + : \"Succeeded\",\n \"startTime\": \"2022-10-19T10:06:50.0921744Z\",\n \"\ + endTime\": \"2022-10-19T10:11:21.253437Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '169' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 10:11:26 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + --nodepool-asg-ids --nodepool-allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002\"\ + ,\n \"location\": \"eastus\",\n \"name\": \"cliakstest000002\",\n \"type\"\ + : \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \ + \ \"provisioningState\": \"Succeeded\",\n \"powerState\": {\n \"code\"\ + : \"Running\"\n },\n \"kubernetesVersion\": \"1.23.12\",\n \"currentKubernetesVersion\"\ + : \"1.23.12\",\n \"dnsPrefix\": \"cliakstest-clitesttd3xtmkoj-8ecadf\",\n\ + \ \"fqdn\": \"cliakstest-clitesttd3xtmkoj-8ecadf-400a076b.hcp.eastus.azmk8s.io\"\ + ,\n \"azurePortalFQDN\": \"cliakstest-clitesttd3xtmkoj-8ecadf-400a076b.portal.hcp.eastus.azmk8s.io\"\ + ,\n \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \ + \ \"count\": 1,\n \"vmSize\": \"standard_d2s_v3\",\n \"osDiskSizeGB\"\ + : 128,\n \"osDiskType\": \"Managed\",\n \"kubeletDiskType\": \"OS\"\ + ,\n \"workloadRuntime\": \"OCIContainer\",\n \"maxPods\": 110,\n \ + \ \"type\": \"VirtualMachineScaleSets\",\n \"enableAutoScaling\": false,\n\ + \ \"provisioningState\": \"Succeeded\",\n \"powerState\": {\n \ + \ \"code\": \"Running\"\n },\n \"orchestratorVersion\": \"1.23.12\"\ + ,\n \"currentOrchestratorVersion\": \"1.23.12\",\n \"enableNodePublicIP\"\ + : false,\n \"enableCustomCATrust\": false,\n \"mode\": \"System\"\ + ,\n \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n\ + \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\"\ + : \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\":\ + \ {},\n \"enableFIPS\": false,\n \"networkProfile\": {\n \"allowedHostPorts\"\ + : [\n {\n \"portStart\": 53,\n \"portEnd\": 53,\n \ + \ \"protocol\": \"UDP\"\n },\n {\n \"portStart\": 80,\n\ + \ \"portEnd\": 80,\n \"protocol\": \"TCP\"\n },\n \ + \ {\n \"portStart\": 443,\n \"portEnd\": 443,\n \"\ + protocol\": \"TCP\"\n },\n {\n \"portStart\": 4000,\n \ + \ \"portEnd\": 5000,\n \"protocol\": \"TCP\"\n },\n \ + \ {\n \"portStart\": 4000,\n \"portEnd\": 6000,\n \ + \ \"protocol\": \"UDP\"\n }\n ],\n \"applicationSecurityGroups\"\ + : [\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg1\"\ + ,\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg2\"\ + \n ]\n }\n }\n ],\n \"linuxProfile\": {\n \"adminUsername\"\ + : \"azureuser\",\n \"ssh\": {\n \"publicKeys\": [\n {\n \ + \ \"keyData\": \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ==\ + \ test@example.com\\n\"\n }\n ]\n }\n },\n \"servicePrincipalProfile\"\ + : {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n },\n \ + \ \"nodeResourceGroup\": \"MC_clitest000001_cliakstest000002_eastus\",\n \ + \ \"enableRBAC\": true,\n \"enablePodSecurityPolicy\": false,\n \"networkProfile\"\ + : {\n \"networkPlugin\": \"kubenet\",\n \"loadBalancerSku\": \"Standard\"\ + ,\n \"loadBalancerProfile\": {\n \"managedOutboundIPs\": {\n \"\ + count\": 1\n },\n \"effectiveOutboundIPs\": [\n {\n \"\ + id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_eastus/providers/Microsoft.Network/publicIPAddresses/fbc84c81-190a-437b-8b4c-1063316a7b1d\"\ + \n }\n ],\n \"backendPoolType\": \"nodeIPConfiguration\"\n \ + \ },\n \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\"\ + ,\n \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\"\ + ,\n \"outboundType\": \"loadBalancer\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\ + \n ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\"\ + : [\n \"IPv4\"\n ]\n },\n \"maxAgentPools\": 100,\n \"identityProfile\"\ + : {\n \"kubeletidentity\": {\n \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_clitest000001_cliakstest000002_eastus/providers/Microsoft.ManagedIdentity/userAssignedIdentities/cliakstest000002-agentpool\"\ + ,\n \"clientId\":\"00000000-0000-0000-0000-000000000001\",\n \"objectId\"\ + :\"00000000-0000-0000-0000-000000000001\"\n }\n },\n \"disableLocalAccounts\"\ + : false,\n \"securityProfile\": {},\n \"storageProfile\": {\n \"diskCSIDriver\"\ + : {\n \"enabled\": true,\n \"version\": \"v1\"\n },\n \"fileCSIDriver\"\ + : {\n \"enabled\": true\n },\n \"snapshotController\": {\n \"\ + enabled\": true\n }\n },\n \"oidcIssuerProfile\": {\n \"enabled\"\ + : false\n },\n \"workloadAutoScalerProfile\": {}\n },\n \"identity\"\ + : {\n \"type\": \"SystemAssigned\",\n \"principalId\":\"00000000-0000-0000-0000-000000000001\"\ + ,\n \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\"\ + : {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" + headers: + cache-control: + - no-cache + content-length: + - '5317' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 10:11:26 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --resource-group --name --yes --no-wait + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview + response: + body: + string: '' + headers: + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/dcc7f0e2-8cae-445b-a10b-88924dbcf61f?api-version=2017-08-31 + cache-control: + - no-cache + content-length: + - '0' + date: + - Wed, 19 Oct 2022 10:11:31 GMT + expires: + - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operationresults/dcc7f0e2-8cae-445b-a10b-88924dbcf61f?api-version=2017-08-31 + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + status: + code: 202 + message: Accepted +version: 1 diff --git a/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_nodepool_create_with_nsg_control.yaml b/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_nodepool_create_with_nsg_control.yaml new file mode 100644 index 00000000000..81db1b99298 --- /dev/null +++ b/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_nodepool_create_with_nsg_control.yaml @@ -0,0 +1,1757 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001","name":"clitest000001","type":"Microsoft.Resources/resourceGroups","location":"eastus","tags":{"product":"azurecli","cause":"automation","date":"2022-10-19T12:16:57Z"},"properties":{"provisioningState":"Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '304' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 12:17:06 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"location": "eastus"}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + Content-Length: + - '22' + Content-Type: + - application/json + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.26.0 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg1?api-version=2021-08-01 + response: + body: + string: "{\r\n \"name\": \"asg1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg1\"\ + ,\r\n \"etag\": \"W/\\\"af2df9f9-599a-40a3-9a16-0b8d390d575b\\\"\",\r\n \ + \ \"type\": \"Microsoft.Network/applicationSecurityGroups\",\r\n \"location\"\ + : \"eastus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\ + \r\n }\r\n}" + headers: + azure-asyncnotification: + - Enabled + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/eastus/operations/6d6efd52-5b25-44d9-a998-98a01a10346c?api-version=2021-08-01 + cache-control: + - no-cache + content-length: + - '376' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 12:17:11 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-arm-service-request-id: + - aa4928ab-4822-40da-b206-5a30624d2df8 + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.26.0 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/eastus/operations/6d6efd52-5b25-44d9-a998-98a01a10346c?api-version=2021-08-01 + response: + body: + string: "{\r\n \"status\": \"Succeeded\"\r\n}" + headers: + cache-control: + - no-cache + content-length: + - '29' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 12:17:12 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-arm-service-request-id: + - 573fd67f-5a5d-4f78-9768-84b8123b375d + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.26.0 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg1?api-version=2021-08-01 + response: + body: + string: "{\r\n \"name\": \"asg1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg1\"\ + ,\r\n \"etag\": \"W/\\\"bcd9c4d2-fb5a-4971-b8ef-09ec92f240c3\\\"\",\r\n \ + \ \"type\": \"Microsoft.Network/applicationSecurityGroups\",\r\n \"location\"\ + : \"eastus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\ + \r\n }\r\n}" + headers: + cache-control: + - no-cache + content-length: + - '377' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 12:17:12 GMT + etag: + - W/"bcd9c4d2-fb5a-4971-b8ef-09ec92f240c3" + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-arm-service-request-id: + - cda210b4-f49d-462d-bceb-e1519eb2226d + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001","name":"clitest000001","type":"Microsoft.Resources/resourceGroups","location":"eastus","tags":{"product":"azurecli","cause":"automation","date":"2022-10-19T12:16:57Z"},"properties":{"provisioningState":"Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '304' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 12:17:14 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"location": "eastus"}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + Content-Length: + - '22' + Content-Type: + - application/json + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.26.0 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg2?api-version=2021-08-01 + response: + body: + string: "{\r\n \"name\": \"asg2\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg2\"\ + ,\r\n \"etag\": \"W/\\\"926a4911-f9a7-494a-8dcf-3dbc6c930215\\\"\",\r\n \ + \ \"type\": \"Microsoft.Network/applicationSecurityGroups\",\r\n \"location\"\ + : \"eastus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\ + \r\n }\r\n}" + headers: + azure-asyncnotification: + - Enabled + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/eastus/operations/7bff3284-12ef-44cf-a7ff-5eaa2a8aa9ce?api-version=2021-08-01 + cache-control: + - no-cache + content-length: + - '376' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 12:17:19 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-arm-service-request-id: + - 36a5480f-c878-43dd-8a08-e8783e80015a + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.26.0 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/eastus/operations/7bff3284-12ef-44cf-a7ff-5eaa2a8aa9ce?api-version=2021-08-01 + response: + body: + string: "{\r\n \"status\": \"Succeeded\"\r\n}" + headers: + cache-control: + - no-cache + content-length: + - '29' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 12:17:19 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-arm-service-request-id: + - c56bda5b-4c5a-498e-882f-36f53e4c4877 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.26.0 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg2?api-version=2021-08-01 + response: + body: + string: "{\r\n \"name\": \"asg2\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg2\"\ + ,\r\n \"etag\": \"W/\\\"118fd2f5-999f-43d7-8d89-7c997b4cdbab\\\"\",\r\n \ + \ \"type\": \"Microsoft.Network/applicationSecurityGroups\",\r\n \"location\"\ + : \"eastus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\ + \r\n }\r\n}" + headers: + cache-control: + - no-cache + content-length: + - '377' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 12:17:20 GMT + etag: + - W/"118fd2f5-999f-43d7-8d89-7c997b4cdbab" + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-arm-service-request-id: + - 58383151-9df7-4b12-9a62-3d18986def03 + status: + code: 200 + message: OK +- request: + body: '{"location": "eastus", "identity": {"type": "SystemAssigned"}, "properties": + {"kubernetesVersion": "", "dnsPrefix": "cliakstest-clitest7j5lv7oui-8ecadf", + "agentPoolProfiles": [{"count": 1, "vmSize": "standard_d2s_v3", "osDiskSizeGB": + 0, "workloadRuntime": "OCIContainer", "osType": "Linux", "enableAutoScaling": + false, "type": "VirtualMachineScaleSets", "mode": "System", "orchestratorVersion": + "", "upgradeSettings": {}, "enableNodePublicIP": false, "enableCustomCATrust": + false, "scaleSetPriority": "Regular", "scaleSetEvictionPolicy": "Delete", "spotMaxPrice": + -1.0, "nodeTaints": [], "enableEncryptionAtHost": false, "enableUltraSSD": false, + "enableFIPS": false, "networkProfile": {"allowedHostPorts": [], "applicationSecurityGroups": + []}, "name": "nodepool1"}], "linuxProfile": {"adminUsername": "azureuser", "ssh": + {"publicKeys": [{"keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ== + test@example.com\n"}]}}, "addonProfiles": {}, "enableRBAC": true, "enablePodSecurityPolicy": + false, "networkProfile": {"networkPlugin": "kubenet", "podCidr": "10.244.0.0/16", + "serviceCidr": "10.0.0.0/16", "dnsServiceIP": "10.0.0.10", "dockerBridgeCidr": + "172.17.0.1/16", "outboundType": "loadBalancer", "loadBalancerSku": "standard"}, + "disableLocalAccounts": false, "storageProfile": {}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + Content-Length: + - '1963' + Content-Type: + - application/json + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002\"\ + ,\n \"location\": \"eastus\",\n \"name\": \"cliakstest000002\",\n \"type\"\ + : \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \ + \ \"provisioningState\": \"Creating\",\n \"powerState\": {\n \"code\"\ + : \"Running\"\n },\n \"kubernetesVersion\": \"1.23.12\",\n \"currentKubernetesVersion\"\ + : \"1.23.12\",\n \"dnsPrefix\": \"cliakstest-clitest7j5lv7oui-8ecadf\",\n\ + \ \"fqdn\": \"cliakstest-clitest7j5lv7oui-8ecadf-4b5e2987.hcp.eastus.azmk8s.io\"\ + ,\n \"azurePortalFQDN\": \"cliakstest-clitest7j5lv7oui-8ecadf-4b5e2987.portal.hcp.eastus.azmk8s.io\"\ + ,\n \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \ + \ \"count\": 1,\n \"vmSize\": \"standard_d2s_v3\",\n \"osDiskSizeGB\"\ + : 128,\n \"osDiskType\": \"Managed\",\n \"kubeletDiskType\": \"OS\"\ + ,\n \"workloadRuntime\": \"OCIContainer\",\n \"maxPods\": 110,\n \ + \ \"type\": \"VirtualMachineScaleSets\",\n \"enableAutoScaling\": false,\n\ + \ \"provisioningState\": \"Creating\",\n \"powerState\": {\n \ + \ \"code\": \"Running\"\n },\n \"orchestratorVersion\": \"1.23.12\"\ + ,\n \"currentOrchestratorVersion\": \"1.23.12\",\n \"enableNodePublicIP\"\ + : false,\n \"enableCustomCATrust\": false,\n \"mode\": \"System\"\ + ,\n \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n\ + \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\"\ + : \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\":\ + \ {},\n \"enableFIPS\": false,\n \"networkProfile\": {}\n }\n \ + \ ],\n \"linuxProfile\": {\n \"adminUsername\": \"azureuser\",\n \ + \ \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": \"ssh-rsa\ + \ AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ==\ + \ test@example.com\\n\"\n }\n ]\n }\n },\n \"servicePrincipalProfile\"\ + : {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n },\n \ + \ \"nodeResourceGroup\": \"MC_clitest000001_cliakstest000002_eastus\",\n \ + \ \"enableRBAC\": true,\n \"enablePodSecurityPolicy\": false,\n \"networkProfile\"\ + : {\n \"networkPlugin\": \"kubenet\",\n \"loadBalancerSku\": \"standard\"\ + ,\n \"loadBalancerProfile\": {\n \"managedOutboundIPs\": {\n \"\ + count\": 1\n },\n \"backendPoolType\": \"nodeIPConfiguration\"\n \ + \ },\n \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\"\ + ,\n \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\"\ + ,\n \"outboundType\": \"loadBalancer\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\ + \n ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\"\ + : [\n \"IPv4\"\n ]\n },\n \"maxAgentPools\": 100,\n \"disableLocalAccounts\"\ + : false,\n \"securityProfile\": {},\n \"storageProfile\": {\n \"diskCSIDriver\"\ + : {\n \"enabled\": true,\n \"version\": \"v1\"\n },\n \"fileCSIDriver\"\ + : {\n \"enabled\": true\n },\n \"snapshotController\": {\n \"\ + enabled\": true\n }\n },\n \"oidcIssuerProfile\": {\n \"enabled\"\ + : false\n },\n \"workloadAutoScalerProfile\": {}\n },\n \"identity\"\ + : {\n \"type\": \"SystemAssigned\",\n \"principalId\":\"00000000-0000-0000-0000-000000000001\"\ + ,\n \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\"\ + : {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" + headers: + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/a9ab0b55-db86-4839-9b3e-628058cb583c?api-version=2017-08-31 + cache-control: + - no-cache + content-length: + - '3805' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:17:31 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/a9ab0b55-db86-4839-9b3e-628058cb583c?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"550baba9-86db-3948-9b3e-628058cb583c\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T12:17:29.9051374Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:17:31 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/a9ab0b55-db86-4839-9b3e-628058cb583c?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"550baba9-86db-3948-9b3e-628058cb583c\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T12:17:29.9051374Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:18:01 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/a9ab0b55-db86-4839-9b3e-628058cb583c?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"550baba9-86db-3948-9b3e-628058cb583c\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T12:17:29.9051374Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:18:31 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/a9ab0b55-db86-4839-9b3e-628058cb583c?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"550baba9-86db-3948-9b3e-628058cb583c\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T12:17:29.9051374Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:19:02 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/a9ab0b55-db86-4839-9b3e-628058cb583c?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"550baba9-86db-3948-9b3e-628058cb583c\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T12:17:29.9051374Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:19:32 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/a9ab0b55-db86-4839-9b3e-628058cb583c?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"550baba9-86db-3948-9b3e-628058cb583c\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T12:17:29.9051374Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:20:03 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/a9ab0b55-db86-4839-9b3e-628058cb583c?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"550baba9-86db-3948-9b3e-628058cb583c\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T12:17:29.9051374Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:20:33 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/a9ab0b55-db86-4839-9b3e-628058cb583c?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"550baba9-86db-3948-9b3e-628058cb583c\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T12:17:29.9051374Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:21:04 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/a9ab0b55-db86-4839-9b3e-628058cb583c?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"550baba9-86db-3948-9b3e-628058cb583c\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T12:17:29.9051374Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:21:34 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/a9ab0b55-db86-4839-9b3e-628058cb583c?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"550baba9-86db-3948-9b3e-628058cb583c\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T12:17:29.9051374Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:22:06 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/a9ab0b55-db86-4839-9b3e-628058cb583c?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"550baba9-86db-3948-9b3e-628058cb583c\",\n \"status\"\ + : \"Succeeded\",\n \"startTime\": \"2022-10-19T12:17:29.9051374Z\",\n \"\ + endTime\": \"2022-10-19T12:22:14.2127065Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '170' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:22:36 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --node-count --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002\"\ + ,\n \"location\": \"eastus\",\n \"name\": \"cliakstest000002\",\n \"type\"\ + : \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \ + \ \"provisioningState\": \"Succeeded\",\n \"powerState\": {\n \"code\"\ + : \"Running\"\n },\n \"kubernetesVersion\": \"1.23.12\",\n \"currentKubernetesVersion\"\ + : \"1.23.12\",\n \"dnsPrefix\": \"cliakstest-clitest7j5lv7oui-8ecadf\",\n\ + \ \"fqdn\": \"cliakstest-clitest7j5lv7oui-8ecadf-4b5e2987.hcp.eastus.azmk8s.io\"\ + ,\n \"azurePortalFQDN\": \"cliakstest-clitest7j5lv7oui-8ecadf-4b5e2987.portal.hcp.eastus.azmk8s.io\"\ + ,\n \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \ + \ \"count\": 1,\n \"vmSize\": \"standard_d2s_v3\",\n \"osDiskSizeGB\"\ + : 128,\n \"osDiskType\": \"Managed\",\n \"kubeletDiskType\": \"OS\"\ + ,\n \"workloadRuntime\": \"OCIContainer\",\n \"maxPods\": 110,\n \ + \ \"type\": \"VirtualMachineScaleSets\",\n \"enableAutoScaling\": false,\n\ + \ \"provisioningState\": \"Succeeded\",\n \"powerState\": {\n \ + \ \"code\": \"Running\"\n },\n \"orchestratorVersion\": \"1.23.12\"\ + ,\n \"currentOrchestratorVersion\": \"1.23.12\",\n \"enableNodePublicIP\"\ + : false,\n \"enableCustomCATrust\": false,\n \"mode\": \"System\"\ + ,\n \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n\ + \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\"\ + : \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\":\ + \ {},\n \"enableFIPS\": false,\n \"networkProfile\": {}\n }\n \ + \ ],\n \"linuxProfile\": {\n \"adminUsername\": \"azureuser\",\n \ + \ \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": \"ssh-rsa\ + \ AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ==\ + \ test@example.com\\n\"\n }\n ]\n }\n },\n \"servicePrincipalProfile\"\ + : {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n },\n \ + \ \"nodeResourceGroup\": \"MC_clitest000001_cliakstest000002_eastus\",\n \ + \ \"enableRBAC\": true,\n \"enablePodSecurityPolicy\": false,\n \"networkProfile\"\ + : {\n \"networkPlugin\": \"kubenet\",\n \"loadBalancerSku\": \"Standard\"\ + ,\n \"loadBalancerProfile\": {\n \"managedOutboundIPs\": {\n \"\ + count\": 1\n },\n \"effectiveOutboundIPs\": [\n {\n \"\ + id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_eastus/providers/Microsoft.Network/publicIPAddresses/e38915f3-6158-45b8-a2f2-e9fb2c26af7c\"\ + \n }\n ],\n \"backendPoolType\": \"nodeIPConfiguration\"\n \ + \ },\n \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\"\ + ,\n \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\"\ + ,\n \"outboundType\": \"loadBalancer\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\ + \n ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\"\ + : [\n \"IPv4\"\n ]\n },\n \"maxAgentPools\": 100,\n \"identityProfile\"\ + : {\n \"kubeletidentity\": {\n \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_clitest000001_cliakstest000002_eastus/providers/Microsoft.ManagedIdentity/userAssignedIdentities/cliakstest000002-agentpool\"\ + ,\n \"clientId\":\"00000000-0000-0000-0000-000000000001\",\n \"objectId\"\ + :\"00000000-0000-0000-0000-000000000001\"\n }\n },\n \"disableLocalAccounts\"\ + : false,\n \"securityProfile\": {},\n \"storageProfile\": {\n \"diskCSIDriver\"\ + : {\n \"enabled\": true,\n \"version\": \"v1\"\n },\n \"fileCSIDriver\"\ + : {\n \"enabled\": true\n },\n \"snapshotController\": {\n \"\ + enabled\": true\n }\n },\n \"oidcIssuerProfile\": {\n \"enabled\"\ + : false\n },\n \"workloadAutoScalerProfile\": {}\n },\n \"identity\"\ + : {\n \"type\": \"SystemAssigned\",\n \"principalId\":\"00000000-0000-0000-0000-000000000001\"\ + ,\n \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\"\ + : {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" + headers: + cache-control: + - no-cache + content-length: + - '4456' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:22:37 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks nodepool add + Connection: + - keep-alive + ParameterSetName: + - --resource-group --cluster-name --name --node-vm-size --node-count --asg-ids + --allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002/agentPools?api-version=2022-09-02-preview + response: + body: + string: "{\n \"value\": [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002/agentPools/nodepool1\"\ + ,\n \"name\": \"nodepool1\",\n \"type\": \"Microsoft.ContainerService/managedClusters/agentPools\"\ + ,\n \"properties\": {\n \"count\": 1,\n \"vmSize\": \"standard_d2s_v3\"\ + ,\n \"osDiskSizeGB\": 128,\n \"osDiskType\": \"Managed\",\n \"\ + kubeletDiskType\": \"OS\",\n \"workloadRuntime\": \"OCIContainer\",\n\ + \ \"maxPods\": 110,\n \"type\": \"VirtualMachineScaleSets\",\n \ + \ \"enableAutoScaling\": false,\n \"provisioningState\": \"Succeeded\"\ + ,\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"orchestratorVersion\"\ + : \"1.23.12\",\n \"currentOrchestratorVersion\": \"1.23.12\",\n \"\ + enableNodePublicIP\": false,\n \"enableCustomCATrust\": false,\n \"\ + mode\": \"System\",\n \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\"\ + : false,\n \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"\ + nodeImageVersion\": \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"\ + upgradeSettings\": {},\n \"enableFIPS\": false,\n \"networkProfile\"\ + : {}\n }\n }\n ]\n }" + headers: + cache-control: + - no-cache + content-length: + - '1140' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:22:40 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"properties": {"count": 1, "vmSize": "standard_d2s_v3", "osDiskSizeGB": + 0, "workloadRuntime": "OCIContainer", "osType": "Linux", "enableAutoScaling": + false, "scaleDownMode": "Delete", "type": "VirtualMachineScaleSets", "mode": + "User", "upgradeSettings": {}, "enableNodePublicIP": false, "enableCustomCATrust": + false, "scaleSetPriority": "Regular", "scaleSetEvictionPolicy": "Delete", "spotMaxPrice": + -1.0, "nodeTaints": [], "enableEncryptionAtHost": false, "enableUltraSSD": false, + "enableFIPS": false, "networkProfile": {"allowedHostPorts": [{"portStart": 53, + "portEnd": 53, "protocol": "UDP"}, {"portStart": 80, "portEnd": 80, "protocol": + "TCP"}, {"portStart": 443, "portEnd": 443, "protocol": "TCP"}, {"portStart": + 4000, "portEnd": 5000, "protocol": "TCP"}, {"portStart": 4000, "portEnd": 6000, + "protocol": "UDP"}], "applicationSecurityGroups": ["/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg1", + "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg2"]}}}' + headers: + AKSHTTPCustomFeatures: + - Microsoft.ContainerService/NodePublicIPNSGControlPreview + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks nodepool add + Connection: + - keep-alive + Content-Length: + - '1138' + Content-Type: + - application/json + ParameterSetName: + - --resource-group --cluster-name --name --node-vm-size --node-count --asg-ids + --allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002/agentPools/n000003?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002/agentPools/n000003\"\ + ,\n \"name\": \"n000003\",\n \"type\": \"Microsoft.ContainerService/managedClusters/agentPools\"\ + ,\n \"properties\": {\n \"count\": 1,\n \"vmSize\": \"standard_d2s_v3\"\ + ,\n \"osDiskSizeGB\": 128,\n \"osDiskType\": \"Managed\",\n \"kubeletDiskType\"\ + : \"OS\",\n \"workloadRuntime\": \"OCIContainer\",\n \"maxPods\": 110,\n\ + \ \"type\": \"VirtualMachineScaleSets\",\n \"enableAutoScaling\": false,\n\ + \ \"scaleDownMode\": \"Delete\",\n \"provisioningState\": \"Creating\"\ + ,\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"orchestratorVersion\"\ + : \"1.23.12\",\n \"currentOrchestratorVersion\": \"1.23.12\",\n \"enableNodePublicIP\"\ + : false,\n \"enableCustomCATrust\": false,\n \"mode\": \"User\",\n \"\ + enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n \"osType\"\ + : \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\": \"AKSUbuntu-1804gen2containerd-2022.10.03\"\ + ,\n \"upgradeSettings\": {},\n \"enableFIPS\": false,\n \"networkProfile\"\ + : {\n \"allowedHostPorts\": [\n {\n \"portStart\": 53,\n \ + \ \"portEnd\": 53,\n \"protocol\": \"UDP\"\n },\n {\n \"\ + portStart\": 80,\n \"portEnd\": 80,\n \"protocol\": \"TCP\"\n \ + \ },\n {\n \"portStart\": 443,\n \"portEnd\": 443,\n \ + \ \"protocol\": \"TCP\"\n },\n {\n \"portStart\": 4000,\n \ + \ \"portEnd\": 5000,\n \"protocol\": \"TCP\"\n },\n {\n \ + \ \"portStart\": 4000,\n \"portEnd\": 6000,\n \"protocol\": \"\ + UDP\"\n }\n ],\n \"applicationSecurityGroups\": [\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg1\"\ + ,\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg2\"\ + \n ]\n }\n }\n }" + headers: + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/864969d5-2429-4f1d-8007-976c1a691c0f?api-version=2017-08-31 + cache-control: + - no-cache + content-length: + - '1871' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:22:43 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks nodepool add + Connection: + - keep-alive + ParameterSetName: + - --resource-group --cluster-name --name --node-vm-size --node-count --asg-ids + --allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/864969d5-2429-4f1d-8007-976c1a691c0f?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"d5694986-2924-1d4f-8007-976c1a691c0f\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T12:22:43.9989407Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:22:43 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks nodepool add + Connection: + - keep-alive + ParameterSetName: + - --resource-group --cluster-name --name --node-vm-size --node-count --asg-ids + --allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/864969d5-2429-4f1d-8007-976c1a691c0f?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"d5694986-2924-1d4f-8007-976c1a691c0f\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T12:22:43.9989407Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:23:15 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks nodepool add + Connection: + - keep-alive + ParameterSetName: + - --resource-group --cluster-name --name --node-vm-size --node-count --asg-ids + --allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/864969d5-2429-4f1d-8007-976c1a691c0f?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"d5694986-2924-1d4f-8007-976c1a691c0f\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T12:22:43.9989407Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:23:45 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks nodepool add + Connection: + - keep-alive + ParameterSetName: + - --resource-group --cluster-name --name --node-vm-size --node-count --asg-ids + --allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/864969d5-2429-4f1d-8007-976c1a691c0f?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"d5694986-2924-1d4f-8007-976c1a691c0f\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T12:22:43.9989407Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:24:15 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks nodepool add + Connection: + - keep-alive + ParameterSetName: + - --resource-group --cluster-name --name --node-vm-size --node-count --asg-ids + --allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/864969d5-2429-4f1d-8007-976c1a691c0f?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"d5694986-2924-1d4f-8007-976c1a691c0f\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T12:22:43.9989407Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:24:45 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks nodepool add + Connection: + - keep-alive + ParameterSetName: + - --resource-group --cluster-name --name --node-vm-size --node-count --asg-ids + --allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/864969d5-2429-4f1d-8007-976c1a691c0f?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"d5694986-2924-1d4f-8007-976c1a691c0f\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-19T12:22:43.9989407Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:25:16 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks nodepool add + Connection: + - keep-alive + ParameterSetName: + - --resource-group --cluster-name --name --node-vm-size --node-count --asg-ids + --allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/864969d5-2429-4f1d-8007-976c1a691c0f?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"d5694986-2924-1d4f-8007-976c1a691c0f\",\n \"status\"\ + : \"Succeeded\",\n \"startTime\": \"2022-10-19T12:22:43.9989407Z\",\n \"\ + endTime\": \"2022-10-19T12:25:47.3875817Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '170' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:25:46 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks nodepool add + Connection: + - keep-alive + ParameterSetName: + - --resource-group --cluster-name --name --node-vm-size --node-count --asg-ids + --allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002/agentPools/n000003?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002/agentPools/n000003\"\ + ,\n \"name\": \"n000003\",\n \"type\": \"Microsoft.ContainerService/managedClusters/agentPools\"\ + ,\n \"properties\": {\n \"count\": 1,\n \"vmSize\": \"standard_d2s_v3\"\ + ,\n \"osDiskSizeGB\": 128,\n \"osDiskType\": \"Managed\",\n \"kubeletDiskType\"\ + : \"OS\",\n \"workloadRuntime\": \"OCIContainer\",\n \"maxPods\": 110,\n\ + \ \"type\": \"VirtualMachineScaleSets\",\n \"enableAutoScaling\": false,\n\ + \ \"scaleDownMode\": \"Delete\",\n \"provisioningState\": \"Succeeded\"\ + ,\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"orchestratorVersion\"\ + : \"1.23.12\",\n \"currentOrchestratorVersion\": \"1.23.12\",\n \"enableNodePublicIP\"\ + : false,\n \"enableCustomCATrust\": false,\n \"mode\": \"User\",\n \"\ + enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n \"osType\"\ + : \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\": \"AKSUbuntu-1804gen2containerd-2022.10.03\"\ + ,\n \"upgradeSettings\": {},\n \"enableFIPS\": false,\n \"networkProfile\"\ + : {\n \"allowedHostPorts\": [\n {\n \"portStart\": 53,\n \ + \ \"portEnd\": 53,\n \"protocol\": \"UDP\"\n },\n {\n \"\ + portStart\": 80,\n \"portEnd\": 80,\n \"protocol\": \"TCP\"\n \ + \ },\n {\n \"portStart\": 443,\n \"portEnd\": 443,\n \ + \ \"protocol\": \"TCP\"\n },\n {\n \"portStart\": 4000,\n \ + \ \"portEnd\": 5000,\n \"protocol\": \"TCP\"\n },\n {\n \ + \ \"portStart\": 4000,\n \"portEnd\": 6000,\n \"protocol\": \"\ + UDP\"\n }\n ],\n \"applicationSecurityGroups\": [\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg1\"\ + ,\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg2\"\ + \n ]\n }\n }\n }" + headers: + cache-control: + - no-cache + content-length: + - '1872' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 12:25:48 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --resource-group --name --yes --no-wait + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview + response: + body: + string: '' + headers: + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/785f916d-510d-4576-ad3a-7cfe3026547c?api-version=2017-08-31 + cache-control: + - no-cache + content-length: + - '0' + date: + - Wed, 19 Oct 2022 12:25:52 GMT + expires: + - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operationresults/785f916d-510d-4576-ad3a-7cfe3026547c?api-version=2017-08-31 + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + status: + code: 202 + message: Accepted +version: 1 diff --git a/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_nodepool_update_with_nsg_control.yaml b/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_nodepool_update_with_nsg_control.yaml new file mode 100644 index 00000000000..eb953e3b387 --- /dev/null +++ b/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_nodepool_update_with_nsg_control.yaml @@ -0,0 +1,1565 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001","name":"clitest000001","type":"Microsoft.Resources/resourceGroups","location":"eastus","tags":{"product":"azurecli","cause":"automation","date":"2022-10-20T03:16:12Z"},"properties":{"provisioningState":"Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '304' + content-type: + - application/json; charset=utf-8 + date: + - Thu, 20 Oct 2022 03:16:22 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"location": "eastus"}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + Content-Length: + - '22' + Content-Type: + - application/json + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.26.0 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg1?api-version=2021-08-01 + response: + body: + string: "{\r\n \"name\": \"asg1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg1\"\ + ,\r\n \"etag\": \"W/\\\"9ab10c48-864e-4655-9182-7c3f906b6f10\\\"\",\r\n \ + \ \"type\": \"Microsoft.Network/applicationSecurityGroups\",\r\n \"location\"\ + : \"eastus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\ + \r\n }\r\n}" + headers: + azure-asyncnotification: + - Enabled + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/eastus/operations/583357d9-ccfd-48fd-a485-a72b9f2e16c5?api-version=2021-08-01 + cache-control: + - no-cache + content-length: + - '376' + content-type: + - application/json; charset=utf-8 + date: + - Thu, 20 Oct 2022 03:16:28 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-arm-service-request-id: + - 20fa877e-9521-4cc0-95fa-c6a7324aa62a + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.26.0 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/eastus/operations/583357d9-ccfd-48fd-a485-a72b9f2e16c5?api-version=2021-08-01 + response: + body: + string: "{\r\n \"status\": \"Succeeded\"\r\n}" + headers: + cache-control: + - no-cache + content-length: + - '29' + content-type: + - application/json; charset=utf-8 + date: + - Thu, 20 Oct 2022 03:16:29 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-arm-service-request-id: + - 3aa4dbbf-9297-46c4-a197-44261fa1e4a0 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.26.0 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg1?api-version=2021-08-01 + response: + body: + string: "{\r\n \"name\": \"asg1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg1\"\ + ,\r\n \"etag\": \"W/\\\"1b114020-029a-45cb-ba95-c63d3462f521\\\"\",\r\n \ + \ \"type\": \"Microsoft.Network/applicationSecurityGroups\",\r\n \"location\"\ + : \"eastus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\ + \r\n }\r\n}" + headers: + cache-control: + - no-cache + content-length: + - '377' + content-type: + - application/json; charset=utf-8 + date: + - Thu, 20 Oct 2022 03:16:30 GMT + etag: + - W/"1b114020-029a-45cb-ba95-c63d3462f521" + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-arm-service-request-id: + - 8dab332a-70c0-4738-aae6-fb75be59ace9 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001","name":"clitest000001","type":"Microsoft.Resources/resourceGroups","location":"eastus","tags":{"product":"azurecli","cause":"automation","date":"2022-10-20T03:16:12Z"},"properties":{"provisioningState":"Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '304' + content-type: + - application/json; charset=utf-8 + date: + - Thu, 20 Oct 2022 03:16:31 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"location": "eastus"}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + Content-Length: + - '22' + Content-Type: + - application/json + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.26.0 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg2?api-version=2021-08-01 + response: + body: + string: "{\r\n \"name\": \"asg2\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg2\"\ + ,\r\n \"etag\": \"W/\\\"4007c132-f1c6-4e50-b846-435f2b3a4f4e\\\"\",\r\n \ + \ \"type\": \"Microsoft.Network/applicationSecurityGroups\",\r\n \"location\"\ + : \"eastus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\ + \r\n }\r\n}" + headers: + azure-asyncnotification: + - Enabled + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/eastus/operations/d75372f8-4e15-4ab9-b061-ab669e10fb2c?api-version=2021-08-01 + cache-control: + - no-cache + content-length: + - '376' + content-type: + - application/json; charset=utf-8 + date: + - Thu, 20 Oct 2022 03:16:38 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-arm-service-request-id: + - d1d447fd-4522-4a4d-95f8-a100b7e3e3fd + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.26.0 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/eastus/operations/d75372f8-4e15-4ab9-b061-ab669e10fb2c?api-version=2021-08-01 + response: + body: + string: "{\r\n \"status\": \"Succeeded\"\r\n}" + headers: + cache-control: + - no-cache + content-length: + - '29' + content-type: + - application/json; charset=utf-8 + date: + - Thu, 20 Oct 2022 03:16:39 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-arm-service-request-id: + - 8b9ecc0e-03c4-4e6a-b54b-47efc0c30209 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - network asg create + Connection: + - keep-alive + ParameterSetName: + - --name --resource-group -o + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.26.0 Python/3.8.14 (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg2?api-version=2021-08-01 + response: + body: + string: "{\r\n \"name\": \"asg2\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg2\"\ + ,\r\n \"etag\": \"W/\\\"1d65bae6-00ce-4aca-8848-9ff1a71c611f\\\"\",\r\n \ + \ \"type\": \"Microsoft.Network/applicationSecurityGroups\",\r\n \"location\"\ + : \"eastus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\ + \r\n }\r\n}" + headers: + cache-control: + - no-cache + content-length: + - '377' + content-type: + - application/json; charset=utf-8 + date: + - Thu, 20 Oct 2022 03:16:39 GMT + etag: + - W/"1d65bae6-00ce-4aca-8848-9ff1a71c611f" + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-arm-service-request-id: + - b13f7bf9-4b47-4864-af0f-e1ea30f240d1 + status: + code: 200 + message: OK +- request: + body: '{"location": "eastus", "identity": {"type": "SystemAssigned"}, "properties": + {"kubernetesVersion": "", "dnsPrefix": "cliakstest-clitestwx34v4547-8ecadf", + "agentPoolProfiles": [{"count": 1, "vmSize": "standard_d2s_v3", "osDiskSizeGB": + 0, "workloadRuntime": "OCIContainer", "osType": "Linux", "enableAutoScaling": + false, "type": "VirtualMachineScaleSets", "mode": "System", "orchestratorVersion": + "", "upgradeSettings": {}, "enableNodePublicIP": false, "enableCustomCATrust": + false, "scaleSetPriority": "Regular", "scaleSetEvictionPolicy": "Delete", "spotMaxPrice": + -1.0, "nodeTaints": [], "enableEncryptionAtHost": false, "enableUltraSSD": false, + "enableFIPS": false, "name": "n000003"}], "linuxProfile": {"adminUsername": + "azureuser", "ssh": {"publicKeys": [{"keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ== + test@example.com\n"}]}}, "addonProfiles": {}, "enableRBAC": true, "enablePodSecurityPolicy": + false, "networkProfile": {"networkPlugin": "kubenet", "podCidr": "10.244.0.0/16", + "serviceCidr": "10.0.0.0/16", "dnsServiceIP": "10.0.0.10", "dockerBridgeCidr": + "172.17.0.1/16", "outboundType": "loadBalancer", "loadBalancerSku": "standard"}, + "disableLocalAccounts": false, "storageProfile": {}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + Content-Length: + - '1884' + Content-Type: + - application/json + ParameterSetName: + - --resource-group --name --location --ssh-key-value --nodepool-name --node-count + --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002\"\ + ,\n \"location\": \"eastus\",\n \"name\": \"cliakstest000002\",\n \"type\"\ + : \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \ + \ \"provisioningState\": \"Creating\",\n \"powerState\": {\n \"code\"\ + : \"Running\"\n },\n \"kubernetesVersion\": \"1.23.12\",\n \"currentKubernetesVersion\"\ + : \"1.23.12\",\n \"dnsPrefix\": \"cliakstest-clitestwx34v4547-8ecadf\",\n\ + \ \"fqdn\": \"cliakstest-clitestwx34v4547-8ecadf-0bdec964.hcp.eastus.azmk8s.io\"\ + ,\n \"azurePortalFQDN\": \"cliakstest-clitestwx34v4547-8ecadf-0bdec964.portal.hcp.eastus.azmk8s.io\"\ + ,\n \"agentPoolProfiles\": [\n {\n \"name\": \"n000003\",\n \"\ + count\": 1,\n \"vmSize\": \"standard_d2s_v3\",\n \"osDiskSizeGB\"\ + : 128,\n \"osDiskType\": \"Managed\",\n \"kubeletDiskType\": \"OS\"\ + ,\n \"workloadRuntime\": \"OCIContainer\",\n \"maxPods\": 110,\n \ + \ \"type\": \"VirtualMachineScaleSets\",\n \"enableAutoScaling\": false,\n\ + \ \"provisioningState\": \"Creating\",\n \"powerState\": {\n \ + \ \"code\": \"Running\"\n },\n \"orchestratorVersion\": \"1.23.12\"\ + ,\n \"currentOrchestratorVersion\": \"1.23.12\",\n \"enableNodePublicIP\"\ + : false,\n \"enableCustomCATrust\": false,\n \"mode\": \"System\"\ + ,\n \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n\ + \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\"\ + : \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\":\ + \ {},\n \"enableFIPS\": false\n }\n ],\n \"linuxProfile\": {\n\ + \ \"adminUsername\": \"azureuser\",\n \"ssh\": {\n \"publicKeys\"\ + : [\n {\n \"keyData\": \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ==\ + \ test@example.com\\n\"\n }\n ]\n }\n },\n \"servicePrincipalProfile\"\ + : {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n },\n \ + \ \"nodeResourceGroup\": \"MC_clitest000001_cliakstest000002_eastus\",\n \ + \ \"enableRBAC\": true,\n \"enablePodSecurityPolicy\": false,\n \"networkProfile\"\ + : {\n \"networkPlugin\": \"kubenet\",\n \"loadBalancerSku\": \"standard\"\ + ,\n \"loadBalancerProfile\": {\n \"managedOutboundIPs\": {\n \"\ + count\": 1\n },\n \"backendPoolType\": \"nodeIPConfiguration\"\n \ + \ },\n \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\"\ + ,\n \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\"\ + ,\n \"outboundType\": \"loadBalancer\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\ + \n ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\"\ + : [\n \"IPv4\"\n ]\n },\n \"maxAgentPools\": 100,\n \"disableLocalAccounts\"\ + : false,\n \"securityProfile\": {},\n \"storageProfile\": {\n \"diskCSIDriver\"\ + : {\n \"enabled\": true,\n \"version\": \"v1\"\n },\n \"fileCSIDriver\"\ + : {\n \"enabled\": true\n },\n \"snapshotController\": {\n \"\ + enabled\": true\n }\n },\n \"oidcIssuerProfile\": {\n \"enabled\"\ + : false\n },\n \"workloadAutoScalerProfile\": {}\n },\n \"identity\"\ + : {\n \"type\": \"SystemAssigned\",\n \"principalId\":\"00000000-0000-0000-0000-000000000001\"\ + ,\n \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\"\ + : {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" + headers: + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/369c58ce-d2c9-48a0-9b5d-221a59b13652?api-version=2017-08-31 + cache-control: + - no-cache + content-length: + - '3776' + content-type: + - application/json + date: + - Thu, 20 Oct 2022 03:16:47 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1198' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --nodepool-name --node-count + --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/369c58ce-d2c9-48a0-9b5d-221a59b13652?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"ce589c36-c9d2-a048-9b5d-221a59b13652\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-20T03:16:46.7819872Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Thu, 20 Oct 2022 03:16:48 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --nodepool-name --node-count + --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/369c58ce-d2c9-48a0-9b5d-221a59b13652?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"ce589c36-c9d2-a048-9b5d-221a59b13652\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-20T03:16:46.7819872Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Thu, 20 Oct 2022 03:17:18 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --nodepool-name --node-count + --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/369c58ce-d2c9-48a0-9b5d-221a59b13652?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"ce589c36-c9d2-a048-9b5d-221a59b13652\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-20T03:16:46.7819872Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Thu, 20 Oct 2022 03:17:48 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --nodepool-name --node-count + --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/369c58ce-d2c9-48a0-9b5d-221a59b13652?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"ce589c36-c9d2-a048-9b5d-221a59b13652\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-20T03:16:46.7819872Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Thu, 20 Oct 2022 03:18:19 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --nodepool-name --node-count + --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/369c58ce-d2c9-48a0-9b5d-221a59b13652?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"ce589c36-c9d2-a048-9b5d-221a59b13652\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-20T03:16:46.7819872Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Thu, 20 Oct 2022 03:18:49 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --nodepool-name --node-count + --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/369c58ce-d2c9-48a0-9b5d-221a59b13652?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"ce589c36-c9d2-a048-9b5d-221a59b13652\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-20T03:16:46.7819872Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Thu, 20 Oct 2022 03:19:20 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --nodepool-name --node-count + --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/369c58ce-d2c9-48a0-9b5d-221a59b13652?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"ce589c36-c9d2-a048-9b5d-221a59b13652\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-20T03:16:46.7819872Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Thu, 20 Oct 2022 03:19:50 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --nodepool-name --node-count + --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/369c58ce-d2c9-48a0-9b5d-221a59b13652?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"ce589c36-c9d2-a048-9b5d-221a59b13652\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-20T03:16:46.7819872Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Thu, 20 Oct 2022 03:20:21 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --nodepool-name --node-count + --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/369c58ce-d2c9-48a0-9b5d-221a59b13652?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"ce589c36-c9d2-a048-9b5d-221a59b13652\",\n \"status\"\ + : \"Succeeded\",\n \"startTime\": \"2022-10-20T03:16:46.7819872Z\",\n \"\ + endTime\": \"2022-10-20T03:20:45.1009927Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '170' + content-type: + - application/json + date: + - Thu, 20 Oct 2022 03:20:55 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --location --ssh-key-value --nodepool-name --node-count + --node-vm-size + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002\"\ + ,\n \"location\": \"eastus\",\n \"name\": \"cliakstest000002\",\n \"type\"\ + : \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \ + \ \"provisioningState\": \"Succeeded\",\n \"powerState\": {\n \"code\"\ + : \"Running\"\n },\n \"kubernetesVersion\": \"1.23.12\",\n \"currentKubernetesVersion\"\ + : \"1.23.12\",\n \"dnsPrefix\": \"cliakstest-clitestwx34v4547-8ecadf\",\n\ + \ \"fqdn\": \"cliakstest-clitestwx34v4547-8ecadf-0bdec964.hcp.eastus.azmk8s.io\"\ + ,\n \"azurePortalFQDN\": \"cliakstest-clitestwx34v4547-8ecadf-0bdec964.portal.hcp.eastus.azmk8s.io\"\ + ,\n \"agentPoolProfiles\": [\n {\n \"name\": \"n000003\",\n \"\ + count\": 1,\n \"vmSize\": \"standard_d2s_v3\",\n \"osDiskSizeGB\"\ + : 128,\n \"osDiskType\": \"Managed\",\n \"kubeletDiskType\": \"OS\"\ + ,\n \"workloadRuntime\": \"OCIContainer\",\n \"maxPods\": 110,\n \ + \ \"type\": \"VirtualMachineScaleSets\",\n \"enableAutoScaling\": false,\n\ + \ \"provisioningState\": \"Succeeded\",\n \"powerState\": {\n \ + \ \"code\": \"Running\"\n },\n \"orchestratorVersion\": \"1.23.12\"\ + ,\n \"currentOrchestratorVersion\": \"1.23.12\",\n \"enableNodePublicIP\"\ + : false,\n \"enableCustomCATrust\": false,\n \"mode\": \"System\"\ + ,\n \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n\ + \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\"\ + : \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\":\ + \ {},\n \"enableFIPS\": false\n }\n ],\n \"linuxProfile\": {\n\ + \ \"adminUsername\": \"azureuser\",\n \"ssh\": {\n \"publicKeys\"\ + : [\n {\n \"keyData\": \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ==\ + \ test@example.com\\n\"\n }\n ]\n }\n },\n \"servicePrincipalProfile\"\ + : {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n },\n \ + \ \"nodeResourceGroup\": \"MC_clitest000001_cliakstest000002_eastus\",\n \ + \ \"enableRBAC\": true,\n \"enablePodSecurityPolicy\": false,\n \"networkProfile\"\ + : {\n \"networkPlugin\": \"kubenet\",\n \"loadBalancerSku\": \"Standard\"\ + ,\n \"loadBalancerProfile\": {\n \"managedOutboundIPs\": {\n \"\ + count\": 1\n },\n \"effectiveOutboundIPs\": [\n {\n \"\ + id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_eastus/providers/Microsoft.Network/publicIPAddresses/8b0f444b-6758-42ed-8a77-8f336e2aba5d\"\ + \n }\n ],\n \"backendPoolType\": \"nodeIPConfiguration\"\n \ + \ },\n \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\"\ + ,\n \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\"\ + ,\n \"outboundType\": \"loadBalancer\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\ + \n ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\"\ + : [\n \"IPv4\"\n ]\n },\n \"maxAgentPools\": 100,\n \"identityProfile\"\ + : {\n \"kubeletidentity\": {\n \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_clitest000001_cliakstest000002_eastus/providers/Microsoft.ManagedIdentity/userAssignedIdentities/cliakstest000002-agentpool\"\ + ,\n \"clientId\":\"00000000-0000-0000-0000-000000000001\",\n \"objectId\"\ + :\"00000000-0000-0000-0000-000000000001\"\n }\n },\n \"disableLocalAccounts\"\ + : false,\n \"securityProfile\": {},\n \"storageProfile\": {\n \"diskCSIDriver\"\ + : {\n \"enabled\": true,\n \"version\": \"v1\"\n },\n \"fileCSIDriver\"\ + : {\n \"enabled\": true\n },\n \"snapshotController\": {\n \"\ + enabled\": true\n }\n },\n \"oidcIssuerProfile\": {\n \"enabled\"\ + : false\n },\n \"workloadAutoScalerProfile\": {}\n },\n \"identity\"\ + : {\n \"type\": \"SystemAssigned\",\n \"principalId\":\"00000000-0000-0000-0000-000000000001\"\ + ,\n \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\"\ + : {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" + headers: + cache-control: + - no-cache + content-length: + - '4427' + content-type: + - application/json + date: + - Thu, 20 Oct 2022 03:20:56 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks nodepool update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --cluster-name --name --asg-ids --allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002/agentPools/n000003?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002/agentPools/n000003\"\ + ,\n \"name\": \"n000003\",\n \"type\": \"Microsoft.ContainerService/managedClusters/agentPools\"\ + ,\n \"properties\": {\n \"count\": 1,\n \"vmSize\": \"standard_d2s_v3\"\ + ,\n \"osDiskSizeGB\": 128,\n \"osDiskType\": \"Managed\",\n \"kubeletDiskType\"\ + : \"OS\",\n \"workloadRuntime\": \"OCIContainer\",\n \"maxPods\": 110,\n\ + \ \"type\": \"VirtualMachineScaleSets\",\n \"enableAutoScaling\": false,\n\ + \ \"provisioningState\": \"Succeeded\",\n \"powerState\": {\n \"code\"\ + : \"Running\"\n },\n \"orchestratorVersion\": \"1.23.12\",\n \"currentOrchestratorVersion\"\ + : \"1.23.12\",\n \"enableNodePublicIP\": false,\n \"enableCustomCATrust\"\ + : false,\n \"mode\": \"System\",\n \"enableEncryptionAtHost\": false,\n\ + \ \"enableUltraSSD\": false,\n \"osType\": \"Linux\",\n \"osSKU\": \"\ + Ubuntu\",\n \"nodeImageVersion\": \"AKSUbuntu-1804gen2containerd-2022.10.03\"\ + ,\n \"upgradeSettings\": {},\n \"enableFIPS\": false\n }\n }" + headers: + cache-control: + - no-cache + content-length: + - '1022' + content-type: + - application/json + date: + - Thu, 20 Oct 2022 03:20:59 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"properties": {"count": 1, "vmSize": "standard_d2s_v3", "osDiskSizeGB": + 128, "osDiskType": "Managed", "kubeletDiskType": "OS", "workloadRuntime": "OCIContainer", + "maxPods": 110, "osType": "Linux", "osSKU": "Ubuntu", "enableAutoScaling": false, + "type": "VirtualMachineScaleSets", "mode": "System", "orchestratorVersion": + "1.23.12", "upgradeSettings": {}, "powerState": {"code": "Running"}, "enableNodePublicIP": + false, "enableCustomCATrust": false, "enableEncryptionAtHost": false, "enableUltraSSD": + false, "enableFIPS": false, "networkProfile": {"allowedHostPorts": [{"portStart": + 53, "portEnd": 53, "protocol": "UDP"}, {"portStart": 80, "portEnd": 80, "protocol": + "TCP"}, {"portStart": 443, "portEnd": 443, "protocol": "TCP"}, {"portStart": + 4000, "portEnd": 5000, "protocol": "TCP"}, {"portStart": 4000, "portEnd": 6000, + "protocol": "UDP"}], "applicationSecurityGroups": ["/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg1", + "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg2"]}}}' + headers: + AKSHTTPCustomFeatures: + - Microsoft.ContainerService/NodePublicIPNSGControlPreview + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks nodepool update + Connection: + - keep-alive + Content-Length: + - '1162' + Content-Type: + - application/json + ParameterSetName: + - --resource-group --cluster-name --name --asg-ids --allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002/agentPools/n000003?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002/agentPools/n000003\"\ + ,\n \"name\": \"n000003\",\n \"type\": \"Microsoft.ContainerService/managedClusters/agentPools\"\ + ,\n \"properties\": {\n \"count\": 1,\n \"vmSize\": \"standard_d2s_v3\"\ + ,\n \"osDiskSizeGB\": 128,\n \"osDiskType\": \"Managed\",\n \"kubeletDiskType\"\ + : \"OS\",\n \"workloadRuntime\": \"OCIContainer\",\n \"maxPods\": 110,\n\ + \ \"type\": \"VirtualMachineScaleSets\",\n \"enableAutoScaling\": false,\n\ + \ \"provisioningState\": \"Updating\",\n \"powerState\": {\n \"code\"\ + : \"Running\"\n },\n \"orchestratorVersion\": \"1.23.12\",\n \"currentOrchestratorVersion\"\ + : \"1.23.12\",\n \"enableNodePublicIP\": false,\n \"enableCustomCATrust\"\ + : false,\n \"mode\": \"System\",\n \"enableEncryptionAtHost\": false,\n\ + \ \"enableUltraSSD\": false,\n \"osType\": \"Linux\",\n \"osSKU\": \"\ + Ubuntu\",\n \"nodeImageVersion\": \"AKSUbuntu-1804gen2containerd-2022.10.03\"\ + ,\n \"upgradeSettings\": {},\n \"enableFIPS\": false,\n \"networkProfile\"\ + : {\n \"allowedHostPorts\": [\n {\n \"portStart\": 53,\n \ + \ \"portEnd\": 53,\n \"protocol\": \"UDP\"\n },\n {\n \"\ + portStart\": 80,\n \"portEnd\": 80,\n \"protocol\": \"TCP\"\n \ + \ },\n {\n \"portStart\": 443,\n \"portEnd\": 443,\n \ + \ \"protocol\": \"TCP\"\n },\n {\n \"portStart\": 4000,\n \ + \ \"portEnd\": 5000,\n \"protocol\": \"TCP\"\n },\n {\n \ + \ \"portStart\": 4000,\n \"portEnd\": 6000,\n \"protocol\": \"\ + UDP\"\n }\n ],\n \"applicationSecurityGroups\": [\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg1\"\ + ,\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg2\"\ + \n ]\n }\n }\n }" + headers: + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/c6398ddf-c42a-44d9-aea5-1cc25efa8364?api-version=2017-08-31 + cache-control: + - no-cache + content-length: + - '1843' + content-type: + - application/json + date: + - Thu, 20 Oct 2022 03:21:02 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks nodepool update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --cluster-name --name --asg-ids --allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/c6398ddf-c42a-44d9-aea5-1cc25efa8364?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"df8d39c6-2ac4-d944-aea5-1cc25efa8364\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-20T03:21:02.7976294Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Thu, 20 Oct 2022 03:21:02 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks nodepool update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --cluster-name --name --asg-ids --allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/c6398ddf-c42a-44d9-aea5-1cc25efa8364?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"df8d39c6-2ac4-d944-aea5-1cc25efa8364\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-20T03:21:02.7976294Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Thu, 20 Oct 2022 03:21:33 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks nodepool update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --cluster-name --name --asg-ids --allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/c6398ddf-c42a-44d9-aea5-1cc25efa8364?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"df8d39c6-2ac4-d944-aea5-1cc25efa8364\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-20T03:21:02.7976294Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Thu, 20 Oct 2022 03:22:04 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks nodepool update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --cluster-name --name --asg-ids --allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/c6398ddf-c42a-44d9-aea5-1cc25efa8364?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"df8d39c6-2ac4-d944-aea5-1cc25efa8364\",\n \"status\"\ + : \"InProgress\",\n \"startTime\": \"2022-10-20T03:21:02.7976294Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Thu, 20 Oct 2022 03:22:34 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks nodepool update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --cluster-name --name --asg-ids --allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/c6398ddf-c42a-44d9-aea5-1cc25efa8364?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"df8d39c6-2ac4-d944-aea5-1cc25efa8364\",\n \"status\"\ + : \"Succeeded\",\n \"startTime\": \"2022-10-20T03:21:02.7976294Z\",\n \"\ + endTime\": \"2022-10-20T03:22:48.0298517Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '170' + content-type: + - application/json + date: + - Thu, 20 Oct 2022 03:23:05 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks nodepool update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --cluster-name --name --asg-ids --allowed-host-ports --aks-custom-headers + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002/agentPools/n000003?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002/agentPools/n000003\"\ + ,\n \"name\": \"n000003\",\n \"type\": \"Microsoft.ContainerService/managedClusters/agentPools\"\ + ,\n \"properties\": {\n \"count\": 1,\n \"vmSize\": \"standard_d2s_v3\"\ + ,\n \"osDiskSizeGB\": 128,\n \"osDiskType\": \"Managed\",\n \"kubeletDiskType\"\ + : \"OS\",\n \"workloadRuntime\": \"OCIContainer\",\n \"maxPods\": 110,\n\ + \ \"type\": \"VirtualMachineScaleSets\",\n \"enableAutoScaling\": false,\n\ + \ \"provisioningState\": \"Succeeded\",\n \"powerState\": {\n \"code\"\ + : \"Running\"\n },\n \"orchestratorVersion\": \"1.23.12\",\n \"currentOrchestratorVersion\"\ + : \"1.23.12\",\n \"enableNodePublicIP\": false,\n \"enableCustomCATrust\"\ + : false,\n \"mode\": \"System\",\n \"enableEncryptionAtHost\": false,\n\ + \ \"enableUltraSSD\": false,\n \"osType\": \"Linux\",\n \"osSKU\": \"\ + Ubuntu\",\n \"nodeImageVersion\": \"AKSUbuntu-1804gen2containerd-2022.10.03\"\ + ,\n \"upgradeSettings\": {},\n \"enableFIPS\": false,\n \"networkProfile\"\ + : {\n \"allowedHostPorts\": [\n {\n \"portStart\": 53,\n \ + \ \"portEnd\": 53,\n \"protocol\": \"UDP\"\n },\n {\n \"\ + portStart\": 80,\n \"portEnd\": 80,\n \"protocol\": \"TCP\"\n \ + \ },\n {\n \"portStart\": 443,\n \"portEnd\": 443,\n \ + \ \"protocol\": \"TCP\"\n },\n {\n \"portStart\": 4000,\n \ + \ \"portEnd\": 5000,\n \"protocol\": \"TCP\"\n },\n {\n \ + \ \"portStart\": 4000,\n \"portEnd\": 6000,\n \"protocol\": \"\ + UDP\"\n }\n ],\n \"applicationSecurityGroups\": [\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg1\"\ + ,\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Network/applicationSecurityGroups/asg2\"\ + \n ]\n }\n }\n }" + headers: + cache-control: + - no-cache + content-length: + - '1844' + content-type: + - application/json + date: + - Thu, 20 Oct 2022 03:23:06 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --resource-group --name --yes --no-wait + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.14 + (macOS-12.6-arm64-arm-64bit) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview + response: + body: + string: '' + headers: + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operations/797709ee-f807-4373-a210-902ee579dc11?api-version=2017-08-31 + cache-control: + - no-cache + content-length: + - '0' + date: + - Thu, 20 Oct 2022 03:23:10 GMT + expires: + - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/eastus/operationresults/797709ee-f807-4373-a210-902ee579dc11?api-version=2017-08-31 + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + status: + code: 202 + message: Accepted +version: 1 diff --git a/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py b/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py index 9706c25b871..e8f5e58c17e 100644 --- a/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py +++ b/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py @@ -5899,3 +5899,175 @@ def test_aks_create_with_kube_proxy_config(self, resource_group, resource_group_ # delete self.cmd( 'aks delete -g {resource_group} -n {name} --yes --no-wait', checks=[self.is_empty()]) + + @AllowLargeResponse() + @AKSCustomResourceGroupPreparer(random_name_length=17, name_prefix='clitest', location='eastus') + def test_aks_create_with_nsg_control(self, resource_group, resource_group_location): + aks_name = self.create_random_name('cliakstest', 16) + nodepool_name = self.create_random_name('n', 6) + + self.kwargs.update({ + 'resource_group': resource_group, + 'name': aks_name, + 'location': resource_group_location, + 'ssh_key_value': self.generate_ssh_keys(), + 'node_vm_size': 'standard_d2s_v3', + 'asg1': 'asg1', + 'asg2': 'asg2', + }) + + create_asg1 = 'network asg create --name {asg1} --resource-group {resource_group} -o json' + create_asg2 = 'network asg create --name {asg2} --resource-group {resource_group} -o json' + asg1 = self.cmd(create_asg1, checks=[self.check('provisioningState', 'Succeeded')]).get_output_in_json() + asg2 = self.cmd(create_asg2, checks=[self.check('provisioningState', 'Succeeded')]).get_output_in_json() + + self.kwargs.update({ + 'asg_ids': ','.join([asg1['id'], asg2['id']]), + 'allowed_host_ports': ','.join(['53/udp', '80/tcp', '443/tcp', '4000-5000/tcp', '4000-6000/udp']), + }) + self.cmd( + 'aks create ' + '--resource-group={resource_group} ' + '--name={name} ' + '--location={location} ' + '--ssh-key-value={ssh_key_value} ' + '--node-count=1 ' + '--node-vm-size={node_vm_size} ' + '--nodepool-asg-ids={asg_ids} ' + '--nodepool-allowed-host-ports={allowed_host_ports} ' + '--aks-custom-headers AKSHTTPCustomFeatures=Microsoft.ContainerService/NodePublicIPNSGControlPreview', + checks=[ + self.check('provisioningState', 'Succeeded'), + self.check('agentPoolProfiles[0].networkProfile.applicationSecurityGroups', self.kwargs['asg_ids'].split(',')), + self.check('agentPoolProfiles[0].networkProfile.allowedHostPorts[] | length(@)', len(self.kwargs['allowed_host_ports'].split(','))), + ], + ) + + # delete + cmd = 'aks delete --resource-group={resource_group} --name={name} --yes --no-wait' + self.cmd(cmd, checks=[ + self.is_empty(), + ]) + + @AllowLargeResponse() + @AKSCustomResourceGroupPreparer(random_name_length=17, name_prefix='clitest', location='eastus') + def test_aks_nodepool_create_with_nsg_control(self, resource_group, resource_group_location): + aks_name = self.create_random_name('cliakstest', 16) + nodepool_name = self.create_random_name('n', 6) + + self.kwargs.update({ + 'resource_group': resource_group, + 'name': aks_name, + 'location': resource_group_location, + 'ssh_key_value': self.generate_ssh_keys(), + 'node_pool_name': nodepool_name, + 'node_vm_size': 'standard_d2s_v3', + 'asg1': 'asg1', + 'asg2': 'asg2', + }) + create_asg1 = 'network asg create --name {asg1} --resource-group {resource_group} -o json' + create_asg2 = 'network asg create --name {asg2} --resource-group {resource_group} -o json' + asg1 = self.cmd(create_asg1, checks=[self.check('provisioningState', 'Succeeded')]).get_output_in_json() + asg2 = self.cmd(create_asg2, checks=[self.check('provisioningState', 'Succeeded')]).get_output_in_json() + + self.kwargs.update({ + 'asg_ids': ','.join([asg1['id'], asg2['id']]), + 'allowed_host_ports': ','.join(['53/udp', '80/tcp', '443/tcp', '4000-5000/tcp', '4000-6000/udp']), + }) + + self.cmd( + 'aks create ' + '--resource-group={resource_group} ' + '--name={name} ' + '--location={location} ' + '--ssh-key-value={ssh_key_value} ' + '--node-count=1 ' + '--node-vm-size={node_vm_size} ', + checks=[ + self.check('provisioningState', 'Succeeded'), + ], + ) + + self.cmd( + 'aks nodepool add ' + '--resource-group={resource_group} ' + '--cluster-name={name} ' + '--name={node_pool_name} ' + '--node-vm-size={node_vm_size} ' + '--node-count=1 ' + '--asg-ids={asg_ids} ' + '--allowed-host-ports={allowed_host_ports} ' + '--aks-custom-headers AKSHTTPCustomFeatures=Microsoft.ContainerService/NodePublicIPNSGControlPreview', + checks=[ + self.check('provisioningState', 'Succeeded'), + self.check('networkProfile.applicationSecurityGroups', self.kwargs['asg_ids'].split(',')), + self.check('networkProfile.allowedHostPorts[] | length(@)', len(self.kwargs['allowed_host_ports'].split(','))), + ], + ) + + # delete + cmd = 'aks delete --resource-group={resource_group} --name={name} --yes --no-wait' + self.cmd(cmd, checks=[ + self.is_empty(), + ]) + + @AllowLargeResponse() + @AKSCustomResourceGroupPreparer(random_name_length=17, name_prefix='clitest', location='eastus') + def test_aks_nodepool_update_with_nsg_control(self, resource_group, resource_group_location): + aks_name = self.create_random_name('cliakstest', 16) + nodepool_name = self.create_random_name('n', 6) + + self.kwargs.update({ + 'resource_group': resource_group, + 'name': aks_name, + 'location': resource_group_location, + 'ssh_key_value': self.generate_ssh_keys(), + 'node_pool_name': nodepool_name, + 'node_vm_size': 'standard_d2s_v3', + 'asg1': 'asg1', + 'asg2': 'asg2', + }) + create_asg1 = 'network asg create --name {asg1} --resource-group {resource_group} -o json' + create_asg2 = 'network asg create --name {asg2} --resource-group {resource_group} -o json' + asg1 = self.cmd(create_asg1, checks=[self.check('provisioningState', 'Succeeded')]).get_output_in_json() + asg2 = self.cmd(create_asg2, checks=[self.check('provisioningState', 'Succeeded')]).get_output_in_json() + + self.kwargs.update({ + 'asg_ids': ','.join([asg1['id'], asg2['id']]), + 'allowed_host_ports': ','.join(['53/udp', '80/tcp', '443/tcp', '4000-5000/tcp', '4000-6000/udp']), + }) + + self.cmd( + 'aks create ' + '--resource-group={resource_group} ' + '--name={name} ' + '--location={location} ' + '--ssh-key-value={ssh_key_value} ' + '--nodepool-name={node_pool_name} ' + '--node-count=1 ' + '--node-vm-size={node_vm_size} ', + checks=[ + self.check('provisioningState', 'Succeeded'), + ], + ) + + self.cmd( + 'aks nodepool update ' + '--resource-group={resource_group} ' + '--cluster-name={name} ' + '--name={node_pool_name} ' + '--asg-ids={asg_ids} ' + '--allowed-host-ports={allowed_host_ports} ' + '--aks-custom-headers AKSHTTPCustomFeatures=Microsoft.ContainerService/NodePublicIPNSGControlPreview', + checks=[ + self.check('provisioningState', 'Succeeded'), + self.check('networkProfile.applicationSecurityGroups', self.kwargs['asg_ids'].split(',')), + self.check('networkProfile.allowedHostPorts[] | length(@)', len(self.kwargs['allowed_host_ports'].split(','))), + ], + ) + + # delete + cmd = 'aks delete --resource-group={resource_group} --name={name} --yes --no-wait' + self.cmd(cmd, checks=[ + self.is_empty(), + ]) diff --git a/src/aks-preview/azext_aks_preview/tests/latest/test_validators.py b/src/aks-preview/azext_aks_preview/tests/latest/test_validators.py index 793d0edd76b..8baf3c3ff4e 100644 --- a/src/aks-preview/azext_aks_preview/tests/latest/test_validators.py +++ b/src/aks-preview/azext_aks_preview/tests/latest/test_validators.py @@ -519,5 +519,65 @@ def test_valid_agent_pool_name(self): ) +class TestValidateAllowedHostPorts(unittest.TestCase): + def test_invalid_allowed_host_ports(self): + namespace = SimpleNamespace( + **{ + "allowed_host_ports": "80,443,8080", + } + ) + with self.assertRaises(InvalidArgumentValueError): + validators.validate_allowed_host_ports( + namespace + ) + + def test_valid_allowed_host_ports(self): + namespace = SimpleNamespace( + **{ + "allowed_host_ports": "80/tcp,443/tcp,8080-8090/tcp,53/udp", + } + ) + validators.validate_allowed_host_ports( + namespace + ) + + +class TestValidateApplicationSecurityGroups(unittest.TestCase): + def test_invalid_application_security_groups(self): + namespace = SimpleNamespace( + **{ + "asg_ids": "invalid", + } + ) + with self.assertRaises(InvalidArgumentValueError): + validators.validate_application_security_groups( + namespace + ) + + def test_empty_application_security_groups(self): + namespace = SimpleNamespace( + **{ + "asg_ids": "", + } + ) + validators.validate_application_security_groups( + namespace + ) + + def test_multiple_application_security_groups(self): + asg_ids = ','.join([ + "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg1/providers/Microsoft.Network/applicationSecurityGroups/asg1", + "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg2/providers/Microsoft.Network/applicationSecurityGroups/asg2", + ]) + namespace = SimpleNamespace( + **{ + "asg_ids": asg_ids, + } + ) + validators.validate_application_security_groups( + namespace + ) + + if __name__ == "__main__": unittest.main() diff --git a/src/aks-preview/linter_exclusions.yml b/src/aks-preview/linter_exclusions.yml index 23d355bb8e4..941f8a15914 100644 --- a/src/aks-preview/linter_exclusions.yml +++ b/src/aks-preview/linter_exclusions.yml @@ -39,6 +39,9 @@ aks create: enable_cilium_dataplane: rule_exclusions: - option_length_too_long + nodepool_allowed_host_ports: + rule_exclusions: + - option_length_too_long aks delete: parameters: ignore_pod_disruption_budget: diff --git a/src/aks-preview/setup.py b/src/aks-preview/setup.py index 632cd760c82..043401d1547 100644 --- a/src/aks-preview/setup.py +++ b/src/aks-preview/setup.py @@ -9,7 +9,7 @@ from setuptools import setup, find_packages -VERSION = "0.5.109" +VERSION = "0.5.110" CLASSIFIERS = [ "Development Status :: 4 - Beta", From 55d78ec87101cdb7ffd2139f5a04ed0e251dad9d Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Thu, 20 Oct 2022 06:27:31 +0000 Subject: [PATCH 07/85] [Release] Update index.json for extension [ aks-preview ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=10100&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/74c32fe50dffc53efe99c1e08862c6398a469327 --- src/index.json | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/src/index.json b/src/index.json index bccc9d62707..a07b0e1ab39 100644 --- a/src/index.json +++ b/src/index.json @@ -6868,6 +6868,49 @@ "version": "0.5.109" }, "sha256Digest": "b17d2536b555d6f6f3381a11003d284aa4cd29906975ab3fe47d40dc39b8aaa0" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/aks_preview-0.5.110-py2.py3-none-any.whl", + "filename": "aks_preview-0.5.110-py2.py3-none-any.whl", + "metadata": { + "azext.isPreview": true, + "azext.minCliCoreVersion": "2.38.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/aks-preview" + } + } + }, + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "aks-preview", + "summary": "Provides a preview for upcoming AKS features", + "version": "0.5.110" + }, + "sha256Digest": "eb0922cd4f404db8fc064c315ee86bdb750b9389e0a2513d04fa535855b410bb" } ], "alertsmanagement": [ From 0a308b9a3ab581f2e4a2d8b70f233adee11f1afe Mon Sep 17 00:00:00 2001 From: akashkeshari Date: Thu, 20 Oct 2022 12:34:58 +0530 Subject: [PATCH 08/85] Adding Software Assurance support on connected clusters for AKS HCI (#5452) --- .github/CODEOWNERS | 2 +- src/connectedk8s/HISTORY.rst | 7 + .../azext_connectedk8s/_client_factory.py | 8 +- .../azext_connectedk8s/_constants.py | 4 +- .../azext_connectedk8s/_params.py | 11 +- .../azext_connectedk8s/_troubleshootutils.py | 38 +- src/connectedk8s/azext_connectedk8s/_utils.py | 17 +- .../azext_connectedk8s/azext_metadata.json | 2 +- .../azext_connectedk8s/commands.py | 8 +- src/connectedk8s/azext_connectedk8s/custom.py | 209 +- .../latest/recordings/test_connectedk8s.yaml | 2890 ++++++++++++----- .../latest/recordings/test_forcedelete.yaml | 1251 ++++--- .../latest/test_connectedk8s_scenario.py | 19 +- .../preview_2022_05_01/_patch.py | 31 - .../preview_2022_05_01/models/__init__.py | 63 - .../preview_2022_05_01/models/_models.py | 671 ---- .../_connected_cluster_operations.py | 911 ------ .../operations/_operations.py | 144 - .../__init__.py | 16 +- .../_configuration.py | 46 +- .../_connected_kubernetes_client.py | 31 +- .../preview_2022_10_01/_patch.py | 20 + .../preview_2022_10_01/_serialization.py | 1970 +++++++++++ .../_vendor.py | 6 +- .../_version.py | 0 .../preview_2022_10_01/models/__init__.py | 67 + .../_connected_kubernetes_client_enums.py | 47 +- .../models/_models_py3.py | 570 ++-- .../preview_2022_10_01/models/_patch.py | 20 + .../operations/__init__.py | 10 +- .../_connected_cluster_operations.py | 1141 +++++++ .../operations/_operations.py | 147 + .../preview_2022_10_01/operations/_patch.py | 20 + src/connectedk8s/setup.py | 4 +- 34 files changed, 6561 insertions(+), 3840 deletions(-) delete mode 100644 src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/_patch.py delete mode 100644 src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/models/__init__.py delete mode 100644 src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/models/_models.py delete mode 100644 src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/operations/_connected_cluster_operations.py delete mode 100644 src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/operations/_operations.py rename src/connectedk8s/azext_connectedk8s/vendored_sdks/{preview_2022_05_01 => preview_2022_10_01}/__init__.py (63%) rename src/connectedk8s/azext_connectedk8s/vendored_sdks/{preview_2022_05_01 => preview_2022_10_01}/_configuration.py (60%) rename src/connectedk8s/azext_connectedk8s/vendored_sdks/{preview_2022_05_01 => preview_2022_10_01}/_connected_kubernetes_client.py (81%) create mode 100644 src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/_patch.py create mode 100644 src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/_serialization.py rename src/connectedk8s/azext_connectedk8s/vendored_sdks/{preview_2022_05_01 => preview_2022_10_01}/_vendor.py (89%) rename src/connectedk8s/azext_connectedk8s/vendored_sdks/{preview_2022_05_01 => preview_2022_10_01}/_version.py (100%) create mode 100644 src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/models/__init__.py rename src/connectedk8s/azext_connectedk8s/vendored_sdks/{preview_2022_05_01 => preview_2022_10_01}/models/_connected_kubernetes_client_enums.py (61%) rename src/connectedk8s/azext_connectedk8s/vendored_sdks/{preview_2022_05_01 => preview_2022_10_01}/models/_models_py3.py (53%) create mode 100644 src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/models/_patch.py rename src/connectedk8s/azext_connectedk8s/vendored_sdks/{preview_2022_05_01 => preview_2022_10_01}/operations/__init__.py (66%) create mode 100644 src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/operations/_connected_cluster_operations.py create mode 100644 src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/operations/_operations.py create mode 100644 src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/operations/_patch.py diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 96fdec88f20..8c8e419cd18 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -82,7 +82,7 @@ /src/ip-group/ @necusjz @kairu-ms @jsntcy -/src/connectedk8s/ @akashkeshari @alphaWizard +/src/connectedk8s/ @akashkeshari @sirireddy12 /src/storagesync/ @jsntcy diff --git a/src/connectedk8s/HISTORY.rst b/src/connectedk8s/HISTORY.rst index 55a95ef31ec..6bb955a3914 100644 --- a/src/connectedk8s/HISTORY.rst +++ b/src/connectedk8s/HISTORY.rst @@ -2,6 +2,13 @@ Release History =============== +1.3.5 +++++++ + +* Added software assurance related changes for AKS HCI +* Added parameter for overriding container log path +* Updated kubernetes package dependency to 24.2.0 + 1.3.4 ++++++ diff --git a/src/connectedk8s/azext_connectedk8s/_client_factory.py b/src/connectedk8s/azext_connectedk8s/_client_factory.py index 9a86b9d5ad9..8dade95060b 100644 --- a/src/connectedk8s/azext_connectedk8s/_client_factory.py +++ b/src/connectedk8s/azext_connectedk8s/_client_factory.py @@ -19,13 +19,13 @@ def cf_connected_cluster(cli_ctx, _): return cf_connectedk8s(cli_ctx).connected_cluster -def cf_connectedk8s_prev_2022_05_01(cli_ctx, *_): - from azext_connectedk8s.vendored_sdks.preview_2022_05_01 import ConnectedKubernetesClient +def cf_connectedk8s_prev_2022_10_01(cli_ctx, *_): + from azext_connectedk8s.vendored_sdks.preview_2022_10_01 import ConnectedKubernetesClient return get_mgmt_service_client(cli_ctx, ConnectedKubernetesClient) -def cf_connected_cluster_prev_2022_05_01(cli_ctx, _): - return cf_connectedk8s_prev_2022_05_01(cli_ctx).connected_cluster +def cf_connected_cluster_prev_2022_10_01(cli_ctx, _): + return cf_connectedk8s_prev_2022_10_01(cli_ctx).connected_cluster def cf_connectedmachine(cli_ctx, subscription_id): diff --git a/src/connectedk8s/azext_connectedk8s/_constants.py b/src/connectedk8s/azext_connectedk8s/_constants.py index 1c1ef969320..75fe471ed81 100644 --- a/src/connectedk8s/azext_connectedk8s/_constants.py +++ b/src/connectedk8s/azext_connectedk8s/_constants.py @@ -6,8 +6,9 @@ # pylint: disable=line-too-long -Distribution_Enum_Values = ["auto", "generic", "openshift", "rancher_rke", "kind", "k3s", "minikube", "gke", "eks", "aks", "aks_management", "aks_workload", "capz", "aks_engine", "tkg"] +Distribution_Enum_Values = ["auto", "generic", "openshift", "rancher_rke", "kind", "k3s", "minikube", "gke", "eks", "aks", "aks_management", "aks_workload", "capz", "aks_engine", "tkg", "canonical", "karbon"] Infrastructure_Enum_Values = ["auto", "generic", "azure", "aws", "gcp", "azure_stack_hci", "azure_stack_hub", "azure_stack_edge", "vsphere", "windows_server"] +AHB_Enum_Values = ["True", "False", "NotApplicable"] Feature_Values = ["cluster-connect", "azure-rbac", "custom-locations"] CRD_FOR_FORCE_DELETE = ["arccertificates.clusterconfig.azure.com", "azureclusteridentityrequests.clusterconfig.azure.com", "azureextensionidentities.clusterconfig.azure.com", "connectedclusters.arc.azure.com", "customlocationsettings.clusterconfig.azure.com", "extensionconfigs.clusterconfig.azure.com", "gitconfigs.clusterconfig.azure.com"] Custom_Locations_Provider_Namespace = 'Microsoft.ExtendedLocation' @@ -57,7 +58,6 @@ Get_HelmRegistery_Path_Fault_Type = 'helm-registry-path-fetch-error' Pull_HelmChart_Fault_Type = 'helm-chart-pull-error' Export_HelmChart_Fault_Type = 'helm-chart-export-error' -Get_Kubernetes_Version_Fault_Type = 'kubernetes-get-version-error' Get_Kubernetes_Distro_Fault_Type = 'kubernetes-get-distribution-error' Get_Kubernetes_Namespace_Fault_Type = 'kubernetes-get-namespace-error' Update_Agent_Success = 'Agents for Connected Cluster {} have been updated successfully' diff --git a/src/connectedk8s/azext_connectedk8s/_params.py b/src/connectedk8s/azext_connectedk8s/_params.py index 4aeaa5a2c87..c1dd552fa30 100644 --- a/src/connectedk8s/azext_connectedk8s/_params.py +++ b/src/connectedk8s/azext_connectedk8s/_params.py @@ -9,7 +9,7 @@ from argcomplete.completers import FilesCompleter from azure.cli.core.commands.parameters import get_location_type, get_enum_type, file_type, tags_type, get_three_state_flag from azure.cli.core.commands.validators import get_default_location_from_resource_group -from azext_connectedk8s._constants import Distribution_Enum_Values, Infrastructure_Enum_Values, Feature_Values +from azext_connectedk8s._constants import Distribution_Enum_Values, Infrastructure_Enum_Values, Feature_Values, AHB_Enum_Values from knack.arguments import (CLIArgumentType, CaseInsensitiveList) from._validators import validate_private_link_properties @@ -35,7 +35,9 @@ def load_arguments(self, _): c.argument('no_proxy', options_list=['--proxy-skip-range'], arg_group='Proxy', help='List of URLs/CIDRs for which proxy should not to be used.') c.argument('proxy_cert', options_list=['--proxy-cert', '--custom-ca-cert'], arg_group='Proxy', type=file_type, completer=FilesCompleter(), help='Path to the certificate file for proxy or custom Certificate Authority') c.argument('distribution', options_list=['--distribution'], help='The Kubernetes distribution which will be running on this connected cluster.', arg_type=get_enum_type(Distribution_Enum_Values)) + c.argument('distribution_version', help='The Kubernetes distribution version of the connected cluster.') c.argument('infrastructure', options_list=['--infrastructure'], help='The infrastructure on which the Kubernetes cluster represented by this connected cluster will be running on.', arg_type=get_enum_type(Infrastructure_Enum_Values)) + c.argument('azure_hybrid_benefit', help='Flag to enable/disable Azure Hybrid Benefit feature.', arg_type=get_enum_type(AHB_Enum_Values)) c.argument('disable_auto_upgrade', options_list=['--disable-auto-upgrade'], action='store_true', help='Flag to disable auto upgrade of arc agents.') c.argument('cl_oid', options_list=['--custom-locations-oid'], help="OID of 'custom-locations' app") c.argument('enable_private_link', arg_type=get_three_state_flag(), arg_group='PrivateLink', help='Flag to enable/disable private link support on a connected cluster resource. Allowed values: false, true.', is_preview=True, validator=validate_private_link_properties) @@ -43,6 +45,8 @@ def load_arguments(self, _): c.argument('onboarding_timeout', options_list=['--onboarding-timeout'], arg_group='Timeout', help='Time required (in seconds) for the arc-agent pods to be installed on the kubernetes cluster. Override this value if the hardware/network constraints on your cluster requires more time for installing the arc-agent pods.') c.argument('no_wait', options_list=['--no-wait'], arg_group='Timeout', help="Do not wait for the long-running operation to finish.") c.argument('correlation_id', options_list=['--correlation-id'], help='A guid that is used to internally track the source of cluster onboarding. Please do not modify it unless advised', validator=override_client_request_id_header) + c.argument('container_log_path', help='Override the default container log path to enable fluent-bit logging') + c.argument('yes', options_list=['--yes', '-y'], help='Do not prompt for confirmation.', action='store_true') with self.argument_context('connectedk8s update') as c: c.argument('tags', tags_type) @@ -52,9 +56,14 @@ def load_arguments(self, _): c.argument('https_proxy', options_list=['--proxy-https'], arg_group='Proxy', help='Https proxy URL to be used.') c.argument('http_proxy', options_list=['--proxy-http'], arg_group='Proxy', help='Http proxy URL to be used.') c.argument('no_proxy', options_list=['--proxy-skip-range'], arg_group='Proxy', help='List of URLs/CIDRs for which proxy should not to be used.') + c.argument('distribution', help='The Kubernetes distribution which will be running on this connected cluster.', arg_type=get_enum_type(Distribution_Enum_Values)) + c.argument('distribution_version', help='The Kubernetes distribution version of the connected cluster.') + c.argument('azure_hybrid_benefit', help='Flag to enable/disable Azure Hybrid Benefit feature.', arg_type=get_enum_type(AHB_Enum_Values)) c.argument('proxy_cert', options_list=['--proxy-cert', '--custom-ca-cert'], arg_group='Proxy', type=file_type, completer=FilesCompleter(), help='Path to the certificate file for proxy or custom Certificate Authority') c.argument('disable_proxy', options_list=['--disable-proxy'], arg_group='Proxy', action='store_true', help='Disables proxy settings for agents') c.argument('auto_upgrade', options_list=['--auto-upgrade'], help='Flag to enable/disable auto upgrade of arc agents. By default, auto upgrade of agents is enabled.', arg_type=get_enum_type(["true", "false"])) + c.argument('container_log_path', help='Override the default container log path to enable fluent-bit logging') + c.argument('yes', options_list=['--yes', '-y'], help='Do not prompt for confirmation.', action='store_true') with self.argument_context('connectedk8s upgrade') as c: c.argument('cluster_name', options_list=['--name', '-n'], id_part='name', help='The name of the connected cluster.') diff --git a/src/connectedk8s/azext_connectedk8s/_troubleshootutils.py b/src/connectedk8s/azext_connectedk8s/_troubleshootutils.py index 7ff227c187e..24b18d383dd 100644 --- a/src/connectedk8s/azext_connectedk8s/_troubleshootutils.py +++ b/src/connectedk8s/azext_connectedk8s/_troubleshootutils.py @@ -65,7 +65,7 @@ def create_folder_diagnosticlogs(time_stamp): return "", False -def fetch_kubectl_cluster_info(filepath_with_timestamp, storage_space_available, kubectl_client_location): +def fetch_kubectl_cluster_info(filepath_with_timestamp, storage_space_available, kubectl_client_location, kube_config, kube_context): global diagnoser_output try: @@ -73,6 +73,10 @@ def fetch_kubectl_cluster_info(filepath_with_timestamp, storage_space_available, if storage_space_available: # CMD command to get events using kubectl and converting it to json format kubect_cluster_info_command = [kubectl_client_location, "cluster-info"] + if kube_config: + kubect_cluster_info_command.extend(["--kubeconfig", kube_config]) + if kube_context: + kubect_cluster_info_command.extend(["--context", kube_context]) # Using Popen to execute the command and fetching the output response_cluster_info = Popen(kubect_cluster_info_command, stdout=PIPE, stderr=PIPE) output_cluster_info, error_cluster_info = response_cluster_info.communicate() @@ -210,7 +214,7 @@ def retrieve_arc_agents_logs(corev1_api_instance, filepath_with_timestamp, stora return consts.Diagnostic_Check_Failed, storage_space_available -def retrieve_arc_agents_event_logs(filepath_with_timestamp, storage_space_available, kubectl_client_location): +def retrieve_arc_agents_event_logs(filepath_with_timestamp, storage_space_available, kubectl_client_location, kube_config, kube_context): global diagnoser_output try: @@ -218,6 +222,10 @@ def retrieve_arc_agents_event_logs(filepath_with_timestamp, storage_space_availa if storage_space_available: # CMD command to get events using kubectl and converting it to json format command = [kubectl_client_location, "get", "events", "-n", "azure-arc", "--output", "json"] + if kube_config: + command.extend(["--kubeconfig", kube_config]) + if kube_context: + command.extend(["--context", kube_context]) # Using Popen to execute the command and fetching the output response_kubectl_get_events = Popen(command, stdout=PIPE, stderr=PIPE) output_kubectl_get_events, error_kubectl_get_events = response_kubectl_get_events.communicate() @@ -455,7 +463,7 @@ def check_agent_version(connected_cluster, azure_arc_agent_version): return consts.Diagnostic_Check_Incomplete -def check_diagnoser_container(corev1_api_instance, batchv1_api_instance, filepath_with_timestamp, storage_space_available, absolute_path, probable_sufficient_resource_for_agents, helm_client_location, kubectl_client_location, release_namespace, probable_pod_security_policy_presence): +def check_diagnoser_container(corev1_api_instance, batchv1_api_instance, filepath_with_timestamp, storage_space_available, absolute_path, probable_sufficient_resource_for_agents, helm_client_location, kubectl_client_location, release_namespace, probable_pod_security_policy_presence, kube_config, kube_context): global diagnoser_output try: @@ -469,7 +477,7 @@ def check_diagnoser_container(corev1_api_instance, batchv1_api_instance, filepat dns_check = "Starting" outbound_connectivity_check = "Starting" # Executing the Diagnoser job and fetching diagnoser logs obtained - diagnoser_container_log = executing_diagnoser_job(corev1_api_instance, batchv1_api_instance, filepath_with_timestamp, storage_space_available, absolute_path, helm_client_location, kubectl_client_location, release_namespace, probable_pod_security_policy_presence) + diagnoser_container_log = executing_diagnoser_job(corev1_api_instance, batchv1_api_instance, filepath_with_timestamp, storage_space_available, absolute_path, helm_client_location, kubectl_client_location, release_namespace, probable_pod_security_policy_presence, kube_config, kube_context) # If diagnoser_container_log is not empty then only we will check for the results if(diagnoser_container_log is not None and diagnoser_container_log != ""): diagnoser_container_log_list = diagnoser_container_log.split("\n") @@ -508,12 +516,16 @@ def check_diagnoser_container(corev1_api_instance, batchv1_api_instance, filepat return consts.Diagnostic_Check_Incomplete, storage_space_available -def executing_diagnoser_job(corev1_api_instance, batchv1_api_instance, filepath_with_timestamp, storage_space_available, absolute_path, helm_client_location, kubectl_client_location, release_namespace, probable_pod_security_policy_presence): +def executing_diagnoser_job(corev1_api_instance, batchv1_api_instance, filepath_with_timestamp, storage_space_available, absolute_path, helm_client_location, kubectl_client_location, release_namespace, probable_pod_security_policy_presence, kube_config, kube_context): global diagnoser_output job_name = "azure-arc-diagnoser-job" # CMD command to get helm values in azure arc and converting it to json format command = [helm_client_location, "get", "values", "azure-arc", "--namespace", release_namespace, "-o", "json"] + if kube_config: + command.extend(["--kubeconfig", kube_config]) + if kube_context: + command.extend(["--kube-context", kube_context]) # Using Popen to execute the helm get values command and fetching the output response_helm_values_get = Popen(command, stdout=PIPE, stderr=PIPE) output_helm_values_get, error_helm_get_values = response_helm_values_get.communicate() @@ -565,6 +577,10 @@ def executing_diagnoser_job(corev1_api_instance, batchv1_api_instance, filepath_ # Setting the log output as Empty diagnoser_container_log = "" cmd_delete_job = [kubectl_client_location, "delete", "-f", ""] + if kube_config: + cmd_delete_job.extend(["--kubeconfig", kube_config]) + if kube_context: + cmd_delete_job.extend(["--context", kube_context]) cmd_delete_job[3] = str(yaml_file_path) # Editing the yaml file based on the release namespace new_yaml = [] @@ -592,7 +608,7 @@ def executing_diagnoser_job(corev1_api_instance, batchv1_api_instance, filepath_ # To handle the user keyboard Interrupt try: # Executing the diagnoser_job.yaml - config.load_kube_config() + config.load_kube_config(kube_config, kube_context) k8s_client = client.ApiClient() # Attempting deletion of diagnoser resources to handle the scenario if any stale resources are present response_kubectl_delete_job = Popen(cmd_delete_job, stdout=PIPE, stderr=PIPE) @@ -669,6 +685,10 @@ def executing_diagnoser_job(corev1_api_instance, batchv1_api_instance, filepath_ # Creating folder with name 'describe_non_ready_agent' in the given path unfinished_diagnoser_job_path = os.path.join(filepath_with_timestamp, consts.Events_of_Incomplete_Diagnoser_Job) cmd_get_diagnoser_job_events = [kubectl_client_location, "get", "events", "--field-selector", "", "-n", "azure-arc", "--output", "json"] + if kube_config: + cmd_get_diagnoser_job_events.extend(["--kubeconfig", kube_config]) + if kube_context: + cmd_get_diagnoser_job_events.extend(["--context", kube_context]) # To describe the diagnoser pod which did not reach completed stage arc_agents_pod_list = corev1_api_instance.list_namespaced_pod(namespace="azure-arc") for each_pod in arc_agents_pod_list.items: @@ -834,7 +854,7 @@ def check_msi_certificate_presence(corev1_api_instance): return consts.Diagnostic_Check_Incomplete -def check_probable_cluster_security_policy(corev1_api_instance, helm_client_location, release_namespace): +def check_probable_cluster_security_policy(corev1_api_instance, helm_client_location, release_namespace, kube_config, kube_context): global diagnoser_output try: @@ -843,6 +863,10 @@ def check_probable_cluster_security_policy(corev1_api_instance, helm_client_loca cluster_connect_feature = False # CMD command to get helm values in azure arc and converting it to json format command = [helm_client_location, "get", "values", "azure-arc", "--namespace", release_namespace, "-o", "json"] + if kube_config: + command.extend(["--kubeconfig", kube_config]) + if kube_context: + command.extend(["--kube-context", kube_context]) # Using Popen to execute the helm get values command and fetching the output response_helm_values_get = Popen(command, stdout=PIPE, stderr=PIPE) output_helm_values_get, error_helm_get_values = response_helm_values_get.communicate() diff --git a/src/connectedk8s/azext_connectedk8s/_utils.py b/src/connectedk8s/azext_connectedk8s/_utils.py index 2e6a36ca22b..2a988f348cf 100644 --- a/src/connectedk8s/azext_connectedk8s/_utils.py +++ b/src/connectedk8s/azext_connectedk8s/_utils.py @@ -250,8 +250,8 @@ def get_values_file(): return values_file_provided, values_file -def ensure_namespace_cleanup(configuration): - api_instance = kube_client.CoreV1Api(kube_client.ApiClient(configuration)) +def ensure_namespace_cleanup(): + api_instance = kube_client.CoreV1Api() timeout = time.time() + 180 while True: if time.time() > timeout: @@ -269,7 +269,7 @@ def ensure_namespace_cleanup(configuration): raise_error=False) -def delete_arc_agents(release_namespace, kube_config, kube_context, configuration, helm_client_location, no_hooks=False): +def delete_arc_agents(release_namespace, kube_config, kube_context, helm_client_location, no_hooks=False): if(no_hooks): cmd_helm_delete = [helm_client_location, "delete", "azure-arc", "--namespace", release_namespace, "--no-hooks"] else: @@ -288,13 +288,14 @@ def delete_arc_agents(release_namespace, kube_config, kube_context, configuratio raise CLIInternalError("Error occured while cleaning up arc agents. " + "Helm release deletion failed: " + error_helm_delete.decode("ascii") + " Please run 'helm delete azure-arc' to ensure that the release is deleted.") - ensure_namespace_cleanup(configuration) + ensure_namespace_cleanup() def helm_install_release(chart_path, subscription_id, kubernetes_distro, kubernetes_infra, resource_group_name, cluster_name, location, onboarding_tenant_id, http_proxy, https_proxy, no_proxy, proxy_cert, private_key_pem, kube_config, kube_context, no_wait, values_file_provided, values_file, cloud_name, disable_auto_upgrade, - enable_custom_locations, custom_locations_oid, helm_client_location, enable_private_link, onboarding_timeout="600"): + enable_custom_locations, custom_locations_oid, helm_client_location, enable_private_link, onboarding_timeout="600", + container_log_path=None): cmd_helm_install = [helm_client_location, "upgrade", "--install", "azure-arc", chart_path, "--set", "global.subscriptionId={}".format(subscription_id), "--set", "global.kubernetesDistro={}".format(kubernetes_distro), @@ -331,6 +332,8 @@ def helm_install_release(chart_path, subscription_id, kubernetes_distro, kuberne cmd_helm_install.extend(["--set", "global.isCustomCert={}".format(True)]) if https_proxy or http_proxy or no_proxy: cmd_helm_install.extend(["--set", "global.isProxyEnabled={}".format(True)]) + if container_log_path is not None: + cmd_helm_install.extend(["--set", "systemDefaultValues.fluent-bit.containerLogPath={}".format(container_log_path)]) if kube_config: cmd_helm_install.extend(["--kubeconfig", kube_config]) if kube_context: @@ -423,9 +426,9 @@ def check_provider_registrations(cli_ctx): logger.warning("Couldn't check the required provider's registration status. Error: {}".format(str(ex))) -def can_create_clusterrolebindings(configuration): +def can_create_clusterrolebindings(): try: - api_instance = kube_client.AuthorizationV1Api(kube_client.ApiClient(configuration)) + api_instance = kube_client.AuthorizationV1Api() access_review = kube_client.V1SelfSubjectAccessReview(spec={ "resourceAttributes": { "verb": "create", diff --git a/src/connectedk8s/azext_connectedk8s/azext_metadata.json b/src/connectedk8s/azext_connectedk8s/azext_metadata.json index 901cd11d20e..1b43c706474 100644 --- a/src/connectedk8s/azext_connectedk8s/azext_metadata.json +++ b/src/connectedk8s/azext_connectedk8s/azext_metadata.json @@ -1,4 +1,4 @@ { "name": "connectedk8s", - "azext.minCliCoreVersion": "2.30.0" + "azext.minCliCoreVersion": "2.38.0" } \ No newline at end of file diff --git a/src/connectedk8s/azext_connectedk8s/commands.py b/src/connectedk8s/azext_connectedk8s/commands.py index 7e75262d78c..2b45419900d 100644 --- a/src/connectedk8s/azext_connectedk8s/commands.py +++ b/src/connectedk8s/azext_connectedk8s/commands.py @@ -5,7 +5,7 @@ # pylint: disable=line-too-long from azure.cli.core.commands import CliCommandType -from azext_connectedk8s._client_factory import (cf_connectedk8s, cf_connected_cluster, cf_connectedk8s_prev_2022_05_01, cf_connected_cluster_prev_2022_05_01) +from azext_connectedk8s._client_factory import (cf_connectedk8s, cf_connected_cluster, cf_connectedk8s_prev_2022_10_01, cf_connected_cluster_prev_2022_10_01) from ._format import connectedk8s_show_table_format from ._format import connectedk8s_list_table_format @@ -17,8 +17,8 @@ def load_command_table(self, _): client_factory=cf_connectedk8s ) connectedk8s_sdk_prev = CliCommandType( - operations_tmpl='azext_connectedk8s.vendored_sdks.preview_2022_05_01.operations#ConnectedClusterOperations.{}', - client_factory=cf_connectedk8s_prev_2022_05_01 + operations_tmpl='azext_connectedk8s.vendored_sdks.preview_2022_10_01.operations#ConnectedClusterOperations.{}', + client_factory=cf_connectedk8s_prev_2022_10_01 ) with self.command_group('connectedk8s', connectedk8s_sdk, client_factory=cf_connected_cluster) as g: g.custom_command('connect', 'create_connectedk8s', supports_no_wait=True) @@ -32,6 +32,6 @@ def load_command_table(self, _): g.custom_command('proxy', 'client_side_proxy_wrapper') g.custom_command('troubleshoot', 'troubleshoot', is_preview=True) - with self.command_group('connectedk8s', connectedk8s_sdk_prev, client_factory=cf_connected_cluster_prev_2022_05_01) as g: + with self.command_group('connectedk8s', connectedk8s_sdk_prev, client_factory=cf_connected_cluster_prev_2022_10_01) as g: pass # use this block for using preview sdk client for a command diff --git a/src/connectedk8s/azext_connectedk8s/custom.py b/src/connectedk8s/azext_connectedk8s/custom.py index d7f1a133ae3..6286f1015f3 100644 --- a/src/connectedk8s/azext_connectedk8s/custom.py +++ b/src/connectedk8s/azext_connectedk8s/custom.py @@ -42,7 +42,7 @@ from azext_connectedk8s._client_factory import _resource_client_factory from azext_connectedk8s._client_factory import _resource_providers_client from azext_connectedk8s._client_factory import get_graph_client_service_principals -from azext_connectedk8s._client_factory import cf_connected_cluster_prev_2022_05_01 +from azext_connectedk8s._client_factory import cf_connected_cluster_prev_2022_10_01 from azext_connectedk8s._client_factory import cf_connectedmachine import azext_connectedk8s._constants as consts import azext_connectedk8s._utils as utils @@ -50,8 +50,8 @@ import azext_connectedk8s._troubleshootutils as troubleshootutils from glob import glob from .vendored_sdks.models import ConnectedCluster, ConnectedClusterIdentity, ConnectedClusterPatch, ListClusterUserCredentialProperties -from .vendored_sdks.preview_2022_05_01.models import ConnectedCluster as ConnectedClusterPreview -from .vendored_sdks.preview_2022_05_01.models import ConnectedClusterPatch as ConnectedClusterPatchPreview +from .vendored_sdks.preview_2022_10_01.models import ConnectedCluster as ConnectedClusterPreview +from .vendored_sdks.preview_2022_10_01.models import ConnectedClusterPatch as ConnectedClusterPatchPreview from threading import Timer, Thread import sys import hashlib @@ -66,9 +66,20 @@ def create_connectedk8s(cmd, client, resource_group_name, cluster_name, correlation_id=None, https_proxy="", http_proxy="", no_proxy="", proxy_cert="", location=None, kube_config=None, kube_context=None, no_wait=False, tags=None, distribution='auto', infrastructure='auto', - disable_auto_upgrade=False, cl_oid=None, onboarding_timeout="600", enable_private_link=None, private_link_scope_resource_id=None): + disable_auto_upgrade=False, cl_oid=None, onboarding_timeout="600", enable_private_link=None, private_link_scope_resource_id=None, + distribution_version=None, azure_hybrid_benefit=None, yes=False, container_log_path=None): logger.warning("This operation might take a while...\n") + # Prompt for confirmation for few parameters + if enable_private_link is True: + confirmation_message = "The Cluster Connect and Custom Location features are not supported by Private Link at this time. Enabling Private Link will disable these features. Are you sure you want to continue?" + utils.user_confirmation(confirmation_message, yes) + if cl_oid: + logger.warning("Private Link is being enabled, and Custom Location is not supported by Private Link at this time, so the '--custom-locations-oid' parameter will be ignored.") + if azure_hybrid_benefit == "True": + confirmation_message = "I confirm I have an eligible Windows Server license with Azure Hybrid Benefit to apply this benefit to AKS on HCI or Windows Server. Visit https://aka.ms/ahb-aks for details" + utils.user_confirmation(confirmation_message, yes) + # Setting subscription id and tenant Id subscription_id = get_subscription_id(cmd.cli_ctx) account = Profile().get_subscription(subscription_id) @@ -100,17 +111,9 @@ def create_connectedk8s(cmd, client, resource_group_name, cluster_name, correlat proxy_cert = proxy_cert.replace('\\', r'\\\\') - # Prompt if private link is getting enabled - if enable_private_link is True: - if os.getenv('SKIP_PROMPT') != "true": - if not prompt_y_n("The Cluster Connect and Custom Location features are not supported by Private Link at this time. Enabling Private Link will disable these features. Are you sure you want to continue?"): - return - if cl_oid: - logger.warning("Private Link is being enabled, and Custom Location is not supported by Private Link at this time, so the '--custom-locations-oid' parameter will be ignored.") - - # Set preview client if private link properties are provided. - if enable_private_link is not None: - client = cf_connected_cluster_prev_2022_05_01(cmd.cli_ctx, None) + # Set preview client if latest preview properties are provided. + if enable_private_link is not None or distribution_version is not None or azure_hybrid_benefit is not None: + client = cf_connected_cluster_prev_2022_10_01(cmd.cli_ctx, None) # Checking whether optional extra values file has been provided. values_file_provided, values_file = utils.get_values_file() @@ -124,15 +127,14 @@ def create_connectedk8s(cmd, client, resource_group_name, cluster_name, correlat # Loading the kubeconfig file in kubernetes client configuration load_kube_config(kube_config, kube_context) - configuration = kube_client.Configuration() # Checking the connection to kubernetes cluster. # This check was added to avoid large timeouts when connecting to AAD Enabled AKS clusters # if the user had not logged in. - check_kube_connection(configuration) + kubernetes_version = check_kube_connection() utils.try_list_node_fix() - api_instance = kube_client.CoreV1Api(kube_client.ApiClient(configuration)) + api_instance = kube_client.CoreV1Api() node_api_response = utils.validate_node_api_response(api_instance, None) required_node_exists = check_linux_amd64_node(node_api_response) @@ -142,15 +144,13 @@ def create_connectedk8s(cmd, client, resource_group_name, cluster_name, correlat summary="Couldn't find any node on the kubernetes cluster with the architecture type 'amd64' and OS 'linux'") logger.warning("Please ensure that this Kubernetes cluster have any nodes with OS 'linux' and architecture 'amd64', for scheduling the Arc-Agents onto and connecting to Azure. Learn more at {}".format("https://aka.ms/ArcK8sSupportedOSArchitecture")) - crb_permission = utils.can_create_clusterrolebindings(configuration) + crb_permission = utils.can_create_clusterrolebindings() if not crb_permission: telemetry.set_exception(exception="Your credentials doesn't have permission to create clusterrolebindings on this kubernetes cluster.", fault_type=consts.Cannot_Create_ClusterRoleBindings_Fault_Type, summary="Your credentials doesn't have permission to create clusterrolebindings on this kubernetes cluster.") raise ValidationError("Your credentials doesn't have permission to create clusterrolebindings on this kubernetes cluster. Please check your permissions.") # Get kubernetes cluster info - kubernetes_version = get_server_version(configuration) - if distribution == 'auto': kubernetes_distro = get_kubernetes_distro(node_api_response) # (cluster heuristics) else: @@ -200,7 +200,7 @@ def create_connectedk8s(cmd, client, resource_group_name, cluster_name, correlat if release_namespace: # Loading config map - api_instance = kube_client.CoreV1Api(kube_client.ApiClient(configuration)) + api_instance = kube_client.CoreV1Api() try: configmap = api_instance.read_namespaced_config_map('azure-clusterconfig', 'azure-arc') except Exception as e: # pylint: disable=broad-except @@ -218,7 +218,7 @@ def create_connectedk8s(cmd, client, resource_group_name, cluster_name, correlat configmap_cluster_name).agent_public_key_certificate except Exception as e: # pylint: disable=broad-except utils.arm_exception_handler(e, consts.Get_ConnectedCluster_Fault_Type, 'Failed to check if connected cluster resource already exists.') - cc = generate_request_payload(configuration, location, public_key, tags, kubernetes_distro, kubernetes_infra, enable_private_link, private_link_scope_resource_id) + cc = generate_request_payload(location, public_key, tags, kubernetes_distro, kubernetes_infra, enable_private_link, private_link_scope_resource_id, distribution_version, azure_hybrid_benefit) cc_response = create_cc_resource(client, resource_group_name, cluster_name, cc, no_wait).result() # Disabling cluster-connect if private link is getting enabled if enable_private_link is True: @@ -232,7 +232,7 @@ def create_connectedk8s(cmd, client, resource_group_name, cluster_name, correlat " '{}' with resource name '{}'.".format(configmap_rg_name, configmap_cluster_name)) else: # Cleanup agents and continue with put - utils.delete_arc_agents(release_namespace, kube_config, kube_context, configuration, helm_client_location) + utils.delete_arc_agents(release_namespace, kube_config, kube_context, helm_client_location) else: if connected_cluster_exists(client, resource_group_name, cluster_name): telemetry.set_exception(exception='The connected cluster resource already exists', fault_type=consts.Resource_Already_Exists_Fault_Type, @@ -319,7 +319,7 @@ def create_connectedk8s(cmd, client, resource_group_name, cluster_name, correlat raise CLIInternalError("Failed to export private key." + str(e)) # Generate request payload - cc = generate_request_payload(configuration, location, public_key, tags, kubernetes_distro, kubernetes_infra, enable_private_link, private_link_scope_resource_id) + cc = generate_request_payload(location, public_key, tags, kubernetes_distro, kubernetes_infra, enable_private_link, private_link_scope_resource_id, distribution_version, azure_hybrid_benefit) # Create connected cluster resource put_cc_response = create_cc_resource(client, resource_group_name, cluster_name, cc, no_wait).result() @@ -331,7 +331,7 @@ def create_connectedk8s(cmd, client, resource_group_name, cluster_name, correlat utils.helm_install_release(chart_path, subscription_id, kubernetes_distro, kubernetes_infra, resource_group_name, cluster_name, location, onboarding_tenant_id, http_proxy, https_proxy, no_proxy, proxy_cert, private_key_pem, kube_config, kube_context, no_wait, values_file_provided, values_file, azure_cloud, disable_auto_upgrade, enable_custom_locations, - custom_locations_oid, helm_client_location, enable_private_link, onboarding_timeout) + custom_locations_oid, helm_client_location, enable_private_link, onboarding_timeout, container_log_path) return put_cc_response @@ -393,10 +393,11 @@ def escape_proxy_settings(proxy_setting): return proxy_setting -def check_kube_connection(configuration): - api_instance = kube_client.NetworkingV1Api(kube_client.ApiClient(configuration)) +def check_kube_connection(): + api_instance = kube_client.VersionApi() try: - api_instance.get_api_resources() + api_response = api_instance.get_code() + return api_response.git_version except Exception as e: # pylint: disable=broad-except logger.warning("Unable to verify connectivity to the Kubernetes cluster.") utils.kubernetes_exception_handler(e, consts.Kubernetes_Connectivity_FaultType, 'Unable to verify connectivity to the Kubernetes cluster') @@ -522,17 +523,6 @@ def get_private_key(key_pair): return PEM.encode(privKey_DER, "RSA PRIVATE KEY") -def get_server_version(configuration): - api_instance = kube_client.VersionApi(kube_client.ApiClient(configuration)) - try: - api_response = api_instance.get_code() - return api_response.git_version - except Exception as e: # pylint: disable=broad-except - logger.warning("Unable to fetch kubernetes version.") - utils.kubernetes_exception_handler(e, consts.Get_Kubernetes_Version_Fault_Type, 'Unable to fetch kubernetes version', - raise_error=False) - - def get_kubernetes_distro(api_response): # Heuristic if api_response is None: return "generic" @@ -605,7 +595,7 @@ def check_linux_amd64_node(api_response): return False -def generate_request_payload(configuration, location, public_key, tags, kubernetes_distro, kubernetes_infra, enable_private_link, private_link_scope_resource_id): +def generate_request_payload(location, public_key, tags, kubernetes_distro, kubernetes_infra, enable_private_link, private_link_scope_resource_id, distribution_version, azure_hybrid_benefit): # Create connected cluster resource object identity = ConnectedClusterIdentity( type="SystemAssigned" @@ -621,8 +611,10 @@ def generate_request_payload(configuration, location, public_key, tags, kubernet infrastructure=kubernetes_infra ) - if enable_private_link is not None: - private_link_state = "Enabled" if enable_private_link is True else "Disabled" + if enable_private_link is not None or distribution_version is not None or azure_hybrid_benefit is not None: + private_link_state = None + if enable_private_link is not None: + private_link_state = "Enabled" if enable_private_link is True else "Disabled" cc = ConnectedClusterPreview( location=location, identity=identity, @@ -631,14 +623,19 @@ def generate_request_payload(configuration, location, public_key, tags, kubernet distribution=kubernetes_distro, infrastructure=kubernetes_infra, private_link_scope_resource_id=private_link_scope_resource_id, - private_link_state=private_link_state + private_link_state=private_link_state, + azure_hybrid_benefit=azure_hybrid_benefit, + distribution_version=distribution_version ) return cc -def generate_patch_payload(tags): - cc = ConnectedClusterPatch( - tags=tags +def generate_patch_payload(tags, distribution, distribution_version, azure_hybrid_benefit): + cc = ConnectedClusterPatchPreview( + tags=tags, + distribution=distribution, + distribution_version=distribution_version, + azure_hybrid_benefit=azure_hybrid_benefit ) return cc @@ -706,13 +703,13 @@ def get_server_address(kube_config, kube_context): def get_connectedk8s(cmd, client, resource_group_name, cluster_name): # Override preview client to show private link properties to customers - client = cf_connected_cluster_prev_2022_05_01(cmd.cli_ctx, None) + client = cf_connected_cluster_prev_2022_10_01(cmd.cli_ctx, None) return client.get(resource_group_name, cluster_name) def list_connectedk8s(cmd, client, resource_group_name=None): # Override preview client to show private link properties to customers - client = cf_connected_cluster_prev_2022_05_01(cmd.cli_ctx, None) + client = cf_connected_cluster_prev_2022_10_01(cmd.cli_ctx, None) if not resource_group_name: return client.list_by_subscription() return client.list_by_resource_group(resource_group_name) @@ -740,12 +737,11 @@ def delete_connectedk8s(cmd, client, resource_group_name, cluster_name, # Loading the kubeconfig file in kubernetes client configuration load_kube_config(kube_config, kube_context) - configuration = kube_client.Configuration() # Checking the connection to kubernetes cluster. # This check was added to avoid large timeouts when connecting to AAD Enabled # AKS clusters if the user had not logged in. - check_kube_connection(configuration) + check_kube_connection() # Install helm client helm_client_location = install_helm_client() @@ -765,6 +761,10 @@ def delete_connectedk8s(cmd, client, resource_group_name, cluster_name, timeout_for_crd_deletion = "20s" for crds in consts.CRD_FOR_FORCE_DELETE: cmd_helm_delete = [kubectl_client_location, "delete", "crds", crds, "--ignore-not-found", "--wait", "--timeout", "{}".format(timeout_for_crd_deletion)] + if kube_config: + cmd_helm_delete.extend(["--kubeconfig", kube_config]) + if kube_context: + cmd_helm_delete.extend(["--context", kube_context]) response_helm_delete = Popen(cmd_helm_delete, stdout=PIPE, stderr=PIPE) _, error_helm_delete = response_helm_delete.communicate() @@ -780,6 +780,10 @@ def delete_connectedk8s(cmd, client, resource_group_name, cluster_name, for crds in consts.CRD_FOR_FORCE_DELETE: cmd = [kubectl_client_location, "get", "crd", crds, "-ojson"] + if kube_config: + cmd.extend(["--kubeconfig", kube_config]) + if kube_context: + cmd.extend(["--context", kube_context]) cmd_output = Popen(cmd, stdout=PIPE, stderr=PIPE) _, error_helm_delete = cmd_output.communicate() @@ -789,11 +793,15 @@ def delete_connectedk8s(cmd, client, resource_group_name, cluster_name, if(status == "Terminating"): patch_cmd = [kubectl_client_location, "patch", "crd", crds, "--type=merge", "--patch-file", yaml_file_path] + if kube_config: + patch_cmd.extend(["--kubeconfig", kube_config]) + if kube_context: + patch_cmd.extend(["--context", kube_context]) output_patch_cmd = Popen(patch_cmd, stdout=PIPE, stderr=PIPE) _, error_helm_delete = output_patch_cmd.communicate() if(release_namespace): - utils.delete_arc_agents(release_namespace, kube_config, kube_context, configuration, helm_client_location, True) + utils.delete_arc_agents(release_namespace, kube_config, kube_context, helm_client_location, True) return @@ -802,7 +810,7 @@ def delete_connectedk8s(cmd, client, resource_group_name, cluster_name, return # Loading config map - api_instance = kube_client.CoreV1Api(kube_client.ApiClient(configuration)) + api_instance = kube_client.CoreV1Api() try: configmap = api_instance.read_namespaced_config_map('azure-clusterconfig', 'azure-arc') except Exception as e: # pylint: disable=broad-except @@ -833,7 +841,7 @@ def delete_connectedk8s(cmd, client, resource_group_name, cluster_name, "and resource name '{}'.".format(configmap.data["AZURE_RESOURCE_NAME"])) # Deleting the azure-arc agents - utils.delete_arc_agents(release_namespace, kube_config, kube_context, configuration, helm_client_location) + utils.delete_arc_agents(release_namespace, kube_config, kube_context, helm_client_location) def get_release_namespace(kube_config, kube_context, helm_client_location): @@ -886,8 +894,8 @@ def delete_cc_resource(client, resource_group_name, cluster_name, no_wait): utils.arm_exception_handler(e, consts.Delete_ConnectedCluster_Fault_Type, 'Unable to delete connected cluster resource') -def update_connected_cluster_internal(client, resource_group_name, cluster_name, tags=None): - cc = generate_patch_payload(tags) +def update_connected_cluster_internal(client, resource_group_name, cluster_name, tags=None, distribution=None, distribution_version=None, azure_hybrid_benefit=None): + cc = generate_patch_payload(tags, distribution, distribution_version, azure_hybrid_benefit) return patch_cc_resource(client, resource_group_name, cluster_name, cc) @@ -899,7 +907,13 @@ def update_connected_cluster_internal(client, resource_group_name, cluster_name, def update_connected_cluster(cmd, client, resource_group_name, cluster_name, https_proxy="", http_proxy="", no_proxy="", proxy_cert="", - disable_proxy=False, kube_config=None, kube_context=None, auto_upgrade=None, tags=None): + disable_proxy=False, kube_config=None, kube_context=None, auto_upgrade=None, tags=None, + distribution=None, distribution_version=None, azure_hybrid_benefit=None, yes=False, container_log_path=None): + + # Prompt for confirmation for few parameters + if azure_hybrid_benefit == "True": + confirmation_message = "I confirm I have an eligible Windows Server license with Azure Hybrid Benefit to apply this benefit to AKS on HCI or Windows Server. Visit https://aka.ms/ahb-aks for details" + utils.user_confirmation(confirmation_message, yes) # Send cloud information to telemetry send_cloud_telemetry(cmd) @@ -924,16 +938,16 @@ def update_connected_cluster(cmd, client, resource_group_name, cluster_name, htt proxy_cert = proxy_cert.replace('\\', r'\\\\') - # Set preview client if cluster is private link enabled. - connected_cluster = get_connectedk8s(cmd, client, resource_group_name, cluster_name) - if connected_cluster.private_link_state.lower() == "enabled": - client = cf_connected_cluster_prev_2022_05_01(cmd.cli_ctx, None) + # Set preview client as most of the patchable fields are available in preview api-version + client = cf_connected_cluster_prev_2022_10_01(cmd.cli_ctx, None) # Patching the connected cluster ARM resource - patch_cc_response = update_connected_cluster_internal(client, resource_group_name, cluster_name, tags) + arm_properties_unset = (tags is None and distribution is None and distribution_version is None and azure_hybrid_benefit is None) + if not arm_properties_unset: + patch_cc_response = update_connected_cluster_internal(client, resource_group_name, cluster_name, tags, distribution, distribution_version, azure_hybrid_benefit) proxy_params_unset = (https_proxy == "" and http_proxy == "" and no_proxy == "" and proxy_cert == "" and not disable_proxy) - if proxy_params_unset and not auto_upgrade and tags is None: + if proxy_params_unset and not auto_upgrade and arm_properties_unset and not container_log_path: raise RequiredArgumentMissingError(consts.No_Param_Error) if (https_proxy or http_proxy or no_proxy) and disable_proxy: @@ -950,25 +964,22 @@ def update_connected_cluster(cmd, client, resource_group_name, cluster_name, htt # Loading the kubeconfig file in kubernetes client configuration load_kube_config(kube_config, kube_context) - configuration = kube_client.Configuration() # Checking the connection to kubernetes cluster. # This check was added to avoid large timeouts when connecting to AAD Enabled AKS clusters # if the user had not logged in. - check_kube_connection(configuration) + kubernetes_version = check_kube_connection() utils.try_list_node_fix() - # Get kubernetes cluster info for telemetry - kubernetes_version = get_server_version(configuration) - # Install helm client helm_client_location = install_helm_client() - release_namespace = validate_release_namespace(client, cluster_name, resource_group_name, configuration, kube_config, kube_context, helm_client_location) + release_namespace = validate_release_namespace(client, cluster_name, resource_group_name, kube_config, kube_context, helm_client_location) # Fetch Connected Cluster for agent version - api_instance = kube_client.CoreV1Api(kube_client.ApiClient(configuration)) + connected_cluster = get_connectedk8s(cmd, client, resource_group_name, cluster_name) + api_instance = kube_client.CoreV1Api() node_api_response = None if hasattr(connected_cluster, 'distribution') and (connected_cluster.distribution is not None): @@ -1050,6 +1061,8 @@ def update_connected_cluster(cmd, client, resource_group_name, cluster_name, htt if proxy_cert: cmd_helm_upgrade.extend(["--set-file", "global.proxyCert={}".format(proxy_cert)]) cmd_helm_upgrade.extend(["--set", "global.isCustomCert={}".format(True)]) + if container_log_path is not None: + cmd_helm_upgrade.extend(["--set", "systemDefaultValues.fluent-bit.containerLogPath={}".format(container_log_path)]) if kube_config: cmd_helm_upgrade.extend(["--kubeconfig", kube_config]) if kube_context: @@ -1072,7 +1085,8 @@ def update_connected_cluster(cmd, client, resource_group_name, cluster_name, htt os.remove(user_values_location) except OSError: pass - return patch_cc_response + if not arm_properties_unset: + return patch_cc_response def upgrade_agents(cmd, client, resource_group_name, cluster_name, kube_config=None, kube_context=None, arc_agent_version=None, upgrade_timeout="600"): @@ -1095,20 +1109,16 @@ def upgrade_agents(cmd, client, resource_group_name, cluster_name, kube_config=N # Loading the kubeconfig file in kubernetes client configuration load_kube_config(kube_config, kube_context) - configuration = kube_client.Configuration() # Checking the connection to kubernetes cluster. # This check was added to avoid large timeouts when connecting to AAD Enabled AKS clusters # if the user had not logged in. - check_kube_connection(configuration) + kubernetes_version = check_kube_connection() utils.try_list_node_fix() - api_instance = kube_client.CoreV1Api(kube_client.ApiClient(configuration)) + api_instance = kube_client.CoreV1Api() node_api_response = None - # Get kubernetes cluster info for telemetry - kubernetes_version = get_server_version(configuration) - # Install helm client helm_client_location = install_helm_client() @@ -1116,7 +1126,7 @@ def upgrade_agents(cmd, client, resource_group_name, cluster_name, kube_config=N release_namespace = get_release_namespace(kube_config, kube_context, helm_client_location) if release_namespace: # Loading config map - api_instance = kube_client.CoreV1Api(kube_client.ApiClient(configuration)) + api_instance = kube_client.CoreV1Api() try: configmap = api_instance.read_namespaced_config_map('azure-clusterconfig', 'azure-arc') except Exception as e: # pylint: disable=broad-except @@ -1267,12 +1277,12 @@ def upgrade_agents(cmd, client, resource_group_name, cluster_name, kube_config=N return str.format(consts.Upgrade_Agent_Success, connected_cluster.name) -def validate_release_namespace(client, cluster_name, resource_group_name, configuration, kube_config, kube_context, helm_client_location): +def validate_release_namespace(client, cluster_name, resource_group_name, kube_config, kube_context, helm_client_location): # Check Release Existance release_namespace = get_release_namespace(kube_config, kube_context, helm_client_location) if release_namespace: # Loading config map - api_instance = kube_client.CoreV1Api(kube_client.ApiClient(configuration)) + api_instance = kube_client.CoreV1Api() try: configmap = api_instance.read_namespaced_config_map('azure-clusterconfig', 'azure-arc') except Exception as e: # pylint: disable=broad-except @@ -1380,24 +1390,20 @@ def enable_features(cmd, client, resource_group_name, cluster_name, features, ku # Loading the kubeconfig file in kubernetes client configuration load_kube_config(kube_config, kube_context) - configuration = kube_client.Configuration() # Checking the connection to kubernetes cluster. # This check was added to avoid large timeouts when connecting to AAD Enabled AKS clusters # if the user had not logged in. - check_kube_connection(configuration) + kubernetes_version = check_kube_connection() utils.try_list_node_fix() - api_instance = kube_client.CoreV1Api(kube_client.ApiClient(configuration)) + api_instance = kube_client.CoreV1Api() node_api_response = None - # Get kubernetes cluster info for telemetry - kubernetes_version = get_server_version(configuration) - # Install helm client helm_client_location = install_helm_client() - release_namespace = validate_release_namespace(client, cluster_name, resource_group_name, configuration, kube_config, kube_context, helm_client_location) + release_namespace = validate_release_namespace(client, cluster_name, resource_group_name, kube_config, kube_context, helm_client_location) # Fetch Connected Cluster for agent version connected_cluster = get_connectedk8s(cmd, client, resource_group_name, cluster_name) @@ -1503,24 +1509,20 @@ def disable_features(cmd, client, resource_group_name, cluster_name, features, k # Loading the kubeconfig file in kubernetes client configuration load_kube_config(kube_config, kube_context) - configuration = kube_client.Configuration() # Checking the connection to kubernetes cluster. # This check was added to avoid large timeouts when connecting to AAD Enabled AKS clusters # if the user had not logged in. - check_kube_connection(configuration) + kubernetes_version = check_kube_connection() utils.try_list_node_fix() - api_instance = kube_client.CoreV1Api(kube_client.ApiClient(configuration)) + api_instance = kube_client.CoreV1Api() node_api_response = None - # Get kubernetes cluster info for telemetry - kubernetes_version = get_server_version(configuration) - # Install helm client helm_client_location = install_helm_client() - release_namespace = validate_release_namespace(client, cluster_name, resource_group_name, configuration, kube_config, kube_context, helm_client_location) + release_namespace = validate_release_namespace(client, cluster_name, resource_group_name, kube_config, kube_context, helm_client_location) # Fetch Connected Cluster for agent version connected_cluster = get_connectedk8s(cmd, client, resource_group_name, cluster_name) @@ -2226,19 +2228,18 @@ def troubleshoot(cmd, client, resource_group_name, cluster_name, kube_config=Non # Loading the kubeconfig file in kubernetes client configuration load_kube_config(kube_config, kube_context) - configuration = kube_client.Configuration() # Install helm client helm_client_location = install_helm_client() # Install kubectl client kubectl_client_location = install_kubectl_client() - release_namespace = validate_release_namespace(client, cluster_name, resource_group_name, configuration, kube_config, kube_context, helm_client_location) + release_namespace = validate_release_namespace(client, cluster_name, resource_group_name, kube_config, kube_context, helm_client_location) # Checking the connection to kubernetes cluster. # This check was added to avoid large timeouts when connecting to AAD Enabled AKS clusters # if the user had not logged in. - check_kube_connection(configuration) + check_kube_connection() utils.try_list_node_fix() # Fetch Connected Cluster for agent version @@ -2263,11 +2264,11 @@ def troubleshoot(cmd, client, resource_group_name, cluster_name, kube_config=Non storage_space_available = False # To store the cluster-info of the cluster in current-context - diagnostic_checks[consts.Fetch_Kubectl_Cluster_Info], storage_space_available = troubleshootutils.fetch_kubectl_cluster_info(filepath_with_timestamp, storage_space_available, kubectl_client_location) + diagnostic_checks[consts.Fetch_Kubectl_Cluster_Info], storage_space_available = troubleshootutils.fetch_kubectl_cluster_info(filepath_with_timestamp, storage_space_available, kubectl_client_location, kube_config, kube_context) # To store the connected cluster resource logs in the diagnostic folder diagnostic_checks[consts.Fetch_Connected_Cluster_Resource], storage_space_available = troubleshootutils.fetch_connected_cluster_resource(filepath_with_timestamp, connected_cluster, storage_space_available) - corev1_api_instance = kube_client.CoreV1Api(kube_client.ApiClient(configuration)) + corev1_api_instance = kube_client.CoreV1Api() # Check if agents have been added to the cluster arc_agents_pod_list = corev1_api_instance.list_namespaced_pod(namespace="azure-arc") @@ -2279,10 +2280,10 @@ def troubleshoot(cmd, client, resource_group_name, cluster_name, kube_config=Non diagnostic_checks[consts.Retrieve_Arc_Agents_Logs], storage_space_available = troubleshootutils.retrieve_arc_agents_logs(corev1_api_instance, filepath_with_timestamp, storage_space_available) # For storing all arc agents events logs - diagnostic_checks[consts.Retrieve_Arc_Agents_Event_Logs], storage_space_available = troubleshootutils.retrieve_arc_agents_event_logs(filepath_with_timestamp, storage_space_available, kubectl_client_location) + diagnostic_checks[consts.Retrieve_Arc_Agents_Event_Logs], storage_space_available = troubleshootutils.retrieve_arc_agents_event_logs(filepath_with_timestamp, storage_space_available, kubectl_client_location, kube_config, kube_context) # For storing all the deployments logs using the AppsV1Api - appv1_api_instance = kube_client.AppsV1Api(kube_client.ApiClient(configuration)) + appv1_api_instance = kube_client.AppsV1Api() diagnostic_checks[consts.Retrieve_Deployments_Logs], storage_space_available = troubleshootutils.retrieve_deployments_logs(appv1_api_instance, filepath_with_timestamp, storage_space_available) # Check for the azure arc agent states @@ -2298,7 +2299,7 @@ def troubleshoot(cmd, client, resource_group_name, cluster_name, kube_config=Non # If msi certificate present then only we will do Kube aad proxy checks if diagnostic_checks[consts.MSI_Cert_Check] == consts.Diagnostic_Check_Passed: - diagnostic_checks[consts.KAP_Security_Policy_Check] = troubleshootutils.check_probable_cluster_security_policy(corev1_api_instance, helm_client_location, release_namespace) + diagnostic_checks[consts.KAP_Security_Policy_Check] = troubleshootutils.check_probable_cluster_security_policy(corev1_api_instance, helm_client_location, release_namespace, kube_config, kube_context) # If no security policy is present in cluster then we can check for the Kube aad proxy certificate if diagnostic_checks[consts.KAP_Security_Policy_Check] == consts.Diagnostic_Check_Passed: @@ -2332,9 +2333,9 @@ def troubleshoot(cmd, client, resource_group_name, cluster_name, kube_config=Non else: logger.warning("Error: Azure Arc agents are not present on the cluster. Please verify whether Arc onboarding of the Kubernetes cluster has been attempted.\n") - batchv1_api_instance = kube_client.BatchV1Api(kube_client.ApiClient(configuration)) + batchv1_api_instance = kube_client.BatchV1Api() # Performing diagnoser container check - diagnostic_checks[consts.Diagnoser_Check], storage_space_available = troubleshootutils.check_diagnoser_container(corev1_api_instance, batchv1_api_instance, filepath_with_timestamp, storage_space_available, absolute_path, probable_sufficient_resource_for_agents, helm_client_location, kubectl_client_location, release_namespace, diagnostic_checks[consts.KAP_Security_Policy_Check]) + diagnostic_checks[consts.Diagnoser_Check], storage_space_available = troubleshootutils.check_diagnoser_container(corev1_api_instance, batchv1_api_instance, filepath_with_timestamp, storage_space_available, absolute_path, probable_sufficient_resource_for_agents, helm_client_location, kubectl_client_location, release_namespace, diagnostic_checks[consts.KAP_Security_Policy_Check], kube_config, kube_context) # Adding cli output to the logs diagnostic_checks[consts.Storing_Diagnoser_Results_Logs] = troubleshootutils.fetching_cli_output_logs(filepath_with_timestamp, storage_space_available, 1) diff --git a/src/connectedk8s/azext_connectedk8s/tests/latest/recordings/test_connectedk8s.yaml b/src/connectedk8s/azext_connectedk8s/tests/latest/recordings/test_connectedk8s.yaml index 4a8e4d9744e..d9996898e52 100644 --- a/src/connectedk8s/azext_connectedk8s/tests/latest/recordings/test_connectedk8s.yaml +++ b/src/connectedk8s/azext_connectedk8s/tests/latest/recordings/test_connectedk8s.yaml @@ -13,7 +13,8 @@ interactions: ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar?api-version=2021-04-01 response: @@ -27,7 +28,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 17:45:29 GMT + - Tue, 18 Oct 2022 19:26:59 GMT expires: - '-1' pragma: @@ -44,7 +45,7 @@ interactions: - request: body: '{"location": "westeurope", "identity": {"type": "SystemAssigned"}, "properties": {"kubernetesVersion": "", "dnsPrefix": "cli-test-a-akkeshar-1bfbb5", "agentPoolProfiles": - [{"count": 1, "vmSize": "Standard_B2s", "osType": "Linux", "type": "VirtualMachineScaleSets", + [{"count": 1, "vmSize": "Standard_B4ms", "osType": "Linux", "type": "VirtualMachineScaleSets", "mode": "System", "enableNodePublicIP": false, "scaleSetPriority": "Regular", "scaleSetEvictionPolicy": "Delete", "spotMaxPrice": -1.0, "enableEncryptionAtHost": false, "enableUltraSSD": false, "enableFIPS": false, "name": "nodepool1"}], @@ -65,14 +66,14 @@ interactions: Connection: - keep-alive Content-Length: - - '1518' + - '1519' Content-Type: - application/json ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.ContainerService/managedClusters/cli-test-aks-000001?api-version=2021-08-01 response: @@ -81,18 +82,18 @@ interactions: \ \"location\": \"westeurope\",\n \"name\": \"cli-test-aks-000001\",\n \"type\": \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \"provisioningState\": \"Creating\",\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"kubernetesVersion\": - \"1.22.6\",\n \"dnsPrefix\": \"cli-test-a-akkeshar-1bfbb5\",\n \"fqdn\": - \"cli-test-a-akkeshar-1bfbb5-7c7ab37f.hcp.westeurope.azmk8s.io\",\n \"azurePortalFQDN\": - \"cli-test-a-akkeshar-1bfbb5-7c7ab37f.portal.hcp.westeurope.azmk8s.io\",\n + \"1.23.12\",\n \"dnsPrefix\": \"cli-test-a-akkeshar-1bfbb5\",\n \"fqdn\": + \"cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io\",\n \"azurePortalFQDN\": + \"cli-test-a-akkeshar-1bfbb5-d5fa5d83.portal.hcp.westeurope.azmk8s.io\",\n \ \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \"count\": - 1,\n \"vmSize\": \"Standard_B2s\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": + 1,\n \"vmSize\": \"Standard_B4ms\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": \"Managed\",\n \"kubeletDiskType\": \"OS\",\n \"maxPods\": 110,\n \ \"type\": \"VirtualMachineScaleSets\",\n \"provisioningState\": \"Creating\",\n \ \"powerState\": {\n \"code\": \"Running\"\n },\n \"orchestratorVersion\": - \"1.22.6\",\n \"enableNodePublicIP\": false,\n \"mode\": \"System\",\n + \"1.23.12\",\n \"enableNodePublicIP\": false,\n \"mode\": \"System\",\n \ \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\": - \"AKSUbuntu-1804gen2containerd-2022.05.16\",\n \"enableFIPS\": false\n + \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"enableFIPS\": false\n \ }\n ],\n \"linuxProfile\": {\n \"adminUsername\": \"azureuser\",\n \ \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDknmXRBGa/GuPCkpyydwCNedhfwINfrO674LWcBih2UjjJc5yULl9cD9LsYMWOzHVqM7H7RFxaONyq46h9vgxB/1XAeJUGc2jS8GS+vsS83bXX6vVrwa8wVeD380SJcF87oH3xf7/v2hlKv3drXi7xPE2JBjTHIOJJ6OxX+bAFXBqd1dPvnX1X7kEyX6vvjvuQrp7rFDbLq/eRpmng7kykodASQkUFZlt5+gH/U/z/a/DRoTocgzNqGl9RmesNtslQJs17Vn/JIJMM55qcRCEKoJ3Fq/Osnx3tHNA3G/vTs/+sVgh0tZmM6oIMRfTKzJskSZkMZOd8KtK/7ROCZO72izRmzwTFwFvRe/I7iHQ4PrjeKAqKDvgHJ/0LlaHmIYysZI21OTo6HcoX4HmA4RsIybNAM5SWeMMGiGe94/LYPk9sgB3o8aMv/nI/hr6vA28c2nso7itOuNcH1GZalAnbCObNv7QqVZ23FPlCjV9GXWCDCnQeCoIispJCrf68N5s= @@ -105,20 +106,20 @@ interactions: \"10.0.0.0/16\",\n \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\",\n \"outboundType\": \"loadBalancer\"\n },\n \"maxAgentPools\": 100,\n \"disableLocalAccounts\": false,\n \"securityProfile\": {}\n },\n - \ \"identity\": {\n \"type\": \"SystemAssigned\",\n \"principalId\": \"f8ee3c52-6a28-4d30-966b-51ab5e28f9c7\",\n + \ \"identity\": {\n \"type\": \"SystemAssigned\",\n \"principalId\": \"c598b0ac-38cf-4a4e-a2ad-c3fc7525a18c\",\n \ \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\": {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" headers: azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/38488217-069d-4745-ad8d-18497861a715?api-version=2017-08-31 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/41fb0cf9-113f-44eb-b89f-98ad54b0afea?api-version=2017-08-31 cache-control: - no-cache content-length: - - '2888' + - '2891' content-type: - application/json date: - - Tue, 07 Jun 2022 17:45:45 GMT + - Tue, 18 Oct 2022 19:27:12 GMT expires: - '-1' pragma: @@ -148,14 +149,14 @@ interactions: ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/38488217-069d-4745-ad8d-18497861a715?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/41fb0cf9-113f-44eb-b89f-98ad54b0afea?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"17824838-9d06-4547-ad8d-18497861a715\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:45:45.2466666Z\"\n }" + string: "{\n \"name\": \"f90cfb41-3f11-eb44-b89f-98ad54b0afea\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:27:11.6905205Z\"\n }" headers: cache-control: - no-cache @@ -164,7 +165,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 17:46:16 GMT + - Tue, 18 Oct 2022 19:27:42 GMT expires: - '-1' pragma: @@ -196,14 +197,14 @@ interactions: ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/38488217-069d-4745-ad8d-18497861a715?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/41fb0cf9-113f-44eb-b89f-98ad54b0afea?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"17824838-9d06-4547-ad8d-18497861a715\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:45:45.2466666Z\"\n }" + string: "{\n \"name\": \"f90cfb41-3f11-eb44-b89f-98ad54b0afea\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:27:11.6905205Z\"\n }" headers: cache-control: - no-cache @@ -212,7 +213,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 17:46:46 GMT + - Tue, 18 Oct 2022 19:28:13 GMT expires: - '-1' pragma: @@ -244,14 +245,14 @@ interactions: ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/38488217-069d-4745-ad8d-18497861a715?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/41fb0cf9-113f-44eb-b89f-98ad54b0afea?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"17824838-9d06-4547-ad8d-18497861a715\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:45:45.2466666Z\"\n }" + string: "{\n \"name\": \"f90cfb41-3f11-eb44-b89f-98ad54b0afea\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:27:11.6905205Z\"\n }" headers: cache-control: - no-cache @@ -260,7 +261,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 17:47:17 GMT + - Tue, 18 Oct 2022 19:28:43 GMT expires: - '-1' pragma: @@ -292,14 +293,14 @@ interactions: ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/38488217-069d-4745-ad8d-18497861a715?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/41fb0cf9-113f-44eb-b89f-98ad54b0afea?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"17824838-9d06-4547-ad8d-18497861a715\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:45:45.2466666Z\"\n }" + string: "{\n \"name\": \"f90cfb41-3f11-eb44-b89f-98ad54b0afea\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:27:11.6905205Z\"\n }" headers: cache-control: - no-cache @@ -308,7 +309,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 17:47:47 GMT + - Tue, 18 Oct 2022 19:29:13 GMT expires: - '-1' pragma: @@ -340,14 +341,14 @@ interactions: ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/38488217-069d-4745-ad8d-18497861a715?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/41fb0cf9-113f-44eb-b89f-98ad54b0afea?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"17824838-9d06-4547-ad8d-18497861a715\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:45:45.2466666Z\"\n }" + string: "{\n \"name\": \"f90cfb41-3f11-eb44-b89f-98ad54b0afea\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:27:11.6905205Z\"\n }" headers: cache-control: - no-cache @@ -356,7 +357,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 17:48:17 GMT + - Tue, 18 Oct 2022 19:29:44 GMT expires: - '-1' pragma: @@ -388,14 +389,14 @@ interactions: ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/38488217-069d-4745-ad8d-18497861a715?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/41fb0cf9-113f-44eb-b89f-98ad54b0afea?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"17824838-9d06-4547-ad8d-18497861a715\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:45:45.2466666Z\"\n }" + string: "{\n \"name\": \"f90cfb41-3f11-eb44-b89f-98ad54b0afea\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:27:11.6905205Z\"\n }" headers: cache-control: - no-cache @@ -404,7 +405,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 17:48:48 GMT + - Tue, 18 Oct 2022 19:30:14 GMT expires: - '-1' pragma: @@ -436,14 +437,14 @@ interactions: ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/38488217-069d-4745-ad8d-18497861a715?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/41fb0cf9-113f-44eb-b89f-98ad54b0afea?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"17824838-9d06-4547-ad8d-18497861a715\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:45:45.2466666Z\"\n }" + string: "{\n \"name\": \"f90cfb41-3f11-eb44-b89f-98ad54b0afea\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:27:11.6905205Z\"\n }" headers: cache-control: - no-cache @@ -452,7 +453,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 17:49:18 GMT + - Tue, 18 Oct 2022 19:30:44 GMT expires: - '-1' pragma: @@ -484,14 +485,14 @@ interactions: ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/38488217-069d-4745-ad8d-18497861a715?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/41fb0cf9-113f-44eb-b89f-98ad54b0afea?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"17824838-9d06-4547-ad8d-18497861a715\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:45:45.2466666Z\"\n }" + string: "{\n \"name\": \"f90cfb41-3f11-eb44-b89f-98ad54b0afea\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:27:11.6905205Z\"\n }" headers: cache-control: - no-cache @@ -500,7 +501,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 17:49:49 GMT + - Tue, 18 Oct 2022 19:31:15 GMT expires: - '-1' pragma: @@ -532,15 +533,111 @@ interactions: ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/38488217-069d-4745-ad8d-18497861a715?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/41fb0cf9-113f-44eb-b89f-98ad54b0afea?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"17824838-9d06-4547-ad8d-18497861a715\",\n \"status\": - \"Succeeded\",\n \"startTime\": \"2022-06-07T17:45:45.2466666Z\",\n \"endTime\": - \"2022-06-07T17:49:58.3252923Z\"\n }" + string: "{\n \"name\": \"f90cfb41-3f11-eb44-b89f-98ad54b0afea\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:27:11.6905205Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Tue, 18 Oct 2022 19:31:45 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - -g -n -s -l -c --generate-ssh-keys + User-Agent: + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/41fb0cf9-113f-44eb-b89f-98ad54b0afea?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"f90cfb41-3f11-eb44-b89f-98ad54b0afea\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:27:11.6905205Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Tue, 18 Oct 2022 19:32:16 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - -g -n -s -l -c --generate-ssh-keys + User-Agent: + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/41fb0cf9-113f-44eb-b89f-98ad54b0afea?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"f90cfb41-3f11-eb44-b89f-98ad54b0afea\",\n \"status\": + \"Succeeded\",\n \"startTime\": \"2022-10-18T19:27:11.6905205Z\",\n \"endTime\": + \"2022-10-18T19:32:24.0859857Z\"\n }" headers: cache-control: - no-cache @@ -549,7 +646,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 17:50:20 GMT + - Tue, 18 Oct 2022 19:32:46 GMT expires: - '-1' pragma: @@ -581,8 +678,8 @@ interactions: ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.ContainerService/managedClusters/cli-test-aks-000001?api-version=2021-08-01 response: @@ -591,18 +688,18 @@ interactions: \ \"location\": \"westeurope\",\n \"name\": \"cli-test-aks-000001\",\n \"type\": \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \"provisioningState\": \"Succeeded\",\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"kubernetesVersion\": - \"1.22.6\",\n \"dnsPrefix\": \"cli-test-a-akkeshar-1bfbb5\",\n \"fqdn\": - \"cli-test-a-akkeshar-1bfbb5-7c7ab37f.hcp.westeurope.azmk8s.io\",\n \"azurePortalFQDN\": - \"cli-test-a-akkeshar-1bfbb5-7c7ab37f.portal.hcp.westeurope.azmk8s.io\",\n + \"1.23.12\",\n \"dnsPrefix\": \"cli-test-a-akkeshar-1bfbb5\",\n \"fqdn\": + \"cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io\",\n \"azurePortalFQDN\": + \"cli-test-a-akkeshar-1bfbb5-d5fa5d83.portal.hcp.westeurope.azmk8s.io\",\n \ \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \"count\": - 1,\n \"vmSize\": \"Standard_B2s\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": + 1,\n \"vmSize\": \"Standard_B4ms\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": \"Managed\",\n \"kubeletDiskType\": \"OS\",\n \"maxPods\": 110,\n \ \"type\": \"VirtualMachineScaleSets\",\n \"provisioningState\": \"Succeeded\",\n \ \"powerState\": {\n \"code\": \"Running\"\n },\n \"orchestratorVersion\": - \"1.22.6\",\n \"enableNodePublicIP\": false,\n \"mode\": \"System\",\n + \"1.23.12\",\n \"enableNodePublicIP\": false,\n \"mode\": \"System\",\n \ \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\": - \"AKSUbuntu-1804gen2containerd-2022.05.16\",\n \"enableFIPS\": false\n + \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"enableFIPS\": false\n \ }\n ],\n \"linuxProfile\": {\n \"adminUsername\": \"azureuser\",\n \ \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDknmXRBGa/GuPCkpyydwCNedhfwINfrO674LWcBih2UjjJc5yULl9cD9LsYMWOzHVqM7H7RFxaONyq46h9vgxB/1XAeJUGc2jS8GS+vsS83bXX6vVrwa8wVeD380SJcF87oH3xf7/v2hlKv3drXi7xPE2JBjTHIOJJ6OxX+bAFXBqd1dPvnX1X7kEyX6vvjvuQrp7rFDbLq/eRpmng7kykodASQkUFZlt5+gH/U/z/a/DRoTocgzNqGl9RmesNtslQJs17Vn/JIJMM55qcRCEKoJ3Fq/Osnx3tHNA3G/vTs/+sVgh0tZmM6oIMRfTKzJskSZkMZOd8KtK/7ROCZO72izRmzwTFwFvRe/I7iHQ4PrjeKAqKDvgHJ/0LlaHmIYysZI21OTo6HcoX4HmA4RsIybNAM5SWeMMGiGe94/LYPk9sgB3o8aMv/nI/hr6vA28c2nso7itOuNcH1GZalAnbCObNv7QqVZ23FPlCjV9GXWCDCnQeCoIispJCrf68N5s= @@ -611,27 +708,27 @@ interactions: \ \"enableRBAC\": true,\n \"enablePodSecurityPolicy\": false,\n \"networkProfile\": {\n \"networkPlugin\": \"kubenet\",\n \"loadBalancerSku\": \"Standard\",\n \ \"loadBalancerProfile\": {\n \"managedOutboundIPs\": {\n \"count\": - 1\n },\n \"effectiveOutboundIPs\": [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_akkeshar_cli-test-aks-000001_westeurope/providers/Microsoft.Network/publicIPAddresses/d48755d8-258e-408d-a294-a3d463e6903f\"\n + 1\n },\n \"effectiveOutboundIPs\": [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_akkeshar_cli-test-aks-000001_westeurope/providers/Microsoft.Network/publicIPAddresses/d479fbc5-5fc0-442f-9bc0-849a7c79817d\"\n \ }\n ]\n },\n \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\",\n \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\",\n \"outboundType\": \"loadBalancer\"\n },\n \"maxAgentPools\": 100,\n \"identityProfile\": {\n \"kubeletidentity\": {\n \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_akkeshar_cli-test-aks-000001_westeurope/providers/Microsoft.ManagedIdentity/userAssignedIdentities/cli-test-aks-000001-agentpool\",\n - \ \"clientId\": \"3926865b-4b7f-4402-912d-70b38f92b999\",\n \"objectId\": - \"5837020f-4cc8-4b33-a42a-47eef1a590ce\"\n }\n },\n \"disableLocalAccounts\": + \ \"clientId\": \"d6c82141-9899-4ce3-943d-1343c5c4d69f\",\n \"objectId\": + \"9ac9860e-edb2-46ba-8807-94eeb72d5331\"\n }\n },\n \"disableLocalAccounts\": false,\n \"securityProfile\": {}\n },\n \"identity\": {\n \"type\": - \"SystemAssigned\",\n \"principalId\": \"f8ee3c52-6a28-4d30-966b-51ab5e28f9c7\",\n + \"SystemAssigned\",\n \"principalId\": \"c598b0ac-38cf-4a4e-a2ad-c3fc7525a18c\",\n \ \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\": {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" headers: cache-control: - no-cache content-length: - - '3563' + - '3566' content-type: - application/json date: - - Tue, 07 Jun 2022 17:50:21 GMT + - Tue, 18 Oct 2022 19:32:47 GMT expires: - '-1' pragma: @@ -665,14 +762,14 @@ interactions: ParameterSetName: - -g -n -f User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: POST uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.ContainerService/managedClusters/cli-test-aks-000001/listClusterUserCredential?api-version=2021-08-01 response: body: string: "{\n \"kubeconfigs\": [\n {\n \"name\": \"clusterUser\",\n \"value\": - \"apiVersion: v1
clusters:
- cluster:
    certificate-authority-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUU2RENDQXRDZ0F3SUJBZ0lRS3BQVlZSWWVvQ243d3U3eTBHeE9mVEFOQmdrcWhraUc5dzBCQVFzRkFEQU4KTVFzd0NRWURWUVFERXdKallUQWdGdzB5TWpBMk1EY3hOek0yTVRaYUdBOHlNRFV5TURZd056RTNORFl4TmxvdwpEVEVMTUFrR0ExVUVBeE1DWTJFd2dnSWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUNEd0F3Z2dJS0FvSUNBUUNqCmR5NXVGZTJLckNkMlY4VEp6YlJGYmlKRnJPWFdEWjQxVUFwcnlTS2hFWG56S1FHS3lrMVRPVHdJZGVYTmNESWcKWExWL0lhcmJVNjdGV0RMNWZBRE9ZWi9pUnlISUlwZklZTmIzZXlXNVpSVFRQaUZyampHSlBOakpaSCs2YzV2bgpYL1ZpS3ErSS9CbE5FZE9Gb1BDZVRsZURkRXVXN24vTzkrT1VtL3o5OXBMQXg4czEzb0VuanFCbmU0WnlmZU15CnB1Q3NUWDlyZzBMVFRQSXRhSE50RXFnM1R4ZmsyN2VKdnNjYlVJY09QSjM3T0dSeXFVcGNmcWF4MDdOM3FjaEcKcUI1SWo1YzhUNll0RDE5eS9WUGI5TlNjK2Zvb2tpek1NWEQ2dmtRN1Npd0NMQzlqTjV5UVVPMkJDOUJaeWFxOApuVlBoVE9xaVNUUS9Qc3ZHWGdZU0lVT2MyNjJjUEIrK2ZGcHlMS0E0ZTJQSzRCUksvOEgwMW1lckpueGFrb290ClN0T0lGRDNNZkVtMHNFbHlDQUEwakh5VVhKK1d1NFViQU41MW1haFFYMzlGSTJ5TWRlVDBFbnF0Wk5QaVJYRmQKaXB1SDNhNGhMUGVtQTRUalRuaHN0MnRSRkIrZHNjazRpT2ZKTmRSUXZ0VkxTaVUxOXBlTktNS20zSVRZQTRXMQpVWkdSZWdPTE9zUEk1K2VmNmc2ckUrMXlyaEgxZGxmc1FHTkxyV0EyNTNoZzV2YXNJUy8yc0FkVVBJeCtOQnEyCnN3Z3pBM2JZMjkzQ3pST0YyaExxU3g0cWhJWW5ubDdEUWxMYUlzRWtEd1lxczVtRTJGcDZrRmFBZ0w2UWpYYmsKditoai9QWE4yV3dJcjNJMmtkV00rQjRaeGZmQTE1c1V1Wkwzc0g5N0Z3SURBUUFCbzBJd1FEQU9CZ05WSFE4QgpBZjhFQkFNQ0FxUXdEd1lEVlIwVEFRSC9CQVV3QXdFQi96QWRCZ05WSFE0RUZnUVVKK3hNRWxYQmN3eDhsQUdaCkxMd0pYUkhSL1Y0d0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dJQkFERms2ejF5cCtpeEkwZUx4N2h6RmFCNXB5QTIKcGNaVEZkTVRwVDRtbnNHV0k1aGtEYUNKSDloY0NlQVhVRlMrWU9ZUkR2STY3V25qeTN3NTVHWTdJZlhHVitjMgpUcGRjMm1JYmdnaDlhNkpoeGQwWkhLeUpBQWlrekZ4UHpnVHFJUW16b3ltNDJHeDdvTWp4a3lmYkZYekdSYjRuCmJjcjFHakhNbllpT1RCWW1SekZURGVjS0t1ZFpHZDEwOXE4Slk1VTBhbUsvVVAxd1d4YTk3a2I0RGRaRFZCVjcKUHlhOFBNaE9ISmVuUGt5YUFTeTdMQ2UwTHM0Y1Fna29wR0VyYW01SityNlE0RzE5Z2pvQzFucjZuMzlOVjJhUAozZ3Y1cjFOZXJuZEJZWVI3bHNsa1lkR0FkVWN1bHBGUnNna0xFdjNEQ2Z5RmJsTXFZUzF6dC9VSW83UE9TOHF2Cnk0dXFMd1pyVGpZUy9GdTJHM2N2MjMxeFhRV21SeHQzS0svZmdSaHdUMkVNYUlEc25MVVVXTjZVNEdNK04waUsKREM5ZkQzUEN1UXp2K09KcVRZank0MnJsdTRlSVRrRys0YW1YOTJMSERYVHZ1Wi9TMHZEMUdzUXdwdlZmaWQreQpET3M2aG5tbHVFME5XQzQrdU1IYkNBNkMwanRHczg0ZmNUenV4ZmFMZ0hseFBQbkM0aTN5MVd0QTgzcjhCQmdRCnF6eldqcnZRc3p6dkI4anVWZGNHTytMTEtRVmlDUDNkYmpuWUVXUFA3Z3ViSE81WnhNanMzVkM3a0pxZHVMTnQKU1lNMWRWTklkQ3huZUtwMkJ3U2t2MjVudVhZeEphbmNGUDE3d2xRMGxLaW9UTXdZRmd5YjdNS2VYMjN1T3UwdgpGUDg4MithclBNTE9tOHNYCi0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K
    server: https://cli-test-a-akkeshar-1bfbb5-7c7ab37f.hcp.westeurope.azmk8s.io:443
  name: cli-test-aks-oknty5zeyc6
contexts:
- context:
    cluster: cli-test-aks-oknty5zeyc6
    user: clusterUser_akkeshar_cli-test-aks-oknty5zeyc6
  name: cli-test-aks-oknty5zeyc6
current-context: cli-test-aks-oknty5zeyc6
kind: Config
preferences: {}
users:
- name: clusterUser_akkeshar_cli-test-aks-oknty5zeyc6
  user:
    client-certificate-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUZIakNDQXdhZ0F3SUJBZ0lSQUlQZEdiM2ppNHpGZGw2WmVBc1pnbm93RFFZSktvWklodmNOQVFFTEJRQXcKRFRFTE1Ba0dBMVVFQXhNQ1kyRXdIaGNOTWpJd05qQTNNVGN6TmpFMldoY05NalF3TmpBM01UYzBOakUyV2pBdwpNUmN3RlFZRFZRUUtFdzV6ZVhOMFpXMDZiV0Z6ZEdWeWN6RVZNQk1HQTFVRUF4TU1iV0Z6ZEdWeVkyeHBaVzUwCk1JSUNJakFOQmdrcWhraUc5dzBCQVFFRkFBT0NBZzhBTUlJQ0NnS0NBZ0VBeVJ4TUlaQW5sdlRLWGVzaWlEMUQKSTdVMHpHSkU0bDRnQjlLSXY2dUgwMDl5OXZqc1luZUd6TnlHRHJVa2hXOFQrVEJJc2FDYWlHQUQ5OURqb2JubgpOWUl1SlF5d0h5bUxYVlh6ZnppSzkyUXpyK1ZrNk0vZWUzeGZKNzZZRWN6a21CSlR2RUdHUUU4UTh2RXcwQnlSCnZRVjVwaGZOYS9tUElCNXI0Y2ltYnFOem9BUG1KQVhrUTJVWEJoQ2pvbnBmMzhzV0ZQMG9sZmpFTnpiK0hHUVgKVGZRYWxHMVdpeU1kc3VQaVpJVnhaditBWDg1eXduckV0MFkreml4WmRjMytzVm4zQ0c0bEVLek9yYnZidmVMMgpEU2FxUXQrd2oySnp5MC9kalRhSGlBT0tqVHVZQndNODlTYW1FWmlGS2wyaDVwTEhtdkcyN0NCU0M1SVNsd3dhCnF4dks5djlud2NEMjQvRDNVTkdlbGMvTVNkcHN1akd4a3diMzlYSnVTaFp1eDZ1Z2JJSFZHKzBNTEFGbndNUmgKcWgzQitTdUF0RFlMaGVvdGhWdVBxZ0x5NUVEdW4yTS80c3I5d1dqeXhlVUVNUkhnSm1oQm9LbFNSWURSeWwrdgp0bWNpekFqMVgrSEVITFlyRldrZDBJMlhsRGVQZGs4dWJPVVdHaGllWjRPMTFlZm5RNlBhdkd2TkJ0L1Q4aHM1ClJ1bFhiaGxOLzNsNGYyTWk0bm4zSHAwajc2UjVhVm1HamRXalpYSDBIVnNKQTFOM0QzUDFFS01GdTVYRmIrRjIKT0ppK2l4VzAyNUtIa2pjNzZVeXo2TnBnUXFkQlFsRmpwRkZFQzUwV2tIK2pDTVV6YU1GWXE4dXV0ZjB6bHJUdQp3aUNKYXpIT1FLS2JMT25SSE1VU3JSVUNBd0VBQWFOV01GUXdEZ1lEVlIwUEFRSC9CQVFEQWdXZ01CTUdBMVVkCkpRUU1NQW9HQ0NzR0FRVUZCd01DTUF3R0ExVWRFd0VCL3dRQ01BQXdId1lEVlIwakJCZ3dGb0FVSit4TUVsWEIKY3d4OGxBR1pMTHdKWFJIUi9WNHdEUVlKS29aSWh2Y05BUUVMQlFBRGdnSUJBQVBFNGpqL2p4bkQ0WXY0NTVzYQpFVTdtSXZManYzSzg4dDJIY1ZGWkErUk5KV1dPaEkvMU9TcGVZSzF6M3BDVCtoSFRORnRScWhZNTRyMTJ3M0JiCmRuTGNUSXc1ZTd0Wi9zMEcyVmtZK3ZiUUxRL1lHYTlHQVdQMHdLQ2IwNEIzS3dybjV1OFNRRWVqRFlDL29kSW4KV0FYU1NNc3ZJTXNZd0gxdFhrdWpuM3FVUTQ4dFdBRkJjdHd0Qm1BOXd2M1NGSWRLT0JhdnlBa1l0ZzRUSWVKUApUN2oydUk5S2FTb01PTWFzNUdQT3ROQnVGeUhPRkptVWJIYUdBQ1NmU3h2Zm9UdUNhMEVpWHhKKzFIUFBVbDhCCnVpZ2xVRTFyVEdpS0VDU2ZMTS9aaENPQ2ZaaVZXR0l4RkFxcWcvNFRhU0ZwdEl3Z2dxbUFjOE5GWEdOYjdUWE0KVko3TUN1VDAyc1VHa3UrMmpFcHFtSHFvREdXSWtVa0xOOVdJWTVBcHNNYU9VYVZJMWlLenBvZFNUT1VDWGdDUQp6aDlYeVArR0pJWGxjdnVsaWppdjZOYnA5QTh3Z0xacHFoU2RqVyt3bzhOcFhSSUpxQ3NBaW9JUGJUTE10UTVMCjBKWmJYd1dqRmdway9URG40eE52TFJtSGY5UjU0UmliT2lkT2pMTFk5cVVQcW1mY2Q3d1RqQUlJTDdtcWp2MjYKaXBFcTNQOUhWK2k5MGhOaE1yK1BzSjV2UTh1cC9KQWxMOGljbkQ3V3RTc2UyR2dER2hYdDRDb1hOQzFTdUw2ZQpIdzRvUTZUWi9xYlNFT2J1MjVYZi9GYjA4ZEdTTHJPbWp0UUtva3d3SkdsUmNRVENOYUNCUC9PSnFNb3VDRFJQCk1wQWp5WFkvYmJNZUcxdm1TejdlNDFzagotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg==
    client-key-data: LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlKS1FJQkFBS0NBZ0VBeVJ4TUlaQW5sdlRLWGVzaWlEMURJN1UwekdKRTRsNGdCOUtJdjZ1SDAwOXk5dmpzClluZUd6TnlHRHJVa2hXOFQrVEJJc2FDYWlHQUQ5OURqb2Jubk5ZSXVKUXl3SHltTFhWWHpmemlLOTJRenIrVmsKNk0vZWUzeGZKNzZZRWN6a21CSlR2RUdHUUU4UTh2RXcwQnlSdlFWNXBoZk5hL21QSUI1cjRjaW1icU56b0FQbQpKQVhrUTJVWEJoQ2pvbnBmMzhzV0ZQMG9sZmpFTnpiK0hHUVhUZlFhbEcxV2l5TWRzdVBpWklWeFp2K0FYODV5CnduckV0MFkreml4WmRjMytzVm4zQ0c0bEVLek9yYnZidmVMMkRTYXFRdCt3ajJKenkwL2RqVGFIaUFPS2pUdVkKQndNODlTYW1FWmlGS2wyaDVwTEhtdkcyN0NCU0M1SVNsd3dhcXh2Szl2OW53Y0QyNC9EM1VOR2VsYy9NU2Rwcwp1akd4a3diMzlYSnVTaFp1eDZ1Z2JJSFZHKzBNTEFGbndNUmhxaDNCK1N1QXREWUxoZW90aFZ1UHFnTHk1RUR1Cm4yTS80c3I5d1dqeXhlVUVNUkhnSm1oQm9LbFNSWURSeWwrdnRtY2l6QWoxWCtIRUhMWXJGV2tkMEkyWGxEZVAKZGs4dWJPVVdHaGllWjRPMTFlZm5RNlBhdkd2TkJ0L1Q4aHM1UnVsWGJobE4vM2w0ZjJNaTRubjNIcDBqNzZSNQphVm1HamRXalpYSDBIVnNKQTFOM0QzUDFFS01GdTVYRmIrRjJPSmkraXhXMDI1S0hramM3NlV5ejZOcGdRcWRCClFsRmpwRkZFQzUwV2tIK2pDTVV6YU1GWXE4dXV0ZjB6bHJUdXdpQ0phekhPUUtLYkxPblJITVVTclJVQ0F3RUEKQVFLQ0FnRUF3N0hCY3pkdkRybU00ZThZWHpJZHp2VlpOemk0aHYzSWRWUXN0VFlZcVVxNk9CRHBGTGZ5ZUE1dQpkSWxYekcvV0k3a1VRNlJnT3l3VE1HWVgyeHVuSEs1Y2NManorSjdZZWk1VkR3cmFUdmsyUW9jWnRtSHZ3SUc1CnBsY1VhdDNSNHhpU1dySDREVVBpM2tSTWwzWDFhdFdWRmtZN0lrSnBQM3ZheG80VlJidlBtM212eklYZkpLV04KemQwTEhlT3V2M3BETnBGUm1QM0ZFV1YxeC9LdHFyRkoyL3k1SmhNNXB3V1JHdmV1N2ViWjZHQmYwMWJiYk92eAoyWFhXWjRmWHVvUXNtZ3QwRGRIYUc3eWRpeHo3Q0xrREhLZkdpdi9tWllTRnRnL0Z1bGlYemU2YmROZDF6YWVhCkdhMHNoNWt0SGN6ZVpxdW1zQWFUVDZjSVorcmpWcnVEdzl3MTFLS254VXVWUXlLL1BlWUtqWG40UEJVcWo5YkQKaUFoVC9jYUp3Mm5hQ0MzWXluVlZYVTgxM2JXc1owZWJXMmFPUm50V0VZcnVIdzQzdG5WaTIzYUx6N20wcUFXRQp2N3Y4TXpmQm9lZ3hYcTJnWk1yeFMvMnM3a2IwRk83ZDg4NkVkcW5iSUN6c01yekpNUC9UZXdnZTcwY2tpWjRUCnJuVzI3dDY4WlJMeVpTMTg4N2lFU3BrTEdVTEVXcXpZZmg0UGU1U0lEN0xENFcxZTYwK0dFSGtiTUpOZHhuR3YKbFBTY2Y2TitIUHlhc2ZMdmlYbUs0eFR0SStpZ0E1L0VBaVJyZmpOL0ROeEJPS1hPS1c1QnVoWThVZDNWaEdPMQpxYzBNL3VlQVZTSEFydnp1U3B4dlc2dHl4TXRQWTQrbkhVTWE3bVNSSnFZUEtuckdudEVDZ2dFQkFOcWJ5Mlh3ClBqRFVwNTdSd3AyRHdlZTVGYmNCU0s1a3AxRmwrb2VkamRvempWQ3ArY1VuNUNQbEcrVzNKSE9Oa1ZMVjNVK1MKbkc0VEFUa2NWc3lpMXV2Z3dMT0R0Z0hNd0hxcTU4UllVdysxRnRvUnhWRFBFVHJBNmJidnJiaWVsSjJ6TWZaNQpxaVh0U3daajlTVTdLbCsyWVF5WnJMcUtrampDUWdWbGVQUkxhNzlSMk94UllQbzgyNDRXOGM2WDhDTitNNTVNCnhFWDIvaFBRSWUvNk1iVEJ0QVVJUjZtcHhVSEtBTEZETDMwOFcxRENoWGhkTytwUmtpOEFsek5IOHJidy9QYUgKNjhkTzNWWHN3OElZa0VxaTdXRWFuM0JwTWl0RlpBdnl2SzBsaGlUdlc4YkRlOThTKzNhRk9ZM3ZFaHQ3V3M2cQpXOXdwOHp3Ri94OE9kRXNDZ2dFQkFPdUNVU2hKOU12bG9zenhJa1JZV3p0RC9NVmh3Szl5ZVhrVjFLZExjN29TCjBmZ1BnVjJaR05YQ0djVzA5VnJTMjBzbEFPUXhXalFNc3Q1QTRUM1RPZ2RJQmdCajNwV0pxK0lXZkhHcmpYMEYKNkRGR0Z4cWhhK3FCUmdUbjVJaDhDcEtHV0J0NVpKWmUyUG4xNndnV3J2RSs2ZytiVnZDZDBFb3FKQzQ5dHU1dAoyZ2dlUFdOTzF3T1NQb0NXSGcvWTVCNmR1WCt2NVJvM20rT2xOSC9HTXlIVXRHaVBTOXZsM1ZkM1NoenZGZW5CClNTNlBDUUU1STlTOCtZRG92emorM3dFaHpMWUd3aGxkN1B5ZE5wRGoxTXRkQlViMXprNE9yMks3SEtVckNZd3gKNVBPMU0rMVlzNWt6Qk9ocFdWckFHd2RmVzZOeTB5bVV6NndoMnN0b3lCOENnZ0VBV2dybHduMHNBSXoyNWRIYwphV2pGeTU1T3RXeFA5WlQ3M2VrMVFmckJYT2p6QWhDaEVuWVY0RHNVUEpBczhYT1VNaHhvNWNlcnF5TWswOU02ClpWdUZUdTdxTUliRnh3UUJIQVdkSlNDRVJlbi9HWjlNT0xheGtCTzVzR0lVbWdDbmtqVW5GVENUeHI5d3NaY20KZUVsZ2NMLzhSV1plNFE4R24xNVVOMGpPZzBFMWhqNCtMVWh2dTJLcHhHTHJBbjlPcUMvcEx3RExyMDNHNVg1TApIRDlmamVaak1kZHhQUTd5Y3VDelhGQWlGV0Z3ODFqTUJRSFNGZmUzVUtqYm5vRzZGV1E1clluYXNlTm9BMlpjClUyd3VzcTlkT3l2dUZDMGdQdlo3K1Bmb2d5RDdtZWNGVkY5Y2ZFdWpTL1FmK3JoSUt3MnFING5JQUcwL0JMQXIKbnkwaTlRS0NBUUVBaUZJTGhpV2c5REU5RmlIeDh0dXVHMVVVU0hCMzUxWjg1OUR5SWRBejhaNFpPZHhIeHBUcQo0amlUVUhTWm5QUDJpYjJta1cvYnhjc2Y4alJkN0xxS21WSmdYdk53RU9ENnhFaTl5ZEpDZVA3bFV6ZytBV1UwCm9TNHBDSWZ1S2U2YTlBdWRqbkdxL3J0eUh1WHhJT0o0YXg0K3Y3L3NDQ1ZoUHJ5NzZxUkFjaXpXQ3VMdUo4Q28KQnVJdEdhTkxUV0wvVUpWOW5XS0VXN09hb1orc0R0WDhIUFlSWTRDbXFXZVp3cHBESlJKamZjWXJCUzg0U0NQNgpFSmMrV1JiZ01TNmVpUG8yazVxLzJZMGJzM0dROXpkWm9rbDYxNE5LNHJYdllWZnJrZE9pbUNyOVd2QWxXWnFCCmd3a2NLeWRuMkhiZ2Jsa05JMFJBNUkzRjA5NVkxbXNWZ3dLQ0FRQkloZ2ZZd1hDZ0pDYmJvR0I4VlhyemllRVoKd3hqSkpmNGNUYWlWall3OXdBVHMwQTJOZGl4bXhHUGR0T3B5RW9SY1ZNclN5WTBLNjEza1NNSGdvcHdpek9oLwpQU3pTSHk2WmFsbHRkMXZiTDkxRlJ0T2pIRHQ1ekZKRHFrSk8reEpQMnYwbnVGS1dqNExtSTJ0bXdSVEdPQndGCm9ZU2FWaGl4TWJiZC9zL0VDenJPS0dhT2xPd295TldRM3gyWTMwamJ0S3hwNEUrdm1rdnk1OG1IRUs5dXVXWloKQWQ4Ukp4SlNTNHpJazRVOUxJaXVtS0lMK0RaMnFTc2l2TVhtZndUSm1rMmlCRFBSRjA1WXJYQ0RweUErWWVuRApCQWVwenY3TEpWdng4OHg0RnhNSWZwcVFaNzZ6c0Y4Z2ZTcFE4TWJxYWh6dUJtTXgwc25NbDlYWnhteTEKLS0tLS1FTkQgUlNBIFBSSVZBVEUgS0VZLS0tLS0K
    token: 3a77e924126420f6fe3bd34f5a252da0a1c474997f6ec972b957172bcf255d8ff0e6e5aaf54d0b9acb8e96b8606bdce2a993f05c318b75ece492701dce393f00
\"\n + \"apiVersion: v1
clusters:
- cluster:
    certificate-authority-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUU2RENDQXRDZ0F3SUJBZ0lRUHhkMzdvK3NYK2R4Z20weERWQkZ5ekFOQmdrcWhraUc5dzBCQVFzRkFEQU4KTVFzd0NRWURWUVFERXdKallUQWdGdzB5TWpFd01UZ3hPVEU0TVROYUdBOHlNRFV5TVRBeE9ERTVNamd4TTFvdwpEVEVMTUFrR0ExVUVBeE1DWTJFd2dnSWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUNEd0F3Z2dJS0FvSUNBUURICmdnalE0RnRjSEJRUEthWUtSYmlSWWthQWd5bEF6bzFML0oxa21ZRXdYcUFKYXp6UWhDT3puajVPZW9ZSCtTbnEKQjMxcU9NVHpzVWgzbWwvYzlpUXBKUnlkOFg3SE9RR01xRVRzL1ZpOFJvaG5SOVJpUC9MSWRuNnRQaDFFMmJLSgo3bmpoR3RVUm9zR250a0hzUTRjOERERWhvU2gwSU5vWXJxN3BUQVRuR0lRSTgvYjJ4S3lTK2hiL2tSZXZwSHRkClFCZ1hMWngxbDU4dlFVNUZqWjNhVDBRTlNDem1sWGIzeHZhU1BNN01CUkJIT1Z6WHVLd1RCNUFqNDY3UUltb00KSlA5MjhwVmQ4NnhURTdyUnBjbmZkY3RzZkwwR3JoRUVKSXZhYkZ5cTZxeG1OMnJFUG5teFRDWlpNRWhYWjUrNgplVlg2UGwvL2RkckpjOFlRTUwyYjVSU01EeXBTR2wwMWxHMEpCbUV3K2hHaXcrQjBzMndtT3hKd28xTmdhMDZxCisveFFoM0xTS0hna0hlc0hZc2tlQnppVnd0c0U1SzFHSzBVR3dpSGVpSk95UTc5S3E4TjRjQ1kvQk5vcXNiRHUKcG1tQmE0QjY4RThaNVNGNnBsWkVPcFdneUl6NTJaL3RIczFwREhUc0k0QngwdFVRZDdQaGFBb0t3VWYxNGhPSApTOWpaUnhsb3RRM0dTdGwyU3Iya21XSzZFeXdJcUNjenJUZEoySW00Skw4dEJUVG9qaWt1cFNoV3E1N1kxcG81CkNHeHJ2N05BZTNqUWdCbkpncEdyNmtvWjdmUngzZkh4cGh6V2dwV2ltL3JIYVBsQUgyVXNKeGZ5OC9CM1V5dWsKRER1TFRFZ1FmOTYxZGtNT0JabUo5dG9yQU5SU1M5dlBkVFhhSm5xMEtRSURBUUFCbzBJd1FEQU9CZ05WSFE4QgpBZjhFQkFNQ0FxUXdEd1lEVlIwVEFRSC9CQVV3QXdFQi96QWRCZ05WSFE0RUZnUVVVV2tscmQrZmVSRVpJOU5NCkFMVWVJaGNDOVpVd0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dJQkFMaXdUbDhtM0hiYWJNb0ZtT0dKSjZkM2hzc04KUVhobGtIdzlQbDcyUW56WkFXei9SWTNNU2VhZkd4R05DVERsZGt3SVk5bkwwUng2Nnh1Wk5LV1ZrcHJIRXRhTgpUeUp4bHNmQUtVNlQrbzNNeEpHU01oUkZRV0RRSTQrWTEyQ1lPUWo3SFpQTTVyYmNySmxkT2pwZG0zZHE3ZVJmCjg5Nm1PdE1kWVRtK1d2SUFwVm5uNmlkOVQrRlB1aW4xNkE1YWdMbVpoTVRxZkZvc0ppcHErWTlEa3Y4N1cwZmoKTXhRK0hiSGJqR09zOGFCQmNXcTdibGhYL0oxRXpyd2ZlWlpwVEJTUmZJMGEzZUt0QXlPYTVzWnV3NFpCdW5mcwp0ZzRrZG5GK2J5RVl1cjVHSFJpMUhLTGhnUnc5K1o1ZUExU3Y1amgzZEk4YmpPZ1hwcW9jWm1zYWNDVU4rc3duCitHUnNYcUxhMzBUOXZuNUVmREhWa2M5Wis0b1czN3NyQVNwYjBoeUFZUTFTZ2NDeDYyTlBkK256aUJ5cEduUjcKcGxVSWlabU1rUFl1blhidzlmMHU1cm9ielNwMmZJa2JzVVlNOFF4WGZsR2QvUVNUZ09MQmVqNmZnUUFobCtIbwpVTkt3cHA4dkVndGZLZ3VST3BSclBZNXlZOCtmb2lEd3AwN2crQmgwUlZIaHJla0hENmEvTU44LzhXUktqb1pKCjlHb0MzaFlqL0FBT2NzWXk2OXpqUGhkSWlKc2NEOXQ2VUNsYWU3WFcvOE91elJnZ3dqaE5QRTNWb1dtQ3lqSngKdllWZFU4S3BPdERJd2JMZytwRDg2d2hxMk5zODdPWXROWGVTOE5hTG1iUHY5cGM5alBUeGhwVERqU0ViTFRYYgpHQms5V21lL3RRZXEwTDR1Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K
    server: https://cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io:443
  name: cli-test-aks-np65nogvncb
contexts:
- context:
    cluster: cli-test-aks-np65nogvncb
    user: clusterUser_akkeshar_cli-test-aks-np65nogvncb
  name: cli-test-aks-np65nogvncb
current-context: cli-test-aks-np65nogvncb
kind: Config
preferences: {}
users:
- name: clusterUser_akkeshar_cli-test-aks-np65nogvncb
  user:
    client-certificate-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUZIVENDQXdXZ0F3SUJBZ0lRZDZhWUtoM3pWaStEQkEwOUlIeGRmREFOQmdrcWhraUc5dzBCQVFzRkFEQU4KTVFzd0NRWURWUVFERXdKallUQWVGdzB5TWpFd01UZ3hPVEU0TVRSYUZ3MHlOREV3TVRneE9USTRNVFJhTURBeApGekFWQmdOVkJBb1REbk41YzNSbGJUcHRZWE4wWlhKek1SVXdFd1lEVlFRREV3eHRZWE4wWlhKamJHbGxiblF3CmdnSWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUNEd0F3Z2dJS0FvSUNBUUMveERaVU0rczJKVEE2Q2xoc1U1RHAKYUdOY2Z0cElGNlFUdDJVK2xneldMZ25TWlhDQTc2R1FaN0pjb0NrM1hIUjBnMGMzQVdMcXJtS0N6dDljbU40eApZMGRpR2dxYW4rNnd6ZTlqT2FXeHY2SzYwZUE0NnFlV1NtT3p6MWtEZ1prOTFIdXZIVkJqSFQ1N2pXYTE1SDlMCjd2ZlpxbDlheWJwdUdYRjJPaGo3SHU1UDF1a0VyRXEwbW9Za3U0Z29lT0xGb2UxUzJ1RzE3ZXdqVUhuS2RWYTQKQ0F2T2NQb0lTWEdwdGROTmI3UU8vU0xaYnRnYi9aaXhMZUZkZUxoUXZ5UlNHYXl2QlF4eDJIVE1RWEVoZDFjVQpkbTc2QTdocWcwY2U3cHdlSlV1VEk1R0IvL3RDalBxM1NpY2tVUU1HL3FhMHVJRDIwSFNsK2sxY3hvcDlBRmhRCkNoanZ6T2F2NDc1b0JLUmJNdS9QdnQvZGtPUUoxWk9UaDQwK2wyQmpxYWRTbW5CamdLOVYyN0sxSUNKVE9CZTYKOVVwWmpiYTFRamJMdm9jUkFZeDNBazhsN25ab1RBK2VCSG9abUlCN0grQXFaTlg0NVh2RVFVbXlmTldOcER0SQo3V3R0Z0lYRUxSNFRkTnNUVDFERTYxTW1iQ1h3WSsrazdVcG1tcWZDbXd3Q0pnUG9ia1BjQThISnFBN0tDMFZMCkk0RDNaQUUrcG5qSU54VGJQaXM0TzVpWFE2eEYySU9lZmpsRWhBU09Jdnl3T0Rna3V2NWRncjYyVjJHN2JMZ3UKa2kwcjc1b2RjdU9ad1QyMVcyMWZ5YVBlTXpPamV2Nm9FMmhmOGk1VXJ1Q2RNc3A4dXBXNEtzTDRRVlRzWk8rawpZMGtsMmFjU0MxSWtVbFExQXhuaEJRSURBUUFCbzFZd1ZEQU9CZ05WSFE4QkFmOEVCQU1DQmFBd0V3WURWUjBsCkJBd3dDZ1lJS3dZQkJRVUhBd0l3REFZRFZSMFRBUUgvQkFJd0FEQWZCZ05WSFNNRUdEQVdnQlJSYVNXdDM1OTUKRVJrajAwd0F0UjRpRndMMWxUQU5CZ2txaGtpRzl3MEJBUXNGQUFPQ0FnRUFrbndlaXNBY2hQU0FCL2JRTEFxSgpBTUg1NjEva1hXYUFxT3ZQUGZYQUxBcUUxRTl5a2FTWjh1NDVIRUxaT0xZS2NKZVl4NzVQei84eklIK3ZNQytYCkZmS2lNWjBDeXV2cFJhY0dWM0NuMkk1S0hXZE1uUXlJcWtNWEszaTB4dndrN2h1cE9XNVY0dzNyWnFjZlA2MTcKU01kVFRrL2ovRzJrNk9OV01SVkg4bGJpMmxSWU8yTFNNanh4TFIwSHFLb0xnK01qdDU4aFg5K0h3YlBHRDRMTQo4Zy8rZ243V29ZS0lHbi9VQitJWDgvais2L1BiYXpzdjd3ZnQzbmhZRWo1SytzdWUzSG1nTXJWTStnekpKKy9XClExTEhHcGNRL3J3cFgrTnFoRjVjT2VzbjVqWDBwNUNZNFFMK0p4Mzc0V2RhMnRnQWphSmV4cFlIQXlUOUo5eEEKNlFuM3JBb1VRYmJnemVLdVk1ZUdOYkNWZXBCR2FlODdWcm8yMy9oNVh4a2lvaEFpd0Nsd29wdzZtdGE4VTM1dgpsdDRZTFluU2tISm4vdkdZVTlsZE0wWnlUZWdFTnlDQWt4Y3RVMktvWkJpbXFoK3NsNHQ0a0V3c3VhWEt4YXAzCkhFekxkL01mSFpNNVNWNXcvOFVYNzZrNWxreGwwOGxaYWhUUzJaSHYwVGVHT1JucUpHVDYxRTBrektYbUl0U2IKYVZBcmVOejZWZDd3TjVXL0VIQ0o1YXlGL1RtMUEvU0VlZHZ0UXkvaU9sUlg1dmsyeURTTmErNXhTYkpkZWx4SwpsMGZFUEVCMGl4WjRORGx2UXFEMG5oeDhEZm9YY28vczVLS2xmR3BycG10ZUZQcG8yZ3VjSGNHazA0L1I0L3NqCndIQzY1VkJSTHdJejhtTUJkZ2pVdjdFPQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg==
    client-key-data: LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlKS0FJQkFBS0NBZ0VBdjhRMlZEUHJOaVV3T2dwWWJGT1E2V2hqWEg3YVNCZWtFN2RsUHBZTTFpNEowbVZ3CmdPK2hrR2V5WEtBcE4xeDBkSU5ITndGaTZxNWlnczdmWEpqZU1XTkhZaG9LbXAvdXNNM3ZZem1sc2IraXV0SGcKT09xbmxrcGpzODlaQTRHWlBkUjdyeDFRWXgwK2U0MW10ZVIvUys3MzJhcGZXc202YmhseGRqb1kreDd1VDlicApCS3hLdEpxR0pMdUlLSGppeGFIdFV0cmh0ZTNzSTFCNXluVld1QWdMem5ENkNFbHhxYlhUVFcrMER2MGkyVzdZCkcvMllzUzNoWFhpNFVMOGtVaG1zcndVTWNkaDB6RUZ4SVhkWEZIWnUrZ080YW9OSEh1NmNIaVZMa3lPUmdmLzcKUW96NnQwb25KRkVEQnY2bXRMaUE5dEIwcGZwTlhNYUtmUUJZVUFvWTc4em1yK08rYUFTa1d6THZ6NzdmM1pEawpDZFdUazRlTlBwZGdZNm1uVXBwd1k0Q3ZWZHV5dFNBaVV6Z1h1dlZLV1kyMnRVSTJ5NzZIRVFHTWR3SlBKZTUyCmFFd1BuZ1I2R1ppQWV4L2dLbVRWK09WN3hFRkpzbnpWamFRN1NPMXJiWUNGeEMwZUUzVGJFMDlReE90VEptd2wKOEdQdnBPMUtacHFud3BzTUFpWUQ2RzVEM0FQQnlhZ095Z3RGU3lPQTkyUUJQcVo0eURjVTJ6NHJPRHVZbDBPcwpSZGlEbm40NVJJUUVqaUw4c0RnNEpMcitYWUsrdGxkaHUyeTRMcEl0SysrYUhYTGptY0U5dFZ0dFg4bWozak16Cm8zcitxQk5vWC9JdVZLN2duVExLZkxxVnVDckMrRUZVN0dUdnBHTkpKZG1uRWd0U0pGSlVOUU1aNFFVQ0F3RUEKQVFLQ0FnQmJjS0kvVUdwdDdSbXI4MWJxZ0ladTVibkxWeW1haXJ0UU0zanhlUGN5QWMzcEp6bVlJYXUyZ2R2SQowdzRVT2QvRjF2dFVQVEwvbUp1Y3FDeFZxUWI2SWM3ZDluRHIvaXNBMzVHd2pNSjNtVnR0SGJwL0tWVTBFdENyCjRza2RKT1QvV0p5SmliN1ZKQkt4TTJHd2hmNDFKUVBHSkxsSFB6M0lWaFdHUEhuQTlLVEtLVDMvWG5CWTRmM28KOGdzZzBWUEF6VSt1M0hRWUhoVWxXYkJaVzg0My9vNHlxQ2h4WHlQMXVwL0JJdXlHNFNtRzZKWGY4dkttT2F4bQp0N09idlNrQ2hQK1VqU0c3LzJ6aUlLcGpQTmFuQ1RvdHVTeHNqYUNTSFZFUUJ0U2NtcTg5ZitROWZaL25XbEtECnZLVm01bkp6Wmk4SWdhbTlvemJVY0UrRS8yOGNGOFNLUW1vdm1oSTNNRnBoWnc2TFgzcTJEYWhLLyt5alhaWVgKMDh4NWZzT1VNdXB2SDQ2OE40L1k4b2MvRk11MjcrZFlLSFV3SHJ6eG8wMkZqTFRqMVMzaHJnS2lhR0s1K2hIWAoyUGUzSUwwb08wWEFPdUhMbzNkR0N1SHI3TXNycXY5YW9WTGFMMndjd2ZXZkNwRloxUlR1ZnloTjRPK3JpVnJlCmh5czN3dmc1Y1dtSmFQYVJrRS91eDd3bkN1SjAzWDRXSmVMRXk3Skh2bHh6VFBheVpnWkkyZm5BVjQwOXpRZXQKNU0weGlWSXRtRFFRcUEyR1BWOG1hSlpuSjZuQVJiT3RpeEtRK1c5dkVHM01SUmhEaXVycmg5L1FjSm91WEJobgpwK0UxTDBKWVROUWN3bldabWwrendjNWNVK1lrYnBFTWwrby9NVFZwdmxjak9LSERBUUtDQVFFQTZOQVBiYUtkCmx0aVdDdEVXNWVkRUN0cTEyWkxIZVhncnQ2SzhmdDhvNVpXSEVlSEhGdFI5SFU2R0dRQ3g1MUdXVmc1eTF5WU0KdEQ3OWNZZlNKMEpNRm5mRkRrb3BvL29EbGxhekRHeCtlcW0yK0l2dmlObFlZN2J3bVUydGc5bnNEL0hyTmtnVAozTHM1cXhGTDZRa25YVzRncFJIN3ZYMVZNTlQ0MUlKQm9BVHVKQ2RFaGpzZVlKVmpEN1JrMDdrZCt5Um1ZUWlmCkxHeXZaaktlL2M1c0dBZGxSZ1k1N29jWWV5d2lDQmZ5RTFYR1pJbVBobDRlMVZCVlhaeHE2cHZUenBMajc3aUYKYmgxVEJUU1J0eUVvZlNsRDRKbHhkR01ZcnFWMXJGaExhY2FhNWQxWjV0VkJ1ejAxRVJBVkgyUnZzaG9BOFJ3bwpodHRMTXJ1ZUwyQUpKUUtDQVFFQTB0MmNOUFZ4elBUaUY5UHd4NmJUK1F1VTNQOXVvS2tpQitlVWk0U2NSZmd0CjBTWStuWHRsTTRsU2NtMjRlMjV4RXFndXZTUHlJcjRNbHprMGd6U0ZIVmI0VUZuOFJ0SFdCMWkyL0drTlJYcUkKNmxDbzUzQ0M3SUlNdXNpM2ZRR285Nm9PeXl2QWxNRU9xS2dhQVpCb0FnQno3VEpyNGYxYnpZTFpKNGo2MTk0aAprNEh5dFphR0NnMTkyTVJ4VVBhWHI5VkZNL3dPMmZOTDBHZ3hseGx5c1ZsRS9uN2kzSVd0NDJzR0dtR2pSQkVYCk1Pamk3aWdxQW9qZm9FcUV2V25WdzJQY0d0RE13anhhQ1RheWlTVWlqQU1rdGpqdituNnEwWEpUQTJ5MkRVQ2QKQVVjclpDeC9pNXhCc1lWMVUzTXZKZ1hRY09JbDNUcFJYaHcwbHltaVlRS0NBUUVBM3hDakI2L2NnSERLNmxoRwpNVWV4eVJpZlRuVTFUT3hZTHQzczd6N1FoNUVLVnBiV1U4aUtqQzcxaXpWQVA5dkxTRzhWbDVFc2NQTTlrSzdMCks3UjVCTG9EQTlzSkpNVGk5UVBqRUE5UDdDVjZXNUJ3bE5WeXZDL0hQWXZRcWRsK05oVGNBaWx6Wm5XNUhuQTMKSkhRUjFKSTFFeFEwK08wQ1dBUm0yZTVvQkhOVTNTMEdtZnBBODllT2o3aVBOdEZTcVpQSnlFVWpoY1FwRnFsLwo3cnora3ROYk5JbTRyaE9nNER0NzYvZE5pVHNSR0xrNDNFeUlER2lwTWtVeHA5SmdVYmdxVDBDa0Q4cG5Ca0RGCk1RckRLWkVGS1VpOERRRDU0RFJHbURnTklhOGFNUTR5R2xyUVhSK2JIZHNqOGUwZ25Dd1FKUFNPM0E5M2NJZVYKVzkwUFJRS0NBUUFVeVB0b3pQald0eHpDTklYN2FCcGs1aHRhWFdOT090Mjc0YnZtdEwraXRmL3VLZExxT3d1Uwp0T3FIbFFFbVNaQ2p3aWc1UkozTmh1WSswY2kzVmluNDVtVzdwZU1VS0FMczNuWExFNS9YUldqUmovbldObVR4Cll6QldDUXJ0SzRpNFFPOE1YOWV2MGdmYjRCWVFLV2ZYS3F4Q3FVR3hHUVEva2JKT0QxYUtpTDdPaDNTaEthRTAKeEhldk1rM3BLU1Z5azVjcXRLSFNJZ1ZMR2JvcHByeURkc1V5UU5lajNSbHkzVTNRcytjZ0FuOUN0dklIZjdZRQpZZkdKSDA5T1RHeGh3aW9rQ29qa2JJYURlZnpIUC8zVHgwdUxHcm1zKzJkakZkM1E3SnBWWEJVZDF5SVQwYjhRCngrcEIwV0hyUStOVjRDVFh1N0wzeDVQVnJ2b1FIRzZCQW9JQkFFREsvTGlPRjIvRjNzVktORExWYTNOZ0k1bUYKSXR2bHJJd3NXb01KUFR4dnJoWkk2L1ZGcWY1MnVxMml0cWdYakZvRThEdXYyVUtqNHR4MkZXdXB1VWZiSVFJQQpZdFhNYmZLK1g4UkxnM1h4RzZlbVoyK1FYYUFrakE5Z2cwNnFoY1lmbVdEQVhWejNmVGFNdWJSbzd0UmZKeDFkCllsMG9NRW5GSDFUUGpWWmlvSjNwYjlGOWoyK2d4a0c3VStBS2NYazM1cm52WkxvbjdPMUs3dGZPTWxKZDMzR1kKb3Y5VFE1dG00VmJQd2hPeVNRYk5pQ29aTFZWVW1Zei8wbGRaUW9Mc0tkTUVlL3Zzd3lhZjJ0TlVYMDR4SFJONwpVTnQxSkcxVWdwL1o2RWs2SE5CV3k3M0VvV29PbnFIckJCWVRpUDNaWEpYOWpDNFkrYmkxMDAyNzErND0KLS0tLS1FTkQgUlNBIFBSSVZBVEUgS0VZLS0tLS0K
    token: 15055f411a6cfefc7fd74e97c1a4e373e7b00361d177d76c666b086352ec265e4b3d48eb5f93f9ef8393d521099b7b441c89fd5cabf5477695f0726ff9d16ead
\"\n \ }\n ]\n }" headers: cache-control: @@ -682,7 +779,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 17:50:22 GMT + - Tue, 18 Oct 2022 19:32:48 GMT expires: - '-1' pragma: @@ -716,7 +813,8 @@ interactions: ParameterSetName: - -g -n -l --tags --kube-config User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Kubernetes?api-version=2021-04-01 response: @@ -725,21 +823,21 @@ interactions: Europe","East US","West Central US","South Central US","Southeast Asia","UK South","East US 2","West US 2","Australia East","North Europe","France Central","Central US","West US","North Central US","Korea Central","Japan East","West US 3","East - Asia","East US 2 EUAP","Canada East","Canada Central"],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"SystemAssignedResourceIdentity, - SupportsTags, SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/operationStatuses","locations":["East + Asia","Canada Central","East US 2 EUAP","Canada East"],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"SystemAssignedResourceIdentity, + SupportsTags, SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/operationStatuses","locations":["East US 2 EUAP","West Europe","East US","West Central US","South Central US","Southeast Asia","UK South","East US 2","West US 2","Australia East","North Europe","France Central","Central US","West US","North Central US","Korea Central","Japan - East","East Asia","West US 3","Canada East","Canada Central"],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"registeredSubscriptions","locations":[],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"Operations","locations":[],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview","2019-11-01-preview","2019-09-01-privatepreview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + East","East Asia","West US 3","Canada East","Canada Central"],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"registeredSubscriptions","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"Operations","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview","2019-11-01-preview","2019-09-01-privatepreview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache content-length: - - '2311' + - '2416' content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 17:50:25 GMT + - Tue, 18 Oct 2022 19:32:51 GMT expires: - '-1' pragma: @@ -767,7 +865,8 @@ interactions: ParameterSetName: - -g -n -l --tags --kube-config User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration?api-version=2021-04-01 response: @@ -777,38 +876,62 @@ interactions: US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France Central","Central US","North Central US","West US","Korea Central","East Asia","Japan East","Canada East","Canada Central","Norway East","Germany West Central","Sweden - Central","Switzerland North","Australia Southeast","Central India","East US - 2 EUAP","Central US EUAP"],"apiVersions":["2022-03-01","2021-03-01","2020-10-01-preview","2020-07-01-preview","2019-11-01-preview"],"defaultApiVersion":"2022-03-01","capabilities":"SupportsExtension"},{"resourceType":"extensions","locations":["East + Central","Switzerland North","Australia Southeast","Central India","South + India","Japan West","Uk West","France South","Korea South","South Africa North","East + US 2 EUAP","Central US EUAP"],"apiVersions":["2022-07-01","2022-03-01","2021-03-01","2020-10-01-preview","2020-07-01-preview","2019-11-01-preview"],"defaultApiVersion":"2022-03-01","capabilities":"SupportsExtension"},{"resourceType":"extensions","locations":["East US","West Europe","West Central US","West US 2","West US 3","South Central US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France Central","Central US","North Central US","West US","Korea Central","East Asia","Japan East","Canada East","Canada Central","Norway East","Germany West Central","Sweden - Central","Switzerland North","Australia Southeast","Central India","East US - 2 EUAP","Central US EUAP"],"apiVersions":["2022-04-02-preview","2022-03-01","2021-09-01","2021-05-01-preview","2020-07-01-preview"],"defaultApiVersion":"2022-03-01","capabilities":"SystemAssignedResourceIdentity, + Central","Switzerland North","Australia Southeast","Central India","South + India","Japan West","Uk West","France South","Korea South","South Africa North","East + US 2 EUAP","Central US EUAP"],"apiVersions":["2022-07-01","2022-04-02-preview","2022-03-01","2021-09-01","2021-05-01-preview","2020-07-01-preview"],"defaultApiVersion":"2022-07-01","capabilities":"SystemAssignedResourceIdentity, SupportsExtension"},{"resourceType":"fluxConfigurations","locations":["East US","West Europe","West Central US","West US 2","West US 3","South Central US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France Central","Central US","North Central US","West US","Korea Central","East Asia","Japan East","Canada East","Canada Central","Norway East","Germany West Central","Sweden - Central","Switzerland North","Australia Southeast","Central India","East US - 2 EUAP","Central US EUAP"],"apiVersions":["2022-03-01","2022-01-01-preview","2021-11-01-preview","2021-06-01-preview"],"defaultApiVersion":"2022-03-01","capabilities":"SupportsExtension"},{"resourceType":"operations","locations":[],"apiVersions":["2022-03-01","2022-01-01-preview","2021-12-01-preview","2021-11-01-preview","2021-09-01","2021-06-01-preview","2021-05-01-preview","2021-03-01","2020-10-01-preview","2020-07-01-preview","2019-11-01-preview"],"capabilities":"None"},{"resourceType":"namespaces","locations":["East + Central","Switzerland North","Australia Southeast","Central India","South + India","Japan West","Uk West","Korea South","France South","South Africa North","East + US 2 EUAP","Central US EUAP"],"apiVersions":["2022-07-01","2022-03-01","2022-01-01-preview","2021-11-01-preview","2021-06-01-preview"],"defaultApiVersion":"2022-07-01","capabilities":"SupportsExtension"},{"resourceType":"operations","locations":[],"apiVersions":["2022-03-01","2022-01-01-preview","2021-12-01-preview","2021-11-01-preview","2021-09-01","2021-06-01-preview","2021-05-01-preview","2021-03-01","2020-10-01-preview","2020-07-01-preview","2019-11-01-preview"],"capabilities":"None"},{"resourceType":"privateLinkScopes","locations":["East + US","West Europe","West Central US","West US 2","West US 3","South Central + US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France + Central","Central US","North Central US","West US","Korea Central","East Asia","Japan + East","Canada East","Canada Central","Norway East","Germany West Central","Sweden + Central","Switzerland North","Australia Southeast","Central India","South + India","Japan West","Uk West","Korea South","France South","South Africa North","East + US 2 EUAP","Central US EUAP"],"apiVersions":["2022-04-02-preview"],"capabilities":"SupportsTags, + SupportsLocation"},{"resourceType":"privateLinkScopes/privateEndpointConnections","locations":["East + US","West Europe","West Central US","West US 2","West US 3","South Central + US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France + Central","Central US","North Central US","West US","Korea Central","East Asia","Japan + East","Canada East","Canada Central","Norway East","Germany West Central","Sweden + Central","Switzerland North","Australia Southeast","Central India","South + India","Japan West","Uk West","France South","Korea South","South Africa North","East + US 2 EUAP","Central US EUAP"],"apiVersions":["2022-04-02-preview"],"capabilities":"None"},{"resourceType":"privateLinkScopes/privateEndpointConnectionProxies","locations":["East + US","West Europe","West Central US","West US 2","West US 3","South Central + US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France + Central","Central US","North Central US","West US","Korea Central","East Asia","Japan + East","Canada East","Canada Central","Norway East","Germany West Central","Sweden + Central","Switzerland North","Australia Southeast","Central India","South + India","Japan West","Uk West","South Africa North","Korea South","France South","East + US 2 EUAP","Central US EUAP"],"apiVersions":["2022-04-02-preview"],"capabilities":"None"},{"resourceType":"namespaces","locations":["East US 2 EUAP","West US 2","East US","West Europe","West Central US","West US 3","South Central US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France Central","Central US","North Central US","West US","Korea Central","East - Asia","Japan East"],"apiVersions":["2021-12-01-preview"],"defaultApiVersion":"2021-12-01-preview","capabilities":"SupportsExtension"},{"resourceType":"privateLinkScopes","locations":["East - US 2 EUAP"],"apiVersions":["2022-04-02-preview"],"capabilities":"SupportsTags, - SupportsLocation"},{"resourceType":"privateLinkScopes/privateEndpointConnections","locations":["East - US 2 EUAP"],"apiVersions":["2022-04-02-preview"],"capabilities":"None"},{"resourceType":"privateLinkScopes/privateEndpointConnectionProxies","locations":["East - US 2 EUAP"],"apiVersions":["2022-04-02-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + Asia","Japan East","Canada Central","Canada East","Norway East","Germany West + Central","Switzerland North","Sweden Central","Central India","South India","Australia + Southeast","Japan West","Uk West","France South","Korea South","South Africa + North"],"apiVersions":["2021-12-01-preview"],"defaultApiVersion":"2021-12-01-preview","capabilities":"SupportsExtension"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache content-length: - - '4029' + - '6074' content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 17:50:25 GMT + - Tue, 18 Oct 2022 19:32:51 GMT expires: - '-1' pragma: @@ -830,29 +953,30 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://cli-test-a-akkeshar-1bfbb5-7c7ab37f.hcp.westeurope.azmk8s.io/apis/networking.k8s.io/v1/ + uri: https://cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io/version/ response: body: - string: '{"kind":"APIResourceList","apiVersion":"v1","groupVersion":"networking.k8s.io/v1","resources":[{"name":"ingressclasses","singularName":"","namespaced":false,"kind":"IngressClass","verbs":["create","delete","deletecollection","get","list","patch","update","watch"],"storageVersionHash":"l/iqIbDgFyQ="},{"name":"ingresses","singularName":"","namespaced":true,"kind":"Ingress","verbs":["create","delete","deletecollection","get","list","patch","update","watch"],"shortNames":["ing"],"storageVersionHash":"39NQlfNR+bo="},{"name":"ingresses/status","singularName":"","namespaced":true,"kind":"Ingress","verbs":["get","patch","update"]},{"name":"networkpolicies","singularName":"","namespaced":true,"kind":"NetworkPolicy","verbs":["create","delete","deletecollection","get","list","patch","update","watch"],"shortNames":["netpol"],"storageVersionHash":"YpfwF18m1G8="}]} - - ' + string: "{\n \"major\": \"1\",\n \"minor\": \"23\",\n \"gitVersion\": \"v1.23.12\",\n + \ \"gitCommit\": \"c6939792865ef0f70f92006081690d77411c8ed5\",\n \"gitTreeState\": + \"clean\",\n \"buildDate\": \"2022-09-21T21:46:35Z\",\n \"goVersion\": \"go1.17.13\",\n + \ \"compiler\": \"gc\",\n \"platform\": \"linux/amd64\"\n}" headers: audit-id: - - e41ddc88-0413-4b98-af8d-9d4dc373e490 + - aa6e077b-d620-47eb-9b7b-45edcbca6516 cache-control: - no-cache, private content-length: - - '864' + - '265' content-type: - application/json date: - - Tue, 07 Jun 2022 17:50:27 GMT + - Tue, 18 Oct 2022 19:32:53 GMT x-kubernetes-pf-flowschema-uid: - - 3b9ec461-e128-4d99-abcc-65ad56c2f58e + - 4338b148-01d8-4f61-8b84-7cdc7ec27482 x-kubernetes-pf-prioritylevel-uid: - - 93094910-4d9a-46c6-b59b-961811b7418b + - aa4e26b3-f7bb-4992-a47d-272b134f6779 status: code: 200 message: OK @@ -864,35 +988,35 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://cli-test-a-akkeshar-1bfbb5-7c7ab37f.hcp.westeurope.azmk8s.io/api/v1/nodes + uri: https://cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io/api/v1/nodes response: body: - string: '{"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"1421"},"items":[{"metadata":{"name":"aks-nodepool1-33510725-vmss000000","uid":"13b68016-9d4d-49e7-869e-b3a3926b41b2","resourceVersion":"1208","creationTimestamp":"2022-06-07T17:48:53Z","labels":{"agentpool":"nodepool1","beta.kubernetes.io/arch":"amd64","beta.kubernetes.io/instance-type":"Standard_B2s","beta.kubernetes.io/os":"linux","failure-domain.beta.kubernetes.io/region":"westeurope","failure-domain.beta.kubernetes.io/zone":"0","kubernetes.azure.com/agentpool":"nodepool1","kubernetes.azure.com/cluster":"MC_akkeshar_cli-test-aks-000001_westeurope","kubernetes.azure.com/kubelet-identity-client-id":"3926865b-4b7f-4402-912d-70b38f92b999","kubernetes.azure.com/mode":"system","kubernetes.azure.com/node-image-version":"AKSUbuntu-1804gen2containerd-2022.05.16","kubernetes.azure.com/os-sku":"Ubuntu","kubernetes.azure.com/role":"agent","kubernetes.azure.com/storageprofile":"managed","kubernetes.azure.com/storagetier":"Premium_LRS","kubernetes.io/arch":"amd64","kubernetes.io/hostname":"aks-nodepool1-33510725-vmss000000","kubernetes.io/os":"linux","kubernetes.io/role":"agent","node-role.kubernetes.io/agent":"","node.kubernetes.io/instance-type":"Standard_B2s","storageprofile":"managed","storagetier":"Premium_LRS","topology.disk.csi.azure.com/zone":"","topology.kubernetes.io/region":"westeurope","topology.kubernetes.io/zone":"0"},"annotations":{"csi.volume.kubernetes.io/nodeid":"{\"disk.csi.azure.com\":\"aks-nodepool1-33510725-vmss000000\",\"file.csi.azure.com\":\"aks-nodepool1-33510725-vmss000000\"}","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"cloud-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:48:53Z","fieldsType":"FieldsV1","fieldsV1":{"f:spec":{"f:podCIDR":{},"f:podCIDRs":{".":{},"v:\"10.244.0.0/24\"":{}}}}},{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:48:53Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:volumes.kubernetes.io/controller-managed-attach-detach":{}},"f:labels":{".":{},"f:agentpool":{},"f:beta.kubernetes.io/arch":{},"f:beta.kubernetes.io/os":{},"f:kubernetes.azure.com/agentpool":{},"f:kubernetes.azure.com/cluster":{},"f:kubernetes.azure.com/kubelet-identity-client-id":{},"f:kubernetes.azure.com/mode":{},"f:kubernetes.azure.com/node-image-version":{},"f:kubernetes.azure.com/os-sku":{},"f:kubernetes.azure.com/role":{},"f:kubernetes.azure.com/storageprofile":{},"f:kubernetes.azure.com/storagetier":{},"f:kubernetes.io/arch":{},"f:kubernetes.io/hostname":{},"f:kubernetes.io/os":{},"f:storageprofile":{},"f:storagetier":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:48:54Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl":{}}}}},{"manager":"cloud-node-manager","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:49:07Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{"f:beta.kubernetes.io/instance-type":{},"f:failure-domain.beta.kubernetes.io/region":{},"f:failure-domain.beta.kubernetes.io/zone":{},"f:node.kubernetes.io/instance-type":{},"f:topology.kubernetes.io/region":{},"f:topology.kubernetes.io/zone":{}}},"f:spec":{"f:providerID":{}}}},{"manager":"cloud-node-manager","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:49:07Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{"k:{\"type\":\"NetworkUnavailable\"}":{".":{},"f:type":{}}}}},"subresource":"status"},{"manager":"kubectl-label","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:49:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{"f:kubernetes.io/role":{},"f:node-role.kubernetes.io/agent":{}}}}},{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:49:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{"f:csi.volume.kubernetes.io/nodeid":{}},"f:labels":{"f:topology.disk.csi.azure.com/zone":{}}},"f:status":{"f:allocatable":{"f:ephemeral-storage":{}},"f:capacity":{"f:ephemeral-storage":{}},"f:conditions":{"k:{\"type\":\"DiskPressure\"}":{"f:lastHeartbeatTime":{}},"k:{\"type\":\"MemoryPressure\"}":{"f:lastHeartbeatTime":{}},"k:{\"type\":\"PIDPressure\"}":{"f:lastHeartbeatTime":{}},"k:{\"type\":\"Ready\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}}}}},"subresource":"status"},{"manager":"cloud-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:49:52Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{"k:{\"type\":\"NetworkUnavailable\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}}}}},"subresource":"status"}]},"spec":{"podCIDR":"10.244.0.0/24","podCIDRs":["10.244.0.0/24"],"providerID":"azure:///subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mc_akkeshar_cli-test-aks-000001_westeurope/providers/Microsoft.Compute/virtualMachineScaleSets/aks-nodepool1-33510725-vmss/virtualMachines/0"},"status":{"capacity":{"cpu":"2","ephemeral-storage":"129900528Ki","hugepages-1Gi":"0","hugepages-2Mi":"0","memory":"4025880Ki","pods":"110"},"allocatable":{"cpu":"1900m","ephemeral-storage":"119716326407","hugepages-1Gi":"0","hugepages-2Mi":"0","memory":"2209304Ki","pods":"110"},"conditions":[{"type":"NetworkUnavailable","status":"False","lastHeartbeatTime":"2022-06-07T17:49:52Z","lastTransitionTime":"2022-06-07T17:49:52Z","reason":"RouteCreated","message":"RouteController - created a route"},{"type":"MemoryPressure","status":"False","lastHeartbeatTime":"2022-06-07T17:48:54Z","lastTransitionTime":"2022-06-07T17:48:52Z","reason":"KubeletHasSufficientMemory","message":"kubelet - has sufficient memory available"},{"type":"DiskPressure","status":"False","lastHeartbeatTime":"2022-06-07T17:48:54Z","lastTransitionTime":"2022-06-07T17:48:52Z","reason":"KubeletHasNoDiskPressure","message":"kubelet - has no disk pressure"},{"type":"PIDPressure","status":"False","lastHeartbeatTime":"2022-06-07T17:48:54Z","lastTransitionTime":"2022-06-07T17:48:52Z","reason":"KubeletHasSufficientPID","message":"kubelet - has sufficient PID available"},{"type":"Ready","status":"True","lastHeartbeatTime":"2022-06-07T17:48:54Z","lastTransitionTime":"2022-06-07T17:48:54Z","reason":"KubeletReady","message":"kubelet - is posting ready status. AppArmor enabled"}],"addresses":[{"type":"InternalIP","address":"10.224.0.4"},{"type":"Hostname","address":"aks-nodepool1-33510725-vmss000000"}],"daemonEndpoints":{"kubeletEndpoint":{"Port":10250}},"nodeInfo":{"machineID":"9bbe79ac4ad54a968725aabda4e46788","systemUUID":"d3e47a54-7866-49dd-868b-253629a9ba3d","bootID":"548b49eb-6966-44f6-977f-b8c46df72397","kernelVersion":"5.4.0-1078-azure","osImage":"Ubuntu - 18.04.6 LTS","containerRuntimeVersion":"containerd://1.5.11+azure-1","kubeletVersion":"v1.22.6","kubeProxyVersion":"v1.22.6","operatingSystem":"linux","architecture":"amd64"},"images":[{"names":["mcr.microsoft.com/azuremonitor/containerinsights/ciprod:ciprod03172022"],"sizeBytes":315764812},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:0.49.3"],"sizeBytes":287741913},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:1.0.4"],"sizeBytes":287652512},{"names":["mcr.microsoft.com/oss/calico/cni:v3.21.4"],"sizeBytes":236345866},{"names":["mcr.microsoft.com/oss/kubernetes/dashboard:v2.4.0"],"sizeBytes":224434239},{"names":["mcr.microsoft.com/oss/calico/node:v3.21.4"],"sizeBytes":216363503},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:0.19.0"],"sizeBytes":166352383},{"names":["mcr.microsoft.com/oss/cilium/cilium:1.10.5.7"],"sizeBytes":165359164},{"names":["mcr.microsoft.com/oss/cilium/cilium:1.10.5.6"],"sizeBytes":164390850},{"names":["mcr.microsoft.com/oss/cilium/cilium:1.10.3.5"],"sizeBytes":161199925},{"names":["mcr.microsoft.com/oss/cilium/cilium:1.10.3.4"],"sizeBytes":158659329},{"names":["mcr.microsoft.com/aks/hcp/hcp-tunnel-front:master.211104.1"],"sizeBytes":149514464},{"names":["mcr.microsoft.com/aks/hcp/hcp-tunnel-front:master.211013.1"],"sizeBytes":149493900},{"names":["mcr.microsoft.com/oss/tigera/operator:v1.24.2"],"sizeBytes":128711964},{"names":["mcr.microsoft.com/oss/calico/typha:v3.21.4"],"sizeBytes":128235133},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.23.5-hotfix.20220331.2"],"sizeBytes":127091344},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.23.3-hotfix.20220401.2"],"sizeBytes":127087159},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.23.5-hotfix.20220331.1"],"sizeBytes":126890132},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.23.3-hotfix.20220401.1"],"sizeBytes":126885957},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy@sha256:03292b1004372db01558a1430619572b046ad639a06e12412a9a62c568daa89d","mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.22.6-hotfix.20220330.3"],"sizeBytes":114386479},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.22.4-hotfix.20220330.3"],"sizeBytes":114374705},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.21.9-hotfix.20220420.1"],"sizeBytes":114204240},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.22.6-hotfix.20220330.2"],"sizeBytes":114189342},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.21.7-hotfix.20220420.1"],"sizeBytes":114188361},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.22.4-hotfix.20220330.2"],"sizeBytes":114177579},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.21.9-hotfix.20220330.2"],"sizeBytes":114099742},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.21.7-hotfix.20220330.2"],"sizeBytes":114082872},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.24.0.1"],"sizeBytes":112109269},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.8.0.2"],"sizeBytes":107814602},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:1.2.0"],"sizeBytes":104400458},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:1.1.3"],"sizeBytes":103821981},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:1.0.5"],"sizeBytes":103502273},{"names":["mcr.microsoft.com/oss/calico/node:v3.8.9.5"],"sizeBytes":101794833},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.2.0.6"],"sizeBytes":100397012},{"names":["mcr.microsoft.com/oss/cilium/operator:1.10.3"],"sizeBytes":98395697},{"names":["mcr.microsoft.com/aks/hcp/tunnel-openvpn:master.210623.2"],"sizeBytes":96125176},{"names":["mcr.microsoft.com/oss/kubernetes/exechealthz:1.2_v0.0.5"],"sizeBytes":94348102},{"names":["mcr.microsoft.com/aks/acc/sgx-attestation:2.0"],"sizeBytes":91841669},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.2.0"],"sizeBytes":89103171},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.18.0"],"sizeBytes":85633800},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.17.0"],"sizeBytes":85541532},{"names":["mcr.microsoft.com/aks/command/runtime:master.220211.1"],"sizeBytes":82792811},{"names":["mcr.microsoft.com/azure-application-gateway/kubernetes-ingress:1.5.1"],"sizeBytes":76884289},{"names":["mcr.microsoft.com/oss/kubernetes/external-dns:v0.10.1"],"sizeBytes":76044900},{"names":["mcr.microsoft.com/azure-application-gateway/kubernetes-ingress:1.4.0"],"sizeBytes":73895290},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.17.0"],"sizeBytes":73276358},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.16.0"],"sizeBytes":73275071},{"names":["mcr.microsoft.com/oss/azure/aad-pod-identity/nmi:v1.7.5.8"],"sizeBytes":68080333},{"names":["mcr.microsoft.com/oss/azure/aad-pod-identity/nmi:v1.7.5.7"],"sizeBytes":67882077},{"names":["mcr.microsoft.com/oss/nvidia/k8s-device-plugin:v0.9.0"],"sizeBytes":67291599}]}}]} + string: '{"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"1565"},"items":[{"metadata":{"name":"aks-nodepool1-31947974-vmss000000","uid":"3d2ca61c-34c5-4768-83f9-aa192d32cf2f","resourceVersion":"1138","creationTimestamp":"2022-10-18T19:30:05Z","labels":{"agentpool":"nodepool1","beta.kubernetes.io/arch":"amd64","beta.kubernetes.io/instance-type":"Standard_B4ms","beta.kubernetes.io/os":"linux","failure-domain.beta.kubernetes.io/region":"westeurope","failure-domain.beta.kubernetes.io/zone":"0","kubernetes.azure.com/agentpool":"nodepool1","kubernetes.azure.com/cluster":"MC_akkeshar_cli-test-aks-000001_westeurope","kubernetes.azure.com/kubelet-identity-client-id":"d6c82141-9899-4ce3-943d-1343c5c4d69f","kubernetes.azure.com/mode":"system","kubernetes.azure.com/node-image-version":"AKSUbuntu-1804gen2containerd-2022.10.03","kubernetes.azure.com/os-sku":"Ubuntu","kubernetes.azure.com/role":"agent","kubernetes.azure.com/storageprofile":"managed","kubernetes.azure.com/storagetier":"Premium_LRS","kubernetes.io/arch":"amd64","kubernetes.io/hostname":"aks-nodepool1-31947974-vmss000000","kubernetes.io/os":"linux","kubernetes.io/role":"agent","node-role.kubernetes.io/agent":"","node.kubernetes.io/instance-type":"Standard_B4ms","storageprofile":"managed","storagetier":"Premium_LRS","topology.disk.csi.azure.com/zone":"","topology.kubernetes.io/region":"westeurope","topology.kubernetes.io/zone":"0"},"annotations":{"csi.volume.kubernetes.io/nodeid":"{\"disk.csi.azure.com\":\"aks-nodepool1-31947974-vmss000000\",\"file.csi.azure.com\":\"aks-nodepool1-31947974-vmss000000\"}","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"cloud-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:30:05Z","fieldsType":"FieldsV1","fieldsV1":{"f:spec":{"f:podCIDR":{},"f:podCIDRs":{".":{},"v:\"10.244.0.0/24\"":{}}}}},{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:30:05Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:volumes.kubernetes.io/controller-managed-attach-detach":{}},"f:labels":{".":{},"f:agentpool":{},"f:beta.kubernetes.io/arch":{},"f:beta.kubernetes.io/os":{},"f:kubernetes.azure.com/agentpool":{},"f:kubernetes.azure.com/kubelet-identity-client-id":{},"f:kubernetes.azure.com/mode":{},"f:kubernetes.azure.com/node-image-version":{},"f:kubernetes.io/arch":{},"f:kubernetes.io/hostname":{},"f:kubernetes.io/os":{}}}}},{"manager":"cloud-node-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:30:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{"f:beta.kubernetes.io/instance-type":{},"f:failure-domain.beta.kubernetes.io/region":{},"f:failure-domain.beta.kubernetes.io/zone":{},"f:node.kubernetes.io/instance-type":{},"f:topology.kubernetes.io/region":{},"f:topology.kubernetes.io/zone":{}}},"f:spec":{"f:providerID":{}}}},{"manager":"cloud-node-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:30:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{"k:{\"type\":\"NetworkUnavailable\"}":{".":{},"f:type":{}}}}},"subresource":"status"},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:30:16Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl":{}}}}},{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:30:17Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{"f:csi.volume.kubernetes.io/nodeid":{}},"f:labels":{"f:topology.disk.csi.azure.com/zone":{}}},"f:status":{"f:allocatable":{"f:ephemeral-storage":{}},"f:capacity":{"f:ephemeral-storage":{}},"f:conditions":{"k:{\"type\":\"DiskPressure\"}":{"f:lastHeartbeatTime":{}},"k:{\"type\":\"MemoryPressure\"}":{"f:lastHeartbeatTime":{}},"k:{\"type\":\"PIDPressure\"}":{"f:lastHeartbeatTime":{}},"k:{\"type\":\"Ready\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}}},"f:images":{}}},"subresource":"status"},{"manager":"cloud-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:30:55Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{"k:{\"type\":\"NetworkUnavailable\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}}}}},"subresource":"status"},{"manager":"kubectl-label","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:31:02Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{"f:kubernetes.io/role":{},"f:node-role.kubernetes.io/agent":{}}}}}]},"spec":{"podCIDR":"10.244.0.0/24","podCIDRs":["10.244.0.0/24"],"providerID":"azure:///subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mc_akkeshar_cli-test-aks-000001_westeurope/providers/Microsoft.Compute/virtualMachineScaleSets/aks-nodepool1-31947974-vmss/virtualMachines/0"},"status":{"capacity":{"cpu":"4","ephemeral-storage":"129886128Ki","hugepages-1Gi":"0","hugepages-2Mi":"0","memory":"16393220Ki","pods":"110"},"allocatable":{"cpu":"3860m","ephemeral-storage":"119703055367","hugepages-1Gi":"0","hugepages-2Mi":"0","memory":"12899332Ki","pods":"110"},"conditions":[{"type":"NetworkUnavailable","status":"False","lastHeartbeatTime":"2022-10-18T19:30:55Z","lastTransitionTime":"2022-10-18T19:30:55Z","reason":"RouteCreated","message":"RouteController + created a route"},{"type":"MemoryPressure","status":"False","lastHeartbeatTime":"2022-10-18T19:30:16Z","lastTransitionTime":"2022-10-18T19:30:05Z","reason":"KubeletHasSufficientMemory","message":"kubelet + has sufficient memory available"},{"type":"DiskPressure","status":"False","lastHeartbeatTime":"2022-10-18T19:30:16Z","lastTransitionTime":"2022-10-18T19:30:05Z","reason":"KubeletHasNoDiskPressure","message":"kubelet + has no disk pressure"},{"type":"PIDPressure","status":"False","lastHeartbeatTime":"2022-10-18T19:30:16Z","lastTransitionTime":"2022-10-18T19:30:05Z","reason":"KubeletHasSufficientPID","message":"kubelet + has sufficient PID available"},{"type":"Ready","status":"True","lastHeartbeatTime":"2022-10-18T19:30:16Z","lastTransitionTime":"2022-10-18T19:30:16Z","reason":"KubeletReady","message":"kubelet + is posting ready status. AppArmor enabled"}],"addresses":[{"type":"InternalIP","address":"10.224.0.4"},{"type":"Hostname","address":"aks-nodepool1-31947974-vmss000000"}],"daemonEndpoints":{"kubeletEndpoint":{"Port":10250}},"nodeInfo":{"machineID":"5418e6487e514bd4997f80c25ac364d2","systemUUID":"82707798-c896-4e3c-b347-9d883b5897c4","bootID":"2bed120d-372f-4a0c-9cf3-0b0fb7627eff","kernelVersion":"5.4.0-1091-azure","osImage":"Ubuntu + 18.04.6 LTS","containerRuntimeVersion":"containerd://1.5.11+azure-2","kubeletVersion":"v1.23.12","kubeProxyVersion":"v1.23.12","operatingSystem":"linux","architecture":"amd64"},"images":[{"names":["mcr.microsoft.com/azuremonitor/containerinsights/ciprod:ciprod08102022"],"sizeBytes":397844357},{"names":["mcr.microsoft.com/azuremonitor/containerinsights/ciprod:ciprod06272022-hotfix"],"sizeBytes":357023149},{"names":["mcr.microsoft.com/azuremonitor/containerinsights/ciprod/prometheus-collector/images:5.2.0-main-09-29-2022-ca064de1"],"sizeBytes":315250960},{"names":["mcr.microsoft.com/azuremonitor/containerinsights/ciprod/prometheus-collector/images:5.1.0-main-09-23-2022-df3e2703"],"sizeBytes":315037321},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:0.49.3"],"sizeBytes":287741913},{"names":["mcr.microsoft.com/oss/calico/cni:v3.23.1"],"sizeBytes":263014840},{"names":["mcr.microsoft.com/oss/calico/cni:v3.21.4"],"sizeBytes":236345866},{"names":["mcr.microsoft.com/oss/calico/cni:v3.21.6"],"sizeBytes":227829276},{"names":["mcr.microsoft.com/oss/calico/node:v3.23.1"],"sizeBytes":221560540},{"names":["mcr.microsoft.com/oss/calico/node:v3.21.4"],"sizeBytes":216363503},{"names":["mcr.microsoft.com/oss/calico/node:v3.21.6"],"sizeBytes":215379163},{"names":["mcr.microsoft.com/oss/tigera/operator:v1.23.8"],"sizeBytes":184105789},{"names":["mcr.microsoft.com/oss/cilium/cilium:1.12.2"],"sizeBytes":166611722},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:0.19.0"],"sizeBytes":166352383},{"names":["mcr.microsoft.com/aks/hcp/hcp-tunnel-front:master.220527.2"],"sizeBytes":146994488},{"names":null,"sizeBytes":138243950},{"names":["mcr.microsoft.com/oss/calico/kube-controllers:v3.23.1"],"sizeBytes":136078571},{"names":["mcr.microsoft.com/oss/calico/typha:v3.23.1"],"sizeBytes":131467121},{"names":null,"sizeBytes":129890505},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.23.12-hotfix.20220922.1"],"sizeBytes":128992809},{"names":["mcr.microsoft.com/oss/tigera/operator:v1.24.2"],"sizeBytes":128711964},{"names":["mcr.microsoft.com/oss/calico/typha:v3.21.4"],"sizeBytes":128235133},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.2.2.5"],"sizeBytes":123925992},{"names":null,"sizeBytes":123549904},{"names":["mcr.microsoft.com/oss/calico/kube-controllers:v3.21.6"],"sizeBytes":123549280},{"names":["mcr.microsoft.com/oss/calico/typha:v3.21.6"],"sizeBytes":119713369},{"names":null,"sizeBytes":115909379},{"names":null,"sizeBytes":115897326},{"names":null,"sizeBytes":115677896},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:v1.2.1"],"sizeBytes":107169290},{"names":["mcr.microsoft.com/oss/calico/node:v3.8.9.5"],"sizeBytes":101794833},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.22.0.3"],"sizeBytes":99538753},{"names":["mcr.microsoft.com/aks/acc/sgx-attestation:3.1"],"sizeBytes":98058501},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.23.0"],"sizeBytes":95915873},{"names":["mcr.microsoft.com/oss/kubernetes/exechealthz:1.2_v0.0.5"],"sizeBytes":94348102},{"names":["mcr.microsoft.com/aks/hcp/tunnel-openvpn:master.220527.2"],"sizeBytes":92531564},{"names":["mcr.microsoft.com/containernetworking/azure-npm:v1.4.32"],"sizeBytes":90048618},{"names":["mcr.microsoft.com/containernetworking/azure-npm:v1.4.29"],"sizeBytes":89255513},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.2.2"],"sizeBytes":88551490},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.22.0"],"sizeBytes":83173887},{"names":["mcr.microsoft.com/aks/command/runtime:master.220211.1"],"sizeBytes":82792811},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.21.0"],"sizeBytes":75345915},{"names":["mcr.microsoft.com/oss/nvidia/k8s-device-plugin:v0.9.0"],"sizeBytes":67291599},{"names":["mcr.microsoft.com/containernetworking/cni-dropgz:v0.0.2"],"sizeBytes":67202663},{"names":["mcr.microsoft.com/oss/kubernetes-csi/secrets-store/driver:v1.2.2.3"],"sizeBytes":64781810},{"names":["mcr.microsoft.com/oss/calico/cni:v3.8.9.3"],"sizeBytes":63581323},{"names":null,"sizeBytes":63271342},{"names":["mcr.microsoft.com/oss/kubernetes-csi/secrets-store/driver:v1.2.2.2"],"sizeBytes":56424516},{"names":["mcr.microsoft.com/oss/calico/kube-controllers:v3.21.4"],"sizeBytes":54638514},{"names":["mcr.microsoft.com/oss/azure/aad-pod-identity/nmi:v1.8.12.1"],"sizeBytes":46617098}]}}]} ' headers: audit-id: - - 43cf4d76-2351-4f12-91a5-05035849e6e8 + - 45e1835a-a77a-477a-a943-bdd2e4992240 cache-control: - no-cache, private content-type: - application/json date: - - Tue, 07 Jun 2022 17:50:27 GMT + - Tue, 18 Oct 2022 19:32:53 GMT transfer-encoding: - chunked x-kubernetes-pf-flowschema-uid: - - 3b9ec461-e128-4d99-abcc-65ad56c2f58e + - 4338b148-01d8-4f61-8b84-7cdc7ec27482 x-kubernetes-pf-prioritylevel-uid: - - 93094910-4d9a-46c6-b59b-961811b7418b + - aa4e26b3-f7bb-4992-a47d-272b134f6779 status: code: 200 message: OK @@ -905,17 +1029,17 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: POST - uri: https://cli-test-a-akkeshar-1bfbb5-7c7ab37f.hcp.westeurope.azmk8s.io/apis/authorization.k8s.io/v1/selfsubjectaccessreviews + uri: https://cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io/apis/authorization.k8s.io/v1/selfsubjectaccessreviews response: body: - string: '{"kind":"SelfSubjectAccessReview","apiVersion":"authorization.k8s.io/v1","metadata":{"creationTimestamp":null,"managedFields":[{"manager":"OpenAPI-Generator","operation":"Update","apiVersion":"authorization.k8s.io/v1","time":"2022-06-07T17:50:28Z","fieldsType":"FieldsV1","fieldsV1":{"f:spec":{"f:resourceAttributes":{".":{},"f:group":{},"f:resource":{},"f:verb":{}}}}}]},"spec":{"resourceAttributes":{"verb":"create","group":"rbac.authorization.k8s.io","resource":"clusterrolebindings"}},"status":{"allowed":true}} + string: '{"kind":"SelfSubjectAccessReview","apiVersion":"authorization.k8s.io/v1","metadata":{"creationTimestamp":null,"managedFields":[{"manager":"OpenAPI-Generator","operation":"Update","apiVersion":"authorization.k8s.io/v1","time":"2022-10-18T19:32:54Z","fieldsType":"FieldsV1","fieldsV1":{"f:spec":{"f:resourceAttributes":{".":{},"f:group":{},"f:resource":{},"f:verb":{}}}}}]},"spec":{"resourceAttributes":{"verb":"create","group":"rbac.authorization.k8s.io","resource":"clusterrolebindings"}},"status":{"allowed":true}} ' headers: audit-id: - - fe975b82-474a-4a60-bd56-d79a9c1ade4d + - ae1e1f79-7172-449e-a3be-a4cc979f6c1a cache-control: - no-cache, private content-length: @@ -923,49 +1047,14 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 17:50:28 GMT + - Tue, 18 Oct 2022 19:32:54 GMT x-kubernetes-pf-flowschema-uid: - - 3b9ec461-e128-4d99-abcc-65ad56c2f58e + - 4338b148-01d8-4f61-8b84-7cdc7ec27482 x-kubernetes-pf-prioritylevel-uid: - - 93094910-4d9a-46c6-b59b-961811b7418b + - aa4e26b3-f7bb-4992-a47d-272b134f6779 status: code: 201 message: Created -- request: - body: null - headers: - Accept: - - application/json - Content-Type: - - application/json - User-Agent: - - OpenAPI-Generator/11.0.0/python - method: GET - uri: https://cli-test-a-akkeshar-1bfbb5-7c7ab37f.hcp.westeurope.azmk8s.io/version/ - response: - body: - string: "{\n \"major\": \"1\",\n \"minor\": \"22\",\n \"gitVersion\": \"v1.22.6\",\n - \ \"gitCommit\": \"07959215dd83b4ae6317b33c824f845abd578642\",\n \"gitTreeState\": - \"clean\",\n \"buildDate\": \"2022-03-30T18:28:25Z\",\n \"goVersion\": \"go1.16.12\",\n - \ \"compiler\": \"gc\",\n \"platform\": \"linux/amd64\"\n}" - headers: - audit-id: - - 946c3d18-0d69-42b8-a858-b58808359bf1 - cache-control: - - no-cache, private - content-length: - - '264' - content-type: - - application/json - date: - - Tue, 07 Jun 2022 17:50:29 GMT - x-kubernetes-pf-flowschema-uid: - - 3b9ec461-e128-4d99-abcc-65ad56c2f58e - x-kubernetes-pf-prioritylevel-uid: - - 93094910-4d9a-46c6-b59b-961811b7418b - status: - code: 200 - message: OK - request: body: null headers: @@ -980,7 +1069,8 @@ interactions: ParameterSetName: - -g -n -l --tags --kube-config User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Kubernetes?api-version=2021-04-01 response: @@ -989,21 +1079,21 @@ interactions: Europe","East US","West Central US","South Central US","Southeast Asia","UK South","East US 2","West US 2","Australia East","North Europe","France Central","Central US","West US","North Central US","Korea Central","Japan East","West US 3","East - Asia","East US 2 EUAP","Canada East","Canada Central"],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"SystemAssignedResourceIdentity, - SupportsTags, SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/operationStatuses","locations":["East + Asia","Canada Central","East US 2 EUAP","Canada East"],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"SystemAssignedResourceIdentity, + SupportsTags, SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/operationStatuses","locations":["East US 2 EUAP","West Europe","East US","West Central US","South Central US","Southeast Asia","UK South","East US 2","West US 2","Australia East","North Europe","France Central","Central US","West US","North Central US","Korea Central","Japan - East","East Asia","West US 3","Canada East","Canada Central"],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"registeredSubscriptions","locations":[],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"Operations","locations":[],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview","2019-11-01-preview","2019-09-01-privatepreview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + East","East Asia","West US 3","Canada East","Canada Central"],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"registeredSubscriptions","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"Operations","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview","2019-11-01-preview","2019-09-01-privatepreview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache content-length: - - '2311' + - '2416' content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 17:50:29 GMT + - Tue, 18 Oct 2022 19:32:54 GMT expires: - '-1' pragma: @@ -1031,7 +1121,8 @@ interactions: ParameterSetName: - -g -n -l --tags --kube-config User-Agent: - - AZURECLI/2.37.0 azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002?api-version=2021-10-01 response: @@ -1047,7 +1138,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 17:50:31 GMT + - Tue, 18 Oct 2022 19:32:55 GMT expires: - '-1' pragma: @@ -1069,29 +1160,29 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://cli-test-a-akkeshar-1bfbb5-7c7ab37f.hcp.westeurope.azmk8s.io/api/v1/namespaces + uri: https://cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io/api/v1/namespaces response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"1438"},"items":[{"metadata":{"name":"default","uid":"ab6f9022-2c50-4409-83b7-c4f517badbdb","resourceVersion":"203","creationTimestamp":"2022-06-07T17:47:10Z","labels":{"kubernetes.io/metadata.name":"default"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:47:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"kube-node-lease","uid":"89a221c2-aeb8-4ba7-8e43-e950daed55e4","resourceVersion":"41","creationTimestamp":"2022-06-07T17:47:08Z","labels":{"kubernetes.io/metadata.name":"kube-node-lease"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:47:08Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"kube-public","uid":"c2eaa55a-f05a-41db-98de-54c2abe09937","resourceVersion":"37","creationTimestamp":"2022-06-07T17:47:08Z","labels":{"kubernetes.io/metadata.name":"kube-public"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:47:08Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"kube-system","uid":"33b24bf2-7255-490c-9ebd-a35f688c29e1","resourceVersion":"491","creationTimestamp":"2022-06-07T17:47:08Z","labels":{"addonmanager.kubernetes.io/mode":"Reconcile","control-plane":"true","kubernetes.io/cluster-service":"true","kubernetes.io/metadata.name":"kube-system"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"v1\",\"kind\":\"Namespace\",\"metadata\":{\"annotations\":{},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"Reconcile\",\"control-plane\":\"true\",\"kubernetes.io/cluster-service\":\"true\"},\"name\":\"kube-system\"}}\n"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:47:08Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:47:26Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{}},"f:labels":{"f:addonmanager.kubernetes.io/mode":{},"f:control-plane":{},"f:kubernetes.io/cluster-service":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}}]} + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"1577"},"items":[{"metadata":{"name":"default","uid":"a88babaf-8a46-4d13-a7ea-0b13c4f959c5","resourceVersion":"205","creationTimestamp":"2022-10-18T19:29:04Z","labels":{"kubernetes.io/metadata.name":"default"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:29:04Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"kube-node-lease","uid":"6c4617c4-8f9f-45ef-82a7-bf190d3dcb13","resourceVersion":"39","creationTimestamp":"2022-10-18T19:29:01Z","labels":{"kubernetes.io/metadata.name":"kube-node-lease"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:29:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"kube-public","uid":"1b6b5b56-14cf-4fbe-abe7-89fd6cf48873","resourceVersion":"22","creationTimestamp":"2022-10-18T19:29:01Z","labels":{"kubernetes.io/metadata.name":"kube-public"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:29:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"kube-system","uid":"2acc8b84-760b-48a4-a41e-e94aa3ad57c1","resourceVersion":"567","creationTimestamp":"2022-10-18T19:29:01Z","labels":{"addonmanager.kubernetes.io/mode":"Reconcile","control-plane":"true","kubernetes.io/cluster-service":"true","kubernetes.io/metadata.name":"kube-system"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"v1\",\"kind\":\"Namespace\",\"metadata\":{\"annotations\":{},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"Reconcile\",\"control-plane\":\"true\",\"kubernetes.io/cluster-service\":\"true\"},\"name\":\"kube-system\"}}\n"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:29:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:29:22Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{}},"f:labels":{"f:addonmanager.kubernetes.io/mode":{},"f:control-plane":{},"f:kubernetes.io/cluster-service":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}}]} ' headers: audit-id: - - ef57d2f6-4c90-4cb2-9d93-0ded014fe212 + - e1e40709-5b00-4faa-9737-f115b6e120ff cache-control: - no-cache, private content-type: - application/json date: - - Tue, 07 Jun 2022 17:50:32 GMT + - Tue, 18 Oct 2022 19:32:57 GMT transfer-encoding: - chunked x-kubernetes-pf-flowschema-uid: - - 3b9ec461-e128-4d99-abcc-65ad56c2f58e + - 4338b148-01d8-4f61-8b84-7cdc7ec27482 x-kubernetes-pf-prioritylevel-uid: - - 93094910-4d9a-46c6-b59b-961811b7418b + - aa4e26b3-f7bb-4992-a47d-272b134f6779 status: code: 200 message: OK @@ -1109,7 +1200,8 @@ interactions: ParameterSetName: - -g -n -l --tags --kube-config User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar?api-version=2021-04-01 response: @@ -1123,7 +1215,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 17:50:33 GMT + - Tue, 18 Oct 2022 19:32:57 GMT expires: - '-1' pragma: @@ -1153,12 +1245,12 @@ interactions: ParameterSetName: - -g -n -l --tags --kube-config User-Agent: - - python/3.7.7 (Windows-10-10.0.22000-SP0) AZURECLI/2.37.0 + - python/3.7.7 (Windows-10-10.0.22621-SP0) AZURECLI/2.41.0 (MSI) method: POST uri: https://eastus.dp.kubernetesconfiguration.azure.com/azure-arc-k8sagents/GetLatestHelmPackagePath?api-version=2019-11-01-preview&releaseTrain=stable response: body: - string: '{"repositoryPath":"mcr.microsoft.com/azurearck8s/batch1/stable/azure-arc-k8sagents:1.6.16"}' + string: '{"repositoryPath":"mcr.microsoft.com/azurearck8s/batch1/stable/azure-arc-k8sagents:1.8.14"}' headers: api-supported-versions: - 2019-11-01-Preview @@ -1169,7 +1261,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 17:50:38 GMT + - Tue, 18 Oct 2022 19:32:58 GMT strict-transport-security: - max-age=15724800; includeSubDomains x-content-type-options: @@ -1179,7 +1271,7 @@ interactions: message: OK - request: body: '{"tags": {"foo": "doo"}, "location": "eastus", "identity": {"type": "SystemAssigned"}, - "properties": {"agentPublicKeyCertificate": "MIICCgKCAgEAzbx/TbbVmP6r2evvWYQdv4q97wt11GmOhtcpV1pjVQRIDbgWnGtkmTdMylPUe5f9b0g8UEr+mz5xX2tFDu1QoaITDn8dOhTdV+0rx/NAri3SDPRW75sJfsy7fZPI7CXK0KxULltdXMQ6ZA9bcruT54iXKmE2AMn+3un7cHw+MlYWi/ruvQs+Crj5DljYKHfqsjnkqObdZBUmfTj2l/KpZY1aSZFJJ9VWYTojqorR6KrAtjnQFdsXP6Ft4JtKz6U2PoGiDZAGUgTlev3mrSpellKxkQLRu1N9Dz3jIOGtL7dAnyG+wJZ2h56Q6D7R04JXJUkaQVZXyM8UcmN0pA4NU823UcRJROpgnEnhFyKwNzMXejGix3411obLAlKe2yvpbkLLcbwOiZs/iwy8OoU9AdruAB/MZOaKbMM5+7Pad4yPjHrkXKLWzm2JfHQBy0uk+kpYp53mYOx2y+CscpTlKYX34KWkY0rYSjFsEAnO01MhQUM/BRfPZCUUaUV8M9b4P9Ab4We1zbUFo2VaqpEBPFgH2c/Z6OYXDiYrQCWv/9GrxeVqlzF4ZFOh7rMFxILQ9QTFKV8pA654b4ridy85O+8od7t1/VEo3jSjYBpe+KMo6MhtXcux0JfJfbbkx3ZyXgz+Bdmpav8ZsdH1EYph5pJ3KWJGSHi89bUZjZP+5F0CAwEAAQ==", + "properties": {"agentPublicKeyCertificate": "MIICCgKCAgEApejXj38tfgl/M+Z8QNeMb0mWXHi+MxxHgO6yYea9Ma6FgU+a+FtuAWmpcZz8wqOOKjljSYySAmq7SuPfhnz9mD8YAJonieO+e1nkGDwOJvuqRVmZPNnoazfWaL6CEZdA4JUjclBrYpcczr3iUvsNiVhbIYkFapzAfKUWCh4M4YrrB+S4CoCVfIRwfFxs5zGXXgbiKSVg5ZOA9m9nrSUviEBDjYdyI7aHG9onBNLmVH2xoFaveh5JNpLxFlhR2IvOR3GfkkkNNxIZjhdM6ys4jyoL69bJzLq8GKTkc7r55q+a+KyUL7QjYkMzxfTE3U/+ScEZK/5UK2WCvIkainKj4tICAxoVXOqBzJEJUW6BVA9PXTecFFFrpnvB38uMvIt8qJ2205LpBfeFEQWsxcHEj5sNFUC2iqC4aMxiUTWw83tGzXCOCa8KGpVWxByH6Exe6F2/co+rU4RONnJRYlMMvnwuKG/FSKNEaENT/vIy1oxSTnFEGX5SDvUJAy8dOtPFA/f2e8mw9A24AWh2ZopOc55FjKS86r38aYsFY2jwTnOjxJFDJmMOYVSXURW6C5BhQp/9t+XuxBYNqmXoY06IWCS4ZphCYQdnwgwHS6qFwSQBLizY5xYj7GBYIC/2gNWjl3rwsqMw115jy/gW3E5e65/z6OI8TQb9p7moRBxECNUCAwEAAQ==", "distribution": "aks", "infrastructure": "azure"}}' headers: Accept: @@ -1197,106 +1289,65 @@ interactions: ParameterSetName: - -g -n -l --tags --kube-config User-Agent: - - AZURECLI/2.37.0 azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002?api-version=2021-10-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","name":"cc-000002","type":"microsoft.kubernetes/connectedclusters","location":"eastus","tags":{"foo":"doo"},"systemData":{"createdBy":"akkeshar@microsoft.com","createdByType":"User","createdAt":"2022-06-07T17:50:49.1312914Z","lastModifiedBy":"akkeshar@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-06-07T17:50:49.1312914Z"},"identity":{"principalId":"e430152b-3c94-43e2-baaf-8c950b609f71","tenantId":"72f988bf-86f1-41af-91ab-2d7cd011db47","type":"SystemAssigned"},"properties":{"provisioningState":"Accepted","connectivityStatus":"Connecting","agentPublicKeyCertificate":"MIICCgKCAgEAzbx/TbbVmP6r2evvWYQdv4q97wt11GmOhtcpV1pjVQRIDbgWnGtkmTdMylPUe5f9b0g8UEr+mz5xX2tFDu1QoaITDn8dOhTdV+0rx/NAri3SDPRW75sJfsy7fZPI7CXK0KxULltdXMQ6ZA9bcruT54iXKmE2AMn+3un7cHw+MlYWi/ruvQs+Crj5DljYKHfqsjnkqObdZBUmfTj2l/KpZY1aSZFJJ9VWYTojqorR6KrAtjnQFdsXP6Ft4JtKz6U2PoGiDZAGUgTlev3mrSpellKxkQLRu1N9Dz3jIOGtL7dAnyG+wJZ2h56Q6D7R04JXJUkaQVZXyM8UcmN0pA4NU823UcRJROpgnEnhFyKwNzMXejGix3411obLAlKe2yvpbkLLcbwOiZs/iwy8OoU9AdruAB/MZOaKbMM5+7Pad4yPjHrkXKLWzm2JfHQBy0uk+kpYp53mYOx2y+CscpTlKYX34KWkY0rYSjFsEAnO01MhQUM/BRfPZCUUaUV8M9b4P9Ab4We1zbUFo2VaqpEBPFgH2c/Z6OYXDiYrQCWv/9GrxeVqlzF4ZFOh7rMFxILQ9QTFKV8pA654b4ridy85O+8od7t1/VEo3jSjYBpe+KMo6MhtXcux0JfJfbbkx3ZyXgz+Bdmpav8ZsdH1EYph5pJ3KWJGSHi89bUZjZP+5F0CAwEAAQ==","distribution":"aks","infrastructure":"azure"}}' - headers: - azure-asyncoperation: - - https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/9a2fffd3-bbd6-4f3e-ae36-5a918b439706*7223452050D836CBF4609B7634E742DA6FFE9F2162D65CA91DC68BAD8BB10DEB?api-version=2021-10-01 - cache-control: - - no-cache - content-length: - - '1494' - content-type: - - application/json; charset=utf-8 - date: - - Tue, 07 Jun 2022 17:50:52 GMT - etag: - - '"1e00df99-0000-0400-0000-629f8ffb0000"' - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-providerhub-traffic: - - 'True' - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - status: - code: 201 - message: Created -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - connectedk8s connect - Connection: - - keep-alive - ParameterSetName: - - -g -n -l --tags --kube-config - User-Agent: - - AZURECLI/2.37.0 azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/9a2fffd3-bbd6-4f3e-ae36-5a918b439706*7223452050D836CBF4609B7634E742DA6FFE9F2162D65CA91DC68BAD8BB10DEB?api-version=2021-10-01 - response: - body: - string: '{"id":"/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/9a2fffd3-bbd6-4f3e-ae36-5a918b439706*7223452050D836CBF4609B7634E742DA6FFE9F2162D65CA91DC68BAD8BB10DEB","name":"9a2fffd3-bbd6-4f3e-ae36-5a918b439706*7223452050D836CBF4609B7634E742DA6FFE9F2162D65CA91DC68BAD8BB10DEB","resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","status":"Succeeded","startTime":"2022-06-07T17:50:50.7759304Z","endTime":"2022-06-07T17:50:56.8730495Z","properties":null}' + string: '{"error":{"code":"SubscriptionRequestsThrottled","message":"Number + of requests for subscription ''1bfbb5d0-917e-4346-9026-1d3b344417f5'' and + operation ''PUT/SUBSCRIPTIONS/RESOURCEGROUPS/PROVIDERS/MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/'' + exceeded the backend storage limit. Please try again after ''6'' seconds."}}' headers: cache-control: - no-cache + connection: + - close content-length: - - '564' + - '308' content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 17:51:22 GMT - etag: - - '"2d007b48-0000-0100-0000-629f90000000"' + - Tue, 18 Oct 2022 19:33:34 GMT expires: - '-1' pragma: - no-cache strict-transport-security: - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding x-content-type-options: - nosniff status: - code: 200 - message: OK + code: 429 + message: '' - request: - body: null + body: '{"tags": {"foo": "doo"}, "location": "eastus", "identity": {"type": "SystemAssigned"}, + "properties": {"agentPublicKeyCertificate": "MIICCgKCAgEApejXj38tfgl/M+Z8QNeMb0mWXHi+MxxHgO6yYea9Ma6FgU+a+FtuAWmpcZz8wqOOKjljSYySAmq7SuPfhnz9mD8YAJonieO+e1nkGDwOJvuqRVmZPNnoazfWaL6CEZdA4JUjclBrYpcczr3iUvsNiVhbIYkFapzAfKUWCh4M4YrrB+S4CoCVfIRwfFxs5zGXXgbiKSVg5ZOA9m9nrSUviEBDjYdyI7aHG9onBNLmVH2xoFaveh5JNpLxFlhR2IvOR3GfkkkNNxIZjhdM6ys4jyoL69bJzLq8GKTkc7r55q+a+KyUL7QjYkMzxfTE3U/+ScEZK/5UK2WCvIkainKj4tICAxoVXOqBzJEJUW6BVA9PXTecFFFrpnvB38uMvIt8qJ2205LpBfeFEQWsxcHEj5sNFUC2iqC4aMxiUTWw83tGzXCOCa8KGpVWxByH6Exe6F2/co+rU4RONnJRYlMMvnwuKG/FSKNEaENT/vIy1oxSTnFEGX5SDvUJAy8dOtPFA/f2e8mw9A24AWh2ZopOc55FjKS86r38aYsFY2jwTnOjxJFDJmMOYVSXURW6C5BhQp/9t+XuxBYNqmXoY06IWCS4ZphCYQdnwgwHS6qFwSQBLizY5xYj7GBYIC/2gNWjl3rwsqMw115jy/gW3E5e65/z6OI8TQb9p7moRBxECNUCAwEAAQ==", + "distribution": "aks", "infrastructure": "azure"}}' headers: Accept: - - '*/*' + - application/json Accept-Encoding: - gzip, deflate CommandName: - connectedk8s connect Connection: - keep-alive + Content-Length: + - '889' + Content-Type: + - application/json ParameterSetName: - -g -n -l --tags --kube-config User-Agent: - - AZURECLI/2.37.0 azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) - method: GET + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) + method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002?api-version=2021-10-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","name":"cc-000002","type":"microsoft.kubernetes/connectedclusters","location":"eastus","tags":{"foo":"doo"},"systemData":{"createdBy":"akkeshar@microsoft.com","createdByType":"User","createdAt":"2022-06-07T17:50:49.1312914Z","lastModifiedBy":"akkeshar@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-06-07T17:50:49.1312914Z"},"identity":{"principalId":"e430152b-3c94-43e2-baaf-8c950b609f71","tenantId":"72f988bf-86f1-41af-91ab-2d7cd011db47","type":"SystemAssigned"},"properties":{"provisioningState":"Succeeded","connectivityStatus":"Connecting","agentPublicKeyCertificate":"MIICCgKCAgEAzbx/TbbVmP6r2evvWYQdv4q97wt11GmOhtcpV1pjVQRIDbgWnGtkmTdMylPUe5f9b0g8UEr+mz5xX2tFDu1QoaITDn8dOhTdV+0rx/NAri3SDPRW75sJfsy7fZPI7CXK0KxULltdXMQ6ZA9bcruT54iXKmE2AMn+3un7cHw+MlYWi/ruvQs+Crj5DljYKHfqsjnkqObdZBUmfTj2l/KpZY1aSZFJJ9VWYTojqorR6KrAtjnQFdsXP6Ft4JtKz6U2PoGiDZAGUgTlev3mrSpellKxkQLRu1N9Dz3jIOGtL7dAnyG+wJZ2h56Q6D7R04JXJUkaQVZXyM8UcmN0pA4NU823UcRJROpgnEnhFyKwNzMXejGix3411obLAlKe2yvpbkLLcbwOiZs/iwy8OoU9AdruAB/MZOaKbMM5+7Pad4yPjHrkXKLWzm2JfHQBy0uk+kpYp53mYOx2y+CscpTlKYX34KWkY0rYSjFsEAnO01MhQUM/BRfPZCUUaUV8M9b4P9Ab4We1zbUFo2VaqpEBPFgH2c/Z6OYXDiYrQCWv/9GrxeVqlzF4ZFOh7rMFxILQ9QTFKV8pA654b4ridy85O+8od7t1/VEo3jSjYBpe+KMo6MhtXcux0JfJfbbkx3ZyXgz+Bdmpav8ZsdH1EYph5pJ3KWJGSHi89bUZjZP+5F0CAwEAAQ==","distribution":"aks","infrastructure":"azure"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","name":"cc-000002","type":"microsoft.kubernetes/connectedclusters","location":"eastus","tags":{"foo":"doo"},"systemData":{"createdBy":"akkeshar@microsoft.com","createdByType":"User","createdAt":"2022-10-18T19:33:22.0897444Z","lastModifiedBy":"akkeshar@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-18T19:33:43.8142892Z"},"identity":{"principalId":"52d3dd05-7e9e-4a16-80d0-610121a4f15f","tenantId":"72f988bf-86f1-41af-91ab-2d7cd011db47","type":"SystemAssigned"},"properties":{"provisioningState":"Succeeded","connectivityStatus":"Connecting","agentPublicKeyCertificate":"MIICCgKCAgEApejXj38tfgl/M+Z8QNeMb0mWXHi+MxxHgO6yYea9Ma6FgU+a+FtuAWmpcZz8wqOOKjljSYySAmq7SuPfhnz9mD8YAJonieO+e1nkGDwOJvuqRVmZPNnoazfWaL6CEZdA4JUjclBrYpcczr3iUvsNiVhbIYkFapzAfKUWCh4M4YrrB+S4CoCVfIRwfFxs5zGXXgbiKSVg5ZOA9m9nrSUviEBDjYdyI7aHG9onBNLmVH2xoFaveh5JNpLxFlhR2IvOR3GfkkkNNxIZjhdM6ys4jyoL69bJzLq8GKTkc7r55q+a+KyUL7QjYkMzxfTE3U/+ScEZK/5UK2WCvIkainKj4tICAxoVXOqBzJEJUW6BVA9PXTecFFFrpnvB38uMvIt8qJ2205LpBfeFEQWsxcHEj5sNFUC2iqC4aMxiUTWw83tGzXCOCa8KGpVWxByH6Exe6F2/co+rU4RONnJRYlMMvnwuKG/FSKNEaENT/vIy1oxSTnFEGX5SDvUJAy8dOtPFA/f2e8mw9A24AWh2ZopOc55FjKS86r38aYsFY2jwTnOjxJFDJmMOYVSXURW6C5BhQp/9t+XuxBYNqmXoY06IWCS4ZphCYQdnwgwHS6qFwSQBLizY5xYj7GBYIC/2gNWjl3rwsqMw115jy/gW3E5e65/z6OI8TQb9p7moRBxECNUCAwEAAQ==","distribution":"aks","infrastructure":"azure"}}' headers: cache-control: - no-cache @@ -1305,9 +1356,9 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 17:51:23 GMT + - Tue, 18 Oct 2022 19:33:49 GMT etag: - - '"0f012aff-0000-0100-0000-629f90000000"' + - '"19004508-0000-0100-0000-634eff9a0000"' expires: - '-1' pragma: @@ -1322,6 +1373,8 @@ interactions: - nosniff x-ms-providerhub-traffic: - 'True' + x-ms-ratelimit-remaining-subscription-writes: + - '1199' status: code: 200 message: OK @@ -1339,7 +1392,8 @@ interactions: ParameterSetName: - -g -n -l --tags --kube-config User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ExtendedLocation?api-version=2021-04-01 response: @@ -1353,7 +1407,12 @@ interactions: US","West Europe","North Europe","France Central","Southeast Asia","Australia East","East US 2","West US 2","UK South","Central US","West Central US","West US","North Central US","South Central US","Korea Central","Japan East","East - Asia","West US 3","Canada Central","East US 2 EUAP"],"apiVersions":["2021-08-31-preview","2021-08-15","2021-03-15-preview","2020-07-15-privatepreview"],"capabilities":"None"},{"resourceType":"locations/operationsstatus","locations":["East + Asia","West US 3","Canada Central","East US 2 EUAP"],"apiVersions":["2021-08-31-preview","2021-08-15","2021-03-15-preview","2020-07-15-privatepreview"],"capabilities":"None"},{"resourceType":"customLocations/resourceSyncRules","locations":["East + US","West Europe","North Europe","France Central","Southeast Asia","Australia + East","East US 2","West US 2","UK South","Central US","West Central US","West + US","North Central US","South Central US","Korea Central","Japan East","East + Asia","West US 3","Canada Central","East US 2 EUAP"],"apiVersions":["2021-08-31-preview"],"defaultApiVersion":"2021-08-31-preview","capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"locations/operationsstatus","locations":["East US","West Europe","North Europe","France Central","Southeast Asia","Australia East","East US 2","West US 2","UK South","Central US","West Central US","West US","North Central US","South Central US","Korea Central","Japan East","East @@ -1361,12 +1420,7 @@ interactions: US","West Europe","North Europe","France Central","Southeast Asia","Australia East","East US 2","West US 2","UK South","Central US","West Central US","West US","North Central US","South Central US","Korea Central","Japan East","East - Asia","West US 3","Canada Central","East US 2 Euap"],"apiVersions":["2021-03-15-preview","2020-07-15-privatepreview"],"capabilities":"None"},{"resourceType":"operations","locations":[],"apiVersions":["2021-08-31-preview","2021-08-15","2021-03-15-preview","2020-07-15-privatepreview"],"capabilities":"None"},{"resourceType":"customLocations/resourceSyncRules","locations":["East - US 2 EUAP","East US","West Europe","North Europe","France Central","Southeast - Asia","Australia East","East US 2","West US 2","UK South","Central US","West - Central US","West US","North Central US","South Central US","Korea Central","Japan - East","East Asia","West US 3","Canada Central"],"apiVersions":["2021-08-31-preview"],"defaultApiVersion":"2021-08-31-preview","capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + Asia","West US 3","Canada Central","East US 2 Euap"],"apiVersions":["2021-03-15-preview","2020-07-15-privatepreview"],"capabilities":"None"},{"resourceType":"operations","locations":[],"apiVersions":["2021-08-31-preview","2021-08-15","2021-03-15-preview","2020-07-15-privatepreview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache @@ -1375,7 +1429,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 17:51:24 GMT + - Tue, 18 Oct 2022 19:33:52 GMT expires: - '-1' pragma: @@ -1403,8 +1457,8 @@ interactions: ParameterSetName: - -g -n -l --tags --kube-config User-Agent: - - python/3.7.7 (Windows-10-10.0.22000-SP0) msrest/0.6.21 msrest_azure/0.6.4 - azure-graphrbac/0.60.0 Azure-SDK-For-Python AZURECLI/2.37.0 + - python/3.7.7 (Windows-10-10.0.22621-SP0) msrest/0.7.1 msrest_azure/0.6.4 azure-graphrbac/0.60.0 + Azure-SDK-For-Python AZURECLI/2.41.0 (MSI) accept-language: - en-US method: GET @@ -1427,19 +1481,19 @@ interactions: dataserviceversion: - 3.0; date: - - Tue, 07 Jun 2022 17:51:24 GMT + - Tue, 18 Oct 2022 19:33:52 GMT duration: - - '1892064' + - '771761' expires: - '-1' ocp-aad-diagnostics-server-name: - - frRfQmjwsIzDshn1inmXGupkH+QmybVgokKJazbVaj4= + - kyiXU9UyZBvxdApP2Z1mil26isS/c4qbWNOtJuFTG7c= ocp-aad-session-key: - - cv5Fhhc0lBVZHXY59ovZPi1PBF8wGPobHqLu4KgQ2PiKooF7zZRJzSwBY6_W9SOgHfCwu1EpSPFPbrR70FmS0YwyKk-2teIM_ryAMVx_OgZ4Q51mY7JxpBzaN0RizGHN.bGFRoiYXyHLwjb6cQvgBWE19x_GvX5_3rTHCMIG0sZg + - I7TnfezQXWrTS-SFQkhhDtBgSvr17Qw_1HZHSGJnTIqlRWWSniLtBLTEbf_Mj3FekR7SaUdGun_5YbImDAjWshRVaw8jybCvpDUJHQXhrkWfRIekGeukraqkDc5Fwmnv.wgRHfdhMc7xt66HEhyYlt7A6PhimbrHCnospbEOkRwE pragma: - no-cache request-id: - - 25328b38-54e8-4b3a-9a46-8d84eaae4579 + - bf0af4b5-00f1-49be-a1ac-70b041cc1c3d strict-transport-security: - max-age=31536000; includeSubDomains x-aspnet-version: @@ -1467,23 +1521,24 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.37.0 azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002?api-version=2022-05-01-preview + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002?api-version=2022-10-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","name":"cc-000002","type":"microsoft.kubernetes/connectedclusters","location":"eastus","tags":{"foo":"doo"},"systemData":{"createdBy":"akkeshar@microsoft.com","createdByType":"User","createdAt":"2022-06-07T17:50:49.1312914Z","lastModifiedBy":"64b12d6e-6549-484c-8cc6-6281839ba394","lastModifiedByType":"Application","lastModifiedAt":"2022-06-07T17:52:09.7422401Z"},"identity":{"principalId":"e430152b-3c94-43e2-baaf-8c950b609f71","tenantId":"72f988bf-86f1-41af-91ab-2d7cd011db47","type":"SystemAssigned"},"properties":{"provisioningState":"Succeeded","connectivityStatus":"Connected","privateLinkState":"Disabled","agentPublicKeyCertificate":"MIICCgKCAgEAzbx/TbbVmP6r2evvWYQdv4q97wt11GmOhtcpV1pjVQRIDbgWnGtkmTdMylPUe5f9b0g8UEr+mz5xX2tFDu1QoaITDn8dOhTdV+0rx/NAri3SDPRW75sJfsy7fZPI7CXK0KxULltdXMQ6ZA9bcruT54iXKmE2AMn+3un7cHw+MlYWi/ruvQs+Crj5DljYKHfqsjnkqObdZBUmfTj2l/KpZY1aSZFJJ9VWYTojqorR6KrAtjnQFdsXP6Ft4JtKz6U2PoGiDZAGUgTlev3mrSpellKxkQLRu1N9Dz3jIOGtL7dAnyG+wJZ2h56Q6D7R04JXJUkaQVZXyM8UcmN0pA4NU823UcRJROpgnEnhFyKwNzMXejGix3411obLAlKe2yvpbkLLcbwOiZs/iwy8OoU9AdruAB/MZOaKbMM5+7Pad4yPjHrkXKLWzm2JfHQBy0uk+kpYp53mYOx2y+CscpTlKYX34KWkY0rYSjFsEAnO01MhQUM/BRfPZCUUaUV8M9b4P9Ab4We1zbUFo2VaqpEBPFgH2c/Z6OYXDiYrQCWv/9GrxeVqlzF4ZFOh7rMFxILQ9QTFKV8pA654b4ridy85O+8od7t1/VEo3jSjYBpe+KMo6MhtXcux0JfJfbbkx3ZyXgz+Bdmpav8ZsdH1EYph5pJ3KWJGSHi89bUZjZP+5F0CAwEAAQ==","distribution":"aks","infrastructure":"azure","kubernetesVersion":"1.22.6","totalNodeCount":1,"agentVersion":"1.6.16","totalCoreCount":2,"lastConnectivityTime":"2022-06-07T17:52:00.696Z","managedIdentityCertificateExpirationTime":"2022-09-05T17:45:00Z"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","name":"cc-000002","type":"microsoft.kubernetes/connectedclusters","location":"eastus","tags":{"foo":"doo"},"systemData":{"createdBy":"akkeshar@microsoft.com","createdByType":"User","createdAt":"2022-10-18T19:33:22.0897444Z","lastModifiedBy":"64b12d6e-6549-484c-8cc6-6281839ba394","lastModifiedByType":"Application","lastModifiedAt":"2022-10-18T19:39:26.5172978Z"},"identity":{"principalId":"52d3dd05-7e9e-4a16-80d0-610121a4f15f","tenantId":"72f988bf-86f1-41af-91ab-2d7cd011db47","type":"SystemAssigned"},"properties":{"provisioningState":"Succeeded","connectivityStatus":"Connected","privateLinkState":"Disabled","azureHybridBenefit":"NotApplicable","agentPublicKeyCertificate":"MIICCgKCAgEApejXj38tfgl/M+Z8QNeMb0mWXHi+MxxHgO6yYea9Ma6FgU+a+FtuAWmpcZz8wqOOKjljSYySAmq7SuPfhnz9mD8YAJonieO+e1nkGDwOJvuqRVmZPNnoazfWaL6CEZdA4JUjclBrYpcczr3iUvsNiVhbIYkFapzAfKUWCh4M4YrrB+S4CoCVfIRwfFxs5zGXXgbiKSVg5ZOA9m9nrSUviEBDjYdyI7aHG9onBNLmVH2xoFaveh5JNpLxFlhR2IvOR3GfkkkNNxIZjhdM6ys4jyoL69bJzLq8GKTkc7r55q+a+KyUL7QjYkMzxfTE3U/+ScEZK/5UK2WCvIkainKj4tICAxoVXOqBzJEJUW6BVA9PXTecFFFrpnvB38uMvIt8qJ2205LpBfeFEQWsxcHEj5sNFUC2iqC4aMxiUTWw83tGzXCOCa8KGpVWxByH6Exe6F2/co+rU4RONnJRYlMMvnwuKG/FSKNEaENT/vIy1oxSTnFEGX5SDvUJAy8dOtPFA/f2e8mw9A24AWh2ZopOc55FjKS86r38aYsFY2jwTnOjxJFDJmMOYVSXURW6C5BhQp/9t+XuxBYNqmXoY06IWCS4ZphCYQdnwgwHS6qFwSQBLizY5xYj7GBYIC/2gNWjl3rwsqMw115jy/gW3E5e65/z6OI8TQb9p7moRBxECNUCAwEAAQ==","distribution":"aks","infrastructure":"azure","kubernetesVersion":"1.23.12","totalNodeCount":1,"agentVersion":"1.8.14","totalCoreCount":4,"lastConnectivityTime":"2022-10-18T19:39:21.852Z","managedIdentityCertificateExpirationTime":"2023-01-16T19:28:00Z"}}' headers: cache-control: - no-cache content-length: - - '1752' + - '1790' content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 17:54:19 GMT + - Tue, 18 Oct 2022 19:40:38 GMT etag: - - '"7800125e-0000-0200-0000-629f90490000"' + - '"19008a0d-0000-0100-0000-634f00ee0000"' expires: - '-1' pragma: @@ -1509,29 +1564,30 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://cli-test-a-akkeshar-1bfbb5-7c7ab37f.hcp.westeurope.azmk8s.io/apis/networking.k8s.io/v1/ + uri: https://cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io/version/ response: body: - string: '{"kind":"APIResourceList","apiVersion":"v1","groupVersion":"networking.k8s.io/v1","resources":[{"name":"ingressclasses","singularName":"","namespaced":false,"kind":"IngressClass","verbs":["create","delete","deletecollection","get","list","patch","update","watch"],"storageVersionHash":"l/iqIbDgFyQ="},{"name":"ingresses","singularName":"","namespaced":true,"kind":"Ingress","verbs":["create","delete","deletecollection","get","list","patch","update","watch"],"shortNames":["ing"],"storageVersionHash":"39NQlfNR+bo="},{"name":"ingresses/status","singularName":"","namespaced":true,"kind":"Ingress","verbs":["get","patch","update"]},{"name":"networkpolicies","singularName":"","namespaced":true,"kind":"NetworkPolicy","verbs":["create","delete","deletecollection","get","list","patch","update","watch"],"shortNames":["netpol"],"storageVersionHash":"YpfwF18m1G8="}]} - - ' + string: "{\n \"major\": \"1\",\n \"minor\": \"23\",\n \"gitVersion\": \"v1.23.12\",\n + \ \"gitCommit\": \"c6939792865ef0f70f92006081690d77411c8ed5\",\n \"gitTreeState\": + \"clean\",\n \"buildDate\": \"2022-09-21T21:46:35Z\",\n \"goVersion\": \"go1.17.13\",\n + \ \"compiler\": \"gc\",\n \"platform\": \"linux/amd64\"\n}" headers: audit-id: - - 228a39f8-5ce4-46a9-9ffd-2a4b1aa3c3c7 + - ce27647b-360b-4304-95d0-bedf457aebc0 cache-control: - no-cache, private content-length: - - '864' + - '265' content-type: - application/json date: - - Tue, 07 Jun 2022 17:54:20 GMT + - Tue, 18 Oct 2022 19:40:40 GMT x-kubernetes-pf-flowschema-uid: - - 3b9ec461-e128-4d99-abcc-65ad56c2f58e + - 4338b148-01d8-4f61-8b84-7cdc7ec27482 x-kubernetes-pf-prioritylevel-uid: - - 93094910-4d9a-46c6-b59b-961811b7418b + - aa4e26b3-f7bb-4992-a47d-272b134f6779 status: code: 200 message: OK @@ -1543,29 +1599,29 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://cli-test-a-akkeshar-1bfbb5-7c7ab37f.hcp.westeurope.azmk8s.io/api/v1/namespaces/azure-arc/configmaps/azure-clusterconfig + uri: https://cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io/api/v1/namespaces/azure-arc/configmaps/azure-clusterconfig response: body: - string: '{"kind":"ConfigMap","apiVersion":"v1","metadata":{"name":"azure-clusterconfig","namespace":"azure-arc","uid":"19e3f6e5-8be6-4b1b-9d9e-3c39a782d1aa","resourceVersion":"1702","creationTimestamp":"2022-06-07T17:51:40Z","labels":{"app.kubernetes.io/managed-by":"Helm"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:51:40Z","fieldsType":"FieldsV1","fieldsV1":{"f:data":{".":{},"f:ARC_AGENT_HELM_CHART_NAME":{},"f:ARC_AGENT_RELEASE_TRAIN":{},"f:AZURE_ARC_AGENT_VERSION":{},"f:AZURE_ARC_AUTOUPDATE":{},"f:AZURE_ARC_HELM_NAMESPACE":{},"f:AZURE_ARC_RELEASE_NAME":{},"f:AZURE_ENVIRONMENT":{},"f:AZURE_REGION":{},"f:AZURE_RESOURCE_GROUP":{},"f:AZURE_RESOURCE_NAME":{},"f:AZURE_SUBSCRIPTION_ID":{},"f:AZURE_TENANT_ID":{},"f:CLUSTER_CONNECT_AGENT_ENABLED":{},"f:CLUSTER_TYPE":{},"f:DEBUG_LOGGING":{},"f:EXTENSION_OPERATOR_ENABLED":{},"f:FLUX_CLIENT_DEFAULT_LOCATION":{},"f:FLUX_UPSTREAM_SERVICE_ENABLED":{},"f:GITOPS_ENABLED":{},"f:HELM_AUTO_UPDATE_CHECK_FREQUENCY_IN_MINUTES":{},"f:IS_CLIENT_SECRET_A_TOKEN":{},"f:KUBERNETES_DISTRO":{},"f:KUBERNETES_INFRA":{},"f:MANAGED_IDENTITY_AUTH":{},"f:MAX_ENTRIES_PER_STORE":{},"f:MAX_STORES":{},"f:NO_AUTH_HEADER_DATA_PLANE":{},"f:ONBOARDING_SECRET_NAME":{},"f:ONBOARDING_SECRET_NAMESPACE":{},"f:RESOURCE_SYNC_ENABLE_CHUNKED_SYNC":{},"f:RESOURCE_SYNC_LIST_CHUNK_SIZE":{},"f:RP_NAMESPACE":{},"f:TAGS":{}},"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:app.kubernetes.io/managed-by":{}}}}}]},"data":{"ARC_AGENT_HELM_CHART_NAME":"azure-arc-k8sagents","ARC_AGENT_RELEASE_TRAIN":"stable","AZURE_ARC_AGENT_VERSION":"1.6.16","AZURE_ARC_AUTOUPDATE":"true","AZURE_ARC_HELM_NAMESPACE":"default","AZURE_ARC_RELEASE_NAME":"azure-arc","AZURE_ENVIRONMENT":"AZUREPUBLICCLOUD","AZURE_REGION":"eastus","AZURE_RESOURCE_GROUP":"akkeshar","AZURE_RESOURCE_NAME":"cc-000002","AZURE_SUBSCRIPTION_ID":"1bfbb5d0-917e-4346-9026-1d3b344417f5","AZURE_TENANT_ID":"72f988bf-86f1-41af-91ab-2d7cd011db47","CLUSTER_CONNECT_AGENT_ENABLED":"true","CLUSTER_TYPE":"ConnectedClusters","DEBUG_LOGGING":"false","EXTENSION_OPERATOR_ENABLED":"true","FLUX_CLIENT_DEFAULT_LOCATION":"mcr.microsoft.com/azurearck8s/arc-preview/fluxctl:0.2.0","FLUX_UPSTREAM_SERVICE_ENABLED":"true","GITOPS_ENABLED":"true","HELM_AUTO_UPDATE_CHECK_FREQUENCY_IN_MINUTES":"60","IS_CLIENT_SECRET_A_TOKEN":"false","KUBERNETES_DISTRO":"aks","KUBERNETES_INFRA":"azure","MANAGED_IDENTITY_AUTH":"true","MAX_ENTRIES_PER_STORE":"680","MAX_STORES":"30","NO_AUTH_HEADER_DATA_PLANE":"false","ONBOARDING_SECRET_NAME":"azure-arc-connect-privatekey","ONBOARDING_SECRET_NAMESPACE":"azure-arc","RESOURCE_SYNC_ENABLE_CHUNKED_SYNC":"false","RESOURCE_SYNC_LIST_CHUNK_SIZE":"200","RP_NAMESPACE":"Microsoft.Kubernetes","TAGS":"map[]"}} + string: '{"kind":"ConfigMap","apiVersion":"v1","metadata":{"name":"azure-clusterconfig","namespace":"azure-arc","uid":"957e4d71-b152-4205-a04f-7cfd0aa6ca43","resourceVersion":"1840","creationTimestamp":"2022-10-18T19:34:04Z","labels":{"app.kubernetes.io/managed-by":"Helm"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:34:04Z","fieldsType":"FieldsV1","fieldsV1":{"f:data":{".":{},"f:ARC_AGENT_HELM_CHART_NAME":{},"f:ARC_AGENT_RELEASE_TRAIN":{},"f:AZURE_ARC_AGENT_VERSION":{},"f:AZURE_ARC_AUTOUPDATE":{},"f:AZURE_ARC_HELM_NAMESPACE":{},"f:AZURE_ARC_RELEASE_NAME":{},"f:AZURE_ENVIRONMENT":{},"f:AZURE_REGION":{},"f:AZURE_RESOURCE_GROUP":{},"f:AZURE_RESOURCE_MANAGER_ENDPOINT":{},"f:AZURE_RESOURCE_NAME":{},"f:AZURE_SUBSCRIPTION_ID":{},"f:AZURE_TENANT_ID":{},"f:CLUSTER_CONNECT_AGENT_ENABLED":{},"f:CLUSTER_TYPE":{},"f:CUSTOM_IDENTITY_PROVIDER_ENABLED":{},"f:DEBUG_LOGGING":{},"f:EXTENSION_OPERATOR_ENABLED":{},"f:FLUX_CLIENT_DEFAULT_LOCATION":{},"f:FLUX_UPSTREAM_SERVICE_ENABLED":{},"f:GITOPS_ENABLED":{},"f:GUARD_PKI_HOSTPATH":{},"f:HELM_AUTO_UPDATE_CHECK_FREQUENCY_IN_MINUTES":{},"f:IS_CLIENT_SECRET_A_TOKEN":{},"f:KUBERNETES_DISTRO":{},"f:KUBERNETES_INFRA":{},"f:MANAGED_IDENTITY_AUTH":{},"f:MAX_ENTRIES_PER_STORE":{},"f:MAX_STORES":{},"f:MSI_ADAPTER_ARTIFACT_PATH":{},"f:NO_AUTH_HEADER_DATA_PLANE":{},"f:ONBOARDING_SECRET_NAME":{},"f:ONBOARDING_SECRET_NAMESPACE":{},"f:RESOURCE_SYNC_ENABLE_CHUNKED_SYNC":{},"f:RESOURCE_SYNC_LIST_CHUNK_SIZE":{},"f:RP_NAMESPACE":{},"f:TAGS":{}},"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:app.kubernetes.io/managed-by":{}}}}}]},"data":{"ARC_AGENT_HELM_CHART_NAME":"azure-arc-k8sagents","ARC_AGENT_RELEASE_TRAIN":"stable","AZURE_ARC_AGENT_VERSION":"1.8.14","AZURE_ARC_AUTOUPDATE":"true","AZURE_ARC_HELM_NAMESPACE":"default","AZURE_ARC_RELEASE_NAME":"azure-arc","AZURE_ENVIRONMENT":"AZUREPUBLICCLOUD","AZURE_REGION":"eastus","AZURE_RESOURCE_GROUP":"akkeshar","AZURE_RESOURCE_MANAGER_ENDPOINT":"","AZURE_RESOURCE_NAME":"cc-000002","AZURE_SUBSCRIPTION_ID":"1bfbb5d0-917e-4346-9026-1d3b344417f5","AZURE_TENANT_ID":"72f988bf-86f1-41af-91ab-2d7cd011db47","CLUSTER_CONNECT_AGENT_ENABLED":"true","CLUSTER_TYPE":"ConnectedClusters","CUSTOM_IDENTITY_PROVIDER_ENABLED":"false","DEBUG_LOGGING":"false","EXTENSION_OPERATOR_ENABLED":"true","FLUX_CLIENT_DEFAULT_LOCATION":"mcr.microsoft.com/azurearck8s/arc-preview/fluxctl:0.2.0","FLUX_UPSTREAM_SERVICE_ENABLED":"true","GITOPS_ENABLED":"true","GUARD_PKI_HOSTPATH":"","HELM_AUTO_UPDATE_CHECK_FREQUENCY_IN_MINUTES":"60","IS_CLIENT_SECRET_A_TOKEN":"false","KUBERNETES_DISTRO":"aks","KUBERNETES_INFRA":"azure","MANAGED_IDENTITY_AUTH":"true","MAX_ENTRIES_PER_STORE":"680","MAX_STORES":"30","MSI_ADAPTER_ARTIFACT_PATH":"mcr.microsoft.com/azurearck8s/msi-adapter:1.0.2","NO_AUTH_HEADER_DATA_PLANE":"false","ONBOARDING_SECRET_NAME":"azure-arc-connect-privatekey","ONBOARDING_SECRET_NAMESPACE":"azure-arc","RESOURCE_SYNC_ENABLE_CHUNKED_SYNC":"false","RESOURCE_SYNC_LIST_CHUNK_SIZE":"200","RP_NAMESPACE":"Microsoft.Kubernetes","TAGS":"map[]"}} ' headers: audit-id: - - 51dc2df3-6a89-4f9b-8734-860544b950ea + - cd4e1193-77e7-4f96-aab0-3554e00b0496 cache-control: - no-cache, private content-type: - application/json date: - - Tue, 07 Jun 2022 17:54:22 GMT + - Tue, 18 Oct 2022 19:40:42 GMT transfer-encoding: - chunked x-kubernetes-pf-flowschema-uid: - - 3b9ec461-e128-4d99-abcc-65ad56c2f58e + - 4338b148-01d8-4f61-8b84-7cdc7ec27482 x-kubernetes-pf-prioritylevel-uid: - - 93094910-4d9a-46c6-b59b-961811b7418b + - aa4e26b3-f7bb-4992-a47d-272b134f6779 status: code: 200 message: OK @@ -1585,7 +1641,8 @@ interactions: ParameterSetName: - -g -n --kube-config -y User-Agent: - - AZURECLI/2.37.0 azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: DELETE uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002?api-version=2021-10-01 response: @@ -1593,7 +1650,7 @@ interactions: string: 'null' headers: azure-asyncoperation: - - https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/4a2e342b-774d-4753-b606-09baf05d09d0*7223452050D836CBF4609B7634E742DA6FFE9F2162D65CA91DC68BAD8BB10DEB?api-version=2021-10-01 + - https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/31d785a1-4704-4e49-aebd-f4d7b6a802fc*CBCF23F2D03CAE8A304520933CBB04564E255DC298B25D06FA216D51D71DCFFB?api-version=2021-10-01 cache-control: - no-cache content-length: @@ -1601,13 +1658,13 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 17:54:25 GMT + - Tue, 18 Oct 2022 19:40:46 GMT etag: - - '"1e00559d-0000-0400-0000-629f90d10000"' + - '"1900dd0e-0000-0100-0000-634f013e0000"' expires: - '-1' location: - - https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/4a2e342b-774d-4753-b606-09baf05d09d0*7223452050D836CBF4609B7634E742DA6FFE9F2162D65CA91DC68BAD8BB10DEB?api-version=2021-10-01 + - https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/31d785a1-4704-4e49-aebd-f4d7b6a802fc*CBCF23F2D03CAE8A304520933CBB04564E255DC298B25D06FA216D51D71DCFFB?api-version=2021-10-01 pragma: - no-cache strict-transport-security: @@ -1635,23 +1692,71 @@ interactions: ParameterSetName: - -g -n --kube-config -y User-Agent: - - AZURECLI/2.37.0 azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/31d785a1-4704-4e49-aebd-f4d7b6a802fc*CBCF23F2D03CAE8A304520933CBB04564E255DC298B25D06FA216D51D71DCFFB?api-version=2021-10-01 + response: + body: + string: '{"id":"/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/31d785a1-4704-4e49-aebd-f4d7b6a802fc*CBCF23F2D03CAE8A304520933CBB04564E255DC298B25D06FA216D51D71DCFFB","name":"31d785a1-4704-4e49-aebd-f4d7b6a802fc*CBCF23F2D03CAE8A304520933CBB04564E255DC298B25D06FA216D51D71DCFFB","resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","status":"Succeeded","startTime":"2022-10-18T19:40:46.0823182Z","endTime":"2022-10-18T19:40:52.4423727Z","properties":null}' + headers: + cache-control: + - no-cache + content-length: + - '564' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 18 Oct 2022 19:41:16 GMT + etag: + - '"3f00748a-0000-0100-0000-634f01440000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - connectedk8s delete + Connection: + - keep-alive + ParameterSetName: + - -g -n --kube-config -y + User-Agent: + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/4a2e342b-774d-4753-b606-09baf05d09d0*7223452050D836CBF4609B7634E742DA6FFE9F2162D65CA91DC68BAD8BB10DEB?api-version=2021-10-01 + uri: https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/31d785a1-4704-4e49-aebd-f4d7b6a802fc*CBCF23F2D03CAE8A304520933CBB04564E255DC298B25D06FA216D51D71DCFFB?api-version=2021-10-01 response: body: - string: '{"id":"/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/4a2e342b-774d-4753-b606-09baf05d09d0*7223452050D836CBF4609B7634E742DA6FFE9F2162D65CA91DC68BAD8BB10DEB","name":"4a2e342b-774d-4753-b606-09baf05d09d0*7223452050D836CBF4609B7634E742DA6FFE9F2162D65CA91DC68BAD8BB10DEB","resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","status":"Succeeded","startTime":"2022-06-07T17:54:24.712864Z","endTime":"2022-06-07T17:54:29.4205957Z","properties":null}' + string: '{"id":"/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/31d785a1-4704-4e49-aebd-f4d7b6a802fc*CBCF23F2D03CAE8A304520933CBB04564E255DC298B25D06FA216D51D71DCFFB","name":"31d785a1-4704-4e49-aebd-f4d7b6a802fc*CBCF23F2D03CAE8A304520933CBB04564E255DC298B25D06FA216D51D71DCFFB","resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","status":"Succeeded","startTime":"2022-10-18T19:40:46.0823182Z","endTime":"2022-10-18T19:40:52.4423727Z","properties":null}' headers: cache-control: - no-cache content-length: - - '563' + - '564' content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 17:54:55 GMT + - Tue, 18 Oct 2022 19:41:17 GMT etag: - - '"2d003049-0000-0100-0000-629f90d50000"' + - '"3f00748a-0000-0100-0000-634f01440000"' expires: - '-1' pragma: @@ -1675,17 +1780,17 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://cli-test-a-akkeshar-1bfbb5-7c7ab37f.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"2931"},"items":[{"metadata":{"name":"azure-arc","uid":"f21fce18-a772-4bd9-a31c-1fd4b0126469","resourceVersion":"2925","creationTimestamp":"2022-06-07T17:51:38Z","deletionTimestamp":"2022-06-07T17:55:09Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:51:38Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating"}}]} + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"4259"},"items":[{"metadata":{"name":"azure-arc","uid":"ebbab61c-eb81-491c-8e8b-cf74382d14f1","resourceVersion":"4254","creationTimestamp":"2022-10-18T19:34:04Z","deletionTimestamp":"2022-10-18T19:41:33Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:34:04Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating"}}]} ' headers: audit-id: - - e89acdc6-2732-4845-9b69-72f2b0f6abf6 + - 477a65d5-44b0-42ec-a26a-504239c35822 cache-control: - no-cache, private content-length: @@ -1693,11 +1798,11 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 17:55:11 GMT + - Tue, 18 Oct 2022 19:41:34 GMT x-kubernetes-pf-flowschema-uid: - - 3b9ec461-e128-4d99-abcc-65ad56c2f58e + - 4338b148-01d8-4f61-8b84-7cdc7ec27482 x-kubernetes-pf-prioritylevel-uid: - - 93094910-4d9a-46c6-b59b-961811b7418b + - aa4e26b3-f7bb-4992-a47d-272b134f6779 status: code: 200 message: OK @@ -1709,17 +1814,17 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://cli-test-a-akkeshar-1bfbb5-7c7ab37f.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"3003"},"items":[{"metadata":{"name":"azure-arc","uid":"f21fce18-a772-4bd9-a31c-1fd4b0126469","resourceVersion":"2925","creationTimestamp":"2022-06-07T17:51:38Z","deletionTimestamp":"2022-06-07T17:55:09Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:51:38Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating"}}]} + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"4444"},"items":[{"metadata":{"name":"azure-arc","uid":"ebbab61c-eb81-491c-8e8b-cf74382d14f1","resourceVersion":"4254","creationTimestamp":"2022-10-18T19:34:04Z","deletionTimestamp":"2022-10-18T19:41:33Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:34:04Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating"}}]} ' headers: audit-id: - - 1d22ef1d-dec2-4c84-9dc6-da25b4113f0e + - 14bd4708-7896-4ebf-8e52-5e42fe0f2254 cache-control: - no-cache, private content-length: @@ -1727,50 +1832,1061 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 17:55:16 GMT + - Tue, 18 Oct 2022 19:41:39 GMT x-kubernetes-pf-flowschema-uid: - - 3b9ec461-e128-4d99-abcc-65ad56c2f58e + - 4338b148-01d8-4f61-8b84-7cdc7ec27482 x-kubernetes-pf-prioritylevel-uid: - - 93094910-4d9a-46c6-b59b-961811b7418b + - aa4e26b3-f7bb-4992-a47d-272b134f6779 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Content-Type: + - application/json + User-Agent: + - OpenAPI-Generator/24.2.0/python + method: GET + uri: https://cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + response: + body: + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"4484"},"items":[{"metadata":{"name":"azure-arc","uid":"ebbab61c-eb81-491c-8e8b-cf74382d14f1","resourceVersion":"4457","creationTimestamp":"2022-10-18T19:34:04Z","deletionTimestamp":"2022-10-18T19:41:33Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:34:04Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:41:40Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"ResourcesDiscovered","message":"All + resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"ParsedGroupVersions","message":"All + legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"ContentDeleted","message":"All + content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"SomeResourcesRemain","message":"Some + resources are remaining: pods. has 10 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"ContentHasNoFinalizers","message":"All + content-preserving finalizers finished"}]}}]} + + ' + headers: + audit-id: + - a2d595ce-3c46-4551-9850-b84f5a731b3b + cache-control: + - no-cache, private + content-type: + - application/json + date: + - Tue, 18 Oct 2022 19:41:45 GMT + transfer-encoding: + - chunked + x-kubernetes-pf-flowschema-uid: + - 4338b148-01d8-4f61-8b84-7cdc7ec27482 + x-kubernetes-pf-prioritylevel-uid: + - aa4e26b3-f7bb-4992-a47d-272b134f6779 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Content-Type: + - application/json + User-Agent: + - OpenAPI-Generator/24.2.0/python + method: GET + uri: https://cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + response: + body: + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"4505"},"items":[{"metadata":{"name":"azure-arc","uid":"ebbab61c-eb81-491c-8e8b-cf74382d14f1","resourceVersion":"4486","creationTimestamp":"2022-10-18T19:34:04Z","deletionTimestamp":"2022-10-18T19:41:33Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:34:04Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:41:40Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"ResourcesDiscovered","message":"All + resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"ParsedGroupVersions","message":"All + legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"ContentDeleted","message":"All + content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"SomeResourcesRemain","message":"Some + resources are remaining: pods. has 7 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"ContentHasNoFinalizers","message":"All + content-preserving finalizers finished"}]}}]} + + ' + headers: + audit-id: + - 4d3df345-b708-4157-9bf7-21120efa0509 + cache-control: + - no-cache, private + content-type: + - application/json + date: + - Tue, 18 Oct 2022 19:41:50 GMT + transfer-encoding: + - chunked + x-kubernetes-pf-flowschema-uid: + - 4338b148-01d8-4f61-8b84-7cdc7ec27482 + x-kubernetes-pf-prioritylevel-uid: + - aa4e26b3-f7bb-4992-a47d-272b134f6779 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Content-Type: + - application/json + User-Agent: + - OpenAPI-Generator/24.2.0/python + method: GET + uri: https://cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + response: + body: + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"4522"},"items":[{"metadata":{"name":"azure-arc","uid":"ebbab61c-eb81-491c-8e8b-cf74382d14f1","resourceVersion":"4486","creationTimestamp":"2022-10-18T19:34:04Z","deletionTimestamp":"2022-10-18T19:41:33Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:34:04Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:41:40Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"ResourcesDiscovered","message":"All + resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"ParsedGroupVersions","message":"All + legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"ContentDeleted","message":"All + content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"SomeResourcesRemain","message":"Some + resources are remaining: pods. has 7 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"ContentHasNoFinalizers","message":"All + content-preserving finalizers finished"}]}}]} + + ' + headers: + audit-id: + - 426766b5-c0b5-44e2-af46-0f008af53c95 + cache-control: + - no-cache, private + content-type: + - application/json + date: + - Tue, 18 Oct 2022 19:41:55 GMT + transfer-encoding: + - chunked + x-kubernetes-pf-flowschema-uid: + - 4338b148-01d8-4f61-8b84-7cdc7ec27482 + x-kubernetes-pf-prioritylevel-uid: + - aa4e26b3-f7bb-4992-a47d-272b134f6779 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Content-Type: + - application/json + User-Agent: + - OpenAPI-Generator/24.2.0/python + method: GET + uri: https://cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + response: + body: + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"4542"},"items":[{"metadata":{"name":"azure-arc","uid":"ebbab61c-eb81-491c-8e8b-cf74382d14f1","resourceVersion":"4486","creationTimestamp":"2022-10-18T19:34:04Z","deletionTimestamp":"2022-10-18T19:41:33Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:34:04Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:41:40Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"ResourcesDiscovered","message":"All + resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"ParsedGroupVersions","message":"All + legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"ContentDeleted","message":"All + content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"SomeResourcesRemain","message":"Some + resources are remaining: pods. has 7 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"ContentHasNoFinalizers","message":"All + content-preserving finalizers finished"}]}}]} + + ' + headers: + audit-id: + - 6676cffc-25a4-4de2-bb93-d9c995a2cb01 + cache-control: + - no-cache, private + content-type: + - application/json + date: + - Tue, 18 Oct 2022 19:42:00 GMT + transfer-encoding: + - chunked + x-kubernetes-pf-flowschema-uid: + - 4338b148-01d8-4f61-8b84-7cdc7ec27482 + x-kubernetes-pf-prioritylevel-uid: + - aa4e26b3-f7bb-4992-a47d-272b134f6779 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Content-Type: + - application/json + User-Agent: + - OpenAPI-Generator/24.2.0/python + method: GET + uri: https://cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + response: + body: + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"4576"},"items":[{"metadata":{"name":"azure-arc","uid":"ebbab61c-eb81-491c-8e8b-cf74382d14f1","resourceVersion":"4486","creationTimestamp":"2022-10-18T19:34:04Z","deletionTimestamp":"2022-10-18T19:41:33Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:34:04Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:41:40Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"ResourcesDiscovered","message":"All + resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"ParsedGroupVersions","message":"All + legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"ContentDeleted","message":"All + content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"SomeResourcesRemain","message":"Some + resources are remaining: pods. has 7 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-10-18T19:41:40Z","reason":"ContentHasNoFinalizers","message":"All + content-preserving finalizers finished"}]}}]} + + ' + headers: + audit-id: + - 6a033e36-cc94-4b20-a7c8-30025b6a4a70 + cache-control: + - no-cache, private + content-type: + - application/json + date: + - Tue, 18 Oct 2022 19:42:05 GMT + transfer-encoding: + - chunked + x-kubernetes-pf-flowschema-uid: + - 4338b148-01d8-4f61-8b84-7cdc7ec27482 + x-kubernetes-pf-prioritylevel-uid: + - aa4e26b3-f7bb-4992-a47d-272b134f6779 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Content-Type: + - application/json + User-Agent: + - OpenAPI-Generator/24.2.0/python + method: GET + uri: https://cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + response: + body: + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"4603"},"items":[]} + + ' + headers: + audit-id: + - 23b2cf48-0d2a-4eeb-a317-560d218a947e + cache-control: + - no-cache, private + content-length: + - '92' + content-type: + - application/json + date: + - Tue, 18 Oct 2022 19:42:11 GMT + x-kubernetes-pf-flowschema-uid: + - 4338b148-01d8-4f61-8b84-7cdc7ec27482 + x-kubernetes-pf-prioritylevel-uid: + - aa4e26b3-f7bb-4992-a47d-272b134f6779 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - connectedk8s connect + Connection: + - keep-alive + ParameterSetName: + - -g -n -l --distribution --infrastructure --distribution-version --tags --kube-config + User-Agent: + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Kubernetes?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Kubernetes","namespace":"Microsoft.Kubernetes","authorizations":[{"applicationId":"64b12d6e-6549-484c-8cc6-6281839ba394","roleDefinitionId":"1d1d44cf-68a1-4def-a2b6-cd7efc3515af"},{"applicationId":"359431ad-ece5-496b-8768-be4bbfd82f36","roleDefinitionId":"1b5c71b7-9814-4b40-b62a-23018af874d8"},{"applicationId":"0000dab9-8b21-4ba2-807f-1743968cef00","roleDefinitionId":"1b5c71b7-9814-4b40-b62a-23018af874d8"},{"applicationId":"8edd93e1-2103-40b4-bd70-6e34e586362d","roleDefinitionId":"eb67887a-31e8-4e4e-bf5b-14ff79351a6f"}],"resourceTypes":[{"resourceType":"connectedClusters","locations":["West + Europe","East US","West Central US","South Central US","Southeast Asia","UK + South","East US 2","West US 2","Australia East","North Europe","France Central","Central + US","West US","North Central US","Korea Central","Japan East","West US 3","East + Asia","Canada Central","East US 2 EUAP","Canada East"],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"SystemAssignedResourceIdentity, + SupportsTags, SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/operationStatuses","locations":["East + US 2 EUAP","West Europe","East US","West Central US","South Central US","Southeast + Asia","UK South","East US 2","West US 2","Australia East","North Europe","France + Central","Central US","West US","North Central US","Korea Central","Japan + East","East Asia","West US 3","Canada East","Canada Central"],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"registeredSubscriptions","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"Operations","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview","2019-11-01-preview","2019-09-01-privatepreview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + headers: + cache-control: + - no-cache + content-length: + - '2416' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 18 Oct 2022 19:42:11 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - connectedk8s connect + Connection: + - keep-alive + ParameterSetName: + - -g -n -l --distribution --infrastructure --distribution-version --tags --kube-config + User-Agent: + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration","namespace":"Microsoft.KubernetesConfiguration","authorizations":[{"applicationId":"c699bf69-fb1d-4eaf-999b-99e6b2ae4d85","roleDefinitionId":"90155430-a360-410f-af5d-89dc284d85c6"},{"applicationId":"03db181c-e9d3-4868-9097-f0b728327182","roleDefinitionId":"DE2ADB97-42D8-49C8-8FCF-DBB53EF936AC"},{"applicationId":"a0f92522-89de-4c5e-9a75-0044ccf66efd","roleDefinitionId":"b3429810-7d5c-420e-8605-cf280f3099f2"},{"applicationId":"bd9b7cd5-dac1-495f-b013-ac871e98fa5f","roleDefinitionId":"0d44c8f0-08b9-44d4-9f59-e51c83f95200"},{"applicationId":"585fc3c3-9a59-4720-8319-53cce041a605","roleDefinitionId":"4a9ce2ee-6de2-43ba-a7bd-8f316de763a7"}],"resourceTypes":[{"resourceType":"sourceControlConfigurations","locations":["East + US","West Europe","West Central US","West US 2","West US 3","South Central + US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France + Central","Central US","North Central US","West US","Korea Central","East Asia","Japan + East","Canada East","Canada Central","Norway East","Germany West Central","Sweden + Central","Switzerland North","Australia Southeast","Central India","South + India","Japan West","Uk West","France South","Korea South","South Africa North","East + US 2 EUAP","Central US EUAP"],"apiVersions":["2022-07-01","2022-03-01","2021-03-01","2020-10-01-preview","2020-07-01-preview","2019-11-01-preview"],"defaultApiVersion":"2022-03-01","capabilities":"SupportsExtension"},{"resourceType":"extensions","locations":["East + US","West Europe","West Central US","West US 2","West US 3","South Central + US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France + Central","Central US","North Central US","West US","Korea Central","East Asia","Japan + East","Canada East","Canada Central","Norway East","Germany West Central","Sweden + Central","Switzerland North","Australia Southeast","Central India","South + India","Japan West","Uk West","France South","Korea South","South Africa North","East + US 2 EUAP","Central US EUAP"],"apiVersions":["2022-07-01","2022-04-02-preview","2022-03-01","2021-09-01","2021-05-01-preview","2020-07-01-preview"],"defaultApiVersion":"2022-07-01","capabilities":"SystemAssignedResourceIdentity, + SupportsExtension"},{"resourceType":"fluxConfigurations","locations":["East + US","West Europe","West Central US","West US 2","West US 3","South Central + US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France + Central","Central US","North Central US","West US","Korea Central","East Asia","Japan + East","Canada East","Canada Central","Norway East","Germany West Central","Sweden + Central","Switzerland North","Australia Southeast","Central India","South + India","Japan West","Uk West","Korea South","France South","South Africa North","East + US 2 EUAP","Central US EUAP"],"apiVersions":["2022-07-01","2022-03-01","2022-01-01-preview","2021-11-01-preview","2021-06-01-preview"],"defaultApiVersion":"2022-07-01","capabilities":"SupportsExtension"},{"resourceType":"operations","locations":[],"apiVersions":["2022-03-01","2022-01-01-preview","2021-12-01-preview","2021-11-01-preview","2021-09-01","2021-06-01-preview","2021-05-01-preview","2021-03-01","2020-10-01-preview","2020-07-01-preview","2019-11-01-preview"],"capabilities":"None"},{"resourceType":"privateLinkScopes","locations":["East + US","West Europe","West Central US","West US 2","West US 3","South Central + US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France + Central","Central US","North Central US","West US","Korea Central","East Asia","Japan + East","Canada East","Canada Central","Norway East","Germany West Central","Sweden + Central","Switzerland North","Australia Southeast","Central India","South + India","Japan West","Uk West","Korea South","France South","South Africa North","East + US 2 EUAP","Central US EUAP"],"apiVersions":["2022-04-02-preview"],"capabilities":"SupportsTags, + SupportsLocation"},{"resourceType":"privateLinkScopes/privateEndpointConnections","locations":["East + US","West Europe","West Central US","West US 2","West US 3","South Central + US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France + Central","Central US","North Central US","West US","Korea Central","East Asia","Japan + East","Canada East","Canada Central","Norway East","Germany West Central","Sweden + Central","Switzerland North","Australia Southeast","Central India","South + India","Japan West","Uk West","France South","Korea South","South Africa North","East + US 2 EUAP","Central US EUAP"],"apiVersions":["2022-04-02-preview"],"capabilities":"None"},{"resourceType":"privateLinkScopes/privateEndpointConnectionProxies","locations":["East + US","West Europe","West Central US","West US 2","West US 3","South Central + US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France + Central","Central US","North Central US","West US","Korea Central","East Asia","Japan + East","Canada East","Canada Central","Norway East","Germany West Central","Sweden + Central","Switzerland North","Australia Southeast","Central India","South + India","Japan West","Uk West","South Africa North","Korea South","France South","East + US 2 EUAP","Central US EUAP"],"apiVersions":["2022-04-02-preview"],"capabilities":"None"},{"resourceType":"namespaces","locations":["East + US 2 EUAP","West US 2","East US","West Europe","West Central US","West US + 3","South Central US","East US 2","North Europe","UK South","Southeast Asia","Australia + East","France Central","Central US","North Central US","West US","Korea Central","East + Asia","Japan East","Canada Central","Canada East","Norway East","Germany West + Central","Switzerland North","Sweden Central","Central India","South India","Australia + Southeast","Japan West","Uk West","France South","Korea South","South Africa + North"],"apiVersions":["2021-12-01-preview"],"defaultApiVersion":"2021-12-01-preview","capabilities":"SupportsExtension"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + headers: + cache-control: + - no-cache + content-length: + - '6074' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 18 Oct 2022 19:42:11 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Content-Type: + - application/json + User-Agent: + - OpenAPI-Generator/24.2.0/python + method: GET + uri: https://cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io/version/ + response: + body: + string: "{\n \"major\": \"1\",\n \"minor\": \"23\",\n \"gitVersion\": \"v1.23.12\",\n + \ \"gitCommit\": \"c6939792865ef0f70f92006081690d77411c8ed5\",\n \"gitTreeState\": + \"clean\",\n \"buildDate\": \"2022-09-21T21:46:35Z\",\n \"goVersion\": \"go1.17.13\",\n + \ \"compiler\": \"gc\",\n \"platform\": \"linux/amd64\"\n}" + headers: + audit-id: + - 07e1ab95-c6dd-49f1-9dd0-8811811a8361 + cache-control: + - no-cache, private + content-length: + - '265' + content-type: + - application/json + date: + - Tue, 18 Oct 2022 19:42:12 GMT + x-kubernetes-pf-flowschema-uid: + - 4338b148-01d8-4f61-8b84-7cdc7ec27482 + x-kubernetes-pf-prioritylevel-uid: + - aa4e26b3-f7bb-4992-a47d-272b134f6779 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Content-Type: + - application/json + User-Agent: + - OpenAPI-Generator/24.2.0/python + method: GET + uri: https://cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io/api/v1/nodes + response: + body: + string: '{"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"4610"},"items":[{"metadata":{"name":"aks-nodepool1-31947974-vmss000000","uid":"3d2ca61c-34c5-4768-83f9-aa192d32cf2f","resourceVersion":"4087","creationTimestamp":"2022-10-18T19:30:05Z","labels":{"agentpool":"nodepool1","beta.kubernetes.io/arch":"amd64","beta.kubernetes.io/instance-type":"Standard_B4ms","beta.kubernetes.io/os":"linux","failure-domain.beta.kubernetes.io/region":"westeurope","failure-domain.beta.kubernetes.io/zone":"0","kubernetes.azure.com/agentpool":"nodepool1","kubernetes.azure.com/cluster":"MC_akkeshar_cli-test-aks-000001_westeurope","kubernetes.azure.com/kubelet-identity-client-id":"d6c82141-9899-4ce3-943d-1343c5c4d69f","kubernetes.azure.com/mode":"system","kubernetes.azure.com/node-image-version":"AKSUbuntu-1804gen2containerd-2022.10.03","kubernetes.azure.com/os-sku":"Ubuntu","kubernetes.azure.com/role":"agent","kubernetes.azure.com/storageprofile":"managed","kubernetes.azure.com/storagetier":"Premium_LRS","kubernetes.io/arch":"amd64","kubernetes.io/hostname":"aks-nodepool1-31947974-vmss000000","kubernetes.io/os":"linux","kubernetes.io/role":"agent","node-role.kubernetes.io/agent":"","node.kubernetes.io/instance-type":"Standard_B4ms","storageprofile":"managed","storagetier":"Premium_LRS","topology.disk.csi.azure.com/zone":"","topology.kubernetes.io/region":"westeurope","topology.kubernetes.io/zone":"0"},"annotations":{"csi.volume.kubernetes.io/nodeid":"{\"disk.csi.azure.com\":\"aks-nodepool1-31947974-vmss000000\",\"file.csi.azure.com\":\"aks-nodepool1-31947974-vmss000000\"}","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"cloud-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:30:05Z","fieldsType":"FieldsV1","fieldsV1":{"f:spec":{"f:podCIDR":{},"f:podCIDRs":{".":{},"v:\"10.244.0.0/24\"":{}}}}},{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:30:05Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:volumes.kubernetes.io/controller-managed-attach-detach":{}},"f:labels":{".":{},"f:agentpool":{},"f:beta.kubernetes.io/arch":{},"f:beta.kubernetes.io/os":{},"f:kubernetes.azure.com/agentpool":{},"f:kubernetes.azure.com/kubelet-identity-client-id":{},"f:kubernetes.azure.com/mode":{},"f:kubernetes.azure.com/node-image-version":{},"f:kubernetes.io/arch":{},"f:kubernetes.io/hostname":{},"f:kubernetes.io/os":{}}}}},{"manager":"cloud-node-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:30:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{"f:beta.kubernetes.io/instance-type":{},"f:failure-domain.beta.kubernetes.io/region":{},"f:failure-domain.beta.kubernetes.io/zone":{},"f:node.kubernetes.io/instance-type":{},"f:topology.kubernetes.io/region":{},"f:topology.kubernetes.io/zone":{}}},"f:spec":{"f:providerID":{}}}},{"manager":"cloud-node-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:30:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{"k:{\"type\":\"NetworkUnavailable\"}":{".":{},"f:type":{}}}}},"subresource":"status"},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:30:16Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl":{}}}}},{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:30:17Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{"f:csi.volume.kubernetes.io/nodeid":{}},"f:labels":{"f:topology.disk.csi.azure.com/zone":{}}},"f:status":{"f:allocatable":{"f:ephemeral-storage":{}},"f:capacity":{"f:ephemeral-storage":{}},"f:conditions":{"k:{\"type\":\"DiskPressure\"}":{"f:lastHeartbeatTime":{}},"k:{\"type\":\"MemoryPressure\"}":{"f:lastHeartbeatTime":{}},"k:{\"type\":\"PIDPressure\"}":{"f:lastHeartbeatTime":{}},"k:{\"type\":\"Ready\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}}},"f:images":{}}},"subresource":"status"},{"manager":"cloud-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:30:55Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{"k:{\"type\":\"NetworkUnavailable\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}}}}},"subresource":"status"},{"manager":"kubectl-label","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:31:02Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{"f:kubernetes.io/role":{},"f:node-role.kubernetes.io/agent":{}}}}},{"manager":"node-problem-detector","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:36:20Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{"k:{\"type\":\"ContainerRuntimeProblem\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FilesystemCorruptionProblem\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FreezeScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FrequentContainerdRestart\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FrequentDockerRestart\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FrequentKubeletRestart\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FrequentUnregisterNetDevice\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"KernelDeadlock\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"KubeletProblem\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"PreemptScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"ReadonlyFilesystem\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"RebootScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"RedeployScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"TerminateScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"VMEventScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"podCIDR":"10.244.0.0/24","podCIDRs":["10.244.0.0/24"],"providerID":"azure:///subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mc_akkeshar_cli-test-aks-000001_westeurope/providers/Microsoft.Compute/virtualMachineScaleSets/aks-nodepool1-31947974-vmss/virtualMachines/0"},"status":{"capacity":{"cpu":"4","ephemeral-storage":"129886128Ki","hugepages-1Gi":"0","hugepages-2Mi":"0","memory":"16393220Ki","pods":"110"},"allocatable":{"cpu":"3860m","ephemeral-storage":"119703055367","hugepages-1Gi":"0","hugepages-2Mi":"0","memory":"12899332Ki","pods":"110"},"conditions":[{"type":"KernelDeadlock","status":"False","lastHeartbeatTime":"2022-10-18T19:41:20Z","lastTransitionTime":"2022-10-18T19:36:19Z","reason":"KernelHasNoDeadlock","message":"kernel + has no deadlock"},{"type":"FrequentDockerRestart","status":"False","lastHeartbeatTime":"2022-10-18T19:41:20Z","lastTransitionTime":"2022-10-18T19:36:19Z","reason":"NoFrequentDockerRestart","message":"docker + is functioning properly"},{"type":"ContainerRuntimeProblem","status":"False","lastHeartbeatTime":"2022-10-18T19:41:20Z","lastTransitionTime":"2022-10-18T19:36:19Z","reason":"ContainerRuntimeIsUp","message":"container + runtime service is up"},{"type":"PreemptScheduled","status":"False","lastHeartbeatTime":"2022-10-18T19:41:20Z","lastTransitionTime":"2022-10-18T19:36:19Z","reason":"NoPreemptScheduled","message":"VM + has no scheduled Preempt event"},{"type":"RebootScheduled","status":"False","lastHeartbeatTime":"2022-10-18T19:41:20Z","lastTransitionTime":"2022-10-18T19:36:19Z","reason":"NoRebootScheduled","message":"VM + has no scheduled Reboot event"},{"type":"FrequentUnregisterNetDevice","status":"False","lastHeartbeatTime":"2022-10-18T19:41:20Z","lastTransitionTime":"2022-10-18T19:36:19Z","reason":"NoFrequentUnregisterNetDevice","message":"node + is functioning properly"},{"type":"RedeployScheduled","status":"False","lastHeartbeatTime":"2022-10-18T19:41:20Z","lastTransitionTime":"2022-10-18T19:36:19Z","reason":"NoRedeployScheduled","message":"VM + has no scheduled Redeploy event"},{"type":"KubeletProblem","status":"False","lastHeartbeatTime":"2022-10-18T19:41:20Z","lastTransitionTime":"2022-10-18T19:36:19Z","reason":"KubeletIsUp","message":"kubelet + service is up"},{"type":"FreezeScheduled","status":"False","lastHeartbeatTime":"2022-10-18T19:41:20Z","lastTransitionTime":"2022-10-18T19:36:19Z","reason":"NoFreezeScheduled","message":"VM + has no scheduled Freeze event"},{"type":"FrequentKubeletRestart","status":"False","lastHeartbeatTime":"2022-10-18T19:41:20Z","lastTransitionTime":"2022-10-18T19:36:19Z","reason":"NoFrequentKubeletRestart","message":"kubelet + is functioning properly"},{"type":"TerminateScheduled","status":"False","lastHeartbeatTime":"2022-10-18T19:41:20Z","lastTransitionTime":"2022-10-18T19:36:19Z","reason":"NoTerminateScheduled","message":"VM + has no scheduled Terminate event"},{"type":"ReadonlyFilesystem","status":"False","lastHeartbeatTime":"2022-10-18T19:41:20Z","lastTransitionTime":"2022-10-18T19:36:19Z","reason":"FilesystemIsNotReadOnly","message":"Filesystem + is not read-only"},{"type":"FilesystemCorruptionProblem","status":"False","lastHeartbeatTime":"2022-10-18T19:41:20Z","lastTransitionTime":"2022-10-18T19:36:19Z","reason":"FilesystemIsOK","message":"Filesystem + is healthy"},{"type":"FrequentContainerdRestart","status":"False","lastHeartbeatTime":"2022-10-18T19:41:20Z","lastTransitionTime":"2022-10-18T19:36:19Z","reason":"NoFrequentContainerdRestart","message":"containerd + is functioning properly"},{"type":"VMEventScheduled","status":"False","lastHeartbeatTime":"2022-10-18T19:41:20Z","lastTransitionTime":"2022-10-18T19:36:19Z","reason":"NoVMEventScheduled","message":"VM + has no scheduled event"},{"type":"NetworkUnavailable","status":"False","lastHeartbeatTime":"2022-10-18T19:30:55Z","lastTransitionTime":"2022-10-18T19:30:55Z","reason":"RouteCreated","message":"RouteController + created a route"},{"type":"MemoryPressure","status":"False","lastHeartbeatTime":"2022-10-18T19:39:47Z","lastTransitionTime":"2022-10-18T19:30:05Z","reason":"KubeletHasSufficientMemory","message":"kubelet + has sufficient memory available"},{"type":"DiskPressure","status":"False","lastHeartbeatTime":"2022-10-18T19:39:47Z","lastTransitionTime":"2022-10-18T19:30:05Z","reason":"KubeletHasNoDiskPressure","message":"kubelet + has no disk pressure"},{"type":"PIDPressure","status":"False","lastHeartbeatTime":"2022-10-18T19:39:47Z","lastTransitionTime":"2022-10-18T19:30:05Z","reason":"KubeletHasSufficientPID","message":"kubelet + has sufficient PID available"},{"type":"Ready","status":"True","lastHeartbeatTime":"2022-10-18T19:39:47Z","lastTransitionTime":"2022-10-18T19:30:16Z","reason":"KubeletReady","message":"kubelet + is posting ready status. AppArmor enabled"}],"addresses":[{"type":"InternalIP","address":"10.224.0.4"},{"type":"Hostname","address":"aks-nodepool1-31947974-vmss000000"}],"daemonEndpoints":{"kubeletEndpoint":{"Port":10250}},"nodeInfo":{"machineID":"5418e6487e514bd4997f80c25ac364d2","systemUUID":"82707798-c896-4e3c-b347-9d883b5897c4","bootID":"2bed120d-372f-4a0c-9cf3-0b0fb7627eff","kernelVersion":"5.4.0-1091-azure","osImage":"Ubuntu + 18.04.6 LTS","containerRuntimeVersion":"containerd://1.5.11+azure-2","kubeletVersion":"v1.23.12","kubeProxyVersion":"v1.23.12","operatingSystem":"linux","architecture":"amd64"},"images":[{"names":["mcr.microsoft.com/azuremonitor/containerinsights/ciprod:ciprod08102022"],"sizeBytes":397844357},{"names":["mcr.microsoft.com/azuremonitor/containerinsights/ciprod:ciprod06272022-hotfix"],"sizeBytes":357023149},{"names":["mcr.microsoft.com/azuremonitor/containerinsights/ciprod/prometheus-collector/images:5.2.0-main-09-29-2022-ca064de1"],"sizeBytes":315250960},{"names":["mcr.microsoft.com/azuremonitor/containerinsights/ciprod/prometheus-collector/images:5.1.0-main-09-23-2022-df3e2703"],"sizeBytes":315037321},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:0.49.3"],"sizeBytes":287741913},{"names":["mcr.microsoft.com/oss/calico/cni:v3.23.1"],"sizeBytes":263014840},{"names":["mcr.microsoft.com/oss/calico/cni:v3.21.4"],"sizeBytes":236345866},{"names":["mcr.microsoft.com/oss/calico/cni:v3.21.6"],"sizeBytes":227829276},{"names":["mcr.microsoft.com/oss/calico/node:v3.23.1"],"sizeBytes":221560540},{"names":["mcr.microsoft.com/oss/calico/node:v3.21.4"],"sizeBytes":216363503},{"names":["mcr.microsoft.com/oss/calico/node:v3.21.6"],"sizeBytes":215379163},{"names":["mcr.microsoft.com/oss/tigera/operator:v1.23.8"],"sizeBytes":184105789},{"names":["mcr.microsoft.com/oss/cilium/cilium:1.12.2"],"sizeBytes":166611722},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:0.19.0"],"sizeBytes":166352383},{"names":["mcr.microsoft.com/aks/hcp/hcp-tunnel-front:master.220527.2"],"sizeBytes":146994488},{"names":null,"sizeBytes":138243950},{"names":["mcr.microsoft.com/oss/calico/kube-controllers:v3.23.1"],"sizeBytes":136078571},{"names":["mcr.microsoft.com/oss/calico/typha:v3.23.1"],"sizeBytes":131467121},{"names":null,"sizeBytes":129890505},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.23.12-hotfix.20220922.1"],"sizeBytes":128992809},{"names":["mcr.microsoft.com/oss/tigera/operator:v1.24.2"],"sizeBytes":128711964},{"names":["mcr.microsoft.com/oss/calico/typha:v3.21.4"],"sizeBytes":128235133},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.2.2.5"],"sizeBytes":123925992},{"names":null,"sizeBytes":123549904},{"names":["mcr.microsoft.com/oss/calico/kube-controllers:v3.21.6"],"sizeBytes":123549280},{"names":["mcr.microsoft.com/oss/calico/typha:v3.21.6"],"sizeBytes":119713369},{"names":null,"sizeBytes":115909379},{"names":null,"sizeBytes":115897326},{"names":null,"sizeBytes":115677896},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:v1.2.1"],"sizeBytes":107169290},{"names":["mcr.microsoft.com/oss/calico/node:v3.8.9.5"],"sizeBytes":101794833},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.22.0.3"],"sizeBytes":99538753},{"names":["mcr.microsoft.com/aks/acc/sgx-attestation:3.1"],"sizeBytes":98058501},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.23.0"],"sizeBytes":95915873},{"names":["mcr.microsoft.com/oss/kubernetes/exechealthz:1.2_v0.0.5"],"sizeBytes":94348102},{"names":["mcr.microsoft.com/aks/hcp/tunnel-openvpn:master.220527.2"],"sizeBytes":92531564},{"names":["mcr.microsoft.com/containernetworking/azure-npm:v1.4.32"],"sizeBytes":90048618},{"names":["mcr.microsoft.com/containernetworking/azure-npm:v1.4.29"],"sizeBytes":89255513},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.2.2"],"sizeBytes":88551490},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.22.0"],"sizeBytes":83173887},{"names":["mcr.microsoft.com/aks/command/runtime:master.220211.1"],"sizeBytes":82792811},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.21.0"],"sizeBytes":75345915},{"names":["mcr.microsoft.com/oss/nvidia/k8s-device-plugin:v0.9.0"],"sizeBytes":67291599},{"names":["mcr.microsoft.com/containernetworking/cni-dropgz:v0.0.2"],"sizeBytes":67202663},{"names":["mcr.microsoft.com/oss/kubernetes-csi/secrets-store/driver:v1.2.2.3"],"sizeBytes":64781810},{"names":["mcr.microsoft.com/oss/calico/cni:v3.8.9.3"],"sizeBytes":63581323},{"names":null,"sizeBytes":63271342},{"names":["mcr.microsoft.com/oss/kubernetes-csi/secrets-store/driver:v1.2.2.2"],"sizeBytes":56424516},{"names":["mcr.microsoft.com/oss/calico/kube-controllers:v3.21.4"],"sizeBytes":54638514},{"names":["mcr.microsoft.com/azurearck8s/fluent-bit@sha256:49b451905182cd85d0691d628d92f91b1294719fa30f35f020425b84f9ef3158","mcr.microsoft.com/azurearck8s/fluent-bit:1.8.14"],"sizeBytes":48052121}]}}]} + + ' + headers: + audit-id: + - 68934407-871d-4274-b594-778c3eea281e + cache-control: + - no-cache, private + content-type: + - application/json + date: + - Tue, 18 Oct 2022 19:42:12 GMT + transfer-encoding: + - chunked + x-kubernetes-pf-flowschema-uid: + - 4338b148-01d8-4f61-8b84-7cdc7ec27482 + x-kubernetes-pf-prioritylevel-uid: + - aa4e26b3-f7bb-4992-a47d-272b134f6779 + status: + code: 200 + message: OK +- request: + body: '{"spec": {"resourceAttributes": {"verb": "create", "resource": "clusterrolebindings", + "group": "rbac.authorization.k8s.io"}}}' + headers: + Accept: + - application/json + Content-Type: + - application/json + User-Agent: + - OpenAPI-Generator/24.2.0/python + method: POST + uri: https://cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io/apis/authorization.k8s.io/v1/selfsubjectaccessreviews + response: + body: + string: '{"kind":"SelfSubjectAccessReview","apiVersion":"authorization.k8s.io/v1","metadata":{"creationTimestamp":null,"managedFields":[{"manager":"OpenAPI-Generator","operation":"Update","apiVersion":"authorization.k8s.io/v1","time":"2022-10-18T19:42:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:spec":{"f:resourceAttributes":{".":{},"f:group":{},"f:resource":{},"f:verb":{}}}}}]},"spec":{"resourceAttributes":{"verb":"create","group":"rbac.authorization.k8s.io","resource":"clusterrolebindings"}},"status":{"allowed":true}} + + ' + headers: + audit-id: + - 997c9887-d177-43ce-8950-43e84520580e + cache-control: + - no-cache, private + content-length: + - '516' + content-type: + - application/json + date: + - Tue, 18 Oct 2022 19:42:13 GMT + x-kubernetes-pf-flowschema-uid: + - 4338b148-01d8-4f61-8b84-7cdc7ec27482 + x-kubernetes-pf-prioritylevel-uid: + - aa4e26b3-f7bb-4992-a47d-272b134f6779 + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - connectedk8s connect + Connection: + - keep-alive + ParameterSetName: + - -g -n -l --distribution --infrastructure --distribution-version --tags --kube-config + User-Agent: + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Kubernetes?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Kubernetes","namespace":"Microsoft.Kubernetes","authorizations":[{"applicationId":"64b12d6e-6549-484c-8cc6-6281839ba394","roleDefinitionId":"1d1d44cf-68a1-4def-a2b6-cd7efc3515af"},{"applicationId":"359431ad-ece5-496b-8768-be4bbfd82f36","roleDefinitionId":"1b5c71b7-9814-4b40-b62a-23018af874d8"},{"applicationId":"0000dab9-8b21-4ba2-807f-1743968cef00","roleDefinitionId":"1b5c71b7-9814-4b40-b62a-23018af874d8"},{"applicationId":"8edd93e1-2103-40b4-bd70-6e34e586362d","roleDefinitionId":"eb67887a-31e8-4e4e-bf5b-14ff79351a6f"}],"resourceTypes":[{"resourceType":"connectedClusters","locations":["West + Europe","East US","West Central US","South Central US","Southeast Asia","UK + South","East US 2","West US 2","Australia East","North Europe","France Central","Central + US","West US","North Central US","Korea Central","Japan East","West US 3","East + Asia","Canada Central","East US 2 EUAP","Canada East"],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"SystemAssignedResourceIdentity, + SupportsTags, SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/operationStatuses","locations":["East + US 2 EUAP","West Europe","East US","West Central US","South Central US","Southeast + Asia","UK South","East US 2","West US 2","Australia East","North Europe","France + Central","Central US","West US","North Central US","Korea Central","Japan + East","East Asia","West US 3","Canada East","Canada Central"],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"registeredSubscriptions","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"Operations","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview","2019-11-01-preview","2019-09-01-privatepreview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + headers: + cache-control: + - no-cache + content-length: + - '2416' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 18 Oct 2022 19:42:13 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - connectedk8s connect + Connection: + - keep-alive + ParameterSetName: + - -g -n -l --distribution --infrastructure --distribution-version --tags --kube-config + User-Agent: + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002?api-version=2022-10-01-preview + response: + body: + string: '{"error":{"code":"ResourceNotFound","message":"The Resource ''Microsoft.Kubernetes/connectedClusters/cc-000002'' + under resource group ''akkeshar'' was not found. For more details please go + to https://aka.ms/ARMResourceNotFoundFix"}}' + headers: + cache-control: + - no-cache + content-length: + - '231' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 18 Oct 2022 19:42:15 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-failure-cause: + - gateway + status: + code: 404 + message: Not Found +- request: + body: null + headers: + Accept: + - application/json + Content-Type: + - application/json + User-Agent: + - OpenAPI-Generator/24.2.0/python + method: GET + uri: https://cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io/api/v1/namespaces + response: + body: + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"4624"},"items":[{"metadata":{"name":"default","uid":"a88babaf-8a46-4d13-a7ea-0b13c4f959c5","resourceVersion":"205","creationTimestamp":"2022-10-18T19:29:04Z","labels":{"kubernetes.io/metadata.name":"default"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:29:04Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"gatekeeper-system","uid":"6007bdf3-5dc1-4e9b-9404-c218ee0b1949","resourceVersion":"3299","creationTimestamp":"2022-10-18T19:38:51Z","labels":{"addonmanager.kubernetes.io/mode":"Reconcile","admission.gatekeeper.sh/ignore":"no-self-managing","control-plane":"controller-manager","gatekeeper.sh/system":"yes","kubernetes.io/metadata.name":"gatekeeper-system"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"v1\",\"kind\":\"Namespace\",\"metadata\":{\"annotations\":{},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"Reconcile\",\"admission.gatekeeper.sh/ignore\":\"no-self-managing\",\"control-plane\":\"controller-manager\",\"gatekeeper.sh/system\":\"yes\"},\"name\":\"gatekeeper-system\"}}\n"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:38:51Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{}},"f:labels":{".":{},"f:addonmanager.kubernetes.io/mode":{},"f:admission.gatekeeper.sh/ignore":{},"f:control-plane":{},"f:gatekeeper.sh/system":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"kube-node-lease","uid":"6c4617c4-8f9f-45ef-82a7-bf190d3dcb13","resourceVersion":"39","creationTimestamp":"2022-10-18T19:29:01Z","labels":{"kubernetes.io/metadata.name":"kube-node-lease"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:29:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"kube-public","uid":"1b6b5b56-14cf-4fbe-abe7-89fd6cf48873","resourceVersion":"22","creationTimestamp":"2022-10-18T19:29:01Z","labels":{"kubernetes.io/metadata.name":"kube-public"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:29:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"kube-system","uid":"2acc8b84-760b-48a4-a41e-e94aa3ad57c1","resourceVersion":"567","creationTimestamp":"2022-10-18T19:29:01Z","labels":{"addonmanager.kubernetes.io/mode":"Reconcile","control-plane":"true","kubernetes.io/cluster-service":"true","kubernetes.io/metadata.name":"kube-system"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"v1\",\"kind\":\"Namespace\",\"metadata\":{\"annotations\":{},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"Reconcile\",\"control-plane\":\"true\",\"kubernetes.io/cluster-service\":\"true\"},\"name\":\"kube-system\"}}\n"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:29:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:29:22Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{}},"f:labels":{"f:addonmanager.kubernetes.io/mode":{},"f:control-plane":{},"f:kubernetes.io/cluster-service":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}}]} + + ' + headers: + audit-id: + - ae3a1563-4c41-456b-858b-887f502195cf + cache-control: + - no-cache, private + content-type: + - application/json + date: + - Tue, 18 Oct 2022 19:42:16 GMT + transfer-encoding: + - chunked + x-kubernetes-pf-flowschema-uid: + - 4338b148-01d8-4f61-8b84-7cdc7ec27482 + x-kubernetes-pf-prioritylevel-uid: + - aa4e26b3-f7bb-4992-a47d-272b134f6779 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - connectedk8s connect + Connection: + - keep-alive + ParameterSetName: + - -g -n -l --distribution --infrastructure --distribution-version --tags --kube-config + User-Agent: + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar","name":"akkeshar","type":"Microsoft.Resources/resourceGroups","location":"eastus","tags":{"Created":"20210721"},"properties":{"provisioningState":"Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '243' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 18 Oct 2022 19:42:16 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - connectedk8s connect + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -g -n -l --distribution --infrastructure --distribution-version --tags --kube-config + User-Agent: + - python/3.7.7 (Windows-10-10.0.22621-SP0) AZURECLI/2.41.0 (MSI) + method: POST + uri: https://eastus.dp.kubernetesconfiguration.azure.com/azure-arc-k8sagents/GetLatestHelmPackagePath?api-version=2019-11-01-preview&releaseTrain=stable + response: + body: + string: '{"repositoryPath":"mcr.microsoft.com/azurearck8s/batch1/stable/azure-arc-k8sagents:1.8.14"}' + headers: + api-supported-versions: + - 2019-11-01-Preview + connection: + - close + content-length: + - '91' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 18 Oct 2022 19:42:17 GMT + strict-transport-security: + - max-age=15724800; includeSubDomains + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"tags": {"foo": "doo"}, "location": "eastus", "identity": {"type": "SystemAssigned"}, + "properties": {"agentPublicKeyCertificate": "MIICCgKCAgEAlGPP5Hn/u2zHU7y02UTOBk+PuhSXG8e62N0zfCCQrtNXieB7hDfopMgi9+dyhtRzlcTHnjXCFuPFmW9Tmo2Fo5jE2569dhY9v7GGpoBvErhNjJy6PB/mMHsoZxwEGQiUxn8BuAUwZQ51pHfWp/VC98KONg7J3pYI8q7mMJ3kFtOkMqVvDMt4XLCt4hmwXD/jSgVBSUQVs15zKx8T3lzIEcSWMJnvPYoKR4Ax0YcQ1SAO+UMFDdMj0ou/ttsY+TPWarbIlBqaQ1Cm842X0ez1fhkSrEOncF+t0Kpt2qFY9ugMQdQRM9DSWvaVy6iDReZrR+WYsTyaMnJ9ugR9B5TN4xwLptmeNSv0GxJhWTiStGwDLKRtbZRfkl+ITiCdqY/R3ITPqzpl1SX6Cs0a8hwfmI8iqxYgh32HnsLyw9Tic9JEjpi03ERumPIN1ZHbdkcJwLu4fD3mzc/5ofuuB5tZbj8AVC9lWQtAX1Xp0PZQn8IZQ6wCSiW3jCtqjxpbNyGNDHAY8txspgNj8aJIjoMGumNoTGDuUEI07jEwnan8Oro1nYNnqy3V6pi6hvQAeAK9kgSrxkKaQpIi+8cSVe952khCZgzY02AYvbNJkM8+gWZ9U3pUae0erMQA+pMjfHSmK7+eQxjYhC/eqIX7wEBG9TqAC56GpI5KajmnqPxMOmsCAwEAAQ==", + "distribution": "aks_management", "distributionVersion": "1.0", "infrastructure": + "azure_stack_hci"}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - connectedk8s connect + Connection: + - keep-alive + Content-Length: + - '940' + Content-Type: + - application/json + ParameterSetName: + - -g -n -l --distribution --infrastructure --distribution-version --tags --kube-config + User-Agent: + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002?api-version=2022-10-01-preview + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","name":"cc-000002","type":"microsoft.kubernetes/connectedclusters","location":"eastus","tags":{"foo":"doo"},"systemData":{"createdBy":"akkeshar@microsoft.com","createdByType":"User","createdAt":"2022-10-18T19:42:47.3232672Z","lastModifiedBy":"akkeshar@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-18T19:42:47.3232672Z"},"identity":{"principalId":"5aee9fac-8d11-43ae-87eb-73e6726f340b","tenantId":"72f988bf-86f1-41af-91ab-2d7cd011db47","type":"SystemAssigned"},"properties":{"provisioningState":"Accepted","connectivityStatus":"Connecting","privateLinkState":"Disabled","azureHybridBenefit":"NotApplicable","agentPublicKeyCertificate":"MIICCgKCAgEAlGPP5Hn/u2zHU7y02UTOBk+PuhSXG8e62N0zfCCQrtNXieB7hDfopMgi9+dyhtRzlcTHnjXCFuPFmW9Tmo2Fo5jE2569dhY9v7GGpoBvErhNjJy6PB/mMHsoZxwEGQiUxn8BuAUwZQ51pHfWp/VC98KONg7J3pYI8q7mMJ3kFtOkMqVvDMt4XLCt4hmwXD/jSgVBSUQVs15zKx8T3lzIEcSWMJnvPYoKR4Ax0YcQ1SAO+UMFDdMj0ou/ttsY+TPWarbIlBqaQ1Cm842X0ez1fhkSrEOncF+t0Kpt2qFY9ugMQdQRM9DSWvaVy6iDReZrR+WYsTyaMnJ9ugR9B5TN4xwLptmeNSv0GxJhWTiStGwDLKRtbZRfkl+ITiCdqY/R3ITPqzpl1SX6Cs0a8hwfmI8iqxYgh32HnsLyw9Tic9JEjpi03ERumPIN1ZHbdkcJwLu4fD3mzc/5ofuuB5tZbj8AVC9lWQtAX1Xp0PZQn8IZQ6wCSiW3jCtqjxpbNyGNDHAY8txspgNj8aJIjoMGumNoTGDuUEI07jEwnan8Oro1nYNnqy3V6pi6hvQAeAK9kgSrxkKaQpIi+8cSVe952khCZgzY02AYvbNJkM8+gWZ9U3pUae0erMQA+pMjfHSmK7+eQxjYhC/eqIX7wEBG9TqAC56GpI5KajmnqPxMOmsCAwEAAQ==","distribution":"aks_management","distributionVersion":"1.0","infrastructure":"azure_stack_hci"}}' + headers: + azure-asyncoperation: + - https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/bfd5d174-5209-4770-b6f7-d49f0db03521*CBCF23F2D03CAE8A304520933CBB04564E255DC298B25D06FA216D51D71DCFFB?api-version=2022-10-01-preview + cache-control: + - no-cache + content-length: + - '1610' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 18 Oct 2022 19:42:51 GMT + etag: + - '"1900e410-0000-0100-0000-634f01ba0000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - connectedk8s connect + Connection: + - keep-alive + ParameterSetName: + - -g -n -l --distribution --infrastructure --distribution-version --tags --kube-config + User-Agent: + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/bfd5d174-5209-4770-b6f7-d49f0db03521*CBCF23F2D03CAE8A304520933CBB04564E255DC298B25D06FA216D51D71DCFFB?api-version=2022-10-01-preview + response: + body: + string: '{"id":"/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/bfd5d174-5209-4770-b6f7-d49f0db03521*CBCF23F2D03CAE8A304520933CBB04564E255DC298B25D06FA216D51D71DCFFB","name":"bfd5d174-5209-4770-b6f7-d49f0db03521*CBCF23F2D03CAE8A304520933CBB04564E255DC298B25D06FA216D51D71DCFFB","resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","status":"Succeeded","startTime":"2022-10-18T19:42:49.8205312Z","endTime":"2022-10-18T19:42:58.3098361Z","properties":null}' + headers: + cache-control: + - no-cache + content-length: + - '564' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 18 Oct 2022 19:43:22 GMT + etag: + - '"3f00d58c-0000-0100-0000-634f01c20000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - connectedk8s connect + Connection: + - keep-alive + ParameterSetName: + - -g -n -l --distribution --infrastructure --distribution-version --tags --kube-config + User-Agent: + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002?api-version=2022-10-01-preview + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","name":"cc-000002","type":"microsoft.kubernetes/connectedclusters","location":"eastus","tags":{"foo":"doo"},"systemData":{"createdBy":"akkeshar@microsoft.com","createdByType":"User","createdAt":"2022-10-18T19:42:47.3232672Z","lastModifiedBy":"akkeshar@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-18T19:42:47.3232672Z"},"identity":{"principalId":"5aee9fac-8d11-43ae-87eb-73e6726f340b","tenantId":"72f988bf-86f1-41af-91ab-2d7cd011db47","type":"SystemAssigned"},"properties":{"provisioningState":"Succeeded","connectivityStatus":"Connecting","privateLinkState":"Disabled","azureHybridBenefit":"NotApplicable","agentPublicKeyCertificate":"MIICCgKCAgEAlGPP5Hn/u2zHU7y02UTOBk+PuhSXG8e62N0zfCCQrtNXieB7hDfopMgi9+dyhtRzlcTHnjXCFuPFmW9Tmo2Fo5jE2569dhY9v7GGpoBvErhNjJy6PB/mMHsoZxwEGQiUxn8BuAUwZQ51pHfWp/VC98KONg7J3pYI8q7mMJ3kFtOkMqVvDMt4XLCt4hmwXD/jSgVBSUQVs15zKx8T3lzIEcSWMJnvPYoKR4Ax0YcQ1SAO+UMFDdMj0ou/ttsY+TPWarbIlBqaQ1Cm842X0ez1fhkSrEOncF+t0Kpt2qFY9ugMQdQRM9DSWvaVy6iDReZrR+WYsTyaMnJ9ugR9B5TN4xwLptmeNSv0GxJhWTiStGwDLKRtbZRfkl+ITiCdqY/R3ITPqzpl1SX6Cs0a8hwfmI8iqxYgh32HnsLyw9Tic9JEjpi03ERumPIN1ZHbdkcJwLu4fD3mzc/5ofuuB5tZbj8AVC9lWQtAX1Xp0PZQn8IZQ6wCSiW3jCtqjxpbNyGNDHAY8txspgNj8aJIjoMGumNoTGDuUEI07jEwnan8Oro1nYNnqy3V6pi6hvQAeAK9kgSrxkKaQpIi+8cSVe952khCZgzY02AYvbNJkM8+gWZ9U3pUae0erMQA+pMjfHSmK7+eQxjYhC/eqIX7wEBG9TqAC56GpI5KajmnqPxMOmsCAwEAAQ==","distribution":"AKS_Management","distributionVersion":"1.0","infrastructure":"azure_stack_hci"}}' + headers: + cache-control: + - no-cache + content-length: + - '1611' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 18 Oct 2022 19:43:22 GMT + etag: + - '"19000611-0000-0100-0000-634f01c20000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - connectedk8s connect + Connection: + - keep-alive + ParameterSetName: + - -g -n -l --distribution --infrastructure --distribution-version --tags --kube-config + User-Agent: + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ExtendedLocation?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ExtendedLocation","namespace":"Microsoft.ExtendedLocation","authorizations":[{"applicationId":"bc313c14-388c-4e7d-a58e-70017303ee3b","roleDefinitionId":"a775b938-2819-4dd0-8067-01f6e3b06392"},{"applicationId":"319f651f-7ddb-4fc6-9857-7aef9250bd05","roleDefinitionId":"0981f4e0-04a7-4e31-bd2b-b2ac2fc6ba4e"}],"resourceTypes":[{"resourceType":"locations","locations":[],"apiVersions":["2021-03-15-preview","2020-07-15-privatepreview"],"capabilities":"None"},{"resourceType":"customLocations","locations":["East + US","West Europe","North Europe","France Central","Southeast Asia","Australia + East","East US 2","West US 2","UK South","Central US","West Central US","West + US","North Central US","South Central US","Korea Central","Japan East","East + Asia","West US 3","Canada Central","East US 2 EUAP"],"apiVersions":["2021-08-31-preview","2021-08-15","2021-03-15-preview","2020-07-15-privatepreview"],"defaultApiVersion":"2021-08-15","capabilities":"SystemAssignedResourceIdentity, + SupportsTags, SupportsLocation"},{"resourceType":"customLocations/enabledResourceTypes","locations":["East + US","West Europe","North Europe","France Central","Southeast Asia","Australia + East","East US 2","West US 2","UK South","Central US","West Central US","West + US","North Central US","South Central US","Korea Central","Japan East","East + Asia","West US 3","Canada Central","East US 2 EUAP"],"apiVersions":["2021-08-31-preview","2021-08-15","2021-03-15-preview","2020-07-15-privatepreview"],"capabilities":"None"},{"resourceType":"customLocations/resourceSyncRules","locations":["East + US","West Europe","North Europe","France Central","Southeast Asia","Australia + East","East US 2","West US 2","UK South","Central US","West Central US","West + US","North Central US","South Central US","Korea Central","Japan East","East + Asia","West US 3","Canada Central","East US 2 EUAP"],"apiVersions":["2021-08-31-preview"],"defaultApiVersion":"2021-08-31-preview","capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"locations/operationsstatus","locations":["East + US","West Europe","North Europe","France Central","Southeast Asia","Australia + East","East US 2","West US 2","UK South","Central US","West Central US","West + US","North Central US","South Central US","Korea Central","Japan East","East + Asia","West US 3","Canada Central","East US 2 Euap"],"apiVersions":["2021-03-15-preview","2020-07-15-privatepreview"],"capabilities":"None"},{"resourceType":"locations/operationresults","locations":["East + US","West Europe","North Europe","France Central","Southeast Asia","Australia + East","East US 2","West US 2","UK South","Central US","West Central US","West + US","North Central US","South Central US","Korea Central","Japan East","East + Asia","West US 3","Canada Central","East US 2 Euap"],"apiVersions":["2021-03-15-preview","2020-07-15-privatepreview"],"capabilities":"None"},{"resourceType":"operations","locations":[],"apiVersions":["2021-08-31-preview","2021-08-15","2021-03-15-preview","2020-07-15-privatepreview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + headers: + cache-control: + - no-cache + content-length: + - '3236' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 18 Oct 2022 19:43:23 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - connectedk8s connect + Connection: + - keep-alive + ParameterSetName: + - -g -n -l --distribution --infrastructure --distribution-version --tags --kube-config + User-Agent: + - python/3.7.7 (Windows-10-10.0.22621-SP0) msrest/0.7.1 msrest_azure/0.6.4 azure-graphrbac/0.60.0 + Azure-SDK-For-Python AZURECLI/2.41.0 (MSI) + accept-language: + - en-US + method: GET + uri: https://graph.windows.net/00000000-0000-0000-0000-000000000000/servicePrincipals?$filter=displayName%20eq%20%27Custom%20Locations%20RP%27&api-version=1.6 + response: + body: + string: '{"odata.metadata":"https://graph.windows.net/00000000-0000-0000-0000-000000000000/$metadata#directoryObjects","value":[{"odata.type":"Microsoft.DirectoryServices.ServicePrincipal","objectType":"ServicePrincipal","objectId":"51dfe1e8-70c6-4de5-a08e-e18aff23d815","deletionTimestamp":null,"accountEnabled":true,"addIns":[],"alternativeNames":[],"appDisplayName":"Custom + Locations RP","appId":"bc313c14-388c-4e7d-a58e-70017303ee3b","applicationTemplateId":null,"appOwnerTenantId":"f8cdef31-a31e-4b4a-93e4-5f571e91255a","appRoleAssignmentRequired":false,"appRoles":[],"displayName":"Custom + Locations RP","errorUrl":null,"homepage":null,"informationalUrls":{"termsOfService":null,"support":null,"privacy":null,"marketing":null},"keyCredentials":[],"logoutUrl":null,"notificationEmailAddresses":[],"oauth2Permissions":[],"passwordCredentials":[],"preferredSingleSignOnMode":null,"preferredTokenSigningKeyEndDateTime":null,"preferredTokenSigningKeyThumbprint":null,"publisherName":"Microsoft + Services","replyUrls":[],"samlMetadataUrl":null,"samlSingleSignOnSettings":null,"servicePrincipalNames":["bc313c14-388c-4e7d-a58e-70017303ee3b"],"servicePrincipalType":"Application","signInAudience":"AzureADMultipleOrgs","tags":[],"tokenEncryptionKeyId":null}]}' + headers: + access-control-allow-origin: + - '*' + cache-control: + - no-cache + content-length: + - '1246' + content-type: + - application/json; odata=minimalmetadata; streaming=true; charset=utf-8 + dataserviceversion: + - 3.0; + date: + - Tue, 18 Oct 2022 19:43:23 GMT + duration: + - '716480' + expires: + - '-1' + ocp-aad-diagnostics-server-name: + - N+bNecr59Jz6oaX37slXejtcg9Gha9KWzja/9Zm7oyc= + ocp-aad-session-key: + - NEd2gx1GqCpeB9Bdnt-ruID5gIvXFrRyQODQdvgdxQW7f6V7lx_BcFJ-Xz8BZIW4_MOrqdJptzyYEsFGoQM8PB5jSx4GhhznEUbbDpI8ChUxvDzITACookf33sb4EvFo.l-TMLXhLmY67wsCljrTbg7GDFWK8u3145N_qATTjIqI + pragma: + - no-cache + request-id: + - e66c8dfe-aed6-4048-b8c6-17bebd6a146a + strict-transport-security: + - max-age=31536000; includeSubDomains + x-aspnet-version: + - 4.0.30319 + x-ms-dirapi-data-contract-version: + - '1.6' + x-ms-resource-unit: + - '1' + x-powered-by: + - ASP.NET status: code: 200 message: OK - request: - body: null + body: '{"properties": {"azureHybridBenefit": "True"}}' headers: Accept: - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - connectedk8s update + Connection: + - keep-alive + Content-Length: + - '46' Content-Type: - application/json + ParameterSetName: + - -g -n --azure-hybrid-benefit --kube-config --yes User-Agent: - - OpenAPI-Generator/11.0.0/python - method: GET - uri: https://cli-test-a-akkeshar-1bfbb5-7c7ab37f.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) + method: PATCH + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002?api-version=2022-10-01-preview response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"3158"},"items":[{"metadata":{"name":"azure-arc","uid":"f21fce18-a772-4bd9-a31c-1fd4b0126469","resourceVersion":"3139","creationTimestamp":"2022-06-07T17:51:38Z","deletionTimestamp":"2022-06-07T17:55:09Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:51:38Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:55:19Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"ResourcesDiscovered","message":"All - resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"ParsedGroupVersions","message":"All - legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"ContentDeleted","message":"All - content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"SomeResourcesRemain","message":"Some - resources are remaining: pods. has 10 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"ContentHasNoFinalizers","message":"All - content-preserving finalizers finished"}]}}]} - - ' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","name":"cc-000002","type":"microsoft.kubernetes/connectedclusters","location":"eastus","tags":{"foo":"doo"},"systemData":{"createdBy":"akkeshar@microsoft.com","createdByType":"User","createdAt":"2022-10-18T19:42:47.3232672Z","lastModifiedBy":"akkeshar@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-18T19:48:22.7749265Z"},"identity":{"principalId":"5aee9fac-8d11-43ae-87eb-73e6726f340b","tenantId":"72f988bf-86f1-41af-91ab-2d7cd011db47","type":"SystemAssigned"},"properties":{"connectivityStatus":"Connected","privateLinkState":"Disabled","azureHybridBenefit":"True","agentPublicKeyCertificate":"MIICCgKCAgEAlGPP5Hn/u2zHU7y02UTOBk+PuhSXG8e62N0zfCCQrtNXieB7hDfopMgi9+dyhtRzlcTHnjXCFuPFmW9Tmo2Fo5jE2569dhY9v7GGpoBvErhNjJy6PB/mMHsoZxwEGQiUxn8BuAUwZQ51pHfWp/VC98KONg7J3pYI8q7mMJ3kFtOkMqVvDMt4XLCt4hmwXD/jSgVBSUQVs15zKx8T3lzIEcSWMJnvPYoKR4Ax0YcQ1SAO+UMFDdMj0ou/ttsY+TPWarbIlBqaQ1Cm842X0ez1fhkSrEOncF+t0Kpt2qFY9ugMQdQRM9DSWvaVy6iDReZrR+WYsTyaMnJ9ugR9B5TN4xwLptmeNSv0GxJhWTiStGwDLKRtbZRfkl+ITiCdqY/R3ITPqzpl1SX6Cs0a8hwfmI8iqxYgh32HnsLyw9Tic9JEjpi03ERumPIN1ZHbdkcJwLu4fD3mzc/5ofuuB5tZbj8AVC9lWQtAX1Xp0PZQn8IZQ6wCSiW3jCtqjxpbNyGNDHAY8txspgNj8aJIjoMGumNoTGDuUEI07jEwnan8Oro1nYNnqy3V6pi6hvQAeAK9kgSrxkKaQpIi+8cSVe952khCZgzY02AYvbNJkM8+gWZ9U3pUae0erMQA+pMjfHSmK7+eQxjYhC/eqIX7wEBG9TqAC56GpI5KajmnqPxMOmsCAwEAAQ==","kubernetesVersion":"1.23.12","totalNodeCount":1,"totalCoreCount":4,"agentVersion":"1.8.14","distribution":"AKS_Management","distributionVersion":"1.0","infrastructure":"azure_stack_hci","lastConnectivityTime":"2022-10-18T19:43:51.362Z","provisioningState":"Succeeded"}}' headers: - audit-id: - - 2c158f3f-6fcf-4637-bff3-9c78c373ac64 cache-control: - - no-cache, private + - no-cache + content-length: + - '1743' content-type: - - application/json + - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 17:55:21 GMT + - Tue, 18 Oct 2022 19:48:23 GMT + etag: + - '"19000616-0000-0100-0000-634f03070000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains transfer-encoding: - chunked - x-kubernetes-pf-flowschema-uid: - - 3b9ec461-e128-4d99-abcc-65ad56c2f58e - x-kubernetes-pf-prioritylevel-uid: - - 93094910-4d9a-46c6-b59b-961811b7418b + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + x-ms-ratelimit-remaining-subscription-writes: + - '1199' status: code: 200 message: OK @@ -1782,34 +2898,30 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://cli-test-a-akkeshar-1bfbb5-7c7ab37f.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io/version/ response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"3178"},"items":[{"metadata":{"name":"azure-arc","uid":"f21fce18-a772-4bd9-a31c-1fd4b0126469","resourceVersion":"3171","creationTimestamp":"2022-06-07T17:51:38Z","deletionTimestamp":"2022-06-07T17:55:09Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:51:38Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:55:19Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"ResourcesDiscovered","message":"All - resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"ParsedGroupVersions","message":"All - legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"ContentDeleted","message":"All - content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"SomeResourcesRemain","message":"Some - resources are remaining: pods. has 7 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"ContentHasNoFinalizers","message":"All - content-preserving finalizers finished"}]}}]} - - ' + string: "{\n \"major\": \"1\",\n \"minor\": \"23\",\n \"gitVersion\": \"v1.23.12\",\n + \ \"gitCommit\": \"c6939792865ef0f70f92006081690d77411c8ed5\",\n \"gitTreeState\": + \"clean\",\n \"buildDate\": \"2022-09-21T21:46:35Z\",\n \"goVersion\": \"go1.17.13\",\n + \ \"compiler\": \"gc\",\n \"platform\": \"linux/amd64\"\n}" headers: audit-id: - - 0d158e11-9cff-4441-afc5-8d2e0fd37530 + - 31c61713-c686-42d6-b26f-ae042770a580 cache-control: - no-cache, private + content-length: + - '265' content-type: - application/json date: - - Tue, 07 Jun 2022 17:55:26 GMT - transfer-encoding: - - chunked + - Tue, 18 Oct 2022 19:48:26 GMT x-kubernetes-pf-flowschema-uid: - - 3b9ec461-e128-4d99-abcc-65ad56c2f58e + - 4338b148-01d8-4f61-8b84-7cdc7ec27482 x-kubernetes-pf-prioritylevel-uid: - - 93094910-4d9a-46c6-b59b-961811b7418b + - aa4e26b3-f7bb-4992-a47d-272b134f6779 status: code: 200 message: OK @@ -1821,34 +2933,29 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://cli-test-a-akkeshar-1bfbb5-7c7ab37f.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://cli-test-a-akkeshar-1bfbb5-d5fa5d83.hcp.westeurope.azmk8s.io/api/v1/namespaces/azure-arc/configmaps/azure-clusterconfig response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"3198"},"items":[{"metadata":{"name":"azure-arc","uid":"f21fce18-a772-4bd9-a31c-1fd4b0126469","resourceVersion":"3171","creationTimestamp":"2022-06-07T17:51:38Z","deletionTimestamp":"2022-06-07T17:55:09Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:51:38Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:55:19Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"ResourcesDiscovered","message":"All - resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"ParsedGroupVersions","message":"All - legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"ContentDeleted","message":"All - content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"SomeResourcesRemain","message":"Some - resources are remaining: pods. has 7 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"ContentHasNoFinalizers","message":"All - content-preserving finalizers finished"}]}}]} + string: '{"kind":"ConfigMap","apiVersion":"v1","metadata":{"name":"azure-clusterconfig","namespace":"azure-arc","uid":"bd2398d1-fdb0-4ece-8137-e9b5327e4d59","resourceVersion":"4921","creationTimestamp":"2022-10-18T19:43:35Z","labels":{"app.kubernetes.io/managed-by":"Helm"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:43:35Z","fieldsType":"FieldsV1","fieldsV1":{"f:data":{".":{},"f:ARC_AGENT_HELM_CHART_NAME":{},"f:ARC_AGENT_RELEASE_TRAIN":{},"f:AZURE_ARC_AGENT_VERSION":{},"f:AZURE_ARC_AUTOUPDATE":{},"f:AZURE_ARC_HELM_NAMESPACE":{},"f:AZURE_ARC_RELEASE_NAME":{},"f:AZURE_ENVIRONMENT":{},"f:AZURE_REGION":{},"f:AZURE_RESOURCE_GROUP":{},"f:AZURE_RESOURCE_MANAGER_ENDPOINT":{},"f:AZURE_RESOURCE_NAME":{},"f:AZURE_SUBSCRIPTION_ID":{},"f:AZURE_TENANT_ID":{},"f:CLUSTER_CONNECT_AGENT_ENABLED":{},"f:CLUSTER_TYPE":{},"f:CUSTOM_IDENTITY_PROVIDER_ENABLED":{},"f:DEBUG_LOGGING":{},"f:EXTENSION_OPERATOR_ENABLED":{},"f:FLUX_CLIENT_DEFAULT_LOCATION":{},"f:FLUX_UPSTREAM_SERVICE_ENABLED":{},"f:GITOPS_ENABLED":{},"f:GUARD_PKI_HOSTPATH":{},"f:HELM_AUTO_UPDATE_CHECK_FREQUENCY_IN_MINUTES":{},"f:IS_CLIENT_SECRET_A_TOKEN":{},"f:KUBERNETES_DISTRO":{},"f:KUBERNETES_INFRA":{},"f:MANAGED_IDENTITY_AUTH":{},"f:MAX_ENTRIES_PER_STORE":{},"f:MAX_STORES":{},"f:MSI_ADAPTER_ARTIFACT_PATH":{},"f:NO_AUTH_HEADER_DATA_PLANE":{},"f:ONBOARDING_SECRET_NAME":{},"f:ONBOARDING_SECRET_NAMESPACE":{},"f:RESOURCE_SYNC_ENABLE_CHUNKED_SYNC":{},"f:RESOURCE_SYNC_LIST_CHUNK_SIZE":{},"f:RP_NAMESPACE":{},"f:TAGS":{}},"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:app.kubernetes.io/managed-by":{}}}}}]},"data":{"ARC_AGENT_HELM_CHART_NAME":"azure-arc-k8sagents","ARC_AGENT_RELEASE_TRAIN":"stable","AZURE_ARC_AGENT_VERSION":"1.8.14","AZURE_ARC_AUTOUPDATE":"true","AZURE_ARC_HELM_NAMESPACE":"default","AZURE_ARC_RELEASE_NAME":"azure-arc","AZURE_ENVIRONMENT":"AZUREPUBLICCLOUD","AZURE_REGION":"eastus","AZURE_RESOURCE_GROUP":"akkeshar","AZURE_RESOURCE_MANAGER_ENDPOINT":"","AZURE_RESOURCE_NAME":"cc-000002","AZURE_SUBSCRIPTION_ID":"1bfbb5d0-917e-4346-9026-1d3b344417f5","AZURE_TENANT_ID":"72f988bf-86f1-41af-91ab-2d7cd011db47","CLUSTER_CONNECT_AGENT_ENABLED":"true","CLUSTER_TYPE":"ConnectedClusters","CUSTOM_IDENTITY_PROVIDER_ENABLED":"false","DEBUG_LOGGING":"false","EXTENSION_OPERATOR_ENABLED":"true","FLUX_CLIENT_DEFAULT_LOCATION":"mcr.microsoft.com/azurearck8s/arc-preview/fluxctl:0.2.0","FLUX_UPSTREAM_SERVICE_ENABLED":"true","GITOPS_ENABLED":"true","GUARD_PKI_HOSTPATH":"","HELM_AUTO_UPDATE_CHECK_FREQUENCY_IN_MINUTES":"60","IS_CLIENT_SECRET_A_TOKEN":"false","KUBERNETES_DISTRO":"aks_management","KUBERNETES_INFRA":"azure_stack_hci","MANAGED_IDENTITY_AUTH":"true","MAX_ENTRIES_PER_STORE":"680","MAX_STORES":"30","MSI_ADAPTER_ARTIFACT_PATH":"mcr.microsoft.com/azurearck8s/msi-adapter:1.0.2","NO_AUTH_HEADER_DATA_PLANE":"false","ONBOARDING_SECRET_NAME":"azure-arc-connect-privatekey","ONBOARDING_SECRET_NAMESPACE":"azure-arc","RESOURCE_SYNC_ENABLE_CHUNKED_SYNC":"false","RESOURCE_SYNC_LIST_CHUNK_SIZE":"200","RP_NAMESPACE":"Microsoft.Kubernetes","TAGS":"map[]"}} ' headers: audit-id: - - d4a45e7b-f0ee-47d8-a8eb-a9426bc91314 + - 87358c16-dd02-4d6f-9199-fcdfc5f29c65 cache-control: - no-cache, private content-type: - application/json date: - - Tue, 07 Jun 2022 17:55:32 GMT + - Tue, 18 Oct 2022 19:48:33 GMT transfer-encoding: - chunked x-kubernetes-pf-flowschema-uid: - - 3b9ec461-e128-4d99-abcc-65ad56c2f58e + - 4338b148-01d8-4f61-8b84-7cdc7ec27482 x-kubernetes-pf-prioritylevel-uid: - - 93094910-4d9a-46c6-b59b-961811b7418b + - aa4e26b3-f7bb-4992-a47d-272b134f6779 status: code: 200 message: OK @@ -1857,37 +2964,47 @@ interactions: headers: Accept: - application/json - Content-Type: - - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - connectedk8s update + Connection: + - keep-alive + ParameterSetName: + - -g -n --azure-hybrid-benefit --kube-config --yes User-Agent: - - OpenAPI-Generator/11.0.0/python + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://cli-test-a-akkeshar-1bfbb5-7c7ab37f.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002?api-version=2022-10-01-preview response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"3214"},"items":[{"metadata":{"name":"azure-arc","uid":"f21fce18-a772-4bd9-a31c-1fd4b0126469","resourceVersion":"3171","creationTimestamp":"2022-06-07T17:51:38Z","deletionTimestamp":"2022-06-07T17:55:09Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:51:38Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:55:19Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"ResourcesDiscovered","message":"All - resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"ParsedGroupVersions","message":"All - legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"ContentDeleted","message":"All - content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"SomeResourcesRemain","message":"Some - resources are remaining: pods. has 7 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"ContentHasNoFinalizers","message":"All - content-preserving finalizers finished"}]}}]} - - ' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","name":"cc-000002","type":"microsoft.kubernetes/connectedclusters","location":"eastus","tags":{"foo":"doo"},"systemData":{"createdBy":"akkeshar@microsoft.com","createdByType":"User","createdAt":"2022-10-18T19:42:47.3232672Z","lastModifiedBy":"akkeshar@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-18T19:48:22.7749265Z"},"identity":{"principalId":"5aee9fac-8d11-43ae-87eb-73e6726f340b","tenantId":"72f988bf-86f1-41af-91ab-2d7cd011db47","type":"SystemAssigned"},"properties":{"connectivityStatus":"Connected","privateLinkState":"Disabled","azureHybridBenefit":"True","agentPublicKeyCertificate":"MIICCgKCAgEAlGPP5Hn/u2zHU7y02UTOBk+PuhSXG8e62N0zfCCQrtNXieB7hDfopMgi9+dyhtRzlcTHnjXCFuPFmW9Tmo2Fo5jE2569dhY9v7GGpoBvErhNjJy6PB/mMHsoZxwEGQiUxn8BuAUwZQ51pHfWp/VC98KONg7J3pYI8q7mMJ3kFtOkMqVvDMt4XLCt4hmwXD/jSgVBSUQVs15zKx8T3lzIEcSWMJnvPYoKR4Ax0YcQ1SAO+UMFDdMj0ou/ttsY+TPWarbIlBqaQ1Cm842X0ez1fhkSrEOncF+t0Kpt2qFY9ugMQdQRM9DSWvaVy6iDReZrR+WYsTyaMnJ9ugR9B5TN4xwLptmeNSv0GxJhWTiStGwDLKRtbZRfkl+ITiCdqY/R3ITPqzpl1SX6Cs0a8hwfmI8iqxYgh32HnsLyw9Tic9JEjpi03ERumPIN1ZHbdkcJwLu4fD3mzc/5ofuuB5tZbj8AVC9lWQtAX1Xp0PZQn8IZQ6wCSiW3jCtqjxpbNyGNDHAY8txspgNj8aJIjoMGumNoTGDuUEI07jEwnan8Oro1nYNnqy3V6pi6hvQAeAK9kgSrxkKaQpIi+8cSVe952khCZgzY02AYvbNJkM8+gWZ9U3pUae0erMQA+pMjfHSmK7+eQxjYhC/eqIX7wEBG9TqAC56GpI5KajmnqPxMOmsCAwEAAQ==","kubernetesVersion":"1.23.12","totalNodeCount":1,"totalCoreCount":4,"agentVersion":"1.8.14","distribution":"AKS_Management","distributionVersion":"1.0","infrastructure":"azure_stack_hci","lastConnectivityTime":"2022-10-18T19:43:51.362Z","provisioningState":"Succeeded"}}' headers: - audit-id: - - 529af010-c6c2-44de-8d45-dcd802c40542 cache-control: - - no-cache, private + - no-cache + content-length: + - '1743' content-type: - - application/json + - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 17:55:37 GMT + - Tue, 18 Oct 2022 19:48:33 GMT + etag: + - '"19000616-0000-0100-0000-634f03070000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains transfer-encoding: - chunked - x-kubernetes-pf-flowschema-uid: - - 3b9ec461-e128-4d99-abcc-65ad56c2f58e - x-kubernetes-pf-prioritylevel-uid: - - 93094910-4d9a-46c6-b59b-961811b7418b + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' status: code: 200 message: OK @@ -1896,37 +3013,47 @@ interactions: headers: Accept: - application/json - Content-Type: - - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - connectedk8s update + Connection: + - keep-alive + ParameterSetName: + - -g -n --azure-hybrid-benefit --kube-config --yes User-Agent: - - OpenAPI-Generator/11.0.0/python + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://cli-test-a-akkeshar-1bfbb5-7c7ab37f.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002?api-version=2022-10-01-preview response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"3254"},"items":[{"metadata":{"name":"azure-arc","uid":"f21fce18-a772-4bd9-a31c-1fd4b0126469","resourceVersion":"3171","creationTimestamp":"2022-06-07T17:51:38Z","deletionTimestamp":"2022-06-07T17:55:09Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:51:38Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-06-07T17:55:19Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"ResourcesDiscovered","message":"All - resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"ParsedGroupVersions","message":"All - legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"ContentDeleted","message":"All - content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"SomeResourcesRemain","message":"Some - resources are remaining: pods. has 7 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-06-07T17:55:19Z","reason":"ContentHasNoFinalizers","message":"All - content-preserving finalizers finished"}]}}]} - - ' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","name":"cc-000002","type":"microsoft.kubernetes/connectedclusters","location":"eastus","tags":{"foo":"doo"},"systemData":{"createdBy":"akkeshar@microsoft.com","createdByType":"User","createdAt":"2022-10-18T19:42:47.3232672Z","lastModifiedBy":"akkeshar@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-18T19:48:22.7749265Z"},"identity":{"principalId":"5aee9fac-8d11-43ae-87eb-73e6726f340b","tenantId":"72f988bf-86f1-41af-91ab-2d7cd011db47","type":"SystemAssigned"},"properties":{"connectivityStatus":"Connected","privateLinkState":"Disabled","azureHybridBenefit":"True","agentPublicKeyCertificate":"MIICCgKCAgEAlGPP5Hn/u2zHU7y02UTOBk+PuhSXG8e62N0zfCCQrtNXieB7hDfopMgi9+dyhtRzlcTHnjXCFuPFmW9Tmo2Fo5jE2569dhY9v7GGpoBvErhNjJy6PB/mMHsoZxwEGQiUxn8BuAUwZQ51pHfWp/VC98KONg7J3pYI8q7mMJ3kFtOkMqVvDMt4XLCt4hmwXD/jSgVBSUQVs15zKx8T3lzIEcSWMJnvPYoKR4Ax0YcQ1SAO+UMFDdMj0ou/ttsY+TPWarbIlBqaQ1Cm842X0ez1fhkSrEOncF+t0Kpt2qFY9ugMQdQRM9DSWvaVy6iDReZrR+WYsTyaMnJ9ugR9B5TN4xwLptmeNSv0GxJhWTiStGwDLKRtbZRfkl+ITiCdqY/R3ITPqzpl1SX6Cs0a8hwfmI8iqxYgh32HnsLyw9Tic9JEjpi03ERumPIN1ZHbdkcJwLu4fD3mzc/5ofuuB5tZbj8AVC9lWQtAX1Xp0PZQn8IZQ6wCSiW3jCtqjxpbNyGNDHAY8txspgNj8aJIjoMGumNoTGDuUEI07jEwnan8Oro1nYNnqy3V6pi6hvQAeAK9kgSrxkKaQpIi+8cSVe952khCZgzY02AYvbNJkM8+gWZ9U3pUae0erMQA+pMjfHSmK7+eQxjYhC/eqIX7wEBG9TqAC56GpI5KajmnqPxMOmsCAwEAAQ==","kubernetesVersion":"1.23.12","totalNodeCount":1,"totalCoreCount":4,"agentVersion":"1.8.14","distribution":"AKS_Management","distributionVersion":"1.0","infrastructure":"azure_stack_hci","lastConnectivityTime":"2022-10-18T19:43:51.362Z","provisioningState":"Succeeded"}}' headers: - audit-id: - - 5403cc45-3480-48df-af70-addd983e38c4 cache-control: - - no-cache, private + - no-cache + content-length: + - '1743' content-type: - - application/json + - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 17:55:42 GMT + - Tue, 18 Oct 2022 19:48:35 GMT + etag: + - '"19000616-0000-0100-0000-634f03070000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains transfer-encoding: - chunked - x-kubernetes-pf-flowschema-uid: - - 3b9ec461-e128-4d99-abcc-65ad56c2f58e - x-kubernetes-pf-prioritylevel-uid: - - 93094910-4d9a-46c6-b59b-961811b7418b + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' status: code: 200 message: OK @@ -1934,33 +3061,39 @@ interactions: body: null headers: Accept: - - application/json - Content-Type: - - application/json + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - connectedk8s update + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -g -n --azure-hybrid-benefit --kube-config --yes User-Agent: - - OpenAPI-Generator/11.0.0/python - method: GET - uri: https://cli-test-a-akkeshar-1bfbb5-7c7ab37f.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + - python/3.7.7 (Windows-10-10.0.22621-SP0) AZURECLI/2.41.0 (MSI) + method: POST + uri: https://eastus.dp.kubernetesconfiguration.azure.com/azure-arc-k8sagents/GetLatestHelmPackagePath?api-version=2019-11-01-preview&releaseTrain=stable response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"3275"},"items":[]} - - ' + string: '{"repositoryPath":"mcr.microsoft.com/azurearck8s/batch1/stable/azure-arc-k8sagents:1.8.14"}' headers: - audit-id: - - 460bebce-b8e1-4331-b1d9-b297f1930838 - cache-control: - - no-cache, private + api-supported-versions: + - 2019-11-01-Preview + connection: + - close content-length: - - '92' + - '91' content-type: - - application/json + - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 17:55:47 GMT - x-kubernetes-pf-flowschema-uid: - - 3b9ec461-e128-4d99-abcc-65ad56c2f58e - x-kubernetes-pf-prioritylevel-uid: - - 93094910-4d9a-46c6-b59b-961811b7418b + - Tue, 18 Oct 2022 19:48:37 GMT + strict-transport-security: + - max-age=15724800; includeSubDomains + x-content-type-options: + - nosniff status: code: 200 message: OK @@ -1980,26 +3113,26 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.ContainerService/managedClusters/cli-test-aks-000001?api-version=2022-04-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.ContainerService/managedClusters/cli-test-aks-000001?api-version=2022-07-01 response: body: string: '' headers: azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/7c71969a-961b-46f4-ba67-f234c9ddb743?api-version=2017-08-31 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/737e1278-64a1-42be-ac82-faead7ab039c?api-version=2017-08-31 cache-control: - no-cache content-length: - '0' date: - - Tue, 07 Jun 2022 17:55:49 GMT + - Tue, 18 Oct 2022 19:49:16 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operationresults/7c71969a-961b-46f4-ba67-f234c9ddb743?api-version=2017-08-31 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operationresults/737e1278-64a1-42be-ac82-faead7ab039c?api-version=2017-08-31 pragma: - no-cache server: @@ -2027,14 +3160,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/7c71969a-961b-46f4-ba67-f234c9ddb743?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/737e1278-64a1-42be-ac82-faead7ab039c?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"9a96717c-1b96-f446-ba67-f234c9ddb743\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:55:50.4466666Z\"\n }" + string: "{\n \"name\": \"78127e73-a164-be42-ac82-faead7ab039c\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:49:16.6089821Z\"\n }" headers: cache-control: - no-cache @@ -2043,7 +3176,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 17:56:20 GMT + - Tue, 18 Oct 2022 19:49:46 GMT expires: - '-1' pragma: @@ -2075,14 +3208,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/7c71969a-961b-46f4-ba67-f234c9ddb743?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/737e1278-64a1-42be-ac82-faead7ab039c?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"9a96717c-1b96-f446-ba67-f234c9ddb743\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:55:50.4466666Z\"\n }" + string: "{\n \"name\": \"78127e73-a164-be42-ac82-faead7ab039c\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:49:16.6089821Z\"\n }" headers: cache-control: - no-cache @@ -2091,7 +3224,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 17:56:51 GMT + - Tue, 18 Oct 2022 19:50:17 GMT expires: - '-1' pragma: @@ -2123,14 +3256,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/7c71969a-961b-46f4-ba67-f234c9ddb743?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/737e1278-64a1-42be-ac82-faead7ab039c?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"9a96717c-1b96-f446-ba67-f234c9ddb743\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:55:50.4466666Z\"\n }" + string: "{\n \"name\": \"78127e73-a164-be42-ac82-faead7ab039c\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:49:16.6089821Z\"\n }" headers: cache-control: - no-cache @@ -2139,7 +3272,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 17:57:21 GMT + - Tue, 18 Oct 2022 19:50:47 GMT expires: - '-1' pragma: @@ -2171,14 +3304,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/7c71969a-961b-46f4-ba67-f234c9ddb743?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/737e1278-64a1-42be-ac82-faead7ab039c?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"9a96717c-1b96-f446-ba67-f234c9ddb743\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:55:50.4466666Z\"\n }" + string: "{\n \"name\": \"78127e73-a164-be42-ac82-faead7ab039c\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:49:16.6089821Z\"\n }" headers: cache-control: - no-cache @@ -2187,7 +3320,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 17:57:51 GMT + - Tue, 18 Oct 2022 19:51:17 GMT expires: - '-1' pragma: @@ -2219,14 +3352,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/7c71969a-961b-46f4-ba67-f234c9ddb743?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/737e1278-64a1-42be-ac82-faead7ab039c?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"9a96717c-1b96-f446-ba67-f234c9ddb743\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:55:50.4466666Z\"\n }" + string: "{\n \"name\": \"78127e73-a164-be42-ac82-faead7ab039c\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:49:16.6089821Z\"\n }" headers: cache-control: - no-cache @@ -2235,7 +3368,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 17:58:21 GMT + - Tue, 18 Oct 2022 19:51:48 GMT expires: - '-1' pragma: @@ -2267,14 +3400,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/7c71969a-961b-46f4-ba67-f234c9ddb743?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/737e1278-64a1-42be-ac82-faead7ab039c?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"9a96717c-1b96-f446-ba67-f234c9ddb743\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:55:50.4466666Z\"\n }" + string: "{\n \"name\": \"78127e73-a164-be42-ac82-faead7ab039c\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:49:16.6089821Z\"\n }" headers: cache-control: - no-cache @@ -2283,7 +3416,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 17:58:52 GMT + - Tue, 18 Oct 2022 19:52:17 GMT expires: - '-1' pragma: @@ -2315,14 +3448,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/7c71969a-961b-46f4-ba67-f234c9ddb743?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/737e1278-64a1-42be-ac82-faead7ab039c?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"9a96717c-1b96-f446-ba67-f234c9ddb743\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:55:50.4466666Z\"\n }" + string: "{\n \"name\": \"78127e73-a164-be42-ac82-faead7ab039c\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:49:16.6089821Z\"\n }" headers: cache-control: - no-cache @@ -2331,7 +3464,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 17:59:22 GMT + - Tue, 18 Oct 2022 19:52:48 GMT expires: - '-1' pragma: @@ -2363,14 +3496,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/7c71969a-961b-46f4-ba67-f234c9ddb743?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/737e1278-64a1-42be-ac82-faead7ab039c?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"9a96717c-1b96-f446-ba67-f234c9ddb743\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:55:50.4466666Z\"\n }" + string: "{\n \"name\": \"78127e73-a164-be42-ac82-faead7ab039c\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:49:16.6089821Z\"\n }" headers: cache-control: - no-cache @@ -2379,7 +3512,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 17:59:53 GMT + - Tue, 18 Oct 2022 19:53:19 GMT expires: - '-1' pragma: @@ -2411,14 +3544,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/7c71969a-961b-46f4-ba67-f234c9ddb743?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/737e1278-64a1-42be-ac82-faead7ab039c?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"9a96717c-1b96-f446-ba67-f234c9ddb743\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:55:50.4466666Z\"\n }" + string: "{\n \"name\": \"78127e73-a164-be42-ac82-faead7ab039c\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:49:16.6089821Z\"\n }" headers: cache-control: - no-cache @@ -2427,7 +3560,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 18:00:23 GMT + - Tue, 18 Oct 2022 19:53:48 GMT expires: - '-1' pragma: @@ -2459,14 +3592,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/7c71969a-961b-46f4-ba67-f234c9ddb743?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/737e1278-64a1-42be-ac82-faead7ab039c?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"9a96717c-1b96-f446-ba67-f234c9ddb743\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:55:50.4466666Z\"\n }" + string: "{\n \"name\": \"78127e73-a164-be42-ac82-faead7ab039c\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:49:16.6089821Z\"\n }" headers: cache-control: - no-cache @@ -2475,7 +3608,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 18:00:54 GMT + - Tue, 18 Oct 2022 19:54:19 GMT expires: - '-1' pragma: @@ -2507,14 +3640,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/7c71969a-961b-46f4-ba67-f234c9ddb743?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/737e1278-64a1-42be-ac82-faead7ab039c?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"9a96717c-1b96-f446-ba67-f234c9ddb743\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:55:50.4466666Z\"\n }" + string: "{\n \"name\": \"78127e73-a164-be42-ac82-faead7ab039c\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:49:16.6089821Z\"\n }" headers: cache-control: - no-cache @@ -2523,7 +3656,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 18:01:24 GMT + - Tue, 18 Oct 2022 19:54:50 GMT expires: - '-1' pragma: @@ -2555,14 +3688,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/7c71969a-961b-46f4-ba67-f234c9ddb743?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/737e1278-64a1-42be-ac82-faead7ab039c?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"9a96717c-1b96-f446-ba67-f234c9ddb743\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:55:50.4466666Z\"\n }" + string: "{\n \"name\": \"78127e73-a164-be42-ac82-faead7ab039c\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:49:16.6089821Z\"\n }" headers: cache-control: - no-cache @@ -2571,7 +3704,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 18:01:55 GMT + - Tue, 18 Oct 2022 19:55:19 GMT expires: - '-1' pragma: @@ -2603,14 +3736,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/7c71969a-961b-46f4-ba67-f234c9ddb743?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/737e1278-64a1-42be-ac82-faead7ab039c?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"9a96717c-1b96-f446-ba67-f234c9ddb743\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:55:50.4466666Z\"\n }" + string: "{\n \"name\": \"78127e73-a164-be42-ac82-faead7ab039c\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:49:16.6089821Z\"\n }" headers: cache-control: - no-cache @@ -2619,7 +3752,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 18:02:25 GMT + - Tue, 18 Oct 2022 19:55:50 GMT expires: - '-1' pragma: @@ -2651,14 +3784,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/7c71969a-961b-46f4-ba67-f234c9ddb743?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/737e1278-64a1-42be-ac82-faead7ab039c?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"9a96717c-1b96-f446-ba67-f234c9ddb743\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-06-07T17:55:50.4466666Z\"\n }" + string: "{\n \"name\": \"78127e73-a164-be42-ac82-faead7ab039c\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:49:16.6089821Z\"\n }" headers: cache-control: - no-cache @@ -2667,7 +3800,7 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 18:02:55 GMT + - Tue, 18 Oct 2022 19:56:21 GMT expires: - '-1' pragma: @@ -2699,24 +3832,24 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/7c71969a-961b-46f4-ba67-f234c9ddb743?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/737e1278-64a1-42be-ac82-faead7ab039c?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"9a96717c-1b96-f446-ba67-f234c9ddb743\",\n \"status\": - \"Succeeded\",\n \"startTime\": \"2022-06-07T17:55:50.4466666Z\",\n \"endTime\": - \"2022-06-07T18:03:07.719248Z\"\n }" + string: "{\n \"name\": \"78127e73-a164-be42-ac82-faead7ab039c\",\n \"status\": + \"Succeeded\",\n \"startTime\": \"2022-10-18T19:49:16.6089821Z\",\n \"endTime\": + \"2022-10-18T19:56:36.2462239Z\"\n }" headers: cache-control: - no-cache content-length: - - '169' + - '170' content-type: - application/json date: - - Tue, 07 Jun 2022 18:03:25 GMT + - Tue, 18 Oct 2022 19:56:51 GMT expires: - '-1' pragma: @@ -2750,24 +3883,24 @@ interactions: ParameterSetName: - -g -n -f User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 - (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.ContainerService/managedClusters/akkeshar/listClusterUserCredential?api-version=2021-08-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.ContainerService/managedClusters/tempaks/listClusterUserCredential?api-version=2021-08-01 response: body: string: "{\n \"kubeconfigs\": [\n {\n \"name\": \"clusterUser\",\n \"value\": - \"apiVersion: v1
clusters:
- cluster:
    certificate-authority-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUU2VENDQXRHZ0F3SUJBZ0lSQU44dDNlUHJZeGpIbk03K3J2V1o0cUV3RFFZSktvWklodmNOQVFFTEJRQXcKRFRFTE1Ba0dBMVVFQXhNQ1kyRXdJQmNOTWpJd01URXpNVE0wTlRNMFdoZ1BNakExTWpBeE1UTXhNelUxTXpSYQpNQTB4Q3pBSkJnTlZCQU1UQW1OaE1JSUNJakFOQmdrcWhraUc5dzBCQVFFRkFBT0NBZzhBTUlJQ0NnS0NBZ0VBCnVuT1cvNUJrUHJEeEVBTHhGb1M4RWYwR1VseS9zS3VaM20vcUh4eDJkVmlWNmZ3VjJycjlMcEVqU1hVeWk4c1kKSjhKWXFhMU1Ha1hNK1B3MGk1VjVkMGNCNVMzbkRKVzNCTHdOT0hHZ2pGSU53eWxnVTRXaTh3aHovR05Bblo2eQptZjRxMHp6K0FhNWR3ZnpHYWdpR0MxaS81TXRlbDVJM0Q5Z3ZMaVlhQjZqU25vR1JQY29uVVJuU29rYi9QYml1CmY4UFVNZ2szQ3FPQkptRVU3aENmTUUyYWVTdEc3MEhMOHVacEE3eVVKOWU0YTBxRkVYT3FCZVNlaXBXdVphaUgKQ0dGOU44RnAwZEtLYzVLdGlWZis4eTRzTDlvc3pJUWkxMVBTU01xQldpYnJpdnFMQXh0djRZUEVCVDlVN0ZvcwpJYlF0ME5WQXYxUFB3djhWeTR3UGR4cE5NMUorUkxyYnIyYm9HSzVoaTEwVzduejF1RERabmV4cU5uRUw1UjZ3ClNBZDBNdThQNDgwY0hWdUhta3JLcVBzeUVMOHp5NFdqNVVZdklQMVJKbU1hS1pWNy93UVpweWE3NllyczFPTkYKak10c0FEOWduV20xTUgyM3k4c1pZMUF5OW9ERG0zNmlhVTVlM1VsL2tTVW9OZmt5bW1XSjFjNjVtd201dSt4eApqZzIyckMwVVNreFVoeGszSmhORkZqaFF2eVEwVWdpb1cvL0pFMngyRExYeUE4Z1BadFdadUc0Wjc5QWtMQTFZCkprMnc0SVlpNmNWNjdWRnp4eWl0ekxGV0pCS3ZoNng4V1Q5OG4xSjNEazk3QmVQMHZ6K1VtOEo4dm1PNlpsZTUKdDlJVEg1REc3Zy9VK1NENUV0Q3BkdG10ckhVMHpkTTZXb3k5U1F3anRjOENBd0VBQWFOQ01FQXdEZ1lEVlIwUApBUUgvQkFRREFnS2tNQThHQTFVZEV3RUIvd1FGTUFNQkFmOHdIUVlEVlIwT0JCWUVGQmVVSEFzcXdMWVROK2llCld0N3Y2d3A5d084K01BMEdDU3FHU0liM0RRRUJDd1VBQTRJQ0FRQ0NzeFdZei95Z2lWRnVWbnF5UHFuRS9leXAKMENoY2NxcXIzblEwV3RTV3BJZEV3Z0FvN0Vwc3lDL2RqaHhyYVhvcmxaQ0dyT2NzWHVsVWZJSlhCY1dmSGRIdwozbXg4b3F0S3hseDJESXhkWDFIWFpCVGgxZUhTWXZEK25mOXhqdnVJU0Nzdm0xdUJ1NHdQL0FKV1ZtZHBTaEkyCng5cjdoSlVZMVVOOHZLd0NYdnJETVpRMHBCQ3hOS2RMMUE3M1JiU3I4YVIzSUxUYW5TMlZuQXQ5UjFWNU11SVAKekJuZGtqZ3c0RUxpWnZENnhhTWRwQnNDdEQwNE1CWmtRVlIvTE5iU1VUSkt4SHdNZGxGSFNiazhDSE1LNE5lLwpWYXhqOXFVYWhSa1BXN1JsZWJiRmNHL1VMN0VkL1lNWEV1ZHEvMUFQQW8zSC95WnNlV0czMmRvSFJEaVVteUIrCnBmeklZaDZtdHNPSEVQSUF0bWc2SnY0K29tNE1HNDY4WVQ5YTBQdTIvWXNYMXdiQ1o4b1NFblViN1ZGeTZHZ08KaE5DMjl3NTBjYWNPOVFGKzIyUnhpbkcvTWdoRThUdUJMc2pobUh5OERTYXkvV041RUxYdjBsUk1rTEJOSWIwRQp4cXN2STVRWWswL1UxZTVTRDRzL3RCbEZ2K2lpV013TDRjOUVZMWhaVzBHd2pQS1pOa1VXTFlCZGprSkNCWnB3CmNCeVh4THBVNWtDN2xPeFIwcTVtRDdaWlRoVDU3Z3RybGR3THhmbHlzSnp1dFZXcDZLQmR4TXYrdEdka2lIQVkKSXZWeUhCY2V6dWw5NHIySXpPS3IxeS84a0R2NG5ES3VBd2Fac3N6MDNKTFZsK0NGN3Fwb0VjczJwOHk5dzNlLwpwc1Q0SS92WnlSODNnZHg1dWc9PQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg==
    server: https://akkeshar-dns-08147f89.hcp.eastus2euap.azmk8s.io:443
  name: akkeshar
contexts:
- context:
    cluster: akkeshar
    user: clusterUser_akkeshar_akkeshar
  name: akkeshar
current-context: akkeshar
kind: Config
preferences: {}
users:
- name: clusterUser_akkeshar_akkeshar
  user:
    client-certificate-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUZIakNDQXdhZ0F3SUJBZ0lSQUxnVGs1dTVnRFZMWEVFaDJvMEtWMWt3RFFZSktvWklodmNOQVFFTEJRQXcKRFRFTE1Ba0dBMVVFQXhNQ1kyRXdIaGNOTWpJd01URXpNVE0wTlRNMFdoY05NalF3TVRFek1UTTFOVE0wV2pBdwpNUmN3RlFZRFZRUUtFdzV6ZVhOMFpXMDZiV0Z6ZEdWeWN6RVZNQk1HQTFVRUF4TU1iV0Z6ZEdWeVkyeHBaVzUwCk1JSUNJakFOQmdrcWhraUc5dzBCQVFFRkFBT0NBZzhBTUlJQ0NnS0NBZ0VBcVZiU2lIL1pjK3BqellkNy9DUk4KaWpvWGhDb0p2a2QyU29WL1pTOGEySjUxeC9xR3ZsWFdOU2xVd2NZMWh1Yk5wNldwV0twMjluM1Q4bUErdnU1aAp5bDZYMEFUYWhETnpTZVZPOWpVWlpoQzFYd01sYlFBaHl1Mzkzc005T3dsWDVSNDVHWTB1TG5WZ2NLSlQ0aGFVClZyVDhYcEd5NU9WVkwyWmp4VGVWcnpjYlBsUGNMSFBUTm9TeG1uNk1Bdkx4L1dIRkE0blY0cm5iM25HOHVBWTMKZHpNUVY2bmhvZmxDSTBvd2ZGYlA0YnNiZ3Z5VmNpbkduSGNZN2Z1Q2xRN3NJZmRyQVc1Mjk5bHU0dWNEa2xveQpFbklrYmFFTkNxallITm81RElWWlV2K1NzZkpFb1lTUGhKbk05Sy9hTVNTR2UvU1kxVDlKeUNaSEhXVldpbDJvCkV5cFBJdUxNN01QSHpXMXdRdERTNDF6M3pCZmpIbnhzdVpETEpqaFRGUG1wUjJIYXZyLzlzZ2R1d21ZSmtJWU4KN0VEbWVqbFNRRE85NGk5RGRwaFltdFV1R2xhNTdRWkFWZTBTcEJWamZVLzU2NW8zZlJJM2VabE9NdTlFWFJraQpXN0NxaThKQVZuVnBUblh1bVRyNDdlOGRJbVY2bXhnR1E4U2h1YWszZnVacTlQWHZyQkRVdTFNOE5QbnNxOHJpCnkxdGN4WDNOdE1jVnNvTlk5RkNUV01SR3BrTTZHWjRvVjRkTmlpOStJMm91UjZ2Rm54T0hrK3NnUGQ1Z1JTNXYKVEw1T1N1UDJQQXBaUS9aQVMwdHhtU1g3azhEM05USHAwdmhPTVFDSWpiOEQrbXhyTy9jMWU0cUFTdTB5VCtYZwoxZTcrd1NpeTVNSER5WXorZTJTRWk1a0NBd0VBQWFOV01GUXdEZ1lEVlIwUEFRSC9CQVFEQWdXZ01CTUdBMVVkCkpRUU1NQW9HQ0NzR0FRVUZCd01DTUF3R0ExVWRFd0VCL3dRQ01BQXdId1lEVlIwakJCZ3dGb0FVRjVRY0N5ckEKdGhNMzZKNWEzdS9yQ24zQTd6NHdEUVlKS29aSWh2Y05BUUVMQlFBRGdnSUJBSjA4MEJKOXZTMXllQmdiVkNHWQpuakY2TlEyZTludzEzb1hvb1BLbDQyV0RacGhLRU94Nk52eXFrOHBLelVUM3ZpYkoybzlWdjlHNGFrb3RmVUtOCkZ4SHg3M1JDbDlDdVIvOW5RNmtKRkk4OU1OQjVpMzJqR1RWUXRzZk83S0RkMktwMTlidGUvNWtKT0dSRzRERjIKWVY1SzZHZmNsY2JrUzlQN0hBcXUzUTFRVVMvM2ZJbUlUZ29uUVJXRUtRQ0dGVHgxaW9yOUJBeVZEakkwVGVUMgphUjZJbkFDSTU2U1NkM1FZWllCdGVPN21PYVZyNk5JOXV1SjYrTENFcHdXL1BLZHIzZGZneDZzSDVKakY1YTJDCmNGZ09vcGtOVExBZHM4TDA2VEN1elRZaGhJakNRZnFyaXFmVUR1c3lmZDViQmUrVWVoZC9wbUk1am5QaDloL0EKVExDMkxXT0VoSTBsMG14Q0IwNnEwaHBFZVNYdUQ0OHU4WFNBd2tPZExhRFo1Mlhub3l6Kyt1YkxjaGRLTVVPbQpEQjl0MUZEa294SXBOTERoSStQcnFNSWFQWndsM0duUndkNnFmOW9rWGxtVEtKWlNSZjR5V2plRFdiL09COTEyCll4aEdQT3JpV0xzVklyeDZ1bFdLQlY5VmVXZ2lsYmRlMXJ5djhMb0JzemdrTzVHaEd6RXM1OWNtL2NzN0ZoazUKVW5QWXFicVdWU2d0dFJOY2NtcUhtaCtaTHk2VEtUVEswZFJIY3gzL3kvSkhoUTh1MDUxUkp1Q1p6VjZRMFVKbQpQUjNmR09WMXNVL2cwRzd2OEs3S2Mxa2Q1YlE3M3BXaXJLTlVRN3NPNDlaOUF1ZFFyRld2bE1NQW5udjRnMmNrCnJEbnQ3cXU1UG5TUGxSRVdNT0o2WWw4MgotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg==
    client-key-data: LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlKS0FJQkFBS0NBZ0VBcVZiU2lIL1pjK3BqellkNy9DUk5pam9YaENvSnZrZDJTb1YvWlM4YTJKNTF4L3FHCnZsWFdOU2xVd2NZMWh1Yk5wNldwV0twMjluM1Q4bUErdnU1aHlsNlgwQVRhaEROelNlVk85alVaWmhDMVh3TWwKYlFBaHl1Mzkzc005T3dsWDVSNDVHWTB1TG5WZ2NLSlQ0aGFVVnJUOFhwR3k1T1ZWTDJaanhUZVZyemNiUGxQYwpMSFBUTm9TeG1uNk1Bdkx4L1dIRkE0blY0cm5iM25HOHVBWTNkek1RVjZuaG9mbENJMG93ZkZiUDRic2JndnlWCmNpbkduSGNZN2Z1Q2xRN3NJZmRyQVc1Mjk5bHU0dWNEa2xveUVuSWtiYUVOQ3FqWUhObzVESVZaVXYrU3NmSkUKb1lTUGhKbk05Sy9hTVNTR2UvU1kxVDlKeUNaSEhXVldpbDJvRXlwUEl1TE03TVBIelcxd1F0RFM0MXozekJmagpIbnhzdVpETEpqaFRGUG1wUjJIYXZyLzlzZ2R1d21ZSmtJWU43RURtZWpsU1FETzk0aTlEZHBoWW10VXVHbGE1CjdRWkFWZTBTcEJWamZVLzU2NW8zZlJJM2VabE9NdTlFWFJraVc3Q3FpOEpBVm5WcFRuWHVtVHI0N2U4ZEltVjYKbXhnR1E4U2h1YWszZnVacTlQWHZyQkRVdTFNOE5QbnNxOHJpeTF0Y3hYM050TWNWc29OWTlGQ1RXTVJHcGtNNgpHWjRvVjRkTmlpOStJMm91UjZ2Rm54T0hrK3NnUGQ1Z1JTNXZUTDVPU3VQMlBBcFpRL1pBUzB0eG1TWDdrOEQzCk5USHAwdmhPTVFDSWpiOEQrbXhyTy9jMWU0cUFTdTB5VCtYZzFlNyt3U2l5NU1IRHlZeitlMlNFaTVrQ0F3RUEKQVFLQ0FnQVlocTZ2Zm5Uc1NRSnpmakFDOE84YUdoZlYySkRZc2xqN3FpSXRjWWtkM3JXSVpVeW12Si90eXpLdgpiaG5LbEJzdE1OMDZMbFpVbFJ6aGVmY2NpRUk1b1VHenEwQks3WGpCTnlrY2w4dzU4Q21VdlV2cVlaNkRXYnp2ClNxL1BvU2JOMUhiSXNKNER0SlJuTzl1U1gxclV4Qm5aU0pqMGRoTEVicnY4a0hScGtEMnQ1VTk2VUx4Q3BTMWsKQnZnWVpUK1BSMTNWaVYwbEhXNHU1YUpEdFFMTGpPWE5ESm5WYjlpWWErSmgzK0R6L0xtYld5SDZDdnJ0Sm9MQgoxUyt3anh5OFBxMk00NGJ2bHRwc21yRDF2WUtMRVdVZFM3b3IxUkNYL1ROT0VQSkRCeWtrQk1iVDdmTXJiV3MxCjlWSlEwODEybnBLVU1EcHJkci9PRGlOZW52bjhzRDF3RTk5SXdaVElQa291NjI5empUMzFoY0p5YXhrQTVBLy8KTUdFWTduNmFYb3hYb0FNbFlVMWhTZ0wwY2tsTlJCN2t5ZGloRi8xWi9oeXoxMWtBNDAvbXplWGpQS2U0eDZWZQozNC8yRmZ4VzliNUlNWHlKZm1vM0FTMjFONnhrcWxmWUgvbWd1cVVKaUtWNWFaT2pxQVBkend5SUpKNjV5Skd4Cmo0bXFZUzM2cUt4VE8wSkM3NFBSZ3NYTDYrakEzcmwzNkI3UmpRb1JOV0IwdXo0SWozMHJRMGdkN2xPYzEyWUUKMHlUS29YZUtvd1A0QTR2Wldxa01JQkJMYWhBbHBuQUFJUlF5Skt2eGc3dXFCM2ZTbXBPNE8xSS9lTTF4MmY0RQp3UUpwTEJsTVY2RDgraXJlc2cxT3NPYVdxdUdSZ1lqUzQ5eGtwbWNGRzlubEtnZW9FUUtDQVFFQXhqbFlLcEZVCk1VeTJLWkpNU1c3K3lJZW41Skg3YzU4OXhnRFJNZG1xQjVPNnBwN2VNUlFKVDRpV0NIVkJqeG5wRFdQcHZFZEQKY0ZQU052ellHVkZ4cmZPb3BnQkpvak5qcE5TR1cyMVRQYS82bWZBUGFKMFkweW5kcmRDMFRaNlBxUFFpdUNVTgp3VnBrMFVVZFFPaFlzaEh2NXY3R0NEcWpldmszN0lhNEo3NUNZUWdXaW5vRlZiZ2VKSjN4aDBDTTF4VDdjUnVMClk1N3hrQWFKdnhna3VsOGZKWHR4SWlIWFdiejdKMDFJUmp6aklxaTBLdmZHU1RRaEhUUzAyU1J4MFFSMytjZGIKODJEakMrcDRoZjhrUlA3ZVhqZk5PNnlLdkxDa0JMdHZ6bkJQWE51OElRTEI2WEJvOUcwQk5QSkt4emF2L2ZlOAowTnpId3pJbEJBOFhod0tDQVFFQTJySTFzNXgwbit5andCUm84anhVQUdLd2ozS1RNREhRQkpjYjhrcitiZFhnCmhmbkx1bEJTMVdKR0RyWlZGSkszYmt6c1llMDlzTXIrU3J0Sy90Zm5IemxPNTRKN0ZUZllGWmE1emRmT05uaWcKeWpLT0pBY0tmM3o0N1MyVWxZZit4SmprZFoyeE8yYUJ0WVNrUGVTa2RYcGZac3Q0UU9sV0ZjZ3FpVnhCSEx5UApjdk1oUGJYcklKeFZxRmNSMFNlUU1qcHk4b1B4bmtUYU5aK2FDelFkanZ0cFZEMjFYVHpsRXFBV1kvUVNVSEJkCmRGUms3eVZvc2x6ODVtQ25yZnRhNlVDVnQ2KzBseThEMHNrWmtHMkx5dXE1Z3RSQ2NmUVJWa1B2bk1mODNGWXEKSG1tQ1Z6b2xSTk1FcGxETzY5UCtuSTJWb0IvTGFZVi9FVm1HZmxITDN3S0NBUUEzSnBsTExxZ1lGMW5QY3k3YQpkNUZYeGJhR3Q3OFlTa1BycFNxUERocHFoVVczT2hoajIwbEQ0YUtzczhNTHg0KzU5QVNDSitteGhRREYzODlZCksvclBCUzMxRWJ0cTc1Vlh0UEcwMmxRQW84ZTdzTGo0eUV5eDQ4SVRyQytlTVBHVGRtbDZob1N2T1RFM3NGMFYKUkEvNWExZ3VFdENTVlk1S2xyQmlsSkVFTGNGV01lUGpuMU9BcC90ckFLN0VqTWNpeUdtMDdJNzVwcTFhL3hhdwpIazJKdFJGN1lqNzEyaWlLaFRQem1XTDBkOWhTT2xIMmFuMjJ2RlJDUmZuVE9IcWFSdjRDZHQ4MGRjNTE4RVNjCk00S3AxNmVTMnorMHpLckZHUGhwZ0tUNDhQS0R5WGNzVUJJWG12ajRtS052VWNKUWJ2ekVVbnNzT1dFekErNmMKbEg1ZEFvSUJBUUNiWGVYVzN6emZiTDk4czBOSGJHS0ZENWU5b1RrK0VQVm1mVy9tTnk4QUd1VHlhVjg1MHR5MgpLYVg1eDhFejUzT1hVZURSUlRrQmc0VWExR3BZR0V4akcxU2FGbEdZNWw2R0g5bUtpbGsreTMzM2gra0JZVnNzCjdUenZTdVYzUWZGVi9BUzZKamlHS2JIV29oQUl4SW10eUMxRW15S29ndHljaHVMcFNQZ2RFVHliL214TUtoU0wKeTJUelBrQXpKZmRFSjlBSHlJTDczSkJsYmlpQzh4aFl2dlc2V2tpaG42UmlFZ2MySld4cFp4cU5qWE5Uam5FTgozZy94SHhPZy82UTdkNTJsckpjUW5OOExmY0RDb1lDaHZSWGtBeVhkVFBhUUlaMTE5WUlIaXROTFhjdXlhbVdWCnVwQmdFNktkV2JOVnh5UllhSjRiTGpMeXdYWlNBWXhqQW9JQkFDYUw1WDdISVA3dngwU0ZTNnd3UUlHeWt5NnMKN1VINmxONGE1YWVGY0poZklKVUNjQmxxUkxMdHVOc211Z3V6U04zYlAzMk41L0hIRWx2bWEveEQ0WitMc0tNMQp4QzdsVWFUVndXcjZuM3c3Z3lPUjNvUGN6eEpLTVRqK0JpVTZ4NkFoUHRER1lBOFYwdVd0a0V1c2xVQS8rL0lUCnQ3bDJ0R2VlTUc3SGxpYk5JSFZZQkk4eStET1lBcGJUUUZwbkMxcEpJQ3hnR3hZMy9uN01Gc0ROOE5EMXh4VkMKcXVoalM3elJaRzRtUUtsYWNXVXV1NnNaa1ZUM2V3dkw5aWoxNkJSWEdxTmtnVnJmTnFkUG9LMEluYzV0WDRPUwpVSWhSdlkzaTA5SGpvdGRXOVBCNVdxOTJ4UjhZK3M0YVlxOVd6UEV6ZTNJZlNCRTRqR0YwMERTRmFCST0KLS0tLS1FTkQgUlNBIFBSSVZBVEUgS0VZLS0tLS0K
    token: 778f03773389ca95dd1a77d36713198561bf5ccd49192d8107603c90f2e6f0e63823a681e44e91fb1ac807e2d5bc56801373412463f67cb3cca0778f2907e4ca
\"\n + \"apiVersion: v1
clusters:
- cluster:
    certificate-authority-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUU1ekNDQXMrZ0F3SUJBZ0lQQkl2dEZLOTg1VWt1N3htOFFxK2RNQTBHQ1NxR1NJYjNEUUVCQ3dVQU1BMHgKQ3pBSkJnTlZCQU1UQW1OaE1DQVhEVEl5TVRBeE1EQXpORGd6TmxvWUR6SXdOVEl4TURFd01ETTFPRE0yV2pBTgpNUXN3Q1FZRFZRUURFd0pqWVRDQ0FpSXdEUVlKS29aSWh2Y05BUUVCQlFBRGdnSVBBRENDQWdvQ2dnSUJBUDF0Cm5heHNCRUs3WTlMaGZ6Y3Z1Sm93TG5hckZlNUtxdVhXMG9hZzBPK0hzcSt4Q1Z4bForOTMvMHRhdklJbzF4TnAKTkd1VGJqejIrTHVBUWdCM2tibVdncnZGNk9HZ0l3MFVIV2Z4TllrOFhOS2UweG1wVzkwcERWM0RJMVdjQUZrSQphb0N6bytrRFRhaFNxelJNUVdlWk9nUFdDNzdWYnpuK0l6Qi9MdTNsUUJGRDBnZ0p5VFh4djA4akR0S3d0ajNLCm15QlI4R2owWHorejU2V05aMEFVWk9ybWhITVErY0dlZzhNQnhqWm1jLy9yU1BKUlVXV28rbU1ZbkZ2cEJiRkcKRERNOUl0eDhnOHoxYjVxd2lIMHlqRHNRVjNEZ2VxTGYycXdtakdpOG8wTS9LVzhYUlFYZVRUS2k1Q1RaZzFEdApWQ1JhTGxCZFl6ZkZGTktvU3hBWXB1NHM3T08vakNDNlZhSlRocDIwTGFGS043ZjcxckdwQ043QVZJaUY0VlZxCkFiRlRhTTJLRjNrcmxvUDBkZDNublFDTDdpRW0wazU1dWJNVStpZmcxUlc0VlNUbUU3UnlNdW9DQ2NpbEVDVEYKQ1ZBcTJDWHRsNjZpMUxjWmgvR1UyckZzZzJCVTlFYWl2RzNXREtCbExUMWU4U0VhTFRBRnZuTGxvbCt3dE1JNgpMcTJBbVlWbE1xNmgyNU5jYnpZRnBNc3VwS1laQ09MSVNnRCsxSHQ0S2xMdzI5TmpTVmJQWUlMM0lCR0twcjI0CkU1ZHplajFWbDREa3QveHJ0Z1AxbFFBVy93VUw1OGRhdEY5ck5jRFF5bXNsL1dUVk41SWFDYkx0dEl4dWpJeVcKNkc1TjZUd0p2SGI2RVZVMW9qMHpTZVNxQm5yOUd5dFFlM3dMLy85eEFnTUJBQUdqUWpCQU1BNEdBMVVkRHdFQgovd1FFQXdJQ3BEQVBCZ05WSFJNQkFmOEVCVEFEQVFIL01CMEdBMVVkRGdRV0JCUVBabUY1Z01KdmE5b2lQSmM2ClFJd1MyZzZjS2pBTkJna3Foa2lHOXcwQkFRc0ZBQU9DQWdFQWsvaGRaVnFVVXB4aG55S2VEWTBkOFI3VHBQTUgKOElMRCtKbW9XdGNYMmNCQkdUSGRPcXBlNGQwMnppNURiWWhtRkplYWc0bmNlYkFtV1MyRGJrc3dnODlLT2lOaApTdnF3RHllK0t4ajhHLyt0THM4WnBpekpqUVEwU09YMVhDa3o1OGZkdFQ4QjNCWllPeGZ1cVhCQ0l1bjhNUDZXCitGcW54NEpFNVk2Mm05NGVYcHNOUlhEMVdHaDlZSS96VFRlb1RnU3Z6Zjc1Um1JTE5xd05OeWkzb1g1emNvRTEKOFp5WUVKTFFtUEtxbktiNk9xQlBXL0RZcmxMU2w0Q0NaUHZsSFFkUkZrV2F2ak9hanVrbk5VckpINktrTnU5UAprUWQrakxhTHMvNVcxUVE1eGRRVWRSSlNhbHZGNW9WTXZHSTBISHdWKyt2d25NNDJIeDYwR0k5UzFOc0xhemZ2Clo2cTdtZG1jRHdDNjVwdVk1S1AxbTcxaUlMQmNOb1MwLy9JTGVPMDNFZy96K2tKMlRxMkFtMzRWUnYxNjlIQ2oKN29QSTF4MVdxWk9HR05xUDJkbFVXOTZ3a1BpWVJndWQzN212T1V0bkpNMHVsZGNrRlJJUytNbDlYcXU3RXZLbApGZWtoZ2UyM2JQcm5vSWUrS1QrM1Fsc2JoZlBjSTR0WjJFL0dVQUltL3RhWEFpZUJFb24renpSVDBLSkFHYmxFCmpQcENvRnF5WGNsdjRYeHNjSzlzZGlzdDVWWkFzckQzdllxTWo2cXhlUC9nWFFxbkNEYW40K2lZQ0l1L29rSW8Ka2FJeFZPa0o4K2hSb09GRFZpR2FrUEwzeWM2QWEzcU5yOG9WLzM0QWRaZ2doSmdVUHlveldUL1hrOXRQUFY5VgpZa3NXVFdHTUR2TDBid2c9Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K
    server: https://tempaks-dns-0f8c9536.hcp.southcentralus.azmk8s.io:443
  name: tempaks
contexts:
- context:
    cluster: tempaks
    user: clusterUser_akkeshar_tempaks
  name: tempaks
current-context: tempaks
kind: Config
preferences: {}
users:
- name: clusterUser_akkeshar_tempaks
  user:
    client-certificate-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUZIakNDQXdhZ0F3SUJBZ0lSQUk3UkpJRFQ5RjZqMzVpZ3pua240K0V3RFFZSktvWklodmNOQVFFTEJRQXcKRFRFTE1Ba0dBMVVFQXhNQ1kyRXdIaGNOTWpJeE1ERXdNRE0wT0RNMldoY05NalF4TURFd01ETTFPRE0yV2pBdwpNUmN3RlFZRFZRUUtFdzV6ZVhOMFpXMDZiV0Z6ZEdWeWN6RVZNQk1HQTFVRUF4TU1iV0Z6ZEdWeVkyeHBaVzUwCk1JSUNJakFOQmdrcWhraUc5dzBCQVFFRkFBT0NBZzhBTUlJQ0NnS0NBZ0VBbDFIVHhmVU5nck93UlB3TlJwdGkKR1BLeWpEOVFxQWcwUTJnUFpBblZPWXdWU2U2bmtBSllFR01aNFBJRnRTUCtldzZXVnVEZ290cU9qQ2ltZzVVdworZWg0ZkR5elpwc2xLa3RFcWZNTTlIMXFZQmNyL2RnTnNXUjBJUGtURWVhZmNaZFl4Vi9rM2tack5yOU1OeXNHCm00VXNxZ2l6QndnYnM4WEhWNlBFMDNSbVlIRDNJT3R1K2laKzhHYi9GUFFlRUloV1pUaFBGUmovd3UrMUZLQloKMXA3N3FvUjk1aWQ0U2dXZUVLUmRLNXh0R25LT1JSeTRObWp5ek5Jc3lBVXZIbzhLQVQ0VzEyL3ZrUzJNVXFXKwpZMm40bTZZZFUweXdIL21oUlJIQ1JnZ3krQkhRQlJjNkdMTU1KcHVaNEFvZkVaejVwNGxRcHB4QXRqYXJ1cEJuCkpGSVc3S25EWEtaRXpoSWJpZmtLd3ZrUGk5Q2hsekx6NmxvNDNBaHQxaDEvSWRieXhSN0pHM0pMY05mMXpwbG8KaUJudnBCZ3lBTFlNQUhrNVNVdDkvWlNoNGN1VGlCRjJ5OHMzN01uR2ZWN0FYTG5lRFdVNS96WUlYajYxYmZTQQpLSkhBL2RmbFJhcnM2WWszZU5LRlJ5QkZ4MmdKZnVNWjlkY0R2NFFMaCtRRTRhR1JzbmtPU0d6cm40dVBKaUh0CmQzZHNCWUs5QWtDYWZVcVZDQWlUWkZ4dW9jakl1OWRrUm5ZRGF0dWdOMnY3R3ltbG1mWjdyT1VhUHVkekpIaE0KTjlOczRrQTk1cjRCb0tIMTQzUmJNRStlWGNmUzBwUm5tR20yVElpdVh1d25SWTRFZ1RYM3N1aVhxRTRZTTBPTgpiSGxRVm53QWxNZGJsZVh6bitMVDhFTUNBd0VBQWFOV01GUXdEZ1lEVlIwUEFRSC9CQVFEQWdXZ01CTUdBMVVkCkpRUU1NQW9HQ0NzR0FRVUZCd01DTUF3R0ExVWRFd0VCL3dRQ01BQXdId1lEVlIwakJCZ3dGb0FVRDJaaGVZREMKYjJ2YUlqeVhPa0NNRXRvT25Db3dEUVlKS29aSWh2Y05BUUVMQlFBRGdnSUJBT2lXNGdsOHBHQmtXVStlVk5aSQoyd2ExRWliTk10MVd2cWs5amk5VFNiQ1ZVRFBjcUF1WTh4dXgrYVM5UVkvd2ZZa0MwMno3SXBGeXhXeWUxSTg2CnMrUEI0QmtDbGEwMy93OHpFTEtRRFZYaWZ2bVFWR25KbmpmNXlva1A0RVNPRldFZnBxNVRZUUd2eCt4TnhuNngKVDhoOU1nd3NtVWNNTGlqdit1blkxR2tOUTFhbkdXV3lMVnQwTU5SYVFwaE9LVFQ5Y0NtSlFyd2EwUDhRWUJvMQpIbFdtbndHRGtFVEs5MzlEYVdrUUl6OGMvRytBSi9SRHd5Nmo4QzJsRXBIVWxGRU5sU1RzQ0toNGFKaVorV2VMCmxLdXNDdWE0SW82RWcrYTk2d2UyWjNPemlzTm1uMUZjR1VTNzBNUzdjNHh0ZDk2NEhVaURWOXUweEtZcjQ5Z2QKNkxyQXFqOUczcGNSQ1VtUmh0NTg5YkxpV3FKVE5VVjRzQWt6UEtnOGVyUGNlMTFuRFRGbHRFQm50M1hSYk5RZgpBVllmTWdteEdKSERmL3c5YThQNUEvUUZhOHpDVEQrSENJcXpabEFoeEFEMFp4R010cjNHMnk0cXAxZ3JCaFZHCnk0RzhlT3JNL1hheFQ2NEtQLytRQ0lnQkdhZUovak1WNGRXVSthTzE3bU4rREVrdlFJc2VwL3FyOUM5cEtla3AKc3o4dWFDcFpRbGZBSFlJYzd5WFJ0UUl3cUtHY3J2cG9SY2RzREFoM0d1Zkh0N2pJMjAyNDVhYXB4OVFaVzNxTwp5OHFoaHo1RTluZVJHZlJsVkMwTjhqbnhrUWliUGxtQ0dnQXdVYU50MTRCUTdIckVQLzJFTHNXdFBOOTVhMlRMCit4dlkzMDBBM0h6ZzRXWENKWjVRTXlTRgotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg==
    client-key-data: LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlKS1FJQkFBS0NBZ0VBbDFIVHhmVU5nck93UlB3TlJwdGlHUEt5akQ5UXFBZzBRMmdQWkFuVk9Zd1ZTZTZuCmtBSllFR01aNFBJRnRTUCtldzZXVnVEZ290cU9qQ2ltZzVVdytlaDRmRHl6WnBzbEtrdEVxZk1NOUgxcVlCY3IKL2RnTnNXUjBJUGtURWVhZmNaZFl4Vi9rM2tack5yOU1OeXNHbTRVc3FnaXpCd2diczhYSFY2UEUwM1JtWUhEMwpJT3R1K2laKzhHYi9GUFFlRUloV1pUaFBGUmovd3UrMUZLQloxcDc3cW9SOTVpZDRTZ1dlRUtSZEs1eHRHbktPClJSeTRObWp5ek5Jc3lBVXZIbzhLQVQ0VzEyL3ZrUzJNVXFXK1kybjRtNllkVTB5d0gvbWhSUkhDUmdneStCSFEKQlJjNkdMTU1KcHVaNEFvZkVaejVwNGxRcHB4QXRqYXJ1cEJuSkZJVzdLbkRYS1pFemhJYmlma0t3dmtQaTlDaApsekx6NmxvNDNBaHQxaDEvSWRieXhSN0pHM0pMY05mMXpwbG9pQm52cEJneUFMWU1BSGs1U1V0OS9aU2g0Y3VUCmlCRjJ5OHMzN01uR2ZWN0FYTG5lRFdVNS96WUlYajYxYmZTQUtKSEEvZGZsUmFyczZZazNlTktGUnlCRngyZ0oKZnVNWjlkY0R2NFFMaCtRRTRhR1JzbmtPU0d6cm40dVBKaUh0ZDNkc0JZSzlBa0NhZlVxVkNBaVRaRnh1b2NqSQp1OWRrUm5ZRGF0dWdOMnY3R3ltbG1mWjdyT1VhUHVkekpIaE1OOU5zNGtBOTVyNEJvS0gxNDNSYk1FK2VYY2ZTCjBwUm5tR20yVElpdVh1d25SWTRFZ1RYM3N1aVhxRTRZTTBPTmJIbFFWbndBbE1kYmxlWHpuK0xUOEVNQ0F3RUEKQVFLQ0FnQnVYbCtoZm5ocFFaaXE2RkRpbEFqNysyRWhqRGpQSnBUQUtaRTRQVGZKcnBRRWhjNnY2aFhEZGhUMQpEbkg5U2hDZVB1aUcyNlRkQTU1L09sTWhuRnJKbkxic252V1hQRWZpVGwzNXUyNVNrWHRhTk95QVRPckgzV1lhCmVyRmZHQ0Jwb0tNOTZ4Q3EweC8ySVpkOGthUlRJTktQL3FKZDJac2pCM203UVpjWlNFZzN4WTFxOUczUWhPMkgKZVFoS2h6ZTZoVWJHczBoek1mV3lnRlpLZ2g2a3ZQY0F5M3hzdDVNMElsN0U4cTJHVnFUNWFsNjhxQkx3bEtHcgpwUXpHbzRsWHQ2cnJkeGIvOGdRQmx3QkhjOTZtOGk5RGorakp6aUNVWWVZdjA2RW9wV2hQcWVaWklEVGV1UkYrCnczVjdoTVplZVdsYXVud1oyZFFWVTFEMFZjajBCTWFMclo5TU1JSndhWEd6ZVFGdkZGM1p6RERIc2t1eGZHeHUKVGpCYmtyOTQxa2dKdnFpMGFOYXNDRkZUYmZvbGtEY2xwUnJpVnZZOTkvSHBiZktFU3JPRkd2THpKeHZ6cHE4awowbXBnNlQ0VXB0VXNsT2xrK1ZBWnJWWG0wdUIwbkZCVEpJVFcydU0rSjVITGJvV2FYNy9mNkQ5L2dhMkNOUDNZCnhHZ0RmaTU3dWl5NXNGeVYzMXRXOFNQYTNoQW9jWmphVEtHU0I3RldDV0p2N09MOHlpdWFXTXdhYTJBNjJJQ2QKd1FXUkhQa1NHeHFuY0FZV1grZHFTZDNBelRnRCt0VXNEWFQ1RjFySUp4RittcldHWkc5dmk0RFJOQVRvcU1Bagpib2Qrck5wVXBpbXVLQVR5Wmdyd3RkNVNMUGduSURJSnpORUJCTjNoVkFXWkMyblFhUUtDQVFFQXlHOVY3MlBiClZma0ppN1dkMlF3L1FvUU56cTZFYVg4WklLV3Y4YjA0SjNwNmtuWDZiVGpheG5RaWNsSGpoeUZKVDRBMy9Ia2QKb2VVbFhOT2JmVFByU2ZFcFVwMnVnMDFYSTZqWHR2bm1OT0lwWnBha3lBaDYzaDNQSlM3cDF2WTdZS0diNTRwcwpKMXNnM21WNjRENU1FMXNGMm1pcVhMUG9hSitnSkx2WkF1RHgrY2NKNnE2RE8vK2dVL20wc2o1Y2owekZJZ2crCmpCZGRHSDhCVURkRXU1Vm03emlPYWxvSlpINmQrVjlzRVQ5QnhEQWpERjBLRjFYSTFic2c4TlhZczdCWlR4T3cKM3I5UVFyc0JTNXMzRjFRSSsrVHIyN1VOcjRWOWRQNDB3d3hBbVNvS0ZLWmJWUkUyRnpYdmlsQjR1c2dVWDcrdAo5S2ZHcWZIYys0TGNqUUtDQVFFQXdVVFVpOHZNNmRkWUZqTDBSRmU4dnNnMllJa3dqdFovZ202YkoxNXRTTEpiCmFieGpmUnFGZ01nM0RBVFNtdzQ4OFBBUUl1VlRLOEdBcy84N0dnaHhoSE5tbHRWUEhVaGhQTC9MSjBBN0Y5dDYKL2xvTjdhT1o2V2VEc0treitmQmtNdEMyY0hzWDVZM0lkWnVpSG1GQ3huZ05rdzBkZGdyeDE1aFFSRnBJUXM4MgpTVHk2Q3FCZUZNM2Rkc1phN1drU3lmN2ZXVkpQWXdhWVRPZHVjY2ViQk44dXRrb0tTU05yU2NQMlQ1R1BlMlFQCmttNDU3V3AyZmZ0VlA3MGIwbTkrZHlOVVovNU81RFB2M2JQeEZYNzlLQXJtZm9vMFErSENEcU5Kc1pYNFBBRlUKM245aksvS3JoNXQzeTc4RS8zd2k0R0RjNmJXQ05aT3JmZml1OENZVUR3S0NBUUFmNWdncEp4MDdHMzAzYk5vSQpOdmpDWHozTFRON3A0ejVlZ2IvMG13YWEwZFU4QmVhckIwZGFHaFY1V0NyNzBJSllVckdXVHJnMmtiOVdmSjdjCldDejBBWnR2K0lSVGhUVi9DVm43VnQ0YVFId1NSWnRIOG9HTFkvY1psZFBGVUQ2MjdFRmhvaGdBVTNKeHNualIKdzFoY0pEcFVQaUFPek5zSis0MkRDeHBJYVFCWm5pR241Y29nL0ZmSnhZM0thRDVXMkFMdm5aeEdzMWt2a3RiawpxMGNYQzNuc1RIWHQyeEp2bWU4T1J1OXQ5MEZDTmQ3K0VyaU9haXFpU1R1KzJHbTZmekJYYzJ6TXBtbmhsTjFwCjZQUGxsSXd4ZW9kRzJoT3BnWGNyN1dEOFFiRHVMMUNYWkxwWVdYcmdTc2NxbmcvNHVlcjBLZzlIUm9SSmU2S04KckkvcEFvSUJBUUNUaXFjei9hWjJVZlltSkVvRHJuTWh0aGlGQTlaMUJOMGN4ckZka2FHZUlNTFdhb0lNN0dWRQpsaFFuOGJxUnNvSGxjVzd3Sk4ra2hMbHJNQXAvYWppMjQ1NUt3WmIvQk5WdW1nQk4rNlBoSjZXeWJTODYzbTFPCkdMOFk2Mk1KaUhJRnRQM0J5c2JJY0wrNndOQTNud3BxRnhaTnozSlJQak8zNGNoTkZoc2FIYlFVbnU1cjFGZGUKVEhhb0NPODdWTDZLUkM5ZnNnMTJ6SDhTSG85YS93V2g1M3R5bi8xUUxtRG1WRnBDRGg1YUZWNHA3RW4rckFxLwpURktkTVJIL1NTeGphUzR6b1ptNmJzZk1HV1dQZ24xaXgwZUZESjZ2djdYMGxNVmFjK0dvZFYvRDU3M2V2QWdrCm94UUdEOUtSODhOd1JhSUFMSmUxclFEN1R4REZ0bHdUQW9JQkFRQ1lTMVdmS1MwaXYxVENadHNQa3JtSHUvMDMKRjUvOHF5aFRnVCs1Sk1kU2hzTHJqbFQxSWlQTzdkVTBYdkxPcm1OTkVaS21oUGdnOVVuUXVnb0NOMFVROVJqYQoyVDMxNTBPQkJ3VG9zL0RxcmswWEswc3k0aERDaTBBRXRhUVNUalJBY1U4QWl3SlJCVzczb1RSb0tJRmVnMlYvCiszb3paaTAzdW1IY1NFU2JJWG1xM2doZjdtL0d4SkE0OGVNdVFrZUpWejJMc1dkRTRlUWxWNnVCQkZMZE1UZ3oKMzZJRGZKOUxGT1dHOHhBNUhmeWpUeS8xa2ptM283enltYjF6enZpVWxCVHRLcEtuQlJGSUthVDdSa0Z1bHE5aApuVHF1Nzh3VlRBbWJyZVJ6TStwRXhJUE5MK3k0UHJxUUQ5a2g0N1RBSVBOSFFUa3hQNnc2VkVyeEFnZWQKLS0tLS1FTkQgUlNBIFBSSVZBVEUgS0VZLS0tLS0K
    token: d41a70f246ac7ce46039981efa7cfcde63e43198c71c927b5f73edb8cc5f250bb84895f9d56742f89ca5dfda59bb0180fe16809a2b5eeb07a2f1a2c17500f3e2
\"\n \ }\n ]\n }" headers: cache-control: - no-cache content-length: - - '12996' + - '12980' content-type: - application/json date: - - Tue, 07 Jun 2022 18:03:30 GMT + - Tue, 18 Oct 2022 19:56:54 GMT expires: - '-1' pragma: @@ -2799,9 +3932,10 @@ interactions: Connection: - keep-alive ParameterSetName: - - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id + - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id --yes User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Kubernetes?api-version=2021-04-01 response: @@ -2810,21 +3944,21 @@ interactions: Europe","East US","West Central US","South Central US","Southeast Asia","UK South","East US 2","West US 2","Australia East","North Europe","France Central","Central US","West US","North Central US","Korea Central","Japan East","West US 3","East - Asia","East US 2 EUAP","Canada East","Canada Central"],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"SystemAssignedResourceIdentity, - SupportsTags, SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/operationStatuses","locations":["East + Asia","Canada Central","East US 2 EUAP","Canada East"],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"SystemAssignedResourceIdentity, + SupportsTags, SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/operationStatuses","locations":["East US 2 EUAP","West Europe","East US","West Central US","South Central US","Southeast Asia","UK South","East US 2","West US 2","Australia East","North Europe","France Central","Central US","West US","North Central US","Korea Central","Japan - East","East Asia","West US 3","Canada East","Canada Central"],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"registeredSubscriptions","locations":[],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"Operations","locations":[],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview","2019-11-01-preview","2019-09-01-privatepreview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + East","East Asia","West US 3","Canada East","Canada Central"],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"registeredSubscriptions","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"Operations","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview","2019-11-01-preview","2019-09-01-privatepreview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache content-length: - - '2311' + - '2416' content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 18:03:30 GMT + - Tue, 18 Oct 2022 19:56:55 GMT expires: - '-1' pragma: @@ -2850,9 +3984,10 @@ interactions: Connection: - keep-alive ParameterSetName: - - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id + - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id --yes User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration?api-version=2021-04-01 response: @@ -2862,38 +3997,62 @@ interactions: US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France Central","Central US","North Central US","West US","Korea Central","East Asia","Japan East","Canada East","Canada Central","Norway East","Germany West Central","Sweden - Central","Switzerland North","Australia Southeast","Central India","East US - 2 EUAP","Central US EUAP"],"apiVersions":["2022-03-01","2021-03-01","2020-10-01-preview","2020-07-01-preview","2019-11-01-preview"],"defaultApiVersion":"2022-03-01","capabilities":"SupportsExtension"},{"resourceType":"extensions","locations":["East + Central","Switzerland North","Australia Southeast","Central India","South + India","Japan West","Uk West","France South","Korea South","South Africa North","East + US 2 EUAP","Central US EUAP"],"apiVersions":["2022-07-01","2022-03-01","2021-03-01","2020-10-01-preview","2020-07-01-preview","2019-11-01-preview"],"defaultApiVersion":"2022-03-01","capabilities":"SupportsExtension"},{"resourceType":"extensions","locations":["East US","West Europe","West Central US","West US 2","West US 3","South Central US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France Central","Central US","North Central US","West US","Korea Central","East Asia","Japan East","Canada East","Canada Central","Norway East","Germany West Central","Sweden - Central","Switzerland North","Australia Southeast","Central India","East US - 2 EUAP","Central US EUAP"],"apiVersions":["2022-04-02-preview","2022-03-01","2021-09-01","2021-05-01-preview","2020-07-01-preview"],"defaultApiVersion":"2022-03-01","capabilities":"SystemAssignedResourceIdentity, + Central","Switzerland North","Australia Southeast","Central India","South + India","Japan West","Uk West","France South","Korea South","South Africa North","East + US 2 EUAP","Central US EUAP"],"apiVersions":["2022-07-01","2022-04-02-preview","2022-03-01","2021-09-01","2021-05-01-preview","2020-07-01-preview"],"defaultApiVersion":"2022-07-01","capabilities":"SystemAssignedResourceIdentity, SupportsExtension"},{"resourceType":"fluxConfigurations","locations":["East US","West Europe","West Central US","West US 2","West US 3","South Central US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France Central","Central US","North Central US","West US","Korea Central","East Asia","Japan East","Canada East","Canada Central","Norway East","Germany West Central","Sweden - Central","Switzerland North","Australia Southeast","Central India","East US - 2 EUAP","Central US EUAP"],"apiVersions":["2022-03-01","2022-01-01-preview","2021-11-01-preview","2021-06-01-preview"],"defaultApiVersion":"2022-03-01","capabilities":"SupportsExtension"},{"resourceType":"operations","locations":[],"apiVersions":["2022-03-01","2022-01-01-preview","2021-12-01-preview","2021-11-01-preview","2021-09-01","2021-06-01-preview","2021-05-01-preview","2021-03-01","2020-10-01-preview","2020-07-01-preview","2019-11-01-preview"],"capabilities":"None"},{"resourceType":"namespaces","locations":["East + Central","Switzerland North","Australia Southeast","Central India","South + India","Japan West","Uk West","Korea South","France South","South Africa North","East + US 2 EUAP","Central US EUAP"],"apiVersions":["2022-07-01","2022-03-01","2022-01-01-preview","2021-11-01-preview","2021-06-01-preview"],"defaultApiVersion":"2022-07-01","capabilities":"SupportsExtension"},{"resourceType":"operations","locations":[],"apiVersions":["2022-03-01","2022-01-01-preview","2021-12-01-preview","2021-11-01-preview","2021-09-01","2021-06-01-preview","2021-05-01-preview","2021-03-01","2020-10-01-preview","2020-07-01-preview","2019-11-01-preview"],"capabilities":"None"},{"resourceType":"privateLinkScopes","locations":["East + US","West Europe","West Central US","West US 2","West US 3","South Central + US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France + Central","Central US","North Central US","West US","Korea Central","East Asia","Japan + East","Canada East","Canada Central","Norway East","Germany West Central","Sweden + Central","Switzerland North","Australia Southeast","Central India","South + India","Japan West","Uk West","Korea South","France South","South Africa North","East + US 2 EUAP","Central US EUAP"],"apiVersions":["2022-04-02-preview"],"capabilities":"SupportsTags, + SupportsLocation"},{"resourceType":"privateLinkScopes/privateEndpointConnections","locations":["East + US","West Europe","West Central US","West US 2","West US 3","South Central + US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France + Central","Central US","North Central US","West US","Korea Central","East Asia","Japan + East","Canada East","Canada Central","Norway East","Germany West Central","Sweden + Central","Switzerland North","Australia Southeast","Central India","South + India","Japan West","Uk West","France South","Korea South","South Africa North","East + US 2 EUAP","Central US EUAP"],"apiVersions":["2022-04-02-preview"],"capabilities":"None"},{"resourceType":"privateLinkScopes/privateEndpointConnectionProxies","locations":["East + US","West Europe","West Central US","West US 2","West US 3","South Central + US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France + Central","Central US","North Central US","West US","Korea Central","East Asia","Japan + East","Canada East","Canada Central","Norway East","Germany West Central","Sweden + Central","Switzerland North","Australia Southeast","Central India","South + India","Japan West","Uk West","South Africa North","Korea South","France South","East + US 2 EUAP","Central US EUAP"],"apiVersions":["2022-04-02-preview"],"capabilities":"None"},{"resourceType":"namespaces","locations":["East US 2 EUAP","West US 2","East US","West Europe","West Central US","West US 3","South Central US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France Central","Central US","North Central US","West US","Korea Central","East - Asia","Japan East"],"apiVersions":["2021-12-01-preview"],"defaultApiVersion":"2021-12-01-preview","capabilities":"SupportsExtension"},{"resourceType":"privateLinkScopes","locations":["East - US 2 EUAP"],"apiVersions":["2022-04-02-preview"],"capabilities":"SupportsTags, - SupportsLocation"},{"resourceType":"privateLinkScopes/privateEndpointConnections","locations":["East - US 2 EUAP"],"apiVersions":["2022-04-02-preview"],"capabilities":"None"},{"resourceType":"privateLinkScopes/privateEndpointConnectionProxies","locations":["East - US 2 EUAP"],"apiVersions":["2022-04-02-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + Asia","Japan East","Canada Central","Canada East","Norway East","Germany West + Central","Switzerland North","Sweden Central","Central India","South India","Australia + Southeast","Japan West","Uk West","France South","Korea South","South Africa + North"],"apiVersions":["2021-12-01-preview"],"defaultApiVersion":"2021-12-01-preview","capabilities":"SupportsExtension"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache content-length: - - '4029' + - '6074' content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 18:03:30 GMT + - Tue, 18 Oct 2022 19:56:55 GMT expires: - '-1' pragma: @@ -2915,29 +4074,30 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://akkeshar-dns-08147f89.hcp.eastus2euap.azmk8s.io/apis/networking.k8s.io/v1/ + uri: https://tempaks-dns-0f8c9536.hcp.southcentralus.azmk8s.io/version/ response: body: - string: '{"kind":"APIResourceList","apiVersion":"v1","groupVersion":"networking.k8s.io/v1","resources":[{"name":"ingressclasses","singularName":"","namespaced":false,"kind":"IngressClass","verbs":["create","delete","deletecollection","get","list","patch","update","watch"],"storageVersionHash":"l/iqIbDgFyQ="},{"name":"ingresses","singularName":"","namespaced":true,"kind":"Ingress","verbs":["create","delete","deletecollection","get","list","patch","update","watch"],"shortNames":["ing"],"storageVersionHash":"39NQlfNR+bo="},{"name":"ingresses/status","singularName":"","namespaced":true,"kind":"Ingress","verbs":["get","patch","update"]},{"name":"networkpolicies","singularName":"","namespaced":true,"kind":"NetworkPolicy","verbs":["create","delete","deletecollection","get","list","patch","update","watch"],"shortNames":["netpol"],"storageVersionHash":"YpfwF18m1G8="}]} - - ' + string: "{\n \"major\": \"1\",\n \"minor\": \"24\",\n \"gitVersion\": \"v1.24.6\",\n + \ \"gitCommit\": \"b39bf148cd654599a52e867485c02c4f9d28b312\",\n \"gitTreeState\": + \"clean\",\n \"buildDate\": \"2022-09-21T21:46:51Z\",\n \"goVersion\": \"go1.18.6\",\n + \ \"compiler\": \"gc\",\n \"platform\": \"linux/amd64\"\n}" headers: audit-id: - - 300422a0-ba2b-4ef6-b253-6c3ce12b3395 + - 369feda7-ec34-41e6-8dc5-b7e44e1e37a7 cache-control: - no-cache, private content-length: - - '864' + - '263' content-type: - application/json date: - - Tue, 07 Jun 2022 18:03:33 GMT + - Tue, 18 Oct 2022 19:56:56 GMT x-kubernetes-pf-flowschema-uid: - - 08843a36-c7a9-489b-a782-1dc805dc9f54 + - 9c9284bf-ac50-497c-86a8-5f4f7d857b28 x-kubernetes-pf-prioritylevel-uid: - - e6f4d88d-c43f-4941-a57f-7f1230896bdc + - f041de6f-3328-46ec-b36d-f51c7cd89b61 status: code: 200 message: OK @@ -2949,69 +4109,71 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python - method: GET - uri: https://akkeshar-dns-08147f89.hcp.eastus2euap.azmk8s.io/api/v1/nodes - response: - body: - string: '{"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"42261839"},"items":[{"metadata":{"name":"aks-agentpool-40341301-vmss000000","uid":"d93ff350-d287-419b-ad99-41ca212bab81","resourceVersion":"42261806","creationTimestamp":"2022-01-13T13:58:08Z","labels":{"agentpool":"agentpool","beta.kubernetes.io/arch":"amd64","beta.kubernetes.io/instance-type":"Standard_B4ms","beta.kubernetes.io/os":"linux","failure-domain.beta.kubernetes.io/region":"eastus2euap","failure-domain.beta.kubernetes.io/zone":"0","kubernetes.azure.com/agentpool":"agentpool","kubernetes.azure.com/cluster":"MC_akkeshar_akkeshar_eastus2euap","kubernetes.azure.com/mode":"system","kubernetes.azure.com/node-image-version":"AKSUbuntu-1804gen2containerd-2022.01.07","kubernetes.azure.com/os-sku":"Ubuntu","kubernetes.azure.com/role":"agent","kubernetes.azure.com/storageprofile":"managed","kubernetes.azure.com/storagetier":"Premium_LRS","kubernetes.io/arch":"amd64","kubernetes.io/hostname":"aks-agentpool-40341301-vmss000000","kubernetes.io/os":"linux","kubernetes.io/role":"agent","node-role.kubernetes.io/agent":"","node.kubernetes.io/instance-type":"Standard_B4ms","storageprofile":"managed","storagetier":"Premium_LRS","topology.disk.csi.azure.com/zone":"","topology.kubernetes.io/region":"eastus2euap","topology.kubernetes.io/zone":"0"},"annotations":{"csi.volume.kubernetes.io/nodeid":"{\"disk.csi.azure.com\":\"aks-agentpool-40341301-vmss000000\",\"file.csi.azure.com\":\"aks-agentpool-40341301-vmss000000\"}","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubectl-label","operation":"Update","apiVersion":"v1","time":"2022-01-13T13:58:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{"f:kubernetes.io/role":{},"f:node-role.kubernetes.io/agent":{}}}}},{"manager":"node-problem-detector","operation":"Update","apiVersion":"v1","time":"2022-01-13T14:01:36Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{"k:{\"type\":\"ContainerRuntimeProblem\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FilesystemCorruptionProblem\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FreezeScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FrequentContainerdRestart\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FrequentDockerRestart\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FrequentKubeletRestart\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FrequentUnregisterNetDevice\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"KernelDeadlock\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"KubeletProblem\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"PreemptScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"ReadonlyFilesystem\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"RebootScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"RedeployScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"TerminateScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-05-24T22:30:36Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl":{}}},"f:spec":{"f:podCIDR":{},"f:podCIDRs":{".":{},"v:\"10.244.2.0/24\"":{}}},"f:status":{"f:conditions":{"k:{\"type\":\"NetworkUnavailable\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}}},{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2022-05-24T22:30:40Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:csi.volume.kubernetes.io/nodeid":{},"f:volumes.kubernetes.io/controller-managed-attach-detach":{}},"f:labels":{".":{},"f:agentpool":{},"f:beta.kubernetes.io/arch":{},"f:beta.kubernetes.io/instance-type":{},"f:beta.kubernetes.io/os":{},"f:failure-domain.beta.kubernetes.io/region":{},"f:failure-domain.beta.kubernetes.io/zone":{},"f:kubernetes.azure.com/agentpool":{},"f:kubernetes.azure.com/cluster":{},"f:kubernetes.azure.com/mode":{},"f:kubernetes.azure.com/node-image-version":{},"f:kubernetes.azure.com/os-sku":{},"f:kubernetes.azure.com/role":{},"f:kubernetes.azure.com/storageprofile":{},"f:kubernetes.azure.com/storagetier":{},"f:kubernetes.io/arch":{},"f:kubernetes.io/hostname":{},"f:kubernetes.io/os":{},"f:node.kubernetes.io/instance-type":{},"f:storageprofile":{},"f:storagetier":{},"f:topology.disk.csi.azure.com/zone":{},"f:topology.kubernetes.io/region":{},"f:topology.kubernetes.io/zone":{}}},"f:spec":{"f:providerID":{}},"f:status":{"f:allocatable":{"f:attachable-volumes-azure-disk":{},"f:ephemeral-storage":{},"f:memory":{}},"f:capacity":{"f:attachable-volumes-azure-disk":{},"f:memory":{}},"f:conditions":{"k:{\"type\":\"DiskPressure\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}},"k:{\"type\":\"MemoryPressure\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}},"k:{\"type\":\"PIDPressure\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}},"k:{\"type\":\"Ready\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}}},"f:images":{},"f:nodeInfo":{"f:bootID":{},"f:kernelVersion":{}}}}}]},"spec":{"podCIDR":"10.244.2.0/24","podCIDRs":["10.244.2.0/24"],"providerID":"azure:///subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mc_akkeshar_akkeshar_eastus2euap/providers/Microsoft.Compute/virtualMachineScaleSets/aks-agentpool-40341301-vmss/virtualMachines/0"},"status":{"capacity":{"attachable-volumes-azure-disk":"8","cpu":"4","ephemeral-storage":"129900528Ki","hugepages-1Gi":"0","hugepages-2Mi":"0","memory":"16393292Ki","pods":"110"},"allocatable":{"attachable-volumes-azure-disk":"8","cpu":"3860m","ephemeral-storage":"119716326407","hugepages-1Gi":"0","hugepages-2Mi":"0","memory":"12899404Ki","pods":"110"},"conditions":[{"type":"PreemptScheduled","status":"False","lastHeartbeatTime":"2022-06-07T18:01:36Z","lastTransitionTime":"2022-06-03T09:50:27Z","reason":"NoPreemptScheduled","message":"VM - has no scheduled Preempt event"},{"type":"FreezeScheduled","status":"False","lastHeartbeatTime":"2022-06-07T18:01:36Z","lastTransitionTime":"2022-06-04T14:42:30Z","reason":"NoFreezeScheduled","message":"VM - has no scheduled Freeze event"},{"type":"KernelDeadlock","status":"False","lastHeartbeatTime":"2022-06-07T18:01:36Z","lastTransitionTime":"2022-04-25T20:02:23Z","reason":"KernelHasNoDeadlock","message":"kernel - has no deadlock"},{"type":"KubeletProblem","status":"False","lastHeartbeatTime":"2022-06-07T18:01:36Z","lastTransitionTime":"2022-04-25T20:02:53Z","reason":"KubeletIsUp","message":"kubelet - service is up"},{"type":"FrequentKubeletRestart","status":"False","lastHeartbeatTime":"2022-06-07T18:01:36Z","lastTransitionTime":"2022-04-25T20:02:23Z","reason":"NoFrequentKubeletRestart","message":"kubelet - is functioning properly"},{"type":"ReadonlyFilesystem","status":"False","lastHeartbeatTime":"2022-06-07T18:01:36Z","lastTransitionTime":"2022-04-25T20:02:23Z","reason":"FilesystemIsNotReadOnly","message":"Filesystem - is not read-only"},{"type":"FrequentDockerRestart","status":"False","lastHeartbeatTime":"2022-06-07T18:01:36Z","lastTransitionTime":"2022-04-25T20:02:23Z","reason":"NoFrequentDockerRestart","message":"docker - is functioning properly"},{"type":"RebootScheduled","status":"False","lastHeartbeatTime":"2022-06-07T18:01:36Z","lastTransitionTime":"2022-06-03T09:50:29Z","reason":"NoRebootScheduled","message":"VM - has no scheduled Reboot event"},{"type":"ContainerRuntimeProblem","status":"False","lastHeartbeatTime":"2022-06-07T18:01:36Z","lastTransitionTime":"2022-04-25T20:02:23Z","reason":"ContainerRuntimeIsUp","message":"container - runtime service is up"},{"type":"FrequentUnregisterNetDevice","status":"False","lastHeartbeatTime":"2022-06-07T18:01:36Z","lastTransitionTime":"2022-04-25T20:02:23Z","reason":"NoFrequentUnregisterNetDevice","message":"node - is functioning properly"},{"type":"FilesystemCorruptionProblem","status":"False","lastHeartbeatTime":"2022-06-07T18:01:36Z","lastTransitionTime":"2022-04-25T20:02:23Z","reason":"FilesystemIsOK","message":"Filesystem - is healthy"},{"type":"TerminateScheduled","status":"False","lastHeartbeatTime":"2022-06-07T18:01:36Z","lastTransitionTime":"2022-06-03T09:50:27Z","reason":"NoTerminateScheduled","message":"VM - has no scheduled Terminate event"},{"type":"FrequentContainerdRestart","status":"False","lastHeartbeatTime":"2022-06-07T18:01:36Z","lastTransitionTime":"2022-04-25T20:02:23Z","reason":"NoFrequentContainerdRestart","message":"containerd - is functioning properly"},{"type":"RedeployScheduled","status":"False","lastHeartbeatTime":"2022-06-07T18:01:36Z","lastTransitionTime":"2022-06-03T09:50:28Z","reason":"NoRedeployScheduled","message":"VM - has no scheduled Redeploy event"},{"type":"NetworkUnavailable","status":"False","lastHeartbeatTime":"2022-01-13T13:59:11Z","lastTransitionTime":"2022-01-13T13:59:11Z","reason":"RouteCreated","message":"RouteController - created a route"},{"type":"MemoryPressure","status":"False","lastHeartbeatTime":"2022-06-07T18:03:25Z","lastTransitionTime":"2022-05-24T22:30:40Z","reason":"KubeletHasSufficientMemory","message":"kubelet - has sufficient memory available"},{"type":"DiskPressure","status":"False","lastHeartbeatTime":"2022-06-07T18:03:25Z","lastTransitionTime":"2022-05-24T22:30:40Z","reason":"KubeletHasNoDiskPressure","message":"kubelet - has no disk pressure"},{"type":"PIDPressure","status":"False","lastHeartbeatTime":"2022-06-07T18:03:25Z","lastTransitionTime":"2022-05-24T22:30:40Z","reason":"KubeletHasSufficientPID","message":"kubelet - has sufficient PID available"},{"type":"Ready","status":"True","lastHeartbeatTime":"2022-06-07T18:03:25Z","lastTransitionTime":"2022-05-24T22:30:40Z","reason":"KubeletReady","message":"kubelet - is posting ready status. AppArmor enabled"}],"addresses":[{"type":"Hostname","address":"aks-agentpool-40341301-vmss000000"},{"type":"InternalIP","address":"10.240.0.4"}],"daemonEndpoints":{"kubeletEndpoint":{"Port":10250}},"nodeInfo":{"machineID":"f23b0be567cf4ac9bbb09642cae7ba1d","systemUUID":"163e0b8b-a136-42a0-ab05-593dc48ac32f","bootID":"81b6db8e-4025-44e9-9cac-ef5167d777b7","kernelVersion":"5.4.0-1077-azure","osImage":"Ubuntu - 18.04.6 LTS","containerRuntimeVersion":"containerd://1.4.9+azure","kubeletVersion":"v1.21.7","kubeProxyVersion":"v1.21.7","operatingSystem":"linux","architecture":"amd64"},"images":[{"names":["arck8sconformance.azurecr.io/arck8sconformance/platform@sha256:21ee537e2fb0e941e76c290fabae22cfdbf1ccc221ed16a544a28053a045526f","arck8sconformance.azurecr.io/arck8sconformance/platform:0.1.5"],"sizeBytes":585230961},{"names":["arck8sconformance.azurecr.io/arck8sconformance/agentcleanup@sha256:566ba1079509cbfdcc0c424b2f5efd91244a903f561bb0acefa3f800a8d7d26a","arck8sconformance.azurecr.io/arck8sconformance/agentcleanup:0.1.0"],"sizeBytes":416596253},{"names":["arck8sconformance.azurecr.io/samples/demo@sha256:d3e4626750d487861d95121319c15506a6a16fde5ed8212001464d0cdfe9d507","arck8sconformance.azurecr.io/samples/demo:v0.1.0"],"sizeBytes":347305950},{"names":["mcr.microsoft.com/azuremonitor/containerinsights/ciprod:ciprod10132021"],"sizeBytes":331573875},{"names":["devconformance.azurecr.io/platform@sha256:6b941358914a9f7ccd35a8d3b6c18fb3c966a1f6c167b05112bca750a0db7488","devconformance.azurecr.io/platform:v3"],"sizeBytes":323251482},{"names":["devconformance.azurecr.io/platform@sha256:72669df54f99b7a458eef7fc51e01b71f374d183504361d41c48a97e174f5156","devconformance.azurecr.io/platform:v2"],"sizeBytes":323129910},{"names":["arck8sconformance.azurecr.io/arck8sconformance/clusterconnect@sha256:75657e3a8662d5d9605fb05888f19e3e286768441dd984669ac9d4d620a7dedb","arck8sconformance.azurecr.io/arck8sconformance/clusterconnect:0.1.6"],"sizeBytes":288092397},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:0.49.3"],"sizeBytes":287741913},{"names":["arck8sconformance.azurecr.io/arck8sconformance/clusterconnect@sha256:24aca0a85bab7c5e7d9ae36ed9e7ae03be31aa6dfa15c3716e859e133015bb5b","arck8sconformance.azurecr.io/arck8sconformance/clusterconnect:0.1.5"],"sizeBytes":287662629},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:1.0.4"],"sizeBytes":287652512},{"names":["mcr.microsoft.com/oss/kubernetes/dashboard:v2.4.0"],"sizeBytes":224434239},{"names":["mcr.microsoft.com/oss/calico/node:v3.21.2"],"sizeBytes":218251246},{"names":["mcr.microsoft.com/oss/calico/node:v3.21.0"],"sizeBytes":192425455},{"names":["mcr.microsoft.com/oss/tigera/operator:v1.23.3"],"sizeBytes":187302206},{"names":["mcr.microsoft.com/oss/tigera/operator:v1.23.1"],"sizeBytes":187298621},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:0.19.0"],"sizeBytes":166352383},{"names":["mcr.microsoft.com/oss/cilium/cilium:1.10.3.3"],"sizeBytes":155490598},{"names":["mcr.microsoft.com/oss/cilium/cilium:1.10.5"],"sizeBytes":149643815},{"names":["mcr.microsoft.com/aks/hcp/hcp-tunnel-front:master.211104.1"],"sizeBytes":149514464},{"names":["mcr.microsoft.com/aks/hcp/hcp-tunnel-front:master.211013.1"],"sizeBytes":149493900},{"names":["mcr.microsoft.com/oss/calico/typha:v3.21.2"],"sizeBytes":129053019},{"names":["mcr.microsoft.com/oss/calico/typha:v3.21.0"],"sizeBytes":129012567},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy@sha256:167efdfcc4acc5e701d30ec02dfdbb79d2e0c700a479b3a064e44018a81e42ee","mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.21.7-hotfix.20220420.1"],"sizeBytes":114188361},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy@sha256:51a38b152239634fdcd4bf204219f33052799b8eea38750f90adde102fb81543","mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.21.7-hotfix.20220330.2"],"sizeBytes":114082872},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy@sha256:71fa8a1035ec07b17bf06fc1e1a81caf766168462cfea25eca321e758c98f3a7","mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.21.7-hotfix.20220310.1"],"sizeBytes":107270314},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi@sha256:a98a3496233207d97b546d855835f075f2fca6d28867048475a8f0f1a3bc0c01","mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.16.0"],"sizeBytes":107139720},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi@sha256:91971fff7385e4750583b9a6f0b6695500b5f27fbb7c2537fcbae1e0851bef73","mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.15.0"],"sizeBytes":107050187},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.8.0.1"],"sizeBytes":106828430},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy@sha256:652b7ef6952f2216c201ed2ac734f9d478b78bd5e758825b7c2491dbd06a3a2c","mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.21.7"],"sizeBytes":105353130},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy@sha256:438bf62e7fcf507fb876cbf4c5c18026d2b89387d823e2f1127777f4798b170d","mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.21.7-hotfix.20220204"],"sizeBytes":105352620},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy@sha256:acde96e8226b650f91eb8c4ff046143f68694e7f9a1ce721db6675e94b923f8d","mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.21.7-hotfix.20220130"],"sizeBytes":105352620},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:1.0.5"],"sizeBytes":103502273},{"names":["mcr.microsoft.com/oss/calico/node:v3.8.9.5"],"sizeBytes":101794833},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi@sha256:423eb6cf602c064c8b2deefead5ceadd6324ed41b3d995dab5d0f6f0f4d4710f","mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.10.0"],"sizeBytes":100891776},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi@sha256:049c2d0e56212e90bae8898fd6f2d8acbb71767a61101ee17ee606065cdc3468","mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.14.0"],"sizeBytes":100400591},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.2.0.6"],"sizeBytes":100397012},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.9.0"],"sizeBytes":99726350},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi@sha256:7d8d8c5dd6b95a672fa81fa91f6196e163d30ea7ec372f9d9fc946c81c51508a","mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.13.0"],"sizeBytes":98712192},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi@sha256:37b93243e548467a1e96c5287e38fb747b989a4f65103505d30503de47f58ff9","mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.12.0"],"sizeBytes":98693402},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi@sha256:704e723596b0421e2cf6ad0194a40136256ac6fb4659cee683331f47e055e166","mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.11.0"],"sizeBytes":98680012},{"names":["mcr.microsoft.com/oss/fluxcd/flux@sha256:eaeb1920dc666efb07cd2c7c046109dfa301760510992f61581500643820074b","mcr.microsoft.com/oss/fluxcd/flux:1.21.2"],"sizeBytes":98617286},{"names":["mcr.microsoft.com/oss/cilium/operator:1.10.3"],"sizeBytes":98395697},{"names":["mcr.microsoft.com/aks/hcp/tunnel-openvpn:master.210623.2"],"sizeBytes":96125176},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi@sha256:ed06ff12566e4bc41b5392fb07a9c65f5b8571844e07f74544a022b5220345f6","mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.15.0"],"sizeBytes":95462502},{"names":["mcr.microsoft.com/oss/kubernetes/exechealthz:1.2_v0.0.5"],"sizeBytes":94348102},{"names":["mcr.microsoft.com/aks/acc/sgx-attestation:2.0"],"sizeBytes":91841669},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi@sha256:9e2ecabcf9dd9943e6600eb9fb460f45b4dc61af7cabe95d115082a029db2aaf","mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.9.0"],"sizeBytes":89210341},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.2.0"],"sizeBytes":89103171},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi@sha256:e2c22c5bda7501ec23753b5afedcc3d2e7cfcf1b443eb0f75e8998f9084a5c6c","mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.13.0"],"sizeBytes":88689791},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.8.0"],"sizeBytes":88364750}]}},{"metadata":{"name":"aks-agentpool-40341301-vmss000001","uid":"98bc447c-d33a-44fe-8e15-320f78186ce9","resourceVersion":"42261278","creationTimestamp":"2022-01-13T13:57:58Z","labels":{"agentpool":"agentpool","beta.kubernetes.io/arch":"amd64","beta.kubernetes.io/instance-type":"Standard_B4ms","beta.kubernetes.io/os":"linux","failure-domain.beta.kubernetes.io/region":"eastus2euap","failure-domain.beta.kubernetes.io/zone":"0","kubernetes.azure.com/agentpool":"agentpool","kubernetes.azure.com/cluster":"MC_akkeshar_akkeshar_eastus2euap","kubernetes.azure.com/mode":"system","kubernetes.azure.com/node-image-version":"AKSUbuntu-1804gen2containerd-2022.01.07","kubernetes.azure.com/os-sku":"Ubuntu","kubernetes.azure.com/role":"agent","kubernetes.azure.com/storageprofile":"managed","kubernetes.azure.com/storagetier":"Premium_LRS","kubernetes.io/arch":"amd64","kubernetes.io/hostname":"aks-agentpool-40341301-vmss000001","kubernetes.io/os":"linux","kubernetes.io/role":"agent","node-role.kubernetes.io/agent":"","node.kubernetes.io/instance-type":"Standard_B4ms","storageprofile":"managed","storagetier":"Premium_LRS","topology.disk.csi.azure.com/zone":"","topology.kubernetes.io/region":"eastus2euap","topology.kubernetes.io/zone":"0"},"annotations":{"csi.volume.kubernetes.io/nodeid":"{\"disk.csi.azure.com\":\"aks-agentpool-40341301-vmss000001\",\"file.csi.azure.com\":\"aks-agentpool-40341301-vmss000001\"}","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubectl-label","operation":"Update","apiVersion":"v1","time":"2022-01-13T13:58:05Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{"f:kubernetes.io/role":{},"f:node-role.kubernetes.io/agent":{}}}}},{"manager":"node-problem-detector","operation":"Update","apiVersion":"v1","time":"2022-01-13T14:01:34Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{"k:{\"type\":\"ContainerRuntimeProblem\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FilesystemCorruptionProblem\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FreezeScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FrequentContainerdRestart\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FrequentDockerRestart\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FrequentKubeletRestart\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FrequentUnregisterNetDevice\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"KernelDeadlock\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"KubeletProblem\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"PreemptScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"ReadonlyFilesystem\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"RebootScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"RedeployScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"TerminateScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-05-24T22:30:45Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl":{}}},"f:spec":{"f:podCIDR":{},"f:podCIDRs":{".":{},"v:\"10.244.0.0/24\"":{}}},"f:status":{"f:conditions":{"k:{\"type\":\"NetworkUnavailable\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}}},{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2022-05-24T22:30:50Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:csi.volume.kubernetes.io/nodeid":{},"f:volumes.kubernetes.io/controller-managed-attach-detach":{}},"f:labels":{".":{},"f:agentpool":{},"f:beta.kubernetes.io/arch":{},"f:beta.kubernetes.io/instance-type":{},"f:beta.kubernetes.io/os":{},"f:failure-domain.beta.kubernetes.io/region":{},"f:failure-domain.beta.kubernetes.io/zone":{},"f:kubernetes.azure.com/agentpool":{},"f:kubernetes.azure.com/cluster":{},"f:kubernetes.azure.com/mode":{},"f:kubernetes.azure.com/node-image-version":{},"f:kubernetes.azure.com/os-sku":{},"f:kubernetes.azure.com/role":{},"f:kubernetes.azure.com/storageprofile":{},"f:kubernetes.azure.com/storagetier":{},"f:kubernetes.io/arch":{},"f:kubernetes.io/hostname":{},"f:kubernetes.io/os":{},"f:node.kubernetes.io/instance-type":{},"f:storageprofile":{},"f:storagetier":{},"f:topology.disk.csi.azure.com/zone":{},"f:topology.kubernetes.io/region":{},"f:topology.kubernetes.io/zone":{}}},"f:spec":{"f:providerID":{}},"f:status":{"f:allocatable":{"f:attachable-volumes-azure-disk":{},"f:memory":{}},"f:capacity":{"f:attachable-volumes-azure-disk":{},"f:memory":{}},"f:conditions":{"k:{\"type\":\"DiskPressure\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}},"k:{\"type\":\"MemoryPressure\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}},"k:{\"type\":\"PIDPressure\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}},"k:{\"type\":\"Ready\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}}},"f:images":{},"f:nodeInfo":{"f:bootID":{},"f:kernelVersion":{}}}}}]},"spec":{"podCIDR":"10.244.0.0/24","podCIDRs":["10.244.0.0/24"],"providerID":"azure:///subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mc_akkeshar_akkeshar_eastus2euap/providers/Microsoft.Compute/virtualMachineScaleSets/aks-agentpool-40341301-vmss/virtualMachines/1"},"status":{"capacity":{"attachable-volumes-azure-disk":"8","cpu":"4","ephemeral-storage":"129900528Ki","hugepages-1Gi":"0","hugepages-2Mi":"0","memory":"16393288Ki","pods":"110"},"allocatable":{"attachable-volumes-azure-disk":"8","cpu":"3860m","ephemeral-storage":"119716326407","hugepages-1Gi":"0","hugepages-2Mi":"0","memory":"12899400Ki","pods":"110"},"conditions":[{"type":"FreezeScheduled","status":"False","lastHeartbeatTime":"2022-06-07T17:59:58Z","lastTransitionTime":"2022-06-04T17:16:26Z","reason":"NoFreezeScheduled","message":"VM - has no scheduled Freeze event"},{"type":"RedeployScheduled","status":"False","lastHeartbeatTime":"2022-06-07T17:59:58Z","lastTransitionTime":"2022-05-14T01:27:23Z","reason":"NoRedeployScheduled","message":"VM - has no scheduled Redeploy event"},{"type":"RebootScheduled","status":"False","lastHeartbeatTime":"2022-06-07T17:59:58Z","lastTransitionTime":"2022-05-14T01:27:23Z","reason":"NoRebootScheduled","message":"VM - has no scheduled Reboot event"},{"type":"FrequentDockerRestart","status":"False","lastHeartbeatTime":"2022-06-07T17:59:58Z","lastTransitionTime":"2022-05-14T01:27:23Z","reason":"NoFrequentDockerRestart","message":"docker - is functioning properly"},{"type":"KernelDeadlock","status":"False","lastHeartbeatTime":"2022-06-07T17:59:58Z","lastTransitionTime":"2022-05-14T01:27:23Z","reason":"KernelHasNoDeadlock","message":"kernel - has no deadlock"},{"type":"FrequentContainerdRestart","status":"False","lastHeartbeatTime":"2022-06-07T17:59:58Z","lastTransitionTime":"2022-05-14T01:27:23Z","reason":"NoFrequentContainerdRestart","message":"containerd - is functioning properly"},{"type":"KubeletProblem","status":"False","lastHeartbeatTime":"2022-06-07T17:59:58Z","lastTransitionTime":"2022-05-14T01:27:53Z","reason":"KubeletIsUp","message":"kubelet - service is up"},{"type":"ReadonlyFilesystem","status":"False","lastHeartbeatTime":"2022-06-07T17:59:58Z","lastTransitionTime":"2022-05-14T01:27:23Z","reason":"FilesystemIsNotReadOnly","message":"Filesystem - is not read-only"},{"type":"FrequentUnregisterNetDevice","status":"False","lastHeartbeatTime":"2022-06-07T17:59:58Z","lastTransitionTime":"2022-05-14T01:27:23Z","reason":"NoFrequentUnregisterNetDevice","message":"node - is functioning properly"},{"type":"FilesystemCorruptionProblem","status":"False","lastHeartbeatTime":"2022-06-07T17:59:58Z","lastTransitionTime":"2022-05-14T01:27:23Z","reason":"FilesystemIsOK","message":"Filesystem - is healthy"},{"type":"TerminateScheduled","status":"False","lastHeartbeatTime":"2022-06-07T17:59:58Z","lastTransitionTime":"2022-05-14T01:27:23Z","reason":"NoTerminateScheduled","message":"VM - has no scheduled Terminate event"},{"type":"FrequentKubeletRestart","status":"False","lastHeartbeatTime":"2022-06-07T17:59:58Z","lastTransitionTime":"2022-05-14T01:27:23Z","reason":"NoFrequentKubeletRestart","message":"kubelet - is functioning properly"},{"type":"ContainerRuntimeProblem","status":"False","lastHeartbeatTime":"2022-06-07T17:59:58Z","lastTransitionTime":"2022-05-14T01:27:23Z","reason":"ContainerRuntimeIsUp","message":"container - runtime service is up"},{"type":"PreemptScheduled","status":"False","lastHeartbeatTime":"2022-06-07T17:59:58Z","lastTransitionTime":"2022-06-03T10:52:53Z","reason":"NoPreemptScheduled","message":"VM - has no scheduled Preempt event"},{"type":"NetworkUnavailable","status":"False","lastHeartbeatTime":"2022-01-13T13:58:41Z","lastTransitionTime":"2022-01-13T13:58:41Z","reason":"RouteCreated","message":"RouteController - created a route"},{"type":"MemoryPressure","status":"False","lastHeartbeatTime":"2022-06-07T18:00:44Z","lastTransitionTime":"2022-05-24T22:30:50Z","reason":"KubeletHasSufficientMemory","message":"kubelet - has sufficient memory available"},{"type":"DiskPressure","status":"False","lastHeartbeatTime":"2022-06-07T18:00:44Z","lastTransitionTime":"2022-05-24T22:30:50Z","reason":"KubeletHasNoDiskPressure","message":"kubelet - has no disk pressure"},{"type":"PIDPressure","status":"False","lastHeartbeatTime":"2022-06-07T18:00:44Z","lastTransitionTime":"2022-05-24T22:30:50Z","reason":"KubeletHasSufficientPID","message":"kubelet - has sufficient PID available"},{"type":"Ready","status":"True","lastHeartbeatTime":"2022-06-07T18:00:44Z","lastTransitionTime":"2022-05-24T22:30:50Z","reason":"KubeletReady","message":"kubelet - is posting ready status. AppArmor enabled"}],"addresses":[{"type":"Hostname","address":"aks-agentpool-40341301-vmss000001"},{"type":"InternalIP","address":"10.240.0.5"}],"daemonEndpoints":{"kubeletEndpoint":{"Port":10250}},"nodeInfo":{"machineID":"68c68ce60961449592a49b380dc3cdb5","systemUUID":"93349a55-c2d8-4ca4-817f-429444dbd221","bootID":"69e1667e-3dc3-4f1f-a922-1f045ea546ce","kernelVersion":"5.4.0-1078-azure","osImage":"Ubuntu - 18.04.6 LTS","containerRuntimeVersion":"containerd://1.4.9+azure","kubeletVersion":"v1.21.7","kubeProxyVersion":"v1.21.7","operatingSystem":"linux","architecture":"amd64"},"images":[{"names":["arck8sconformance.azurecr.io/samples/demo@sha256:d3e4626750d487861d95121319c15506a6a16fde5ed8212001464d0cdfe9d507","arck8sconformance.azurecr.io/samples/demo:v0.1.0"],"sizeBytes":347305950},{"names":["mcr.microsoft.com/azuremonitor/containerinsights/ciprod:ciprod10132021"],"sizeBytes":331573875},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:0.49.3"],"sizeBytes":287741913},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:1.0.4"],"sizeBytes":287652512},{"names":["mcr.microsoft.com/oss/kubernetes/dashboard:v2.4.0"],"sizeBytes":224434239},{"names":["mcr.microsoft.com/oss/calico/node:v3.21.2"],"sizeBytes":218251246},{"names":["mcr.microsoft.com/oss/calico/node:v3.21.0"],"sizeBytes":192425455},{"names":["mcr.microsoft.com/oss/tigera/operator:v1.23.3"],"sizeBytes":187302206},{"names":["mcr.microsoft.com/oss/tigera/operator:v1.23.1"],"sizeBytes":187298621},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:0.19.0"],"sizeBytes":166352383},{"names":["mcr.microsoft.com/oss/cilium/cilium:1.10.3.3"],"sizeBytes":155490598},{"names":["mcr.microsoft.com/oss/cilium/cilium:1.10.5"],"sizeBytes":149643815},{"names":["mcr.microsoft.com/aks/hcp/hcp-tunnel-front:master.211104.1"],"sizeBytes":149514464},{"names":["mcr.microsoft.com/aks/hcp/hcp-tunnel-front:master.211013.1"],"sizeBytes":149493900},{"names":["mcr.microsoft.com/oss/calico/typha:v3.21.2"],"sizeBytes":129053019},{"names":["mcr.microsoft.com/oss/calico/typha:v3.21.0"],"sizeBytes":129012567},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy@sha256:167efdfcc4acc5e701d30ec02dfdbb79d2e0c700a479b3a064e44018a81e42ee","mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.21.7-hotfix.20220420.1"],"sizeBytes":114188361},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy@sha256:51a38b152239634fdcd4bf204219f33052799b8eea38750f90adde102fb81543","mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.21.7-hotfix.20220330.2"],"sizeBytes":114082872},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy@sha256:71fa8a1035ec07b17bf06fc1e1a81caf766168462cfea25eca321e758c98f3a7","mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.21.7-hotfix.20220310.1"],"sizeBytes":107270314},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi@sha256:a98a3496233207d97b546d855835f075f2fca6d28867048475a8f0f1a3bc0c01","mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.16.0"],"sizeBytes":107139720},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi@sha256:91971fff7385e4750583b9a6f0b6695500b5f27fbb7c2537fcbae1e0851bef73","mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.15.0"],"sizeBytes":107050187},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.8.0.1"],"sizeBytes":106828430},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy@sha256:652b7ef6952f2216c201ed2ac734f9d478b78bd5e758825b7c2491dbd06a3a2c","mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.21.7"],"sizeBytes":105353130},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy@sha256:438bf62e7fcf507fb876cbf4c5c18026d2b89387d823e2f1127777f4798b170d","mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.21.7-hotfix.20220204"],"sizeBytes":105352620},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy@sha256:acde96e8226b650f91eb8c4ff046143f68694e7f9a1ce721db6675e94b923f8d","mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.21.7-hotfix.20220130"],"sizeBytes":105352620},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:1.0.5"],"sizeBytes":103502273},{"names":["mcr.microsoft.com/oss/calico/node:v3.8.9.5"],"sizeBytes":101794833},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi@sha256:423eb6cf602c064c8b2deefead5ceadd6324ed41b3d995dab5d0f6f0f4d4710f","mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.10.0"],"sizeBytes":100891776},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi@sha256:049c2d0e56212e90bae8898fd6f2d8acbb71767a61101ee17ee606065cdc3468","mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.14.0"],"sizeBytes":100400591},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.2.0.6"],"sizeBytes":100397012},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.9.0"],"sizeBytes":99726350},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi@sha256:7d8d8c5dd6b95a672fa81fa91f6196e163d30ea7ec372f9d9fc946c81c51508a","mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.13.0"],"sizeBytes":98712192},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi@sha256:37b93243e548467a1e96c5287e38fb747b989a4f65103505d30503de47f58ff9","mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.12.0"],"sizeBytes":98693402},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi@sha256:704e723596b0421e2cf6ad0194a40136256ac6fb4659cee683331f47e055e166","mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.11.0"],"sizeBytes":98680012},{"names":["mcr.microsoft.com/oss/cilium/operator:1.10.3"],"sizeBytes":98395697},{"names":["mcr.microsoft.com/aks/hcp/tunnel-openvpn:master.210623.2"],"sizeBytes":96125176},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi@sha256:ed06ff12566e4bc41b5392fb07a9c65f5b8571844e07f74544a022b5220345f6","mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.15.0"],"sizeBytes":95462502},{"names":["mcr.microsoft.com/oss/kubernetes/exechealthz:1.2_v0.0.5"],"sizeBytes":94348102},{"names":["mcr.microsoft.com/aks/acc/sgx-attestation:2.0"],"sizeBytes":91841669},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi@sha256:9e2ecabcf9dd9943e6600eb9fb460f45b4dc61af7cabe95d115082a029db2aaf","mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.9.0"],"sizeBytes":89210341},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.2.0"],"sizeBytes":89103171},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi@sha256:e2c22c5bda7501ec23753b5afedcc3d2e7cfcf1b443eb0f75e8998f9084a5c6c","mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.13.0"],"sizeBytes":88689791},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.8.0"],"sizeBytes":88364750},{"names":["mcr.microsoft.com/azure-application-gateway/kubernetes-ingress:1.5.0-rc1"],"sizeBytes":88039673},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.7.0"],"sizeBytes":87489567},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi@sha256:42a10ba5bee0900c3c4d9c85439eb52c664cb23045cadb1804d289f3fdafb46e","mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.12.0"],"sizeBytes":86984035},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi@sha256:e64dcf287286db2388e2a9af2f7a6e307fd34e6dbf201e05368112004a178ed3","mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.11.0"],"sizeBytes":86960638},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi@sha256:51a9443246d8aa2778a51352fa8321c264c0bc71db1bfecb94abd59fcbc866a5","mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.10.0"],"sizeBytes":86953572},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi@sha256:bee5ea3684be60088e97191f67300aa987a8eac403a80f6a205852ed21f81590","mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.18.0"],"sizeBytes":85633800},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi@sha256:03c648dca5092cb489b68be03ec054ac86654148784ae3636518c81ed3460207","mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.17.0"],"sizeBytes":85541532}]}}]} + - OpenAPI-Generator/24.2.0/python + method: GET + uri: https://tempaks-dns-0f8c9536.hcp.southcentralus.azmk8s.io/api/v1/nodes + response: + body: + string: '{"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"2976771"},"items":[{"metadata":{"name":"aks-agentpool-35222091-vmss000000","uid":"ead90674-ebc8-4237-8b11-cba2c3f902c1","resourceVersion":"2976674","creationTimestamp":"2022-10-10T04:01:39Z","labels":{"agentpool":"agentpool","beta.kubernetes.io/arch":"amd64","beta.kubernetes.io/instance-type":"Standard_B4ms","beta.kubernetes.io/os":"linux","failure-domain.beta.kubernetes.io/region":"southcentralus","failure-domain.beta.kubernetes.io/zone":"0","kubernetes.azure.com/agentpool":"agentpool","kubernetes.azure.com/cluster":"MC_akkeshar_tempaks_southcentralus","kubernetes.azure.com/kubelet-identity-client-id":"a7082775-1b69-4810-99a4-7ddaeac55b5b","kubernetes.azure.com/mode":"system","kubernetes.azure.com/node-image-version":"AKSUbuntu-1804gen2containerd-2022.09.22","kubernetes.azure.com/os-sku":"Ubuntu","kubernetes.azure.com/role":"agent","kubernetes.azure.com/storageprofile":"managed","kubernetes.azure.com/storagetier":"Premium_LRS","kubernetes.io/arch":"amd64","kubernetes.io/hostname":"aks-agentpool-35222091-vmss000000","kubernetes.io/os":"linux","kubernetes.io/role":"agent","node-role.kubernetes.io/agent":"","node.kubernetes.io/instance-type":"Standard_B4ms","storageprofile":"managed","storagetier":"Premium_LRS","topology.disk.csi.azure.com/zone":"","topology.kubernetes.io/region":"southcentralus","topology.kubernetes.io/zone":"0"},"annotations":{"csi.volume.kubernetes.io/nodeid":"{\"disk.csi.azure.com\":\"aks-agentpool-35222091-vmss000000\",\"file.csi.azure.com\":\"aks-agentpool-35222091-vmss000000\"}","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"cloud-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-10T04:01:39Z","fieldsType":"FieldsV1","fieldsV1":{"f:spec":{"f:podCIDR":{},"f:podCIDRs":{".":{},"v:\"10.244.0.0/24\"":{}}}}},{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2022-10-10T04:01:39Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:volumes.kubernetes.io/controller-managed-attach-detach":{}},"f:labels":{".":{},"f:agentpool":{},"f:beta.kubernetes.io/arch":{},"f:beta.kubernetes.io/os":{},"f:kubernetes.azure.com/agentpool":{},"f:kubernetes.azure.com/cluster":{},"f:kubernetes.azure.com/kubelet-identity-client-id":{},"f:kubernetes.azure.com/mode":{},"f:kubernetes.azure.com/node-image-version":{},"f:kubernetes.azure.com/os-sku":{},"f:kubernetes.azure.com/role":{},"f:kubernetes.azure.com/storageprofile":{},"f:kubernetes.azure.com/storagetier":{},"f:kubernetes.io/arch":{},"f:kubernetes.io/hostname":{},"f:kubernetes.io/os":{},"f:storageprofile":{},"f:storagetier":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-10T04:01:40Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl":{}}}}},{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2022-10-10T04:02:00Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{"f:csi.volume.kubernetes.io/nodeid":{}},"f:labels":{"f:topology.disk.csi.azure.com/zone":{}}},"f:status":{"f:conditions":{"k:{\"type\":\"DiskPressure\"}":{"f:lastHeartbeatTime":{}},"k:{\"type\":\"MemoryPressure\"}":{"f:lastHeartbeatTime":{}},"k:{\"type\":\"PIDPressure\"}":{"f:lastHeartbeatTime":{}},"k:{\"type\":\"Ready\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}}},"f:images":{}}},"subresource":"status"},{"manager":"cloud-node-manager","operation":"Update","apiVersion":"v1","time":"2022-10-10T04:02:02Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{"f:beta.kubernetes.io/instance-type":{},"f:failure-domain.beta.kubernetes.io/region":{},"f:failure-domain.beta.kubernetes.io/zone":{},"f:node.kubernetes.io/instance-type":{},"f:topology.kubernetes.io/region":{},"f:topology.kubernetes.io/zone":{}}},"f:spec":{"f:providerID":{}}}},{"manager":"cloud-node-manager","operation":"Update","apiVersion":"v1","time":"2022-10-10T04:02:02Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{"k:{\"type\":\"NetworkUnavailable\"}":{".":{},"f:type":{}}}}},"subresource":"status"},{"manager":"kubectl-label","operation":"Update","apiVersion":"v1","time":"2022-10-10T04:02:22Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{"f:kubernetes.io/role":{},"f:node-role.kubernetes.io/agent":{}}}}},{"manager":"cloud-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-10T04:02:35Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{"k:{\"type\":\"NetworkUnavailable\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}}}}},"subresource":"status"},{"manager":"node-problem-detector","operation":"Update","apiVersion":"v1","time":"2022-10-11T14:42:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{"k:{\"type\":\"ContainerRuntimeProblem\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FilesystemCorruptionProblem\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FreezeScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FrequentContainerdRestart\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FrequentDockerRestart\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FrequentKubeletRestart\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FrequentUnregisterNetDevice\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"KernelDeadlock\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"KubeletProblem\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"PreemptScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"ReadonlyFilesystem\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"RebootScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"RedeployScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"TerminateScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"VMEventScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"podCIDR":"10.244.0.0/24","podCIDRs":["10.244.0.0/24"],"providerID":"azure:///subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mc_akkeshar_tempaks_southcentralus/providers/Microsoft.Compute/virtualMachineScaleSets/aks-agentpool-35222091-vmss/virtualMachines/0"},"status":{"capacity":{"cpu":"4","ephemeral-storage":"129886128Ki","hugepages-1Gi":"0","hugepages-2Mi":"0","memory":"16009Mi","pods":"110"},"allocatable":{"cpu":"3860m","ephemeral-storage":"119703055367","hugepages-1Gi":"0","hugepages-2Mi":"0","memory":"12597Mi","pods":"110"},"conditions":[{"type":"FilesystemCorruptionProblem","status":"False","lastHeartbeatTime":"2022-10-18T19:56:31Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"FilesystemIsOK","message":"Filesystem + is healthy"},{"type":"RebootScheduled","status":"False","lastHeartbeatTime":"2022-10-18T19:56:31Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"NoRebootScheduled","message":"VM + has no scheduled Reboot event"},{"type":"ReadonlyFilesystem","status":"False","lastHeartbeatTime":"2022-10-18T19:56:31Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"FilesystemIsNotReadOnly","message":"Filesystem + is not read-only"},{"type":"TerminateScheduled","status":"False","lastHeartbeatTime":"2022-10-18T19:56:31Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"NoTerminateScheduled","message":"VM + has no scheduled Terminate event"},{"type":"FrequentContainerdRestart","status":"False","lastHeartbeatTime":"2022-10-18T19:56:31Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"NoFrequentContainerdRestart","message":"containerd + is functioning properly"},{"type":"KernelDeadlock","status":"False","lastHeartbeatTime":"2022-10-18T19:56:31Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"KernelHasNoDeadlock","message":"kernel + has no deadlock"},{"type":"KubeletProblem","status":"False","lastHeartbeatTime":"2022-10-18T19:56:31Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"KubeletIsUp","message":"kubelet + service is up"},{"type":"PreemptScheduled","status":"False","lastHeartbeatTime":"2022-10-18T19:56:31Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"NoPreemptScheduled","message":"VM + has no scheduled Preempt event"},{"type":"FrequentUnregisterNetDevice","status":"False","lastHeartbeatTime":"2022-10-18T19:56:31Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"NoFrequentUnregisterNetDevice","message":"node + is functioning properly"},{"type":"RedeployScheduled","status":"False","lastHeartbeatTime":"2022-10-18T19:56:31Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"NoRedeployScheduled","message":"VM + has no scheduled Redeploy event"},{"type":"ContainerRuntimeProblem","status":"False","lastHeartbeatTime":"2022-10-18T19:56:31Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"ContainerRuntimeIsUp","message":"container + runtime service is up"},{"type":"FrequentKubeletRestart","status":"False","lastHeartbeatTime":"2022-10-18T19:56:31Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"NoFrequentKubeletRestart","message":"kubelet + is functioning properly"},{"type":"FreezeScheduled","status":"False","lastHeartbeatTime":"2022-10-18T19:56:31Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"NoFreezeScheduled","message":"VM + has no scheduled Freeze event"},{"type":"FrequentDockerRestart","status":"False","lastHeartbeatTime":"2022-10-18T19:56:31Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"NoFrequentDockerRestart","message":"docker + is functioning properly"},{"type":"VMEventScheduled","status":"False","lastHeartbeatTime":"2022-10-18T19:56:31Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"NoVMEventScheduled","message":"VM + has no scheduled event"},{"type":"NetworkUnavailable","status":"False","lastHeartbeatTime":"2022-10-10T04:02:35Z","lastTransitionTime":"2022-10-10T04:02:35Z","reason":"RouteCreated","message":"RouteController + created a route"},{"type":"MemoryPressure","status":"False","lastHeartbeatTime":"2022-10-18T19:54:55Z","lastTransitionTime":"2022-10-10T04:01:39Z","reason":"KubeletHasSufficientMemory","message":"kubelet + has sufficient memory available"},{"type":"DiskPressure","status":"False","lastHeartbeatTime":"2022-10-18T19:54:55Z","lastTransitionTime":"2022-10-10T04:01:39Z","reason":"KubeletHasNoDiskPressure","message":"kubelet + has no disk pressure"},{"type":"PIDPressure","status":"False","lastHeartbeatTime":"2022-10-18T19:54:55Z","lastTransitionTime":"2022-10-10T04:01:39Z","reason":"KubeletHasSufficientPID","message":"kubelet + has sufficient PID available"},{"type":"Ready","status":"True","lastHeartbeatTime":"2022-10-18T19:54:55Z","lastTransitionTime":"2022-10-10T04:01:40Z","reason":"KubeletReady","message":"kubelet + is posting ready status. AppArmor enabled"}],"addresses":[{"type":"InternalIP","address":"10.224.0.4"},{"type":"Hostname","address":"aks-agentpool-35222091-vmss000000"}],"daemonEndpoints":{"kubeletEndpoint":{"Port":10250}},"nodeInfo":{"machineID":"a3c0f9b3c4c74d83ae50f188e237de01","systemUUID":"caabd586-3531-4e2e-9111-e258f1790065","bootID":"51adf7b5-ed98-4322-80b9-c937431968b1","kernelVersion":"5.4.0-1091-azure","osImage":"Ubuntu + 18.04.6 LTS","containerRuntimeVersion":"containerd://1.6.4+azure-4","kubeletVersion":"v1.24.6","kubeProxyVersion":"v1.24.6","operatingSystem":"linux","architecture":"amd64"},"images":[{"names":["mcr.microsoft.com/azuremonitor/containerinsights/ciprod:ciprod08102022"],"sizeBytes":397844357},{"names":["mcr.microsoft.com/azuredefender/stable/low-level-init@sha256:65b99ab432f3a164e3bea2e5d0350039e597176eda3179f2a59ef4696e9d65df","mcr.microsoft.com/azuredefender/stable/low-level-init:1.3.57"],"sizeBytes":374449658},{"names":["mcr.microsoft.com/azuredefender/stable/low-level-init@sha256:239a04dd583cd552d7a37941c96422d6c8acf243dd88538ba013d337c2925426","mcr.microsoft.com/azuredefender/stable/low-level-init:1.3.49"],"sizeBytes":374180034},{"names":["mcr.microsoft.com/azuremonitor/containerinsights/ciprod:ciprod06272022-hotfix"],"sizeBytes":357023149},{"names":["mcr.microsoft.com/azuredefender/stable/low-level-collector@sha256:07e6640452537dfb75f4f30f25178a9be4151ddc7578436a6ee8843d79889fe1","mcr.microsoft.com/azuredefender/stable/low-level-collector:1.3.57"],"sizeBytes":315495474},{"names":["mcr.microsoft.com/azuredefender/stable/low-level-collector@sha256:ec066564034f34578c930bef6734ff19c92b33462165e62538842cfeb9dbc3fb","mcr.microsoft.com/azuredefender/stable/low-level-collector:1.3.49"],"sizeBytes":315042580},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:0.49.3"],"sizeBytes":287741913},{"names":["mcr.microsoft.com/oss/calico/cni:v3.23.1"],"sizeBytes":263014840},{"names":["mcr.microsoft.com/oss/calico/cni:v3.21.4"],"sizeBytes":236345866},{"names":["mcr.microsoft.com/oss/calico/cni:v3.21.6"],"sizeBytes":227829276},{"names":["mcr.microsoft.com/oss/calico/node:v3.23.1"],"sizeBytes":221560540},{"names":["mcr.microsoft.com/oss/calico/node:v3.21.4"],"sizeBytes":216363503},{"names":["mcr.microsoft.com/oss/calico/node:v3.21.6"],"sizeBytes":215379163},{"names":["mcr.microsoft.com/oss/tigera/operator:v1.23.8"],"sizeBytes":184105789},{"names":["mcr.microsoft.com/azuredefender/stable/security-publisher@sha256:d5de5c8fa8213dc5c178d7a5c9c5c5d8c7ba14c65c5fbd2d661f7670af6cbdf5","mcr.microsoft.com/azuredefender/stable/security-publisher:1.0.56"],"sizeBytes":172024457},{"names":["mcr.microsoft.com/oss/cilium/cilium:1.12.1.1"],"sizeBytes":167528909},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:0.19.0"],"sizeBytes":166352383},{"names":["mcr.microsoft.com/aks/hcp/hcp-tunnel-front:master.220527.2"],"sizeBytes":146994488},{"names":null,"sizeBytes":138243950},{"names":["mcr.microsoft.com/oss/calico/kube-controllers:v3.23.1"],"sizeBytes":136078571},{"names":["mcr.microsoft.com/oss/calico/typha:v3.23.1"],"sizeBytes":131467121},{"names":null,"sizeBytes":129890505},{"names":null,"sizeBytes":128992809},{"names":["mcr.microsoft.com/oss/tigera/operator:v1.24.2"],"sizeBytes":128711964},{"names":["mcr.microsoft.com/oss/calico/typha:v3.21.4"],"sizeBytes":128235133},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.2.2.5"],"sizeBytes":123925992},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.24.6.1"],"sizeBytes":123549904},{"names":["mcr.microsoft.com/oss/calico/kube-controllers:v3.21.6"],"sizeBytes":123549280},{"names":["mcr.microsoft.com/oss/calico/typha:v3.21.6"],"sizeBytes":119713369},{"names":null,"sizeBytes":115909379},{"names":null,"sizeBytes":115897326},{"names":null,"sizeBytes":115677896},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:v1.2.1"],"sizeBytes":107169290},{"names":["mcr.microsoft.com/oss/calico/node:v3.8.9.5"],"sizeBytes":101794833},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi@sha256:a9d68708393eb3fbe09aa32db020c805bab952709fe6df552959c26ab2f92336","mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.22.0.3"],"sizeBytes":99538753},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.22.0.2"],"sizeBytes":99335832},{"names":["mcr.microsoft.com/aks/acc/sgx-attestation:3.1"],"sizeBytes":98058501},{"names":["mcr.microsoft.com/oss/kubernetes/exechealthz:1.2_v0.0.5"],"sizeBytes":94348102},{"names":["mcr.microsoft.com/aks/hcp/tunnel-openvpn:master.220527.2"],"sizeBytes":92531564},{"names":["mcr.microsoft.com/containernetworking/azure-npm:v1.4.29"],"sizeBytes":89255513},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.2.2"],"sizeBytes":88551490},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.22.0.1"],"sizeBytes":88352750},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.21.0"],"sizeBytes":87550430},{"names":["mcr.microsoft.com/aks/command/runtime:master.220211.1"],"sizeBytes":82792811},{"names":["mcr.microsoft.com/azure-application-gateway/kubernetes-ingress:1.5.2"],"sizeBytes":77081542},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.21.0"],"sizeBytes":75345915},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.20.0"],"sizeBytes":75152698},{"names":["mcr.microsoft.com/azure-application-gateway/kubernetes-ingress:1.4.0"],"sizeBytes":73895290},{"names":["mcr.microsoft.com/oss/nvidia/k8s-device-plugin:v0.9.0"],"sizeBytes":67291599},{"names":["mcr.microsoft.com/containernetworking/cni-dropgz:v0.0.2"],"sizeBytes":67202663}]}},{"metadata":{"name":"aks-agentpool-35222091-vmss000001","uid":"336238a8-144e-4068-a997-7963fa960709","resourceVersion":"2976629","creationTimestamp":"2022-10-10T04:02:15Z","labels":{"agentpool":"agentpool","beta.kubernetes.io/arch":"amd64","beta.kubernetes.io/instance-type":"Standard_B4ms","beta.kubernetes.io/os":"linux","failure-domain.beta.kubernetes.io/region":"southcentralus","failure-domain.beta.kubernetes.io/zone":"0","kubernetes.azure.com/agentpool":"agentpool","kubernetes.azure.com/cluster":"MC_akkeshar_tempaks_southcentralus","kubernetes.azure.com/kubelet-identity-client-id":"a7082775-1b69-4810-99a4-7ddaeac55b5b","kubernetes.azure.com/mode":"system","kubernetes.azure.com/node-image-version":"AKSUbuntu-1804gen2containerd-2022.09.22","kubernetes.azure.com/os-sku":"Ubuntu","kubernetes.azure.com/role":"agent","kubernetes.azure.com/storageprofile":"managed","kubernetes.azure.com/storagetier":"Premium_LRS","kubernetes.io/arch":"amd64","kubernetes.io/hostname":"aks-agentpool-35222091-vmss000001","kubernetes.io/os":"linux","kubernetes.io/role":"agent","node-role.kubernetes.io/agent":"","node.kubernetes.io/instance-type":"Standard_B4ms","storageprofile":"managed","storagetier":"Premium_LRS","topology.disk.csi.azure.com/zone":"","topology.kubernetes.io/region":"southcentralus","topology.kubernetes.io/zone":"0"},"annotations":{"csi.volume.kubernetes.io/nodeid":"{\"disk.csi.azure.com\":\"aks-agentpool-35222091-vmss000001\",\"file.csi.azure.com\":\"aks-agentpool-35222091-vmss000001\"}","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"cloud-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-10T04:02:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:spec":{"f:podCIDR":{},"f:podCIDRs":{".":{},"v:\"10.244.1.0/24\"":{}}}}},{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2022-10-10T04:02:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:volumes.kubernetes.io/controller-managed-attach-detach":{}},"f:labels":{".":{},"f:agentpool":{},"f:beta.kubernetes.io/arch":{},"f:beta.kubernetes.io/os":{},"f:kubernetes.azure.com/agentpool":{},"f:kubernetes.azure.com/cluster":{},"f:kubernetes.azure.com/kubelet-identity-client-id":{},"f:kubernetes.azure.com/mode":{},"f:kubernetes.azure.com/node-image-version":{},"f:kubernetes.azure.com/os-sku":{},"f:kubernetes.azure.com/role":{},"f:kubernetes.azure.com/storageprofile":{},"f:kubernetes.azure.com/storagetier":{},"f:kubernetes.io/arch":{},"f:kubernetes.io/hostname":{},"f:kubernetes.io/os":{},"f:storageprofile":{},"f:storagetier":{}}}}},{"manager":"kubectl-label","operation":"Update","apiVersion":"v1","time":"2022-10-10T04:02:22Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{"f:kubernetes.io/role":{},"f:node-role.kubernetes.io/agent":{}}}}},{"manager":"cloud-node-manager","operation":"Update","apiVersion":"v1","time":"2022-10-10T04:02:24Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{"f:beta.kubernetes.io/instance-type":{},"f:failure-domain.beta.kubernetes.io/region":{},"f:failure-domain.beta.kubernetes.io/zone":{},"f:node.kubernetes.io/instance-type":{},"f:topology.kubernetes.io/region":{},"f:topology.kubernetes.io/zone":{}}},"f:spec":{"f:providerID":{}}}},{"manager":"cloud-node-manager","operation":"Update","apiVersion":"v1","time":"2022-10-10T04:02:24Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{"k:{\"type\":\"NetworkUnavailable\"}":{".":{},"f:type":{}}}}},"subresource":"status"},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-10T04:02:25Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl":{}}}}},{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2022-10-10T04:02:27Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{"f:csi.volume.kubernetes.io/nodeid":{}},"f:labels":{"f:topology.disk.csi.azure.com/zone":{}}},"f:status":{"f:allocatable":{"f:ephemeral-storage":{}},"f:capacity":{"f:ephemeral-storage":{}},"f:conditions":{"k:{\"type\":\"DiskPressure\"}":{"f:lastHeartbeatTime":{}},"k:{\"type\":\"MemoryPressure\"}":{"f:lastHeartbeatTime":{}},"k:{\"type\":\"PIDPressure\"}":{"f:lastHeartbeatTime":{}},"k:{\"type\":\"Ready\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}}},"f:images":{}}},"subresource":"status"},{"manager":"cloud-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-10T04:03:35Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{"k:{\"type\":\"NetworkUnavailable\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}}}}},"subresource":"status"},{"manager":"node-problem-detector","operation":"Update","apiVersion":"v1","time":"2022-10-11T14:42:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{"k:{\"type\":\"ContainerRuntimeProblem\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FilesystemCorruptionProblem\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FreezeScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FrequentContainerdRestart\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FrequentDockerRestart\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FrequentKubeletRestart\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"FrequentUnregisterNetDevice\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"KernelDeadlock\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"KubeletProblem\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"PreemptScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"ReadonlyFilesystem\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"RebootScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"RedeployScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"TerminateScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"VMEventScheduled\"}":{".":{},"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"podCIDR":"10.244.1.0/24","podCIDRs":["10.244.1.0/24"],"providerID":"azure:///subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mc_akkeshar_tempaks_southcentralus/providers/Microsoft.Compute/virtualMachineScaleSets/aks-agentpool-35222091-vmss/virtualMachines/1"},"status":{"capacity":{"cpu":"4","ephemeral-storage":"129886128Ki","hugepages-1Gi":"0","hugepages-2Mi":"0","memory":"16393220Ki","pods":"110"},"allocatable":{"cpu":"3860m","ephemeral-storage":"119703055367","hugepages-1Gi":"0","hugepages-2Mi":"0","memory":"12899332Ki","pods":"110"},"conditions":[{"type":"KernelDeadlock","status":"False","lastHeartbeatTime":"2022-10-18T19:56:19Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"KernelHasNoDeadlock","message":"kernel + has no deadlock"},{"type":"RebootScheduled","status":"False","lastHeartbeatTime":"2022-10-18T19:56:19Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"NoRebootScheduled","message":"VM + has no scheduled Reboot event"},{"type":"PreemptScheduled","status":"False","lastHeartbeatTime":"2022-10-18T19:56:19Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"NoPreemptScheduled","message":"VM + has no scheduled Preempt event"},{"type":"FrequentDockerRestart","status":"False","lastHeartbeatTime":"2022-10-18T19:56:19Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"NoFrequentDockerRestart","message":"docker + is functioning properly"},{"type":"FrequentContainerdRestart","status":"False","lastHeartbeatTime":"2022-10-18T19:56:19Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"NoFrequentContainerdRestart","message":"containerd + is functioning properly"},{"type":"FrequentKubeletRestart","status":"False","lastHeartbeatTime":"2022-10-18T19:56:19Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"NoFrequentKubeletRestart","message":"kubelet + is functioning properly"},{"type":"FreezeScheduled","status":"False","lastHeartbeatTime":"2022-10-18T19:56:19Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"NoFreezeScheduled","message":"VM + has no scheduled Freeze event"},{"type":"ContainerRuntimeProblem","status":"False","lastHeartbeatTime":"2022-10-18T19:56:19Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"ContainerRuntimeIsUp","message":"container + runtime service is up"},{"type":"FilesystemCorruptionProblem","status":"False","lastHeartbeatTime":"2022-10-18T19:56:19Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"FilesystemIsOK","message":"Filesystem + is healthy"},{"type":"VMEventScheduled","status":"False","lastHeartbeatTime":"2022-10-18T19:56:19Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"NoVMEventScheduled","message":"VM + has no scheduled event"},{"type":"TerminateScheduled","status":"False","lastHeartbeatTime":"2022-10-18T19:56:19Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"NoTerminateScheduled","message":"VM + has no scheduled Terminate event"},{"type":"KubeletProblem","status":"False","lastHeartbeatTime":"2022-10-18T19:56:19Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"KubeletIsUp","message":"kubelet + service is up"},{"type":"FrequentUnregisterNetDevice","status":"False","lastHeartbeatTime":"2022-10-18T19:56:19Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"NoFrequentUnregisterNetDevice","message":"node + is functioning properly"},{"type":"ReadonlyFilesystem","status":"False","lastHeartbeatTime":"2022-10-18T19:56:19Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"FilesystemIsNotReadOnly","message":"Filesystem + is not read-only"},{"type":"RedeployScheduled","status":"False","lastHeartbeatTime":"2022-10-18T19:56:19Z","lastTransitionTime":"2022-10-15T19:52:59Z","reason":"NoRedeployScheduled","message":"VM + has no scheduled Redeploy event"},{"type":"NetworkUnavailable","status":"False","lastHeartbeatTime":"2022-10-10T04:03:35Z","lastTransitionTime":"2022-10-10T04:03:35Z","reason":"RouteCreated","message":"RouteController + created a route"},{"type":"MemoryPressure","status":"False","lastHeartbeatTime":"2022-10-18T19:54:35Z","lastTransitionTime":"2022-10-10T04:02:15Z","reason":"KubeletHasSufficientMemory","message":"kubelet + has sufficient memory available"},{"type":"DiskPressure","status":"False","lastHeartbeatTime":"2022-10-18T19:54:35Z","lastTransitionTime":"2022-10-10T04:02:15Z","reason":"KubeletHasNoDiskPressure","message":"kubelet + has no disk pressure"},{"type":"PIDPressure","status":"False","lastHeartbeatTime":"2022-10-18T19:54:35Z","lastTransitionTime":"2022-10-10T04:02:15Z","reason":"KubeletHasSufficientPID","message":"kubelet + has sufficient PID available"},{"type":"Ready","status":"True","lastHeartbeatTime":"2022-10-18T19:54:35Z","lastTransitionTime":"2022-10-10T04:02:25Z","reason":"KubeletReady","message":"kubelet + is posting ready status. AppArmor enabled"}],"addresses":[{"type":"InternalIP","address":"10.224.0.5"},{"type":"Hostname","address":"aks-agentpool-35222091-vmss000001"}],"daemonEndpoints":{"kubeletEndpoint":{"Port":10250}},"nodeInfo":{"machineID":"51b2ad9a5c98406c914f404679bd58fa","systemUUID":"0ef5b1a0-7adb-4086-b8aa-0b11b5316e73","bootID":"4df3389b-ca13-43e7-b0d7-c304803b6e69","kernelVersion":"5.4.0-1091-azure","osImage":"Ubuntu + 18.04.6 LTS","containerRuntimeVersion":"containerd://1.6.4+azure-4","kubeletVersion":"v1.24.6","kubeProxyVersion":"v1.24.6","operatingSystem":"linux","architecture":"amd64"},"images":[{"names":["mcr.microsoft.com/azuremonitor/containerinsights/ciprod:ciprod08102022"],"sizeBytes":397844357},{"names":["mcr.microsoft.com/azuredefender/stable/low-level-init@sha256:65b99ab432f3a164e3bea2e5d0350039e597176eda3179f2a59ef4696e9d65df","mcr.microsoft.com/azuredefender/stable/low-level-init:1.3.57"],"sizeBytes":374449658},{"names":["mcr.microsoft.com/azuredefender/stable/low-level-init@sha256:239a04dd583cd552d7a37941c96422d6c8acf243dd88538ba013d337c2925426","mcr.microsoft.com/azuredefender/stable/low-level-init:1.3.49"],"sizeBytes":374180034},{"names":["mcr.microsoft.com/azuremonitor/containerinsights/ciprod:ciprod06272022-hotfix"],"sizeBytes":357023149},{"names":["devconformance.azurecr.io/ocdev@sha256:39d8d9ae4b6b1de87211b584b6374054e9c51c080cce825dd0b4ad56e8376a72","devconformance.azurecr.io/ocdev:v3"],"sizeBytes":330270905},{"names":["devconformance.azurecr.io/ocdev@sha256:db49de7ddae80473a3bfbf53c146f3df3908e1759e8cc44a6bf7d8e37d174e8e","devconformance.azurecr.io/ocdev:v2"],"sizeBytes":330270862},{"names":["mcr.microsoft.com/azuredefender/stable/low-level-collector@sha256:07e6640452537dfb75f4f30f25178a9be4151ddc7578436a6ee8843d79889fe1","mcr.microsoft.com/azuredefender/stable/low-level-collector:1.3.57"],"sizeBytes":315495474},{"names":["mcr.microsoft.com/azuredefender/stable/low-level-collector@sha256:ec066564034f34578c930bef6734ff19c92b33462165e62538842cfeb9dbc3fb","mcr.microsoft.com/azuredefender/stable/low-level-collector:1.3.49"],"sizeBytes":315042580},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:0.49.3"],"sizeBytes":287741913},{"names":["mcr.microsoft.com/oss/calico/cni:v3.23.1"],"sizeBytes":263014840},{"names":["mcr.microsoft.com/oss/calico/cni:v3.21.4"],"sizeBytes":236345866},{"names":["mcr.microsoft.com/oss/calico/cni:v3.21.6"],"sizeBytes":227829276},{"names":["mcr.microsoft.com/oss/calico/node:v3.23.1"],"sizeBytes":221560540},{"names":["mcr.microsoft.com/oss/calico/node:v3.21.4"],"sizeBytes":216363503},{"names":["mcr.microsoft.com/oss/calico/node:v3.21.6"],"sizeBytes":215379163},{"names":["mcr.microsoft.com/oss/tigera/operator:v1.23.8"],"sizeBytes":184105789},{"names":["mcr.microsoft.com/azuredefender/stable/security-publisher@sha256:d5de5c8fa8213dc5c178d7a5c9c5c5d8c7ba14c65c5fbd2d661f7670af6cbdf5","mcr.microsoft.com/azuredefender/stable/security-publisher:1.0.56"],"sizeBytes":172024457},{"names":["mcr.microsoft.com/oss/cilium/cilium:1.12.1.1"],"sizeBytes":167528909},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:0.19.0"],"sizeBytes":166352383},{"names":["mcr.microsoft.com/aks/hcp/hcp-tunnel-front:master.220527.2"],"sizeBytes":146994488},{"names":null,"sizeBytes":138243950},{"names":["mcr.microsoft.com/oss/calico/kube-controllers:v3.23.1"],"sizeBytes":136078571},{"names":["mcr.microsoft.com/oss/calico/typha:v3.23.1"],"sizeBytes":131467121},{"names":null,"sizeBytes":129890505},{"names":null,"sizeBytes":128992809},{"names":["mcr.microsoft.com/oss/tigera/operator:v1.24.2"],"sizeBytes":128711964},{"names":["mcr.microsoft.com/oss/calico/typha:v3.21.4"],"sizeBytes":128235133},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.2.2.5"],"sizeBytes":123925992},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.24.6.1"],"sizeBytes":123549904},{"names":["mcr.microsoft.com/oss/calico/kube-controllers:v3.21.6"],"sizeBytes":123549280},{"names":["mcr.microsoft.com/oss/calico/typha:v3.21.6"],"sizeBytes":119713369},{"names":null,"sizeBytes":115909379},{"names":null,"sizeBytes":115897326},{"names":null,"sizeBytes":115677896},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:v1.2.1"],"sizeBytes":107169290},{"names":["mcr.microsoft.com/oss/calico/node:v3.8.9.5"],"sizeBytes":101794833},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi@sha256:a9d68708393eb3fbe09aa32db020c805bab952709fe6df552959c26ab2f92336","mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.22.0.3"],"sizeBytes":99538753},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.22.0.2"],"sizeBytes":99335832},{"names":["mcr.microsoft.com/aks/acc/sgx-attestation:3.1"],"sizeBytes":98058501},{"names":["mcr.microsoft.com/oss/kubernetes/exechealthz:1.2_v0.0.5"],"sizeBytes":94348102},{"names":["mcr.microsoft.com/aks/hcp/tunnel-openvpn:master.220527.2"],"sizeBytes":92531564},{"names":["mcr.microsoft.com/containernetworking/azure-npm:v1.4.29"],"sizeBytes":89255513},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.2.2"],"sizeBytes":88551490},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.22.0.1"],"sizeBytes":88352750},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.21.0"],"sizeBytes":87550430},{"names":["mcr.microsoft.com/aks/command/runtime:master.220211.1"],"sizeBytes":82792811},{"names":["mcr.microsoft.com/azure-application-gateway/kubernetes-ingress:1.5.2"],"sizeBytes":77081542},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.21.0"],"sizeBytes":75345915},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.20.0"],"sizeBytes":75152698},{"names":["mcr.microsoft.com/azure-application-gateway/kubernetes-ingress:1.4.0"],"sizeBytes":73895290}]}}]} ' headers: audit-id: - - 0fcab4ed-a75b-43ae-a440-79f320bbb4ae + - 52658d55-9664-440d-a821-0904be6ca8ce cache-control: - no-cache, private content-type: - application/json date: - - Tue, 07 Jun 2022 18:03:34 GMT + - Tue, 18 Oct 2022 19:56:57 GMT transfer-encoding: - chunked x-kubernetes-pf-flowschema-uid: - - 08843a36-c7a9-489b-a782-1dc805dc9f54 + - 9c9284bf-ac50-497c-86a8-5f4f7d857b28 x-kubernetes-pf-prioritylevel-uid: - - e6f4d88d-c43f-4941-a57f-7f1230896bdc + - f041de6f-3328-46ec-b36d-f51c7cd89b61 status: code: 200 message: OK @@ -3024,17 +4186,17 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: POST - uri: https://akkeshar-dns-08147f89.hcp.eastus2euap.azmk8s.io/apis/authorization.k8s.io/v1/selfsubjectaccessreviews + uri: https://tempaks-dns-0f8c9536.hcp.southcentralus.azmk8s.io/apis/authorization.k8s.io/v1/selfsubjectaccessreviews response: body: - string: '{"kind":"SelfSubjectAccessReview","apiVersion":"authorization.k8s.io/v1","metadata":{"creationTimestamp":null,"managedFields":[{"manager":"OpenAPI-Generator","operation":"Update","apiVersion":"authorization.k8s.io/v1","time":"2022-06-07T18:03:35Z","fieldsType":"FieldsV1","fieldsV1":{"f:spec":{"f:resourceAttributes":{".":{},"f:group":{},"f:resource":{},"f:verb":{}}}}}]},"spec":{"resourceAttributes":{"verb":"create","group":"rbac.authorization.k8s.io","resource":"clusterrolebindings"}},"status":{"allowed":true}} + string: '{"kind":"SelfSubjectAccessReview","apiVersion":"authorization.k8s.io/v1","metadata":{"creationTimestamp":null,"managedFields":[{"manager":"OpenAPI-Generator","operation":"Update","apiVersion":"authorization.k8s.io/v1","time":"2022-10-18T19:56:59Z","fieldsType":"FieldsV1","fieldsV1":{"f:spec":{"f:resourceAttributes":{".":{},"f:group":{},"f:resource":{},"f:verb":{}}}}}]},"spec":{"resourceAttributes":{"verb":"create","group":"rbac.authorization.k8s.io","resource":"clusterrolebindings"}},"status":{"allowed":true}} ' headers: audit-id: - - fdc50849-4300-4f0e-b021-4e8474e24755 + - 72dc6989-25d4-43dc-8034-0ff3cc01383d cache-control: - no-cache, private content-length: @@ -3042,11 +4204,11 @@ interactions: content-type: - application/json date: - - Tue, 07 Jun 2022 18:03:35 GMT + - Tue, 18 Oct 2022 19:56:59 GMT x-kubernetes-pf-flowschema-uid: - - 08843a36-c7a9-489b-a782-1dc805dc9f54 + - 9c9284bf-ac50-497c-86a8-5f4f7d857b28 x-kubernetes-pf-prioritylevel-uid: - - e6f4d88d-c43f-4941-a57f-7f1230896bdc + - f041de6f-3328-46ec-b36d-f51c7cd89b61 status: code: 201 message: Created @@ -3055,33 +4217,50 @@ interactions: headers: Accept: - application/json - Content-Type: - - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - connectedk8s connect + Connection: + - keep-alive + ParameterSetName: + - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id --yes User-Agent: - - OpenAPI-Generator/11.0.0/python + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://akkeshar-dns-08147f89.hcp.eastus2euap.azmk8s.io/version/ + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Kubernetes?api-version=2021-04-01 response: body: - string: "{\n \"major\": \"1\",\n \"minor\": \"21\",\n \"gitVersion\": \"v1.21.7\",\n - \ \"gitCommit\": \"a326522ffdc578d1ac5c14cf8d0160feda9b13fc\",\n \"gitTreeState\": - \"clean\",\n \"buildDate\": \"2022-04-21T07:40:45Z\",\n \"goVersion\": \"go1.16.10\",\n - \ \"compiler\": \"gc\",\n \"platform\": \"linux/amd64\"\n}" + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Kubernetes","namespace":"Microsoft.Kubernetes","authorizations":[{"applicationId":"64b12d6e-6549-484c-8cc6-6281839ba394","roleDefinitionId":"1d1d44cf-68a1-4def-a2b6-cd7efc3515af"},{"applicationId":"359431ad-ece5-496b-8768-be4bbfd82f36","roleDefinitionId":"1b5c71b7-9814-4b40-b62a-23018af874d8"},{"applicationId":"0000dab9-8b21-4ba2-807f-1743968cef00","roleDefinitionId":"1b5c71b7-9814-4b40-b62a-23018af874d8"},{"applicationId":"8edd93e1-2103-40b4-bd70-6e34e586362d","roleDefinitionId":"eb67887a-31e8-4e4e-bf5b-14ff79351a6f"}],"resourceTypes":[{"resourceType":"connectedClusters","locations":["West + Europe","East US","West Central US","South Central US","Southeast Asia","UK + South","East US 2","West US 2","Australia East","North Europe","France Central","Central + US","West US","North Central US","Korea Central","Japan East","West US 3","East + Asia","Canada Central","East US 2 EUAP","Canada East"],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"SystemAssignedResourceIdentity, + SupportsTags, SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/operationStatuses","locations":["East + US 2 EUAP","West Europe","East US","West Central US","South Central US","Southeast + Asia","UK South","East US 2","West US 2","Australia East","North Europe","France + Central","Central US","West US","North Central US","Korea Central","Japan + East","East Asia","West US 3","Canada East","Canada Central"],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"registeredSubscriptions","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"Operations","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview","2019-11-01-preview","2019-09-01-privatepreview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: - audit-id: - - fc4cd8fb-2ff2-4058-b231-149850df12e4 cache-control: - - no-cache, private + - no-cache content-length: - - '264' + - '2416' content-type: - - application/json + - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 18:03:36 GMT - x-kubernetes-pf-flowschema-uid: - - 08843a36-c7a9-489b-a782-1dc805dc9f54 - x-kubernetes-pf-prioritylevel-uid: - - e6f4d88d-c43f-4941-a57f-7f1230896bdc + - Tue, 18 Oct 2022 19:56:59 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff status: code: 200 message: OK @@ -3097,38 +4276,33 @@ interactions: Connection: - keep-alive ParameterSetName: - - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id + - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id --yes User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridcompute/7.0.0 Python/3.7.7 (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Kubernetes?api-version=2021-04-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.HybridCompute/privateLinkScopes/temppls?api-version=2021-03-25-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Kubernetes","namespace":"Microsoft.Kubernetes","authorizations":[{"applicationId":"64b12d6e-6549-484c-8cc6-6281839ba394","roleDefinitionId":"1d1d44cf-68a1-4def-a2b6-cd7efc3515af"},{"applicationId":"359431ad-ece5-496b-8768-be4bbfd82f36","roleDefinitionId":"1b5c71b7-9814-4b40-b62a-23018af874d8"},{"applicationId":"0000dab9-8b21-4ba2-807f-1743968cef00","roleDefinitionId":"1b5c71b7-9814-4b40-b62a-23018af874d8"},{"applicationId":"8edd93e1-2103-40b4-bd70-6e34e586362d","roleDefinitionId":"eb67887a-31e8-4e4e-bf5b-14ff79351a6f"}],"resourceTypes":[{"resourceType":"connectedClusters","locations":["West - Europe","East US","West Central US","South Central US","Southeast Asia","UK - South","East US 2","West US 2","Australia East","North Europe","France Central","Central - US","West US","North Central US","Korea Central","Japan East","West US 3","East - Asia","East US 2 EUAP","Canada East","Canada Central"],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"SystemAssignedResourceIdentity, - SupportsTags, SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/operationStatuses","locations":["East - US 2 EUAP","West Europe","East US","West Central US","South Central US","Southeast - Asia","UK South","East US 2","West US 2","Australia East","North Europe","France - Central","Central US","West US","North Central US","Korea Central","Japan - East","East Asia","West US 3","Canada East","Canada Central"],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"registeredSubscriptions","locations":[],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"Operations","locations":[],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview","2019-11-01-preview","2019-09-01-privatepreview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.HybridCompute/privateLinkScopes/temppls","name":"temppls","type":"Microsoft.HybridCompute/privateLinkScopes","location":"eastus2euap","properties":{"privateLinkScopeId":"22683d8c-0c2c-4516-b3fd-466e958437be","publicNetworkAccess":"Disabled","provisioningState":"Succeeded"},"tags":{}}' headers: cache-control: - no-cache content-length: - - '2311' + - '387' content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 18:03:35 GMT + - Tue, 18 Oct 2022 19:57:01 GMT expires: - '-1' pragma: - no-cache + server: + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked vary: - Accept-Encoding x-content-type-options: @@ -3148,11 +4322,12 @@ interactions: Connection: - keep-alive ParameterSetName: - - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id + - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id --yes User-Agent: - - AZURECLI/2.37.0 azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cliplscc?api-version=2022-05-01-preview + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cliplscc?api-version=2022-10-01-preview response: body: string: '{"error":{"code":"ResourceNotFound","message":"The Resource ''Microsoft.Kubernetes/connectedClusters/cliplscc'' @@ -3166,7 +4341,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 18:03:39 GMT + - Tue, 18 Oct 2022 19:57:03 GMT expires: - '-1' pragma: @@ -3188,29 +4363,29 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://akkeshar-dns-08147f89.hcp.eastus2euap.azmk8s.io/api/v1/namespaces + uri: https://tempaks-dns-0f8c9536.hcp.southcentralus.azmk8s.io/api/v1/namespaces response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"42261857"},"items":[{"metadata":{"name":"default","uid":"378b4c98-7cf9-4087-a47c-6964c6df767b","resourceVersion":"207","creationTimestamp":"2022-01-13T13:56:24Z","labels":{"kubernetes.io/metadata.name":"default"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-01-13T13:56:24Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"gatekeeper-system","uid":"dcafa84d-af5f-49ec-adef-a54c1c5b434d","resourceVersion":"36233709","creationTimestamp":"2022-05-17T16:35:37Z","labels":{"addonmanager.kubernetes.io/mode":"Reconcile","admission.gatekeeper.sh/ignore":"no-self-managing","control-plane":"controller-manager","gatekeeper.sh/system":"yes","kubernetes.io/metadata.name":"gatekeeper-system"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"v1\",\"kind\":\"Namespace\",\"metadata\":{\"annotations\":{},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"Reconcile\",\"admission.gatekeeper.sh/ignore\":\"no-self-managing\",\"control-plane\":\"controller-manager\",\"gatekeeper.sh/system\":\"yes\"},\"name\":\"gatekeeper-system\"}}\n"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"v1","time":"2022-05-17T16:35:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{}},"f:labels":{".":{},"f:addonmanager.kubernetes.io/mode":{},"f:admission.gatekeeper.sh/ignore":{},"f:control-plane":{},"f:gatekeeper.sh/system":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"kube-node-lease","uid":"03ccdf6b-d52e-41ad-bdcc-cdb3a51e6f86","resourceVersion":"32","creationTimestamp":"2022-01-13T13:56:21Z","labels":{"kubernetes.io/metadata.name":"kube-node-lease"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-01-13T13:56:21Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"kube-public","uid":"23c13bf5-359c-4ed5-9a29-ab3ac1d142c9","resourceVersion":"13","creationTimestamp":"2022-01-13T13:56:21Z","labels":{"kubernetes.io/metadata.name":"kube-public"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-01-13T13:56:21Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"kube-system","uid":"4d6af319-0ca3-4229-a277-0672e6a186db","resourceVersion":"516","creationTimestamp":"2022-01-13T13:56:21Z","labels":{"addonmanager.kubernetes.io/mode":"Reconcile","control-plane":"true","kubernetes.io/cluster-service":"true","kubernetes.io/metadata.name":"kube-system"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"v1\",\"kind\":\"Namespace\",\"metadata\":{\"annotations\":{},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"Reconcile\",\"control-plane\":\"true\",\"kubernetes.io/cluster-service\":\"true\"},\"name\":\"kube-system\"}}\n"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-01-13T13:56:21Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"v1","time":"2022-01-13T13:56:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{}},"f:labels":{"f:addonmanager.kubernetes.io/mode":{},"f:control-plane":{},"f:kubernetes.io/cluster-service":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}}]} + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"2976802"},"items":[{"metadata":{"name":"default","uid":"be0c79fc-159c-4921-8607-0f09c4b7eaca","resourceVersion":"197","creationTimestamp":"2022-10-10T03:59:23Z","labels":{"kubernetes.io/metadata.name":"default"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-10-10T03:59:23Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"gatekeeper-system","uid":"347409a6-22fe-40d7-9a84-519b89e0f4f3","resourceVersion":"3249","creationTimestamp":"2022-10-10T04:09:44Z","labels":{"addonmanager.kubernetes.io/mode":"Reconcile","admission.gatekeeper.sh/ignore":"no-self-managing","control-plane":"controller-manager","gatekeeper.sh/system":"yes","kubernetes.io/metadata.name":"gatekeeper-system"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"v1\",\"kind\":\"Namespace\",\"metadata\":{\"annotations\":{},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"Reconcile\",\"admission.gatekeeper.sh/ignore\":\"no-self-managing\",\"control-plane\":\"controller-manager\",\"gatekeeper.sh/system\":\"yes\"},\"name\":\"gatekeeper-system\"}}\n"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"v1","time":"2022-10-10T04:09:44Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{}},"f:labels":{".":{},"f:addonmanager.kubernetes.io/mode":{},"f:admission.gatekeeper.sh/ignore":{},"f:control-plane":{},"f:gatekeeper.sh/system":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"kube-node-lease","uid":"872cd4f1-0123-43c5-ba77-f497adca8a60","resourceVersion":"19","creationTimestamp":"2022-10-10T03:59:21Z","labels":{"kubernetes.io/metadata.name":"kube-node-lease"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-10-10T03:59:21Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"kube-public","uid":"2aa82866-8647-4b91-b72a-7bc969c090c7","resourceVersion":"5","creationTimestamp":"2022-10-10T03:59:21Z","labels":{"kubernetes.io/metadata.name":"kube-public"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-10-10T03:59:21Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"kube-system","uid":"853f3c4d-905a-480e-b679-2058626f72f2","resourceVersion":"481","creationTimestamp":"2022-10-10T03:59:21Z","labels":{"addonmanager.kubernetes.io/mode":"Reconcile","control-plane":"true","kubernetes.io/cluster-service":"true","kubernetes.io/metadata.name":"kube-system"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"v1\",\"kind\":\"Namespace\",\"metadata\":{\"annotations\":{},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"Reconcile\",\"control-plane\":\"true\",\"kubernetes.io/cluster-service\":\"true\"},\"name\":\"kube-system\"}}\n"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-10-10T03:59:21Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"v1","time":"2022-10-10T03:59:41Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{}},"f:labels":{"f:addonmanager.kubernetes.io/mode":{},"f:control-plane":{},"f:kubernetes.io/cluster-service":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}}]} ' headers: audit-id: - - d56107ff-892a-4e6f-81d6-31159f00af6e + - 42bbc76a-9851-4060-821a-21b3e6b776ba cache-control: - no-cache, private content-type: - application/json date: - - Tue, 07 Jun 2022 18:03:40 GMT + - Tue, 18 Oct 2022 19:57:05 GMT transfer-encoding: - chunked x-kubernetes-pf-flowschema-uid: - - 08843a36-c7a9-489b-a782-1dc805dc9f54 + - 9c9284bf-ac50-497c-86a8-5f4f7d857b28 x-kubernetes-pf-prioritylevel-uid: - - e6f4d88d-c43f-4941-a57f-7f1230896bdc + - f041de6f-3328-46ec-b36d-f51c7cd89b61 status: code: 200 message: OK @@ -3226,9 +4401,10 @@ interactions: Connection: - keep-alive ParameterSetName: - - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id + - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id --yes User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar?api-version=2021-04-01 response: @@ -3242,7 +4418,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 18:03:40 GMT + - Tue, 18 Oct 2022 19:57:05 GMT expires: - '-1' pragma: @@ -3270,14 +4446,14 @@ interactions: Content-Length: - '0' ParameterSetName: - - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id + - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id --yes User-Agent: - - python/3.7.7 (Windows-10-10.0.22000-SP0) AZURECLI/2.37.0 + - python/3.7.7 (Windows-10-10.0.22621-SP0) AZURECLI/2.41.0 (MSI) method: POST - uri: https://eastus.dp.kubernetesconfiguration.azure.com/azure-arc-k8sagents/GetLatestHelmPackagePath?api-version=2019-11-01-preview&releaseTrain=stable + uri: https://eastus2euap.dp.kubernetesconfiguration.azure.com/azure-arc-k8sagents/GetLatestHelmPackagePath?api-version=2019-11-01-preview&releaseTrain=stable response: body: - string: '{"repositoryPath":"mcr.microsoft.com/azurearck8s/batch1/stable/azure-arc-k8sagents:1.6.16"}' + string: '{"repositoryPath":"mcr.microsoft.com/azurearck8s/canary/stable/azure-arc-k8sagents:1.8.14"}' headers: api-supported-versions: - 2019-11-01-Preview @@ -3288,7 +4464,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 18:03:41 GMT + - Tue, 18 Oct 2022 19:57:07 GMT strict-transport-security: - max-age=15724800; includeSubDomains x-content-type-options: @@ -3297,10 +4473,10 @@ interactions: code: 200 message: OK - request: - body: '{"tags": {"foo": "doo"}, "location": "eastus", "identity": {"type": "SystemAssigned"}, - "properties": {"agentPublicKeyCertificate": "MIICCgKCAgEAsv0RDagHtwzZ8MMjn6rcn86hyyzSCnM24phfO8UxvSL2PMSMvZiKDtGvkSJ48H6Zp8zeVxK66P7kTS35wbQtfAExuN0YHHKKh3PcDYMb+0Vqym0V3yGa5jRz7RI6QnB5Qf4cGCKC6777fdq9CEYH2SZnyMr9xg/dVWe7RSHnzufltQLZYdXFG2VK37CbJTBgfS1narI2o9iBPUmh6ZlUwJJsQ2bpC8KotS2QIWMVUaX8rF2Wq0UtMp1NMc6yXghr6Vn6z6HLPieZmJsorrjyagzvdapJTvdMNhxvbNTwa/ghV3dUVSfxg4NDVmkG9G67gqx67UNHJZ/F4NHmAzrzCdHDT9bETFFYsdW5rcR/J637jZGv+yVK2k8TYc1aqMh22PvIpKb1oDG9OYdOPOc6xt1dXsU8ufQNTauC+kj8bP1+fo+sFgB/PEp/I1INCh+AXDuaLBIF3lff+iaOoOWU6XCP/Del9xwCWgeiWzgM4pYEInA9wYLi61wHt2hUy6/dkFGTj22iiDF/xNVbLwjztGAEnqlGmCCJ+zDA+hrnj98oDDUIn87UIB/hjoiBokUp2p6QCln72TkovVkXvFj8XyhzdJOs0FMartX0CB0LtRfie5VtFBv7rH5fn57peEMrYv5042zqNUVXPG/o1TLkYJ/9QiQmtJ+VohXlAR+t2p8CAwEAAQ==", + body: '{"tags": {"foo": "doo"}, "location": "eastus2euap", "identity": {"type": + "SystemAssigned"}, "properties": {"agentPublicKeyCertificate": "MIICCgKCAgEAsdZ9iZT5FXZBxxqv5xG8sf0LOxll1gD5GchTYOmGuJ6ApWz4vepGQ76pX7yAh4+0glnenYCLZmacvv5DJKaXUjKf6BF0Dt6zIT8Wfr5sG7cKprlRWO1+YWHrSdP4L1cgZ8Mwe7sa9FGGhr8WuwQd9b8Ujau47+nBNY2GwtEK9dtA5HnLoa/O2u1ViJBbnwhBHGUuMUmbmr9JsPub9k4QjQjmMtcY7VeyLemSATFerqH5/OmGKDX4JD6XGVd1rD1EGqCkaGa1floKxx5/wouUs6Uerz6kr3/SSTMNqBjJwBuFobfOp7tRwnoQEzs8VdVv7M3C3J27H6/rCzyvOLIMHtXRis+80mszgJu7qEFLmTMtcB/11T5qFpbPLU/M0hwhRCQ51THmGSe77HPfiMxux6J0X+D1QL2YbwEWp4Ijm4KCE3fUE8XapVcMLL7XInCmGAi1shMPtMYevhr2lu+YMRWJ3H2ey1CVKO2ItuHsBWHB2mh7v+Qb9CYaqPZp31oIbBbEJYj7/CiHg+JhsTNCQx+Jb13tXyXVQ6gDBmk1qRyHYxn1jA+u8iXqZgN0J34UxiC001OJNyHZtf5OuYcg6LwiT2YpcGNQ2Tii7GRYB3OqnPQb730UNFXghz7yLkPGYbQswmZcr0SPDfXq1XdseJDi5UWoyEN0mtryaPwaJC8CAwEAAQ==", "distribution": "aks", "infrastructure": "azure", "privateLinkState": "Enabled", - "privateLinkScopeResourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar-pls/providers/Microsoft.HybridCompute/privateLinkScopes/testpls"}}' + "privateLinkScopeResourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.HybridCompute/privateLinkScopes/temppls"}}' headers: Accept: - application/json @@ -3311,31 +4487,32 @@ interactions: Connection: - keep-alive Content-Length: - - '1093' + - '1094' Content-Type: - application/json ParameterSetName: - - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id + - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id --yes User-Agent: - - AZURECLI/2.37.0 azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cliplscc?api-version=2022-05-01-preview + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cliplscc?api-version=2022-10-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cliplscc","name":"cliplscc","type":"microsoft.kubernetes/connectedclusters","location":"eastus","tags":{"foo":"doo"},"systemData":{"createdBy":"akkeshar@microsoft.com","createdByType":"User","createdAt":"2022-06-07T18:04:15.1402577Z","lastModifiedBy":"akkeshar@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-06-07T18:04:15.1402577Z"},"identity":{"principalId":"89181e76-2e9d-4546-8b41-168e985fca0c","tenantId":"72f988bf-86f1-41af-91ab-2d7cd011db47","type":"SystemAssigned"},"properties":{"provisioningState":"Accepted","connectivityStatus":"Connecting","privateLinkState":"Enabled","agentPublicKeyCertificate":"MIICCgKCAgEAsv0RDagHtwzZ8MMjn6rcn86hyyzSCnM24phfO8UxvSL2PMSMvZiKDtGvkSJ48H6Zp8zeVxK66P7kTS35wbQtfAExuN0YHHKKh3PcDYMb+0Vqym0V3yGa5jRz7RI6QnB5Qf4cGCKC6777fdq9CEYH2SZnyMr9xg/dVWe7RSHnzufltQLZYdXFG2VK37CbJTBgfS1narI2o9iBPUmh6ZlUwJJsQ2bpC8KotS2QIWMVUaX8rF2Wq0UtMp1NMc6yXghr6Vn6z6HLPieZmJsorrjyagzvdapJTvdMNhxvbNTwa/ghV3dUVSfxg4NDVmkG9G67gqx67UNHJZ/F4NHmAzrzCdHDT9bETFFYsdW5rcR/J637jZGv+yVK2k8TYc1aqMh22PvIpKb1oDG9OYdOPOc6xt1dXsU8ufQNTauC+kj8bP1+fo+sFgB/PEp/I1INCh+AXDuaLBIF3lff+iaOoOWU6XCP/Del9xwCWgeiWzgM4pYEInA9wYLi61wHt2hUy6/dkFGTj22iiDF/xNVbLwjztGAEnqlGmCCJ+zDA+hrnj98oDDUIn87UIB/hjoiBokUp2p6QCln72TkovVkXvFj8XyhzdJOs0FMartX0CB0LtRfie5VtFBv7rH5fn57peEMrYv5042zqNUVXPG/o1TLkYJ/9QiQmtJ+VohXlAR+t2p8CAwEAAQ==","distribution":"aks","infrastructure":"azure","privateLinkScopeResourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar-pls/providers/Microsoft.HybridCompute/privateLinkScopes/testpls"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cliplscc","name":"cliplscc","type":"microsoft.kubernetes/connectedclusters","location":"eastus2euap","tags":{"foo":"doo"},"systemData":{"createdBy":"akkeshar@microsoft.com","createdByType":"User","createdAt":"2022-10-18T19:57:27.0172265Z","lastModifiedBy":"akkeshar@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-18T19:57:27.0172265Z"},"identity":{"principalId":"86c750e8-9f80-4398-a0e9-ef5c804237d1","tenantId":"72f988bf-86f1-41af-91ab-2d7cd011db47","type":"SystemAssigned"},"properties":{"provisioningState":"Accepted","connectivityStatus":"Connecting","privateLinkState":"Enabled","azureHybridBenefit":"NotApplicable","agentPublicKeyCertificate":"MIICCgKCAgEAsdZ9iZT5FXZBxxqv5xG8sf0LOxll1gD5GchTYOmGuJ6ApWz4vepGQ76pX7yAh4+0glnenYCLZmacvv5DJKaXUjKf6BF0Dt6zIT8Wfr5sG7cKprlRWO1+YWHrSdP4L1cgZ8Mwe7sa9FGGhr8WuwQd9b8Ujau47+nBNY2GwtEK9dtA5HnLoa/O2u1ViJBbnwhBHGUuMUmbmr9JsPub9k4QjQjmMtcY7VeyLemSATFerqH5/OmGKDX4JD6XGVd1rD1EGqCkaGa1floKxx5/wouUs6Uerz6kr3/SSTMNqBjJwBuFobfOp7tRwnoQEzs8VdVv7M3C3J27H6/rCzyvOLIMHtXRis+80mszgJu7qEFLmTMtcB/11T5qFpbPLU/M0hwhRCQ51THmGSe77HPfiMxux6J0X+D1QL2YbwEWp4Ijm4KCE3fUE8XapVcMLL7XInCmGAi1shMPtMYevhr2lu+YMRWJ3H2ey1CVKO2ItuHsBWHB2mh7v+Qb9CYaqPZp31oIbBbEJYj7/CiHg+JhsTNCQx+Jb13tXyXVQ6gDBmk1qRyHYxn1jA+u8iXqZgN0J34UxiC001OJNyHZtf5OuYcg6LwiT2YpcGNQ2Tii7GRYB3OqnPQb730UNFXghz7yLkPGYbQswmZcr0SPDfXq1XdseJDi5UWoyEN0mtryaPwaJC8CAwEAAQ==","distribution":"aks","infrastructure":"azure","privateLinkScopeResourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.HybridCompute/privateLinkScopes/temppls"}}' headers: azure-asyncoperation: - - https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/c076ac8a-20b0-43d6-a8cd-fcb614a1205c*8738D9B038F603442BFE6B89D055B1E438DA9BC8B386B2FE9015CECB2D2CC8DB?api-version=2022-05-01-preview + - https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS2EUAP/operationStatuses/3ec56f1a-23ca-45e0-9d0f-2cc18e2730a8*8738D9B038F603442BFE6B89D055B1E438DA9BC8B386B2FE9015CECB2D2CC8DB?api-version=2022-10-01-preview cache-control: - no-cache content-length: - - '1686' + - '1724' content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 18:04:20 GMT + - Tue, 18 Oct 2022 19:57:31 GMT etag: - - '"1001c50b-0000-0100-0000-629f93220000"' + - '"010044ea-0000-3400-0000-634f052a0000"' expires: - '-1' pragma: @@ -3363,25 +4540,26 @@ interactions: Connection: - keep-alive ParameterSetName: - - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id + - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id --yes User-Agent: - - AZURECLI/2.37.0 azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/c076ac8a-20b0-43d6-a8cd-fcb614a1205c*8738D9B038F603442BFE6B89D055B1E438DA9BC8B386B2FE9015CECB2D2CC8DB?api-version=2022-05-01-preview + uri: https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS2EUAP/operationStatuses/3ec56f1a-23ca-45e0-9d0f-2cc18e2730a8*8738D9B038F603442BFE6B89D055B1E438DA9BC8B386B2FE9015CECB2D2CC8DB?api-version=2022-10-01-preview response: body: - string: '{"id":"/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/c076ac8a-20b0-43d6-a8cd-fcb614a1205c*8738D9B038F603442BFE6B89D055B1E438DA9BC8B386B2FE9015CECB2D2CC8DB","name":"c076ac8a-20b0-43d6-a8cd-fcb614a1205c*8738D9B038F603442BFE6B89D055B1E438DA9BC8B386B2FE9015CECB2D2CC8DB","resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cliplscc","status":"Succeeded","startTime":"2022-06-07T18:04:18.033749Z","endTime":"2022-06-07T18:04:25.5540321Z","properties":null}' + string: '{"id":"/providers/Microsoft.Kubernetes/locations/EASTUS2EUAP/operationStatuses/3ec56f1a-23ca-45e0-9d0f-2cc18e2730a8*8738D9B038F603442BFE6B89D055B1E438DA9BC8B386B2FE9015CECB2D2CC8DB","name":"3ec56f1a-23ca-45e0-9d0f-2cc18e2730a8*8738D9B038F603442BFE6B89D055B1E438DA9BC8B386B2FE9015CECB2D2CC8DB","resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cliplscc","status":"Succeeded","startTime":"2022-10-18T19:57:29.665549Z","endTime":"2022-10-18T19:57:40.0832662Z","properties":null}' headers: cache-control: - no-cache content-length: - - '559' + - '564' content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 18:04:51 GMT + - Tue, 18 Oct 2022 19:58:02 GMT etag: - - '"2d000b4b-0000-0100-0000-629f93290000"' + - '"0200a814-0000-3400-0000-634f05340000"' expires: - '-1' pragma: @@ -3409,25 +4587,26 @@ interactions: Connection: - keep-alive ParameterSetName: - - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id + - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id --yes User-Agent: - - AZURECLI/2.37.0 azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cliplscc?api-version=2022-05-01-preview + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cliplscc?api-version=2022-10-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cliplscc","name":"cliplscc","type":"microsoft.kubernetes/connectedclusters","location":"eastus","tags":{"foo":"doo"},"systemData":{"createdBy":"akkeshar@microsoft.com","createdByType":"User","createdAt":"2022-06-07T18:04:15.1402577Z","lastModifiedBy":"akkeshar@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-06-07T18:04:15.1402577Z"},"identity":{"principalId":"89181e76-2e9d-4546-8b41-168e985fca0c","tenantId":"72f988bf-86f1-41af-91ab-2d7cd011db47","type":"SystemAssigned"},"properties":{"provisioningState":"Succeeded","connectivityStatus":"Connecting","privateLinkState":"Enabled","agentPublicKeyCertificate":"MIICCgKCAgEAsv0RDagHtwzZ8MMjn6rcn86hyyzSCnM24phfO8UxvSL2PMSMvZiKDtGvkSJ48H6Zp8zeVxK66P7kTS35wbQtfAExuN0YHHKKh3PcDYMb+0Vqym0V3yGa5jRz7RI6QnB5Qf4cGCKC6777fdq9CEYH2SZnyMr9xg/dVWe7RSHnzufltQLZYdXFG2VK37CbJTBgfS1narI2o9iBPUmh6ZlUwJJsQ2bpC8KotS2QIWMVUaX8rF2Wq0UtMp1NMc6yXghr6Vn6z6HLPieZmJsorrjyagzvdapJTvdMNhxvbNTwa/ghV3dUVSfxg4NDVmkG9G67gqx67UNHJZ/F4NHmAzrzCdHDT9bETFFYsdW5rcR/J637jZGv+yVK2k8TYc1aqMh22PvIpKb1oDG9OYdOPOc6xt1dXsU8ufQNTauC+kj8bP1+fo+sFgB/PEp/I1INCh+AXDuaLBIF3lff+iaOoOWU6XCP/Del9xwCWgeiWzgM4pYEInA9wYLi61wHt2hUy6/dkFGTj22iiDF/xNVbLwjztGAEnqlGmCCJ+zDA+hrnj98oDDUIn87UIB/hjoiBokUp2p6QCln72TkovVkXvFj8XyhzdJOs0FMartX0CB0LtRfie5VtFBv7rH5fn57peEMrYv5042zqNUVXPG/o1TLkYJ/9QiQmtJ+VohXlAR+t2p8CAwEAAQ==","distribution":"aks","infrastructure":"azure","privateLinkScopeResourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar-pls/providers/Microsoft.HybridCompute/privateLinkScopes/testpls"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cliplscc","name":"cliplscc","type":"microsoft.kubernetes/connectedclusters","location":"eastus2euap","tags":{"foo":"doo"},"systemData":{"createdBy":"akkeshar@microsoft.com","createdByType":"User","createdAt":"2022-10-18T19:57:27.0172265Z","lastModifiedBy":"akkeshar@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-18T19:57:27.0172265Z"},"identity":{"principalId":"86c750e8-9f80-4398-a0e9-ef5c804237d1","tenantId":"72f988bf-86f1-41af-91ab-2d7cd011db47","type":"SystemAssigned"},"properties":{"provisioningState":"Succeeded","connectivityStatus":"Connecting","privateLinkState":"Enabled","azureHybridBenefit":"NotApplicable","agentPublicKeyCertificate":"MIICCgKCAgEAsdZ9iZT5FXZBxxqv5xG8sf0LOxll1gD5GchTYOmGuJ6ApWz4vepGQ76pX7yAh4+0glnenYCLZmacvv5DJKaXUjKf6BF0Dt6zIT8Wfr5sG7cKprlRWO1+YWHrSdP4L1cgZ8Mwe7sa9FGGhr8WuwQd9b8Ujau47+nBNY2GwtEK9dtA5HnLoa/O2u1ViJBbnwhBHGUuMUmbmr9JsPub9k4QjQjmMtcY7VeyLemSATFerqH5/OmGKDX4JD6XGVd1rD1EGqCkaGa1floKxx5/wouUs6Uerz6kr3/SSTMNqBjJwBuFobfOp7tRwnoQEzs8VdVv7M3C3J27H6/rCzyvOLIMHtXRis+80mszgJu7qEFLmTMtcB/11T5qFpbPLU/M0hwhRCQ51THmGSe77HPfiMxux6J0X+D1QL2YbwEWp4Ijm4KCE3fUE8XapVcMLL7XInCmGAi1shMPtMYevhr2lu+YMRWJ3H2ey1CVKO2ItuHsBWHB2mh7v+Qb9CYaqPZp31oIbBbEJYj7/CiHg+JhsTNCQx+Jb13tXyXVQ6gDBmk1qRyHYxn1jA+u8iXqZgN0J34UxiC001OJNyHZtf5OuYcg6LwiT2YpcGNQ2Tii7GRYB3OqnPQb730UNFXghz7yLkPGYbQswmZcr0SPDfXq1XdseJDi5UWoyEN0mtryaPwaJC8CAwEAAQ==","distribution":"AKS","infrastructure":"azure","privateLinkScopeResourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.HybridCompute/privateLinkScopes/temppls"}}' headers: cache-control: - no-cache content-length: - - '1687' + - '1725' content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 18:04:52 GMT + - Tue, 18 Oct 2022 19:58:03 GMT etag: - - '"1001f80b-0000-0100-0000-629f93290000"' + - '"010053ea-0000-3400-0000-634f05340000"' expires: - '-1' pragma: @@ -3457,9 +4636,10 @@ interactions: Connection: - keep-alive ParameterSetName: - - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id + - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id --yes User-Agent: - - AZURECLI/2.37.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ExtendedLocation?api-version=2021-04-01 response: @@ -3473,7 +4653,12 @@ interactions: US","West Europe","North Europe","France Central","Southeast Asia","Australia East","East US 2","West US 2","UK South","Central US","West Central US","West US","North Central US","South Central US","Korea Central","Japan East","East - Asia","West US 3","Canada Central","East US 2 EUAP"],"apiVersions":["2021-08-31-preview","2021-08-15","2021-03-15-preview","2020-07-15-privatepreview"],"capabilities":"None"},{"resourceType":"locations/operationsstatus","locations":["East + Asia","West US 3","Canada Central","East US 2 EUAP"],"apiVersions":["2021-08-31-preview","2021-08-15","2021-03-15-preview","2020-07-15-privatepreview"],"capabilities":"None"},{"resourceType":"customLocations/resourceSyncRules","locations":["East + US","West Europe","North Europe","France Central","Southeast Asia","Australia + East","East US 2","West US 2","UK South","Central US","West Central US","West + US","North Central US","South Central US","Korea Central","Japan East","East + Asia","West US 3","Canada Central","East US 2 EUAP"],"apiVersions":["2021-08-31-preview"],"defaultApiVersion":"2021-08-31-preview","capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"locations/operationsstatus","locations":["East US","West Europe","North Europe","France Central","Southeast Asia","Australia East","East US 2","West US 2","UK South","Central US","West Central US","West US","North Central US","South Central US","Korea Central","Japan East","East @@ -3481,12 +4666,7 @@ interactions: US","West Europe","North Europe","France Central","Southeast Asia","Australia East","East US 2","West US 2","UK South","Central US","West Central US","West US","North Central US","South Central US","Korea Central","Japan East","East - Asia","West US 3","Canada Central","East US 2 Euap"],"apiVersions":["2021-03-15-preview","2020-07-15-privatepreview"],"capabilities":"None"},{"resourceType":"operations","locations":[],"apiVersions":["2021-08-31-preview","2021-08-15","2021-03-15-preview","2020-07-15-privatepreview"],"capabilities":"None"},{"resourceType":"customLocations/resourceSyncRules","locations":["East - US 2 EUAP","East US","West Europe","North Europe","France Central","Southeast - Asia","Australia East","East US 2","West US 2","UK South","Central US","West - Central US","West US","North Central US","South Central US","Korea Central","Japan - East","East Asia","West US 3","Canada Central"],"apiVersions":["2021-08-31-preview"],"defaultApiVersion":"2021-08-31-preview","capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + Asia","West US 3","Canada Central","East US 2 Euap"],"apiVersions":["2021-03-15-preview","2020-07-15-privatepreview"],"capabilities":"None"},{"resourceType":"operations","locations":[],"apiVersions":["2021-08-31-preview","2021-08-15","2021-03-15-preview","2020-07-15-privatepreview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache @@ -3495,7 +4675,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 18:04:51 GMT + - Tue, 18 Oct 2022 19:58:03 GMT expires: - '-1' pragma: @@ -3521,10 +4701,10 @@ interactions: Connection: - keep-alive ParameterSetName: - - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id + - -g -n -l --tags --kube-config --enable-private-link --pls-arm-id --yes User-Agent: - - python/3.7.7 (Windows-10-10.0.22000-SP0) msrest/0.6.21 msrest_azure/0.6.4 - azure-graphrbac/0.60.0 Azure-SDK-For-Python AZURECLI/2.37.0 + - python/3.7.7 (Windows-10-10.0.22621-SP0) msrest/0.7.1 msrest_azure/0.6.4 azure-graphrbac/0.60.0 + Azure-SDK-For-Python AZURECLI/2.41.0 (MSI) accept-language: - en-US method: GET @@ -3547,19 +4727,19 @@ interactions: dataserviceversion: - 3.0; date: - - Tue, 07 Jun 2022 18:04:53 GMT + - Tue, 18 Oct 2022 19:58:03 GMT duration: - - '1259786' + - '1392532' expires: - '-1' ocp-aad-diagnostics-server-name: - - frRfQmjwsIzDshn1inmXGupkH+QmybVgokKJazbVaj4= + - qFC8iPsJb7IP2W9UCWL1qXzD/gTfvayGLcO79vOOKcc= ocp-aad-session-key: - - znDLfkuF4a8BHwmEaJGXzKt06aFQ9cdFUVQw5EPQ3pe4xmagc31jFPmkCJo-DhHSml2gstVMu-35Za4PDDj4RmmPgmEpy2vkwffHXJsrZRtpM4MFRrdis_jnm6LQpd0M.GGnqS5-T4Ewwa1Z_HONdWwu7dE9H8iI_1wjN9zFY5Pc + - oBj3V580lHt1I8NQC-MLU_-E_WW6TL1qGFwOJGmE6K7idxg_mNKVZIavLxB0342vh-2Une3vkUHU1EmHM6RK6YMAuKjDDZgcOwx_V5z0FSHalslGT8zPLkZeruYsVMDK.wGjKQxX4xATrrLZGrmuYQy9UgXDzODzcklzc5wqt1pI pragma: - no-cache request-id: - - 4de2d564-3af6-41a2-bdb5-746523210e33 + - bef25c12-8577-4d85-9529-de65d94a754a strict-transport-security: - max-age=31536000; includeSubDomains x-aspnet-version: @@ -3581,29 +4761,30 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://akkeshar-dns-08147f89.hcp.eastus2euap.azmk8s.io/apis/networking.k8s.io/v1/ + uri: https://tempaks-dns-0f8c9536.hcp.southcentralus.azmk8s.io/version/ response: body: - string: '{"kind":"APIResourceList","apiVersion":"v1","groupVersion":"networking.k8s.io/v1","resources":[{"name":"ingressclasses","singularName":"","namespaced":false,"kind":"IngressClass","verbs":["create","delete","deletecollection","get","list","patch","update","watch"],"storageVersionHash":"l/iqIbDgFyQ="},{"name":"ingresses","singularName":"","namespaced":true,"kind":"Ingress","verbs":["create","delete","deletecollection","get","list","patch","update","watch"],"shortNames":["ing"],"storageVersionHash":"39NQlfNR+bo="},{"name":"ingresses/status","singularName":"","namespaced":true,"kind":"Ingress","verbs":["get","patch","update"]},{"name":"networkpolicies","singularName":"","namespaced":true,"kind":"NetworkPolicy","verbs":["create","delete","deletecollection","get","list","patch","update","watch"],"shortNames":["netpol"],"storageVersionHash":"YpfwF18m1G8="}]} - - ' + string: "{\n \"major\": \"1\",\n \"minor\": \"24\",\n \"gitVersion\": \"v1.24.6\",\n + \ \"gitCommit\": \"b39bf148cd654599a52e867485c02c4f9d28b312\",\n \"gitTreeState\": + \"clean\",\n \"buildDate\": \"2022-09-21T21:46:51Z\",\n \"goVersion\": \"go1.18.6\",\n + \ \"compiler\": \"gc\",\n \"platform\": \"linux/amd64\"\n}" headers: audit-id: - - 0fb05ddc-5d7c-4b9c-abd6-cacaf2123cbf + - deec8bf0-0c1c-4518-8e54-399f7dfad540 cache-control: - no-cache, private content-length: - - '864' + - '263' content-type: - application/json date: - - Tue, 07 Jun 2022 18:06:07 GMT + - Tue, 18 Oct 2022 19:59:19 GMT x-kubernetes-pf-flowschema-uid: - - 08843a36-c7a9-489b-a782-1dc805dc9f54 + - 9c9284bf-ac50-497c-86a8-5f4f7d857b28 x-kubernetes-pf-prioritylevel-uid: - - e6f4d88d-c43f-4941-a57f-7f1230896bdc + - f041de6f-3328-46ec-b36d-f51c7cd89b61 status: code: 200 message: OK @@ -3615,29 +4796,29 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://akkeshar-dns-08147f89.hcp.eastus2euap.azmk8s.io/api/v1/namespaces/azure-arc/configmaps/azure-clusterconfig + uri: https://tempaks-dns-0f8c9536.hcp.southcentralus.azmk8s.io/api/v1/namespaces/azure-arc/configmaps/azure-clusterconfig response: body: - string: '{"kind":"ConfigMap","apiVersion":"v1","metadata":{"name":"azure-clusterconfig","namespace":"azure-arc","uid":"33cbd434-6bcf-484a-a78f-8ac90f7ce0fc","resourceVersion":"42262155","creationTimestamp":"2022-06-07T18:05:07Z","labels":{"app.kubernetes.io/managed-by":"Helm"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-06-07T18:05:07Z","fieldsType":"FieldsV1","fieldsV1":{"f:data":{".":{},"f:ARC_AGENT_HELM_CHART_NAME":{},"f:ARC_AGENT_RELEASE_TRAIN":{},"f:AZURE_ARC_AGENT_VERSION":{},"f:AZURE_ARC_AUTOUPDATE":{},"f:AZURE_ARC_HELM_NAMESPACE":{},"f:AZURE_ARC_RELEASE_NAME":{},"f:AZURE_ENVIRONMENT":{},"f:AZURE_REGION":{},"f:AZURE_RESOURCE_GROUP":{},"f:AZURE_RESOURCE_NAME":{},"f:AZURE_SUBSCRIPTION_ID":{},"f:AZURE_TENANT_ID":{},"f:CLUSTER_CONNECT_AGENT_ENABLED":{},"f:CLUSTER_TYPE":{},"f:DEBUG_LOGGING":{},"f:EXTENSION_OPERATOR_ENABLED":{},"f:FLUX_CLIENT_DEFAULT_LOCATION":{},"f:FLUX_UPSTREAM_SERVICE_ENABLED":{},"f:GITOPS_ENABLED":{},"f:HELM_AUTO_UPDATE_CHECK_FREQUENCY_IN_MINUTES":{},"f:IS_CLIENT_SECRET_A_TOKEN":{},"f:KUBERNETES_DISTRO":{},"f:KUBERNETES_INFRA":{},"f:MANAGED_IDENTITY_AUTH":{},"f:MAX_ENTRIES_PER_STORE":{},"f:MAX_STORES":{},"f:NO_AUTH_HEADER_DATA_PLANE":{},"f:ONBOARDING_SECRET_NAME":{},"f:ONBOARDING_SECRET_NAMESPACE":{},"f:RESOURCE_SYNC_ENABLE_CHUNKED_SYNC":{},"f:RESOURCE_SYNC_LIST_CHUNK_SIZE":{},"f:RP_NAMESPACE":{},"f:TAGS":{}},"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:app.kubernetes.io/managed-by":{}}}}}]},"data":{"ARC_AGENT_HELM_CHART_NAME":"azure-arc-k8sagents","ARC_AGENT_RELEASE_TRAIN":"stable","AZURE_ARC_AGENT_VERSION":"1.6.16","AZURE_ARC_AUTOUPDATE":"true","AZURE_ARC_HELM_NAMESPACE":"default","AZURE_ARC_RELEASE_NAME":"azure-arc","AZURE_ENVIRONMENT":"AZUREPUBLICCLOUD","AZURE_REGION":"eastus","AZURE_RESOURCE_GROUP":"akkeshar","AZURE_RESOURCE_NAME":"cliplscc","AZURE_SUBSCRIPTION_ID":"1bfbb5d0-917e-4346-9026-1d3b344417f5","AZURE_TENANT_ID":"72f988bf-86f1-41af-91ab-2d7cd011db47","CLUSTER_CONNECT_AGENT_ENABLED":"false","CLUSTER_TYPE":"ConnectedClusters","DEBUG_LOGGING":"false","EXTENSION_OPERATOR_ENABLED":"true","FLUX_CLIENT_DEFAULT_LOCATION":"mcr.microsoft.com/azurearck8s/arc-preview/fluxctl:0.2.0","FLUX_UPSTREAM_SERVICE_ENABLED":"true","GITOPS_ENABLED":"true","HELM_AUTO_UPDATE_CHECK_FREQUENCY_IN_MINUTES":"60","IS_CLIENT_SECRET_A_TOKEN":"false","KUBERNETES_DISTRO":"aks","KUBERNETES_INFRA":"azure","MANAGED_IDENTITY_AUTH":"true","MAX_ENTRIES_PER_STORE":"680","MAX_STORES":"30","NO_AUTH_HEADER_DATA_PLANE":"false","ONBOARDING_SECRET_NAME":"azure-arc-connect-privatekey","ONBOARDING_SECRET_NAMESPACE":"azure-arc","RESOURCE_SYNC_ENABLE_CHUNKED_SYNC":"false","RESOURCE_SYNC_LIST_CHUNK_SIZE":"200","RP_NAMESPACE":"Microsoft.Kubernetes","TAGS":"map[]"}} + string: '{"kind":"ConfigMap","apiVersion":"v1","metadata":{"name":"azure-clusterconfig","namespace":"azure-arc","uid":"ec61a30c-7e2d-462f-abd5-588cfc16ec20","resourceVersion":"2977078","creationTimestamp":"2022-10-18T19:58:18Z","labels":{"app.kubernetes.io/managed-by":"Helm"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:58:18Z","fieldsType":"FieldsV1","fieldsV1":{"f:data":{".":{},"f:ARC_AGENT_HELM_CHART_NAME":{},"f:ARC_AGENT_RELEASE_TRAIN":{},"f:AZURE_ARC_AGENT_VERSION":{},"f:AZURE_ARC_AUTOUPDATE":{},"f:AZURE_ARC_HELM_NAMESPACE":{},"f:AZURE_ARC_RELEASE_NAME":{},"f:AZURE_ENVIRONMENT":{},"f:AZURE_REGION":{},"f:AZURE_RESOURCE_GROUP":{},"f:AZURE_RESOURCE_MANAGER_ENDPOINT":{},"f:AZURE_RESOURCE_NAME":{},"f:AZURE_SUBSCRIPTION_ID":{},"f:AZURE_TENANT_ID":{},"f:CLUSTER_CONNECT_AGENT_ENABLED":{},"f:CLUSTER_TYPE":{},"f:CUSTOM_IDENTITY_PROVIDER_ENABLED":{},"f:DEBUG_LOGGING":{},"f:EXTENSION_OPERATOR_ENABLED":{},"f:FLUX_CLIENT_DEFAULT_LOCATION":{},"f:FLUX_UPSTREAM_SERVICE_ENABLED":{},"f:GITOPS_ENABLED":{},"f:GUARD_PKI_HOSTPATH":{},"f:HELM_AUTO_UPDATE_CHECK_FREQUENCY_IN_MINUTES":{},"f:IS_CLIENT_SECRET_A_TOKEN":{},"f:KUBERNETES_DISTRO":{},"f:KUBERNETES_INFRA":{},"f:MANAGED_IDENTITY_AUTH":{},"f:MAX_ENTRIES_PER_STORE":{},"f:MAX_STORES":{},"f:MSI_ADAPTER_ARTIFACT_PATH":{},"f:NO_AUTH_HEADER_DATA_PLANE":{},"f:ONBOARDING_SECRET_NAME":{},"f:ONBOARDING_SECRET_NAMESPACE":{},"f:RESOURCE_SYNC_ENABLE_CHUNKED_SYNC":{},"f:RESOURCE_SYNC_LIST_CHUNK_SIZE":{},"f:RP_NAMESPACE":{},"f:TAGS":{}},"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:app.kubernetes.io/managed-by":{}}}}}]},"data":{"ARC_AGENT_HELM_CHART_NAME":"azure-arc-k8sagents","ARC_AGENT_RELEASE_TRAIN":"stable","AZURE_ARC_AGENT_VERSION":"1.8.14","AZURE_ARC_AUTOUPDATE":"true","AZURE_ARC_HELM_NAMESPACE":"default","AZURE_ARC_RELEASE_NAME":"azure-arc","AZURE_ENVIRONMENT":"AZUREPUBLICCLOUD","AZURE_REGION":"eastus2euap","AZURE_RESOURCE_GROUP":"akkeshar","AZURE_RESOURCE_MANAGER_ENDPOINT":"","AZURE_RESOURCE_NAME":"cliplscc","AZURE_SUBSCRIPTION_ID":"1bfbb5d0-917e-4346-9026-1d3b344417f5","AZURE_TENANT_ID":"72f988bf-86f1-41af-91ab-2d7cd011db47","CLUSTER_CONNECT_AGENT_ENABLED":"false","CLUSTER_TYPE":"ConnectedClusters","CUSTOM_IDENTITY_PROVIDER_ENABLED":"false","DEBUG_LOGGING":"false","EXTENSION_OPERATOR_ENABLED":"true","FLUX_CLIENT_DEFAULT_LOCATION":"mcr.microsoft.com/azurearck8s/arc-preview/fluxctl:0.2.0","FLUX_UPSTREAM_SERVICE_ENABLED":"true","GITOPS_ENABLED":"true","GUARD_PKI_HOSTPATH":"","HELM_AUTO_UPDATE_CHECK_FREQUENCY_IN_MINUTES":"60","IS_CLIENT_SECRET_A_TOKEN":"false","KUBERNETES_DISTRO":"aks","KUBERNETES_INFRA":"azure","MANAGED_IDENTITY_AUTH":"true","MAX_ENTRIES_PER_STORE":"680","MAX_STORES":"30","MSI_ADAPTER_ARTIFACT_PATH":"mcr.microsoft.com/azurearck8s/msi-adapter:1.0.2","NO_AUTH_HEADER_DATA_PLANE":"false","ONBOARDING_SECRET_NAME":"azure-arc-connect-privatekey","ONBOARDING_SECRET_NAMESPACE":"azure-arc","RESOURCE_SYNC_ENABLE_CHUNKED_SYNC":"false","RESOURCE_SYNC_LIST_CHUNK_SIZE":"200","RP_NAMESPACE":"Microsoft.Kubernetes","TAGS":"map[]"}} ' headers: audit-id: - - 2e50b257-9592-41be-9c6c-e21a5c2565b4 + - 048b8197-57f9-4f3d-8372-d25fa09d8f4f cache-control: - no-cache, private content-type: - application/json date: - - Tue, 07 Jun 2022 18:06:10 GMT + - Tue, 18 Oct 2022 19:59:22 GMT transfer-encoding: - chunked x-kubernetes-pf-flowschema-uid: - - 08843a36-c7a9-489b-a782-1dc805dc9f54 + - 9c9284bf-ac50-497c-86a8-5f4f7d857b28 x-kubernetes-pf-prioritylevel-uid: - - e6f4d88d-c43f-4941-a57f-7f1230896bdc + - f041de6f-3328-46ec-b36d-f51c7cd89b61 status: code: 200 message: OK @@ -3657,7 +4838,8 @@ interactions: ParameterSetName: - -g -n --kube-config -y User-Agent: - - AZURECLI/2.37.0 azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: DELETE uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cliplscc?api-version=2021-10-01 response: @@ -3665,7 +4847,7 @@ interactions: string: 'null' headers: azure-asyncoperation: - - https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/8de5d722-8613-43ab-a4f4-15dd6fef0450*8738D9B038F603442BFE6B89D055B1E438DA9BC8B386B2FE9015CECB2D2CC8DB?api-version=2021-10-01 + - https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS2EUAP/operationStatuses/e47d7e24-5f6f-49e6-accb-72bfc91eb5cc*8738D9B038F603442BFE6B89D055B1E438DA9BC8B386B2FE9015CECB2D2CC8DB?api-version=2021-10-01 cache-control: - no-cache content-length: @@ -3673,13 +4855,13 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 18:06:13 GMT + - Tue, 18 Oct 2022 19:59:26 GMT etag: - - '"1001b50d-0000-0100-0000-629f93960000"' + - '"0100bdea-0000-3400-0000-634f059f0000"' expires: - '-1' location: - - https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/8de5d722-8613-43ab-a4f4-15dd6fef0450*8738D9B038F603442BFE6B89D055B1E438DA9BC8B386B2FE9015CECB2D2CC8DB?api-version=2021-10-01 + - https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS2EUAP/operationStatuses/e47d7e24-5f6f-49e6-accb-72bfc91eb5cc*8738D9B038F603442BFE6B89D055B1E438DA9BC8B386B2FE9015CECB2D2CC8DB?api-version=2021-10-01 pragma: - no-cache strict-transport-security: @@ -3707,23 +4889,24 @@ interactions: ParameterSetName: - -g -n --kube-config -y User-Agent: - - AZURECLI/2.37.0 azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/8de5d722-8613-43ab-a4f4-15dd6fef0450*8738D9B038F603442BFE6B89D055B1E438DA9BC8B386B2FE9015CECB2D2CC8DB?api-version=2021-10-01 + uri: https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS2EUAP/operationStatuses/e47d7e24-5f6f-49e6-accb-72bfc91eb5cc*8738D9B038F603442BFE6B89D055B1E438DA9BC8B386B2FE9015CECB2D2CC8DB?api-version=2021-10-01 response: body: - string: '{"id":"/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/8de5d722-8613-43ab-a4f4-15dd6fef0450*8738D9B038F603442BFE6B89D055B1E438DA9BC8B386B2FE9015CECB2D2CC8DB","name":"8de5d722-8613-43ab-a4f4-15dd6fef0450*8738D9B038F603442BFE6B89D055B1E438DA9BC8B386B2FE9015CECB2D2CC8DB","resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cliplscc","status":"Succeeded","startTime":"2022-06-07T18:06:13.695811Z","endTime":"2022-06-07T18:06:18.1844452Z","properties":null}' + string: '{"id":"/providers/Microsoft.Kubernetes/locations/EASTUS2EUAP/operationStatuses/e47d7e24-5f6f-49e6-accb-72bfc91eb5cc*8738D9B038F603442BFE6B89D055B1E438DA9BC8B386B2FE9015CECB2D2CC8DB","name":"e47d7e24-5f6f-49e6-accb-72bfc91eb5cc*8738D9B038F603442BFE6B89D055B1E438DA9BC8B386B2FE9015CECB2D2CC8DB","resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cliplscc","status":"Succeeded","startTime":"2022-10-18T19:59:26.5310039Z","endTime":"2022-10-18T19:59:32.8503909Z","properties":null}' headers: cache-control: - no-cache content-length: - - '559' + - '565' content-type: - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 18:06:45 GMT + - Tue, 18 Oct 2022 19:59:58 GMT etag: - - '"2d005a4b-0000-0100-0000-629f939a0000"' + - '"0200fa14-0000-3400-0000-634f05a40000"' expires: - '-1' pragma: @@ -3743,33 +4926,46 @@ interactions: body: null headers: Accept: - - application/json - Content-Type: - - application/json + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - connectedk8s delete + Connection: + - keep-alive + ParameterSetName: + - -g -n --kube-config -y User-Agent: - - OpenAPI-Generator/11.0.0/python + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://akkeshar-dns-08147f89.hcp.eastus2euap.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS2EUAP/operationStatuses/e47d7e24-5f6f-49e6-accb-72bfc91eb5cc*8738D9B038F603442BFE6B89D055B1E438DA9BC8B386B2FE9015CECB2D2CC8DB?api-version=2021-10-01 response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"42262866"},"items":[{"metadata":{"name":"azure-arc","uid":"0c6cbc64-097d-4713-b2c7-3a9819c8e77d","resourceVersion":"42262856","creationTimestamp":"2022-06-07T18:05:06Z","deletionTimestamp":"2022-06-07T18:06:57Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-06-07T18:05:06Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating"}}]} - - ' + string: '{"id":"/providers/Microsoft.Kubernetes/locations/EASTUS2EUAP/operationStatuses/e47d7e24-5f6f-49e6-accb-72bfc91eb5cc*8738D9B038F603442BFE6B89D055B1E438DA9BC8B386B2FE9015CECB2D2CC8DB","name":"e47d7e24-5f6f-49e6-accb-72bfc91eb5cc*8738D9B038F603442BFE6B89D055B1E438DA9BC8B386B2FE9015CECB2D2CC8DB","resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/akkeshar/providers/Microsoft.Kubernetes/connectedClusters/cliplscc","status":"Succeeded","startTime":"2022-10-18T19:59:26.5310039Z","endTime":"2022-10-18T19:59:32.8503909Z","properties":null}' headers: - audit-id: - - eb37844d-ea86-4014-9bc4-c91d7ed7dc65 cache-control: - - no-cache, private + - no-cache content-length: - - '1022' + - '565' content-type: - - application/json + - application/json; charset=utf-8 date: - - Tue, 07 Jun 2022 18:06:59 GMT - x-kubernetes-pf-flowschema-uid: - - 08843a36-c7a9-489b-a782-1dc805dc9f54 - x-kubernetes-pf-prioritylevel-uid: - - e6f4d88d-c43f-4941-a57f-7f1230896bdc + - Tue, 18 Oct 2022 19:59:58 GMT + etag: + - '"0200fa14-0000-3400-0000-634f05a40000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff status: code: 200 message: OK @@ -3781,34 +4977,29 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://akkeshar-dns-08147f89.hcp.eastus2euap.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://tempaks-dns-0f8c9536.hcp.southcentralus.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"42263015"},"items":[{"metadata":{"name":"azure-arc","uid":"0c6cbc64-097d-4713-b2c7-3a9819c8e77d","resourceVersion":"42263013","creationTimestamp":"2022-06-07T18:05:06Z","deletionTimestamp":"2022-06-07T18:06:57Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-06-07T18:05:06Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-06-07T18:07:04Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ResourcesDiscovered","message":"All - resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ParsedGroupVersions","message":"All - legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ContentDeleted","message":"All - content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"SomeResourcesRemain","message":"Some - resources are remaining: pods. has 8 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ContentHasNoFinalizers","message":"All - content-preserving finalizers finished"}]}}]} + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"2977978"},"items":[{"metadata":{"name":"azure-arc","uid":"064efd70-3b51-499a-807a-356532eae430","resourceVersion":"2977971","creationTimestamp":"2022-10-18T19:58:17Z","deletionTimestamp":"2022-10-18T20:00:12Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:58:17Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating"}}]} ' headers: audit-id: - - db6e6631-3101-4d26-bd1e-d43e72c99de8 + - 5a73dcf8-db22-4731-9f69-4cad4494d370 cache-control: - no-cache, private + content-length: + - '1020' content-type: - application/json date: - - Tue, 07 Jun 2022 18:07:05 GMT - transfer-encoding: - - chunked + - Tue, 18 Oct 2022 20:00:14 GMT x-kubernetes-pf-flowschema-uid: - - 08843a36-c7a9-489b-a782-1dc805dc9f54 + - 9c9284bf-ac50-497c-86a8-5f4f7d857b28 x-kubernetes-pf-prioritylevel-uid: - - e6f4d88d-c43f-4941-a57f-7f1230896bdc + - f041de6f-3328-46ec-b36d-f51c7cd89b61 status: code: 200 message: OK @@ -3820,34 +5011,34 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://akkeshar-dns-08147f89.hcp.eastus2euap.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://tempaks-dns-0f8c9536.hcp.southcentralus.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"42263035"},"items":[{"metadata":{"name":"azure-arc","uid":"0c6cbc64-097d-4713-b2c7-3a9819c8e77d","resourceVersion":"42263013","creationTimestamp":"2022-06-07T18:05:06Z","deletionTimestamp":"2022-06-07T18:06:57Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-06-07T18:05:06Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-06-07T18:07:04Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ResourcesDiscovered","message":"All - resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ParsedGroupVersions","message":"All - legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ContentDeleted","message":"All - content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"SomeResourcesRemain","message":"Some - resources are remaining: pods. has 8 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ContentHasNoFinalizers","message":"All + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"2978135"},"items":[{"metadata":{"name":"azure-arc","uid":"064efd70-3b51-499a-807a-356532eae430","resourceVersion":"2978135","creationTimestamp":"2022-10-18T19:58:17Z","deletionTimestamp":"2022-10-18T20:00:12Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:58:17Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T20:00:18Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"ResourcesDiscovered","message":"All + resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"ParsedGroupVersions","message":"All + legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"ContentDeleted","message":"All + content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"SomeResourcesRemain","message":"Some + resources are remaining: pods. has 8 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"ContentHasNoFinalizers","message":"All content-preserving finalizers finished"}]}}]} ' headers: audit-id: - - a5f613c2-3415-4b98-af33-140b981e324c + - 973cf731-af5d-4c0f-b5a7-7afbd7e18bda cache-control: - no-cache, private content-type: - application/json date: - - Tue, 07 Jun 2022 18:07:10 GMT + - Tue, 18 Oct 2022 20:00:19 GMT transfer-encoding: - chunked x-kubernetes-pf-flowschema-uid: - - 08843a36-c7a9-489b-a782-1dc805dc9f54 + - 9c9284bf-ac50-497c-86a8-5f4f7d857b28 x-kubernetes-pf-prioritylevel-uid: - - e6f4d88d-c43f-4941-a57f-7f1230896bdc + - f041de6f-3328-46ec-b36d-f51c7cd89b61 status: code: 200 message: OK @@ -3859,34 +5050,34 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://akkeshar-dns-08147f89.hcp.eastus2euap.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://tempaks-dns-0f8c9536.hcp.southcentralus.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"42263060"},"items":[{"metadata":{"name":"azure-arc","uid":"0c6cbc64-097d-4713-b2c7-3a9819c8e77d","resourceVersion":"42263013","creationTimestamp":"2022-06-07T18:05:06Z","deletionTimestamp":"2022-06-07T18:06:57Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-06-07T18:05:06Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-06-07T18:07:04Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ResourcesDiscovered","message":"All - resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ParsedGroupVersions","message":"All - legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ContentDeleted","message":"All - content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"SomeResourcesRemain","message":"Some - resources are remaining: pods. has 8 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ContentHasNoFinalizers","message":"All + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"2978165"},"items":[{"metadata":{"name":"azure-arc","uid":"064efd70-3b51-499a-807a-356532eae430","resourceVersion":"2978164","creationTimestamp":"2022-10-18T19:58:17Z","deletionTimestamp":"2022-10-18T20:00:12Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:58:17Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T20:00:18Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"ResourcesDiscovered","message":"All + resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"ParsedGroupVersions","message":"All + legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"ContentDeleted","message":"All + content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"SomeResourcesRemain","message":"Some + resources are remaining: pods. has 5 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"ContentHasNoFinalizers","message":"All content-preserving finalizers finished"}]}}]} ' headers: audit-id: - - c439e98c-9bb2-4cfc-a38e-10888ffe1a5c + - 7c5dfee6-98c5-47a9-b91d-2b9e30378cfc cache-control: - no-cache, private content-type: - application/json date: - - Tue, 07 Jun 2022 18:07:16 GMT + - Tue, 18 Oct 2022 20:00:24 GMT transfer-encoding: - chunked x-kubernetes-pf-flowschema-uid: - - 08843a36-c7a9-489b-a782-1dc805dc9f54 + - 9c9284bf-ac50-497c-86a8-5f4f7d857b28 x-kubernetes-pf-prioritylevel-uid: - - e6f4d88d-c43f-4941-a57f-7f1230896bdc + - f041de6f-3328-46ec-b36d-f51c7cd89b61 status: code: 200 message: OK @@ -3898,34 +5089,34 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://akkeshar-dns-08147f89.hcp.eastus2euap.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://tempaks-dns-0f8c9536.hcp.southcentralus.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"42263074"},"items":[{"metadata":{"name":"azure-arc","uid":"0c6cbc64-097d-4713-b2c7-3a9819c8e77d","resourceVersion":"42263013","creationTimestamp":"2022-06-07T18:05:06Z","deletionTimestamp":"2022-06-07T18:06:57Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-06-07T18:05:06Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-06-07T18:07:04Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ResourcesDiscovered","message":"All - resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ParsedGroupVersions","message":"All - legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ContentDeleted","message":"All - content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"SomeResourcesRemain","message":"Some - resources are remaining: pods. has 8 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ContentHasNoFinalizers","message":"All + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"2978185"},"items":[{"metadata":{"name":"azure-arc","uid":"064efd70-3b51-499a-807a-356532eae430","resourceVersion":"2978164","creationTimestamp":"2022-10-18T19:58:17Z","deletionTimestamp":"2022-10-18T20:00:12Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:58:17Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T20:00:18Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"ResourcesDiscovered","message":"All + resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"ParsedGroupVersions","message":"All + legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"ContentDeleted","message":"All + content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"SomeResourcesRemain","message":"Some + resources are remaining: pods. has 5 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"ContentHasNoFinalizers","message":"All content-preserving finalizers finished"}]}}]} ' headers: audit-id: - - 54f09ce8-aebf-4874-aae4-38a15b7f9209 + - 34b7f658-ed21-4dce-ab91-7734c5da4864 cache-control: - no-cache, private content-type: - application/json date: - - Tue, 07 Jun 2022 18:07:21 GMT + - Tue, 18 Oct 2022 20:00:30 GMT transfer-encoding: - chunked x-kubernetes-pf-flowschema-uid: - - 08843a36-c7a9-489b-a782-1dc805dc9f54 + - 9c9284bf-ac50-497c-86a8-5f4f7d857b28 x-kubernetes-pf-prioritylevel-uid: - - e6f4d88d-c43f-4941-a57f-7f1230896bdc + - f041de6f-3328-46ec-b36d-f51c7cd89b61 status: code: 200 message: OK @@ -3937,34 +5128,34 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://akkeshar-dns-08147f89.hcp.eastus2euap.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://tempaks-dns-0f8c9536.hcp.southcentralus.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"42263094"},"items":[{"metadata":{"name":"azure-arc","uid":"0c6cbc64-097d-4713-b2c7-3a9819c8e77d","resourceVersion":"42263094","creationTimestamp":"2022-06-07T18:05:06Z","deletionTimestamp":"2022-06-07T18:06:57Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-06-07T18:05:06Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-06-07T18:07:04Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ResourcesDiscovered","message":"All - resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ParsedGroupVersions","message":"All - legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ContentDeleted","message":"All - content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"SomeResourcesRemain","message":"Some - resources are remaining: pods. has 5 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ContentHasNoFinalizers","message":"All + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"2978203"},"items":[{"metadata":{"name":"azure-arc","uid":"064efd70-3b51-499a-807a-356532eae430","resourceVersion":"2978164","creationTimestamp":"2022-10-18T19:58:17Z","deletionTimestamp":"2022-10-18T20:00:12Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:58:17Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T20:00:18Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"ResourcesDiscovered","message":"All + resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"ParsedGroupVersions","message":"All + legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"ContentDeleted","message":"All + content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"SomeResourcesRemain","message":"Some + resources are remaining: pods. has 5 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"ContentHasNoFinalizers","message":"All content-preserving finalizers finished"}]}}]} ' headers: audit-id: - - a07e4258-e16c-4077-8696-f026b75238e7 + - 309a6b3a-4d6d-43d2-bf54-7d29ea9fc71f cache-control: - no-cache, private content-type: - application/json date: - - Tue, 07 Jun 2022 18:07:26 GMT + - Tue, 18 Oct 2022 20:00:35 GMT transfer-encoding: - chunked x-kubernetes-pf-flowschema-uid: - - 08843a36-c7a9-489b-a782-1dc805dc9f54 + - 9c9284bf-ac50-497c-86a8-5f4f7d857b28 x-kubernetes-pf-prioritylevel-uid: - - e6f4d88d-c43f-4941-a57f-7f1230896bdc + - f041de6f-3328-46ec-b36d-f51c7cd89b61 status: code: 200 message: OK @@ -3976,35 +5167,34 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://akkeshar-dns-08147f89.hcp.eastus2euap.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://tempaks-dns-0f8c9536.hcp.southcentralus.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"42263118"},"items":[{"metadata":{"name":"azure-arc","uid":"0c6cbc64-097d-4713-b2c7-3a9819c8e77d","resourceVersion":"42263118","creationTimestamp":"2022-06-07T18:05:06Z","deletionTimestamp":"2022-06-07T18:06:57Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-06-07T18:05:06Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-06-07T18:07:04Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ResourcesDiscovered","message":"All - resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ParsedGroupVersions","message":"All - legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"True","lastTransitionTime":"2022-06-07T18:07:32Z","reason":"ContentDeletionFailed","message":"Failed - to delete all resource types, 1 remaining: unexpected items still remain in - namespace: azure-arc for gvr: /v1, Resource=pods"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"SomeResourcesRemain","message":"Some - resources are remaining: pods. has 4 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-06-07T18:07:04Z","reason":"ContentHasNoFinalizers","message":"All + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"2978226"},"items":[{"metadata":{"name":"azure-arc","uid":"064efd70-3b51-499a-807a-356532eae430","resourceVersion":"2978164","creationTimestamp":"2022-10-18T19:58:17Z","deletionTimestamp":"2022-10-18T20:00:12Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:58:17Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T20:00:18Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"ResourcesDiscovered","message":"All + resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"ParsedGroupVersions","message":"All + legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"ContentDeleted","message":"All + content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"SomeResourcesRemain","message":"Some + resources are remaining: pods. has 5 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-10-18T20:00:18Z","reason":"ContentHasNoFinalizers","message":"All content-preserving finalizers finished"}]}}]} ' headers: audit-id: - - 81fdd11f-d15b-45ea-a284-e8d94b1d537e + - 6345a4bd-8229-451e-9fcc-f79bba772e45 cache-control: - no-cache, private content-type: - application/json date: - - Tue, 07 Jun 2022 18:07:32 GMT + - Tue, 18 Oct 2022 20:00:40 GMT transfer-encoding: - chunked x-kubernetes-pf-flowschema-uid: - - 08843a36-c7a9-489b-a782-1dc805dc9f54 + - 9c9284bf-ac50-497c-86a8-5f4f7d857b28 x-kubernetes-pf-prioritylevel-uid: - - e6f4d88d-c43f-4941-a57f-7f1230896bdc + - f041de6f-3328-46ec-b36d-f51c7cd89b61 status: code: 200 message: OK @@ -4016,29 +5206,29 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://akkeshar-dns-08147f89.hcp.eastus2euap.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://tempaks-dns-0f8c9536.hcp.southcentralus.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"42263146"},"items":[]} + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"2978264"},"items":[]} ' headers: audit-id: - - 2220869c-20fd-455f-8613-3d09f9cb07da + - b16763d7-820d-4410-8359-397e908bb6ee cache-control: - no-cache, private content-length: - - '96' + - '95' content-type: - application/json date: - - Tue, 07 Jun 2022 18:07:37 GMT + - Tue, 18 Oct 2022 20:00:46 GMT x-kubernetes-pf-flowschema-uid: - - 08843a36-c7a9-489b-a782-1dc805dc9f54 + - 9c9284bf-ac50-497c-86a8-5f4f7d857b28 x-kubernetes-pf-prioritylevel-uid: - - e6f4d88d-c43f-4941-a57f-7f1230896bdc + - f041de6f-3328-46ec-b36d-f51c7cd89b61 status: code: 200 message: OK diff --git a/src/connectedk8s/azext_connectedk8s/tests/latest/recordings/test_forcedelete.yaml b/src/connectedk8s/azext_connectedk8s/tests/latest/recordings/test_forcedelete.yaml index 95da9c9a42f..adca06b664f 100644 --- a/src/connectedk8s/azext_connectedk8s/tests/latest/recordings/test_forcedelete.yaml +++ b/src/connectedk8s/azext_connectedk8s/tests/latest/recordings/test_forcedelete.yaml @@ -1,18 +1,61 @@ interactions: +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - -g -n -s -l -c --generate-ssh-keys + User-Agent: + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/rohanazuregroup?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rohanazuregroup","name":"rohanazuregroup","type":"Microsoft.Resources/resourceGroups","location":"eastus","tags":{"Created":"20220718"},"properties":{"provisioningState":"Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '257' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 18 Oct 2022 19:26:59 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK - request: body: '{"location": "westeurope", "identity": {"type": "SystemAssigned"}, "properties": {"kubernetesVersion": "", "dnsPrefix": "test-force-rohanazuregroup-1bfbb5", - "agentPoolProfiles": [{"count": 1, "vmSize": "Standard_B2s", "osDiskSizeGB": - 0, "osType": "Linux", "enableAutoScaling": false, "type": "VirtualMachineScaleSets", - "mode": "System", "orchestratorVersion": "", "upgradeSettings": {}, "enableNodePublicIP": - false, "scaleSetPriority": "Regular", "scaleSetEvictionPolicy": "Delete", "spotMaxPrice": - -1.0, "nodeTaints": [], "enableEncryptionAtHost": false, "enableUltraSSD": false, - "enableFIPS": false, "name": "nodepool1"}], "linuxProfile": {"adminUsername": - "azureuser", "ssh": {"publicKeys": [{"keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCvp8/0L+JGBNPdl8292HKmvItwPYykyQQx9YDYu2b8YSoveRvXiHoxu4jGq+UykW0mhXVjoKh54DD7qkh+ryMrAarhtLSjTuF6CVk9X8zrcxjQm0mr1xDtjNpd7R/NIU+KxDhn7ITKfal+SpXEC4634eOzPc4YqsKULVUCrdXk9rA/0CpE4KWO2YWwYV1MmWD2uLEpiUzLwgefuHcEH8S6hOUC5veiPc7AuD4lX0efogvvObE+5tqhvNCYvO2NG7x51rBlMgmcLYehNTeCFQLpUdFm014yfk9l8JQv2hQ/cFbCRzA/zYEPSAOOb1VSMYxTVk51wt1mrcb+wMzpsBQv"}]}}, - "addonProfiles": {}, "enableRBAC": true, "networkProfile": {"networkPlugin": - "kubenet", "podCidr": "10.244.0.0/16", "serviceCidr": "10.0.0.0/16", "dnsServiceIP": - "10.0.0.10", "dockerBridgeCidr": "172.17.0.1/16", "outboundType": "loadBalancer", - "loadBalancerSku": "standard"}, "disableLocalAccounts": false}}' + "agentPoolProfiles": [{"count": 1, "vmSize": "Standard_B4ms", "osType": "Linux", + "type": "VirtualMachineScaleSets", "mode": "System", "enableNodePublicIP": false, + "scaleSetPriority": "Regular", "scaleSetEvictionPolicy": "Delete", "spotMaxPrice": + -1.0, "enableEncryptionAtHost": false, "enableUltraSSD": false, "enableFIPS": + false, "name": "nodepool1"}], "linuxProfile": {"adminUsername": "azureuser", + "ssh": {"publicKeys": [{"keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDknmXRBGa/GuPCkpyydwCNedhfwINfrO674LWcBih2UjjJc5yULl9cD9LsYMWOzHVqM7H7RFxaONyq46h9vgxB/1XAeJUGc2jS8GS+vsS83bXX6vVrwa8wVeD380SJcF87oH3xf7/v2hlKv3drXi7xPE2JBjTHIOJJ6OxX+bAFXBqd1dPvnX1X7kEyX6vvjvuQrp7rFDbLq/eRpmng7kykodASQkUFZlt5+gH/U/z/a/DRoTocgzNqGl9RmesNtslQJs17Vn/JIJMM55qcRCEKoJ3Fq/Osnx3tHNA3G/vTs/+sVgh0tZmM6oIMRfTKzJskSZkMZOd8KtK/7ROCZO72izRmzwTFwFvRe/I7iHQ4PrjeKAqKDvgHJ/0LlaHmIYysZI21OTo6HcoX4HmA4RsIybNAM5SWeMMGiGe94/LYPk9sgB3o8aMv/nI/hr6vA28c2nso7itOuNcH1GZalAnbCObNv7QqVZ23FPlCjV9GXWCDCnQeCoIispJCrf68N5s= + fareast\\akkeshar@AkashLaptop\n"}]}}, "addonProfiles": {}, "enableRBAC": true, + "enablePodSecurityPolicy": false, "networkProfile": {"networkPlugin": "kubenet", + "podCidr": "10.244.0.0/16", "serviceCidr": "10.0.0.0/16", "dnsServiceIP": "10.0.0.10", + "dockerBridgeCidr": "172.17.0.1/16", "outboundType": "loadBalancer", "loadBalancerSku": + "standard"}, "disableLocalAccounts": false}}' headers: Accept: - application/json @@ -23,66 +66,60 @@ interactions: Connection: - keep-alive Content-Length: - - '1402' + - '1526' Content-Type: - application/json ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rohanazuregroup/providers/Microsoft.ContainerService/managedClusters/test-force-delete000001?api-version=2022-04-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rohanazuregroup/providers/Microsoft.ContainerService/managedClusters/test-force-delete000001?api-version=2021-08-01 response: body: string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/rohanazuregroup/providers/Microsoft.ContainerService/managedClusters/test-force-delete000001\",\n \ \"location\": \"westeurope\",\n \"name\": \"test-force-delete000001\",\n \ \"type\": \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \"provisioningState\": \"Creating\",\n \"powerState\": {\n \"code\": - \"Running\"\n },\n \"kubernetesVersion\": \"1.23.8\",\n \"currentKubernetesVersion\": - \"1.23.8\",\n \"dnsPrefix\": \"test-force-rohanazuregroup-1bfbb5\",\n \"fqdn\": - \"test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io\",\n - \ \"azurePortalFQDN\": \"test-force-rohanazuregroup-1bfbb5-6069f968.portal.hcp.westeurope.azmk8s.io\",\n + \"Running\"\n },\n \"kubernetesVersion\": \"1.23.12\",\n \"dnsPrefix\": + \"test-force-rohanazuregroup-1bfbb5\",\n \"fqdn\": \"test-force-rohanazuregroup-1bfbb5-4f0f44fc.hcp.westeurope.azmk8s.io\",\n + \ \"azurePortalFQDN\": \"test-force-rohanazuregroup-1bfbb5-4f0f44fc.portal.hcp.westeurope.azmk8s.io\",\n \ \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \"count\": - 1,\n \"vmSize\": \"Standard_B2s\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": + 1,\n \"vmSize\": \"Standard_B4ms\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": \"Managed\",\n \"kubeletDiskType\": \"OS\",\n \"maxPods\": 110,\n - \ \"type\": \"VirtualMachineScaleSets\",\n \"enableAutoScaling\": false,\n - \ \"provisioningState\": \"Creating\",\n \"powerState\": {\n \"code\": - \"Running\"\n },\n \"orchestratorVersion\": \"1.23.8\",\n \"currentOrchestratorVersion\": - \"1.23.8\",\n \"enableNodePublicIP\": false,\n \"mode\": \"System\",\n + \ \"type\": \"VirtualMachineScaleSets\",\n \"provisioningState\": \"Creating\",\n + \ \"powerState\": {\n \"code\": \"Running\"\n },\n \"orchestratorVersion\": + \"1.23.12\",\n \"enableNodePublicIP\": false,\n \"mode\": \"System\",\n \ \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\": - \"AKSUbuntu-1804gen2containerd-2022.08.23\",\n \"upgradeSettings\": {},\n - \ \"enableFIPS\": false\n }\n ],\n \"linuxProfile\": {\n \"adminUsername\": - \"azureuser\",\n \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCvp8/0L+JGBNPdl8292HKmvItwPYykyQQx9YDYu2b8YSoveRvXiHoxu4jGq+UykW0mhXVjoKh54DD7qkh+ryMrAarhtLSjTuF6CVk9X8zrcxjQm0mr1xDtjNpd7R/NIU+KxDhn7ITKfal+SpXEC4634eOzPc4YqsKULVUCrdXk9rA/0CpE4KWO2YWwYV1MmWD2uLEpiUzLwgefuHcEH8S6hOUC5veiPc7AuD4lX0efogvvObE+5tqhvNCYvO2NG7x51rBlMgmcLYehNTeCFQLpUdFm014yfk9l8JQv2hQ/cFbCRzA/zYEPSAOOb1VSMYxTVk51wt1mrcb+wMzpsBQv\"\n - \ }\n ]\n }\n },\n \"servicePrincipalProfile\": {\n \"clientId\": - \"msi\"\n },\n \"nodeResourceGroup\": \"MC_rohanazuregroup_test-force-delete000001_westeurope\",\n - \ \"enableRBAC\": true,\n \"networkProfile\": {\n \"networkPlugin\": - \"kubenet\",\n \"loadBalancerSku\": \"standard\",\n \"loadBalancerProfile\": - {\n \"managedOutboundIPs\": {\n \"count\": 1\n }\n },\n \"podCidr\": - \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\",\n \"dnsServiceIP\": - \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\",\n \"outboundType\": - \"loadBalancer\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\n ],\n \"serviceCidrs\": - [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\": [\n \"IPv4\"\n ]\n - \ },\n \"maxAgentPools\": 100,\n \"disableLocalAccounts\": false,\n \"securityProfile\": - {},\n \"storageProfile\": {\n \"diskCSIDriver\": {\n \"enabled\": - true\n },\n \"fileCSIDriver\": {\n \"enabled\": true\n },\n \"snapshotController\": - {\n \"enabled\": true\n }\n }\n },\n \"identity\": {\n \"type\": - \"SystemAssigned\",\n \"principalId\": \"4a5e0669-a457-4181-a2f4-471fb0a6a38a\",\n + \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"enableFIPS\": false\n + \ }\n ],\n \"linuxProfile\": {\n \"adminUsername\": \"azureuser\",\n + \ \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": \"ssh-rsa + AAAAB3NzaC1yc2EAAAADAQABAAABgQDknmXRBGa/GuPCkpyydwCNedhfwINfrO674LWcBih2UjjJc5yULl9cD9LsYMWOzHVqM7H7RFxaONyq46h9vgxB/1XAeJUGc2jS8GS+vsS83bXX6vVrwa8wVeD380SJcF87oH3xf7/v2hlKv3drXi7xPE2JBjTHIOJJ6OxX+bAFXBqd1dPvnX1X7kEyX6vvjvuQrp7rFDbLq/eRpmng7kykodASQkUFZlt5+gH/U/z/a/DRoTocgzNqGl9RmesNtslQJs17Vn/JIJMM55qcRCEKoJ3Fq/Osnx3tHNA3G/vTs/+sVgh0tZmM6oIMRfTKzJskSZkMZOd8KtK/7ROCZO72izRmzwTFwFvRe/I7iHQ4PrjeKAqKDvgHJ/0LlaHmIYysZI21OTo6HcoX4HmA4RsIybNAM5SWeMMGiGe94/LYPk9sgB3o8aMv/nI/hr6vA28c2nso7itOuNcH1GZalAnbCObNv7QqVZ23FPlCjV9GXWCDCnQeCoIispJCrf68N5s= + fareast\\\\akkeshar@AkashLaptop\\n\"\n }\n ]\n }\n },\n \"servicePrincipalProfile\": + {\n \"clientId\": \"msi\"\n },\n \"nodeResourceGroup\": \"MC_rohanazuregroup_test-force-delete000001_westeurope\",\n + \ \"enableRBAC\": true,\n \"enablePodSecurityPolicy\": false,\n \"networkProfile\": + {\n \"networkPlugin\": \"kubenet\",\n \"loadBalancerSku\": \"standard\",\n + \ \"loadBalancerProfile\": {\n \"managedOutboundIPs\": {\n \"count\": + 1\n }\n },\n \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": + \"10.0.0.0/16\",\n \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": + \"172.17.0.1/16\",\n \"outboundType\": \"loadBalancer\"\n },\n \"maxAgentPools\": + 100,\n \"disableLocalAccounts\": false,\n \"securityProfile\": {}\n },\n + \ \"identity\": {\n \"type\": \"SystemAssigned\",\n \"principalId\": \"d6ba6cde-5447-4197-8b72-dc2ec99e93dc\",\n \ \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\": {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" headers: azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/efb1a504-c79d-473a-a02d-be8ebdd68b39?api-version=2017-08-31 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/2f0666d4-9e2d-48b8-b6c5-7959bb27cc4e?api-version=2017-08-31 cache-control: - no-cache content-length: - - '3145' + - '2926' content-type: - application/json date: - - Mon, 12 Sep 2022 09:28:08 GMT + - Tue, 18 Oct 2022 19:27:13 GMT expires: - '-1' pragma: @@ -112,14 +149,14 @@ interactions: ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/efb1a504-c79d-473a-a02d-be8ebdd68b39?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/2f0666d4-9e2d-48b8-b6c5-7959bb27cc4e?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"04a5b1ef-9dc7-3a47-a02d-be8ebdd68b39\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:28:06.9757864Z\"\n }" + string: "{\n \"name\": \"d466062f-2d9e-b848-b6c5-7959bb27cc4e\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:27:12.7997972Z\"\n }" headers: cache-control: - no-cache @@ -128,7 +165,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:28:38 GMT + - Tue, 18 Oct 2022 19:27:43 GMT expires: - '-1' pragma: @@ -160,14 +197,14 @@ interactions: ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/efb1a504-c79d-473a-a02d-be8ebdd68b39?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/2f0666d4-9e2d-48b8-b6c5-7959bb27cc4e?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"04a5b1ef-9dc7-3a47-a02d-be8ebdd68b39\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:28:06.9757864Z\"\n }" + string: "{\n \"name\": \"d466062f-2d9e-b848-b6c5-7959bb27cc4e\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:27:12.7997972Z\"\n }" headers: cache-control: - no-cache @@ -176,7 +213,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:29:09 GMT + - Tue, 18 Oct 2022 19:28:14 GMT expires: - '-1' pragma: @@ -208,14 +245,14 @@ interactions: ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/efb1a504-c79d-473a-a02d-be8ebdd68b39?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/2f0666d4-9e2d-48b8-b6c5-7959bb27cc4e?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"04a5b1ef-9dc7-3a47-a02d-be8ebdd68b39\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:28:06.9757864Z\"\n }" + string: "{\n \"name\": \"d466062f-2d9e-b848-b6c5-7959bb27cc4e\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:27:12.7997972Z\"\n }" headers: cache-control: - no-cache @@ -224,7 +261,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:29:39 GMT + - Tue, 18 Oct 2022 19:28:44 GMT expires: - '-1' pragma: @@ -256,14 +293,14 @@ interactions: ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/efb1a504-c79d-473a-a02d-be8ebdd68b39?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/2f0666d4-9e2d-48b8-b6c5-7959bb27cc4e?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"04a5b1ef-9dc7-3a47-a02d-be8ebdd68b39\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:28:06.9757864Z\"\n }" + string: "{\n \"name\": \"d466062f-2d9e-b848-b6c5-7959bb27cc4e\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:27:12.7997972Z\"\n }" headers: cache-control: - no-cache @@ -272,7 +309,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:30:10 GMT + - Tue, 18 Oct 2022 19:29:15 GMT expires: - '-1' pragma: @@ -304,14 +341,14 @@ interactions: ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/efb1a504-c79d-473a-a02d-be8ebdd68b39?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/2f0666d4-9e2d-48b8-b6c5-7959bb27cc4e?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"04a5b1ef-9dc7-3a47-a02d-be8ebdd68b39\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:28:06.9757864Z\"\n }" + string: "{\n \"name\": \"d466062f-2d9e-b848-b6c5-7959bb27cc4e\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:27:12.7997972Z\"\n }" headers: cache-control: - no-cache @@ -320,7 +357,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:30:40 GMT + - Tue, 18 Oct 2022 19:29:45 GMT expires: - '-1' pragma: @@ -352,14 +389,14 @@ interactions: ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/efb1a504-c79d-473a-a02d-be8ebdd68b39?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/2f0666d4-9e2d-48b8-b6c5-7959bb27cc4e?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"04a5b1ef-9dc7-3a47-a02d-be8ebdd68b39\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:28:06.9757864Z\"\n }" + string: "{\n \"name\": \"d466062f-2d9e-b848-b6c5-7959bb27cc4e\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:27:12.7997972Z\"\n }" headers: cache-control: - no-cache @@ -368,7 +405,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:31:10 GMT + - Tue, 18 Oct 2022 19:30:16 GMT expires: - '-1' pragma: @@ -400,14 +437,14 @@ interactions: ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/efb1a504-c79d-473a-a02d-be8ebdd68b39?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/2f0666d4-9e2d-48b8-b6c5-7959bb27cc4e?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"04a5b1ef-9dc7-3a47-a02d-be8ebdd68b39\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:28:06.9757864Z\"\n }" + string: "{\n \"name\": \"d466062f-2d9e-b848-b6c5-7959bb27cc4e\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:27:12.7997972Z\"\n }" headers: cache-control: - no-cache @@ -416,7 +453,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:31:41 GMT + - Tue, 18 Oct 2022 19:30:46 GMT expires: - '-1' pragma: @@ -448,14 +485,14 @@ interactions: ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/efb1a504-c79d-473a-a02d-be8ebdd68b39?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/2f0666d4-9e2d-48b8-b6c5-7959bb27cc4e?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"04a5b1ef-9dc7-3a47-a02d-be8ebdd68b39\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:28:06.9757864Z\"\n }" + string: "{\n \"name\": \"d466062f-2d9e-b848-b6c5-7959bb27cc4e\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:27:12.7997972Z\"\n }" headers: cache-control: - no-cache @@ -464,7 +501,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:32:11 GMT + - Tue, 18 Oct 2022 19:31:16 GMT expires: - '-1' pragma: @@ -496,15 +533,63 @@ interactions: ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/efb1a504-c79d-473a-a02d-be8ebdd68b39?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/2f0666d4-9e2d-48b8-b6c5-7959bb27cc4e?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"04a5b1ef-9dc7-3a47-a02d-be8ebdd68b39\",\n \"status\": - \"Succeeded\",\n \"startTime\": \"2022-09-12T09:28:06.9757864Z\",\n \"endTime\": - \"2022-09-12T09:32:20.2959997Z\"\n }" + string: "{\n \"name\": \"d466062f-2d9e-b848-b6c5-7959bb27cc4e\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:27:12.7997972Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Tue, 18 Oct 2022 19:31:47 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - -g -n -s -l -c --generate-ssh-keys + User-Agent: + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/2f0666d4-9e2d-48b8-b6c5-7959bb27cc4e?api-version=2017-08-31 + response: + body: + string: "{\n \"name\": \"d466062f-2d9e-b848-b6c5-7959bb27cc4e\",\n \"status\": + \"Succeeded\",\n \"startTime\": \"2022-10-18T19:27:12.7997972Z\",\n \"endTime\": + \"2022-10-18T19:32:07.4274835Z\"\n }" headers: cache-control: - no-cache @@ -513,7 +598,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:32:41 GMT + - Tue, 18 Oct 2022 19:32:16 GMT expires: - '-1' pragma: @@ -545,64 +630,57 @@ interactions: ParameterSetName: - -g -n -s -l -c --generate-ssh-keys User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rohanazuregroup/providers/Microsoft.ContainerService/managedClusters/test-force-delete000001?api-version=2022-04-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rohanazuregroup/providers/Microsoft.ContainerService/managedClusters/test-force-delete000001?api-version=2021-08-01 response: body: string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/rohanazuregroup/providers/Microsoft.ContainerService/managedClusters/test-force-delete000001\",\n \ \"location\": \"westeurope\",\n \"name\": \"test-force-delete000001\",\n \ \"type\": \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \"provisioningState\": \"Succeeded\",\n \"powerState\": {\n \"code\": - \"Running\"\n },\n \"kubernetesVersion\": \"1.23.8\",\n \"currentKubernetesVersion\": - \"1.23.8\",\n \"dnsPrefix\": \"test-force-rohanazuregroup-1bfbb5\",\n \"fqdn\": - \"test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io\",\n - \ \"azurePortalFQDN\": \"test-force-rohanazuregroup-1bfbb5-6069f968.portal.hcp.westeurope.azmk8s.io\",\n + \"Running\"\n },\n \"kubernetesVersion\": \"1.23.12\",\n \"dnsPrefix\": + \"test-force-rohanazuregroup-1bfbb5\",\n \"fqdn\": \"test-force-rohanazuregroup-1bfbb5-4f0f44fc.hcp.westeurope.azmk8s.io\",\n + \ \"azurePortalFQDN\": \"test-force-rohanazuregroup-1bfbb5-4f0f44fc.portal.hcp.westeurope.azmk8s.io\",\n \ \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \"count\": - 1,\n \"vmSize\": \"Standard_B2s\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": + 1,\n \"vmSize\": \"Standard_B4ms\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": \"Managed\",\n \"kubeletDiskType\": \"OS\",\n \"maxPods\": 110,\n - \ \"type\": \"VirtualMachineScaleSets\",\n \"enableAutoScaling\": false,\n - \ \"provisioningState\": \"Succeeded\",\n \"powerState\": {\n \"code\": - \"Running\"\n },\n \"orchestratorVersion\": \"1.23.8\",\n \"currentOrchestratorVersion\": - \"1.23.8\",\n \"enableNodePublicIP\": false,\n \"mode\": \"System\",\n + \ \"type\": \"VirtualMachineScaleSets\",\n \"provisioningState\": \"Succeeded\",\n + \ \"powerState\": {\n \"code\": \"Running\"\n },\n \"orchestratorVersion\": + \"1.23.12\",\n \"enableNodePublicIP\": false,\n \"mode\": \"System\",\n \ \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\": - \"AKSUbuntu-1804gen2containerd-2022.08.23\",\n \"upgradeSettings\": {},\n - \ \"enableFIPS\": false\n }\n ],\n \"linuxProfile\": {\n \"adminUsername\": - \"azureuser\",\n \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCvp8/0L+JGBNPdl8292HKmvItwPYykyQQx9YDYu2b8YSoveRvXiHoxu4jGq+UykW0mhXVjoKh54DD7qkh+ryMrAarhtLSjTuF6CVk9X8zrcxjQm0mr1xDtjNpd7R/NIU+KxDhn7ITKfal+SpXEC4634eOzPc4YqsKULVUCrdXk9rA/0CpE4KWO2YWwYV1MmWD2uLEpiUzLwgefuHcEH8S6hOUC5veiPc7AuD4lX0efogvvObE+5tqhvNCYvO2NG7x51rBlMgmcLYehNTeCFQLpUdFm014yfk9l8JQv2hQ/cFbCRzA/zYEPSAOOb1VSMYxTVk51wt1mrcb+wMzpsBQv\"\n - \ }\n ]\n }\n },\n \"servicePrincipalProfile\": {\n \"clientId\": - \"msi\"\n },\n \"nodeResourceGroup\": \"MC_rohanazuregroup_test-force-delete000001_westeurope\",\n - \ \"enableRBAC\": true,\n \"networkProfile\": {\n \"networkPlugin\": - \"kubenet\",\n \"loadBalancerSku\": \"Standard\",\n \"loadBalancerProfile\": - {\n \"managedOutboundIPs\": {\n \"count\": 1\n },\n \"effectiveOutboundIPs\": - [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_rohanazuregroup_test-force-delete000001_westeurope/providers/Microsoft.Network/publicIPAddresses/b5bcdb5d-bce8-47bc-a942-330276ab3f32\"\n + \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"enableFIPS\": false\n + \ }\n ],\n \"linuxProfile\": {\n \"adminUsername\": \"azureuser\",\n + \ \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": \"ssh-rsa + AAAAB3NzaC1yc2EAAAADAQABAAABgQDknmXRBGa/GuPCkpyydwCNedhfwINfrO674LWcBih2UjjJc5yULl9cD9LsYMWOzHVqM7H7RFxaONyq46h9vgxB/1XAeJUGc2jS8GS+vsS83bXX6vVrwa8wVeD380SJcF87oH3xf7/v2hlKv3drXi7xPE2JBjTHIOJJ6OxX+bAFXBqd1dPvnX1X7kEyX6vvjvuQrp7rFDbLq/eRpmng7kykodASQkUFZlt5+gH/U/z/a/DRoTocgzNqGl9RmesNtslQJs17Vn/JIJMM55qcRCEKoJ3Fq/Osnx3tHNA3G/vTs/+sVgh0tZmM6oIMRfTKzJskSZkMZOd8KtK/7ROCZO72izRmzwTFwFvRe/I7iHQ4PrjeKAqKDvgHJ/0LlaHmIYysZI21OTo6HcoX4HmA4RsIybNAM5SWeMMGiGe94/LYPk9sgB3o8aMv/nI/hr6vA28c2nso7itOuNcH1GZalAnbCObNv7QqVZ23FPlCjV9GXWCDCnQeCoIispJCrf68N5s= + fareast\\\\akkeshar@AkashLaptop\\n\"\n }\n ]\n }\n },\n \"servicePrincipalProfile\": + {\n \"clientId\": \"msi\"\n },\n \"nodeResourceGroup\": \"MC_rohanazuregroup_test-force-delete000001_westeurope\",\n + \ \"enableRBAC\": true,\n \"enablePodSecurityPolicy\": false,\n \"networkProfile\": + {\n \"networkPlugin\": \"kubenet\",\n \"loadBalancerSku\": \"Standard\",\n + \ \"loadBalancerProfile\": {\n \"managedOutboundIPs\": {\n \"count\": + 1\n },\n \"effectiveOutboundIPs\": [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_rohanazuregroup_test-force-delete000001_westeurope/providers/Microsoft.Network/publicIPAddresses/566a7f34-3163-4b48-9b94-806026d4aec4\"\n \ }\n ]\n },\n \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\",\n \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": - \"172.17.0.1/16\",\n \"outboundType\": \"loadBalancer\",\n \"podCidrs\": - [\n \"10.244.0.0/16\"\n ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n - \ ],\n \"ipFamilies\": [\n \"IPv4\"\n ]\n },\n \"maxAgentPools\": + \"172.17.0.1/16\",\n \"outboundType\": \"loadBalancer\"\n },\n \"maxAgentPools\": 100,\n \"identityProfile\": {\n \"kubeletidentity\": {\n \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_rohanazuregroup_test-force-delete000001_westeurope/providers/Microsoft.ManagedIdentity/userAssignedIdentities/test-force-delete000001-agentpool\",\n - \ \"clientId\": \"6fbb5ca9-813b-4fcb-8493-52c0a63a05cb\",\n \"objectId\": - \"02b990c6-5c4b-497b-9e00-18c4fdadf250\"\n }\n },\n \"disableLocalAccounts\": - false,\n \"securityProfile\": {},\n \"storageProfile\": {\n \"diskCSIDriver\": - {\n \"enabled\": true\n },\n \"fileCSIDriver\": {\n \"enabled\": - true\n },\n \"snapshotController\": {\n \"enabled\": true\n }\n - \ }\n },\n \"identity\": {\n \"type\": \"SystemAssigned\",\n \"principalId\": - \"4a5e0669-a457-4181-a2f4-471fb0a6a38a\",\n \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n - \ },\n \"sku\": {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n - }" + \ \"clientId\": \"09e30c05-18bc-4655-b841-ed8680d5e0cf\",\n \"objectId\": + \"ded816ab-034b-42fa-a3be-9444e324d153\"\n }\n },\n \"disableLocalAccounts\": + false,\n \"securityProfile\": {}\n },\n \"identity\": {\n \"type\": + \"SystemAssigned\",\n \"principalId\": \"d6ba6cde-5447-4197-8b72-dc2ec99e93dc\",\n + \ \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\": + {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" headers: cache-control: - no-cache content-length: - - '3831' + - '3615' content-type: - application/json date: - - Mon, 12 Sep 2022 09:32:42 GMT + - Tue, 18 Oct 2022 19:32:17 GMT expires: - '-1' pragma: @@ -636,14 +714,14 @@ interactions: ParameterSetName: - -g -n -f User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/16.2.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rohanazuregroup/providers/Microsoft.ContainerService/managedClusters/test-force-delete000001/listClusterUserCredential?api-version=2022-04-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rohanazuregroup/providers/Microsoft.ContainerService/managedClusters/test-force-delete000001/listClusterUserCredential?api-version=2021-08-01 response: body: string: "{\n \"kubeconfigs\": [\n {\n \"name\": \"clusterUser\",\n \"value\": - \"apiVersion: v1
clusters:
- cluster:
    certificate-authority-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUU2RENDQXRDZ0F3SUJBZ0lRSTBzMW1WQTA4Tkh5ZGR3Qis1VlhtREFOQmdrcWhraUc5dzBCQVFzRkFEQU4KTVFzd0NRWURWUVFERXdKallUQWdGdzB5TWpBNU1USXdPVEU1TURoYUdBOHlNRFV5TURreE1qQTVNamt3T0ZvdwpEVEVMTUFrR0ExVUVBeE1DWTJFd2dnSWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUNEd0F3Z2dJS0FvSUNBUUMvCmdxeG54a0QzVmdDWW55YTZqU3RiMDJ5VnZtK1hYblZ1cnNydW1tc3ZFZURLWjJKbDdHM3BiOXdkT0J1RkRZcEkKOXNxWTRjQ29ZMTVveFVVWTJ5MjljWkZGMDZPRnZhVzRrSndiMitONVdYZGZCeGMrMTU4NzdHZHV6cWtFbDk5eQo4YUVoYXJpdnpxci9nWk1nM2d1bUJQYnhWTjM2TXpvNlFhUHRpWFZ6a3M5L2xNbzF0SXNsYjJuZHkxSXlKelhNCjlnM3pWTEtCTDl3dXFVZHJRakFyNlpaT0d4R2JWaVZDaFFsNDRnektaa2FQd0pmTVBDbUtmd3ZGS3RIRkVKS1cKWnErczF2RFU2b0oxVTc3Y0t6WWZ0TENJSlFDdmZUbkZLcnAvcjhHeXY3TFpwOG5qUUR2ODZxUEpkc0tGWVhCUwpHN3N0b2s4dXV2Vm9tcjBkYmJRZVpnSmVBZVNqZEdTNHNlK0xzN1o3TUM1VWM0U3pXelZvNmtoekV4UnpPRk8wClRYSWp3V3plT2xMcmtHa0NtN0Y3T0VLa3JTcUlsVTJrak5OWEVyUTlmUU1QRnlhY3ZPU3dZTDNkTVlSVldNVWEKUTRkMEdPVTh5UzZyZlhkczRGUEY3T1pqYzZzY1kwZDk0OUtoUTRKc0M3dUwvc1gxTTRnWmgraHZ1VGswb29PRAplYWw4UHRWVVNRR1RYZE5qNGpPWTdmcWdRd2YvUUVvc2NSbDJyL09saXJWenR6NkEyL3BPbG5lcjhPTGxMZDBsCjB6ZE9ZaStBUjU0K25OSk9xT3I0QWRKaVUxVitkQ2doQVBwUkd6T1VhT09OTjVEQ05yOWZZdFdpTlA0b2JsdHIKY09GQlF5V0s2QmVicVRjNGZkcVNadE5WZEs5MWUweEhoSVMxNld1NGd3SURBUUFCbzBJd1FEQU9CZ05WSFE4QgpBZjhFQkFNQ0FxUXdEd1lEVlIwVEFRSC9CQVV3QXdFQi96QWRCZ05WSFE0RUZnUVV5L1dmS01SenU0eU9SMEo2CnBOUFN4aWFkUVRBd0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dJQkFCVHpFbXBVQ2ZWY256QkgrN0tQM0lwMHhIaDcKK1NPenRaRnZqdzdkR1NoYXJnYng4azJvdG92U1ZCeTZFZnRrT0ZCaEg0NjRiSTh2c2Fna3A0OGNucUZxeDZZWQpDVUwvQ2Z0dmtPbi9aVEJwbDBRWWgvUGNMQTVyV2tmT0dqVkFpTzVRTUQ4bXk5ZVdkaU5tamVxdCtoc2JQY01WCi9FWUIrR0JsMVFrTnovR01iTERRaXBtV1VEVVVMaVNLakNwTlV3RGM3d2Vrb2g2VGsybjZ0UHFFQVU1cENtMzgKUjdMbHlwNE1ndWhWT3ZodjJxbWNqMExLb1M3WU9nNXh4bUt6NTJKNENZOEF0eFFuZkU1bDBEcEN5MEZRSkFyagpJdnpHNlZEL1lidTI1QUNOYWh5a01QZ0tLMmIrWkxuTk1INFc5eElXNGxaamdrQzNPYjUzT1FBU1JndVpnQW8yCklKQ2dBREdkNHplcml2MEpMR01vTi8zQ1VIMXMwcUgzL0NZUTE4VThmQkNlSjNWVW5KNE52UHgvWTIxYktuRWEKOEtDZHFlRnJ3cWRSVVMzbUpDMTI3ZHIvd3plRXNmd1pEOTJORmpaMlZiaTFQemVVM0hpYUp3RWtCWkFtR2NnUgp0WVhFS1pFendYeXJKM1VONjhDV1NYWTdDYjFJcGFtRWpGQXNYc2wvZGtRVjl3ejJKNnRtdWxzbGhwU20yL2RHCjN3K1I2dFhUb1c3VGVSbHdqMTcweVNPV0FNY2wxeXRyTFZwTE1jZDc4ajdJdTFOL29QTFVOU0JYV0pENlZyeFoKKzQ2bUdkcUx6MCsrQU54emZ5MGgwOEhVelVrL3VEdkhseFBNNFQ3NE1EVXh5ZUFXRHpMTXRMMDRnaFMrOFc2ZApyL0xnd0R6ZlYrVG1DMHkvCi0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K
    server: https://test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io:443
  name: test-force-deletegq6dqzs
contexts:
- context:
    cluster: test-force-deletegq6dqzs
    user: clusterUser_rohanazuregroup_test-force-deletegq6dqzs
  name: test-force-deletegq6dqzs
current-context: test-force-deletegq6dqzs
kind: Config
preferences: {}
users:
- name: clusterUser_rohanazuregroup_test-force-deletegq6dqzs
  user:
    client-certificate-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUZIVENDQXdXZ0F3SUJBZ0lRZVl0OHV3WkJzNGpITVJ3YTlLSVVuVEFOQmdrcWhraUc5dzBCQVFzRkFEQU4KTVFzd0NRWURWUVFERXdKallUQWVGdzB5TWpBNU1USXdPVEU1TURoYUZ3MHlOREE1TVRJd09USTVNRGhhTURBeApGekFWQmdOVkJBb1REbk41YzNSbGJUcHRZWE4wWlhKek1SVXdFd1lEVlFRREV3eHRZWE4wWlhKamJHbGxiblF3CmdnSWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUNEd0F3Z2dJS0FvSUNBUUNzQ2tnK29uS2l2dG1KRzhjSVNFRGsKWTIxUlh3ZkNGTzRSSTlDcWtWcmkxdkxQMjJrNXMwVEhSVVIvc29RekJMOEF4czZvSlVFVFVLejVyR2dzdEhlVwprdGFVLzcvZER0ei9KNVRJdG1kLzVXOU5zUkRBWFFudmVXanZaRFlteEhkT0RxYlVXY1d2YU1YYW43OGFadjAwCkZjNXNFVUZEL2RzMHN5c1BTYjhiQW8rUEtVWEVmcVZkOEVXMXZSdTdzWkxieUVoZS92WktYb1BzOGpTaGoyTVoKd1RXRFZvOC9DTzhjaGtKMjlwOER3Z05GOXhPamRrWGZPaWVTK0Fjc2tlY2txOUtnYmFSVUJOYWRGNmJyNHpRZQovSXc1cU9xRGF6TEVoSndPaG1pOGVYemR4V1FFM20wY2RMV2xDNStiY0d4NVVlODI3RVNuZkF2L2phSHlINnl6Cjg3aGIraFpDdUttMW0reTQzMlFEalNSTFVoaVhRYzJoT0wrM2paRDRCTUhqQjlwTnZIOS9WWi9OUEg2cWtVYTYKc2lpeFlIb2lUYm1iYnp2ZnJtaTkvZkRtdThiMVl0TnFBbXVvTTFiQXE1NXRoTE0rUFdpMWJvMnJhUklkU3ZVZgpPU2d3UlRJTm4vckZhbHYwUitKQXFBMnVVdnZnSG90aGtYanhBMUtvL3NxSWhtdkxZTjJ2Vm9vck5nVDRsWnZxClBxTEkyd2g3S3ZMczMwMkdvWkNIaXovMTBzcXpOZ0lQNml5cnlFSGUvQngxWDNReWVBc0hmUm9SZjEzWkZRUjMKSnExeFdBRzM4ekhNdzBXcXlWanFMMXhGb1JQLzhhN2xSajdZcjlDYWJhNVkyb2djVC9iRW9pVVlObDA4Qmdnegp6ekZDbS9xOTRYdmQxTnlTV3pDZ3BRSURBUUFCbzFZd1ZEQU9CZ05WSFE4QkFmOEVCQU1DQmFBd0V3WURWUjBsCkJBd3dDZ1lJS3dZQkJRVUhBd0l3REFZRFZSMFRBUUgvQkFJd0FEQWZCZ05WSFNNRUdEQVdnQlRMOVo4b3hITzcKakk1SFFucWswOUxHSnAxQk1EQU5CZ2txaGtpRzl3MEJBUXNGQUFPQ0FnRUFKVk94NStPd1JjOURrWk5kZUpHRgpNNFBpVU1MSmVXanZrcjNIWTB4czBTaFVEVlByT1J5dW5QbDZjT1A2bDlycUI0SENsQUZ6MFRRd2I0Rlo1TW4wCkd5TXA3czNqMGVZeS9kQSsvZzdpN2ZXZ1UxZG1qb3J1d3UzcWs0dEdvZ1M0YXBFeFBzUUJCcy8yaVh3cWVhakMKKzRnN1B3UXZPVmg4em5wUytvZzJMaG9MNjY3MGJFVm9lcDlYNWIvS1JuczUra1ZWQmVjMkx1dy81WmtNN2p1bQpsMysxaENNaklCdGphckRTay9jWGZiN2JOeHdWK0dJNzliMnphNVlCUzRiRFdSUXZKekVxclBkdDQreVVwaGJSClg2bEFqSFQ3eDFRbWIyZlJia2NVNU9rMUtWN3dybXQ4ZVI4MmY0UUJMU0lxd0xXdWlHV1owbmp5ampHMXIxRGQKdUgreHdWeVJKUlJMWk1VRytlbGYwd0hjazUwdXVFUUdlMEllTmc0bGdxQVRWTEVnVEo5SEljMWZQWWprc1AyUAp3blJaVllyWEROLzMrSjIyT3dBR1pSOFpkSGVDRjBtSTcrT1Bpc0pGeiszK0VzY0t0akVCZVdqT1FHdWdxZ1NuCllTbUxUb2dXSXVrUGtMS09Rc1FwV3BkRGNycm1lZ1lzOTNIMWQxOUwvbHhqYmVVMkFSbGNDTUhqTnFnbC90TUYKUmNJaTZxS0lvbFhJMXVlV0ZpZ3RlbXJZWEM4RllrQjkxT2tiR2M0UnJwWjRsQzc1TlU3T2M1L0tmV2Q4ck9TbQpEUUhDVDJIWS83WThMT3dXZlJQRG81ZnVhVnpvM0NOQ09oalVaK3NjR1RiR3poZmdEbzU4MURzSWxBcGxWNEZKCk5wc3Uzb3VnMGM2RGZVV2NKeVBxanIwPQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg==
    client-key-data: LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlKSndJQkFBS0NBZ0VBckFwSVBxSnlvcjdaaVJ2SENFaEE1R050VVY4SHdoVHVFU1BRcXBGYTR0Ynl6OXRwCk9iTkV4MFZFZjdLRU13Uy9BTWJPcUNWQkUxQ3MrYXhvTExSM2xwTFdsUCsvM1E3Yy95ZVV5TFpuZitWdlRiRVEKd0YwSjczbG83MlEySnNSM1RnNm0xRm5GcjJqRjJwKy9HbWI5TkJYT2JCRkJRLzNiTkxNckQwbS9Hd0tQanlsRgp4SDZsWGZCRnRiMGJ1N0dTMjhoSVh2NzJTbDZEN1BJMG9ZOWpHY0UxZzFhUFB3anZISVpDZHZhZkE4SURSZmNUCm8zWkYzem9ua3ZnSExKSG5KS3ZTb0cya1ZBVFduUmVtNitNMEh2eU1PYWpxZzJzeXhJU2NEb1pvdkhsODNjVmsKQk41dEhIUzFwUXVmbTNCc2VWSHZOdXhFcDN3TC80Mmg4aCtzcy9PNFcvb1dRcmlwdFp2c3VOOWtBNDBrUzFJWQpsMEhOb1RpL3Q0MlErQVRCNHdmYVRieC9mMVdmelR4K3FwRkd1cklvc1dCNklrMjVtMjg3MzY1b3ZmM3c1cnZHCjlXTFRhZ0pycUROV3dLdWViWVN6UGoxb3RXNk5xMmtTSFVyMUh6a29NRVV5RFovNnhXcGI5RWZpUUtnTnJsTDcKNEI2TFlaRjQ4UU5TcVA3S2lJWnJ5MkRkcjFhS0t6WUUrSldiNmo2aXlOc0lleXJ5N045TmhxR1FoNHMvOWRMSwpzellDRCtvc3E4aEIzdndjZFY5ME1uZ0xCMzBhRVg5ZDJSVUVkeWF0Y1ZnQnQvTXh6TU5GcXNsWTZpOWNSYUVUCi8vR3U1VVkrMksvUW1tMnVXTnFJSEUvMnhLSWxHRFpkUEFZSU04OHhRcHY2dmVGNzNkVGNrbHN3b0tVQ0F3RUEKQVFLQ0FnQkVLN3JlM3Q0UDhSSWdDMXE5eEd1NlZFK3dDN2oyUmZiajRjNUFtR0pzT29KeXJOMndXSUhyZUY0aApQNnEwMEsraTdpUmJhbyt6eUtjU05DZi9kdTlnUWdVM2UzUWFjMVpNNFBGSzYwUnQ0SXRQUE5lSGNoYUt4eEEyCnMvM2xOTHFWdXFhT3hKZlozdWx2clJjN3JnMnRRdXJOMWJTSDdhS2I2bGNvc3E5QVU2NXhLd2lUU1IvYjdtYWgKRWpOWUY1QTYrTXo4bjRrV2hRYy9yK1g5RXVWUkFrZGpLTHlRWE1qckE2UjRvVmIzVlR5WXo4UC9aTkZoR0JrdwpXb24xTmtkaU41ZTVyRmlPVm5CNFJUUUdpcnhQZndGTm84cHhrdHZHRlVvRlUydFRLOHkzMEF6SEJTQTFRMlhxCjVZVTBRY01wKzdhUGkrWFVmQW9CRksxVVZRcklpVWYzWTNDeTh0akhsMG15SGhrY1BUYW9teXRMZ3FRNzdXVHMKT1VZWTNyWXMrVW5zQStwVmNBZ0tRVy9MWmZGQ050WE5IR2gxNmZSOU5LcFRMb2tjbkhHT0U0QWgvOGdXT0RGUgp5RmVFa3grdnZTd0ticjRES2dlVUp2clQxc0FmMEtUaG1zMUVzNUZZdXFSNmw1YjQxUXlSWDRtazB4elFMM0k2CkdkdmVEQTA5cGRtQlU0bXFBU3h3cGVlcjUrQVlSVjlUdGRWcEhGY3VsRVZWZlhlemdkZzdrUnlPbU81b09vSXEKS2FaU2psaWhyTHVSby9TeXRZKzJzV2JwRTBVMnZGTVpWTWU0RHQwUE4rQ2dTZzR3YVJPb0JEMmZwRERJWWFnbApUYnhlV1RkcjNTeDVRWktKY0d6QTlzLzk3TDZ6WTlQN1Z6bEN3TlVPVHNBTTdNNlRvUUtDQVFFQXg3YlNzaGdlClVsN1c3R3BWZ2QrUnlOekJudVdlQk9SV3NVeUxhdlU3QkNKdjVMSnRGbE1LcmR3NXVibm5aODAvY3ZrTUk5YWUKS2tCMEVvYlFFNDMyNmVaUTREdWxkdWIzb2NVWFA3VDdGVDVhZXRjd0xhRHVJR3ZTUkxrZGhUSFlrR052MVhlaApNb1kwMlU3ZlFzZ1JadmpKcFM5SS9RVGE3MzVzTUFlYldGRDVxNWZDSmtNRzFyWEhPZWpyZ3ZVeER1eWFIeGxjCkszSWlEcXIzSzdQUDBJRjdUVFFtZzhWaTJ4S3dRWEFQdWpxclNlc1JpSDZkR2x4QnNnNmxEazR4ZVlyY1NidWQKa3pYMjJaK0FaK29xNVdSS1BSa0R6RWR1d3NONVFOTnM4Z3UvbU9wdWxNTjNYYlZ4SUxuWHFTcnJhT29ZNkgzVgpTdGtvWXRYUTJZcjNlUUtDQVFFQTNJYk8vZ214R3lEcmp6RTVWOU84OHlVYklxblg2TEtkRXJHK2l2c1FGallTCll5SjJhWHdrdmxmK2VXbTNZbWhISUl3dW9tTHNpaUVwRmZPcmgxOWE3bnNvVmZBYXVDT2huNjFDamhvMDZwR3cKeVVERzdkcmRSeTlYaGxOYU1ZS1ZOWmQ5MlhJWFUydU5PWkhxUS9odjBzU0hCeG9QWVU5S0E0N1Y1Vnp2dmdDNQpCcW8xREFHQ29IbURKR3BZTE1QTmdqNzYzRGV2aVlMTFVESjZkSVVGb1pITWF2MS9vN25qdTFYQkZ6NDBndDBNCkt2NXhwYWovTjhSRUlrN2tsdVFTYTNRWFFCWFpwR1Y5NlduOXdjbjN1NUlQWnpLWU5WNnAvME14WmZaZGNoMWIKdnpqWkJGR0x0N0Zob1Vha1BwMDVqRjJDVk9BckJScnNzT2dOUEgwcmpRS0NBUUFPR21iK2xKMXZpNnI1dnZpUwoxMHJnT0Zhajllc1d4VmtwNUJIa1Y3VDNNY2RkQ3d5UkN3UXlDdDg0c2x3WkJIRjZtUU8yZEMxdkptWUwrT0V3CndGR3phc1JEVVhMNTFvMmVLdURkYXRaZ1JhcjJDY1FwTkxnY0tnRUxRVmRJVktES2NYc3dRK1pxUHZmMGNKRnIKa2QxSXZFRGhIVm0ySkdXaGkvZnYzRm1uL3UvaW9qRGZxdFFNellGb0lYNmdITmgrQ1U5STZ1MkxVOS83MFM3cgpNeU4xd2U4SElLdW5tb1RFbTZrK3RiOG1rNFlORGtDS3R3WTVHbEs1VkRkcnJ6SG5OMXNmQ2RTeTFKaTkvWGwvCjNSVnlaWWFva2QrbXlWU1ZCR0RIcHlhR1lUWUNnbXVVcmNqOElSZ0FDNStqSHBqSks5N2ViQ1JueUxZRlpMdisKNE1WaEFvSUJBRVRXQzd5K2Q3c1pIbzUyOWdoVlByREREMkpWaytDK0NFdXlrZUMrRjdwbkxId25ZTUR5a0dHZQpCQUFRSUw4bC83WElMREppSUtYSlRoTGRHZGpLeWc2UkFQazZWb3pPQ29CV1MrVWhodzBGWnlLYkJyMXVTUTRVCldBVEU5ZTFvYm56cENjNmJUMjZwcnM2ZW5ucVREUXpFYmNXb3U4NUxPclk3bXNiYXFMY3RjSlFGQTBQNUlMWkkKMThmQkxqN2k2NzVSUEdzTE1zOEF0Y2dWeWFXZmRDK0gzRUExV3VkWGVoK2EyQWdTVWIvYmtoWEZrT0grQzVSegp2alRiWFgxQU9nT3luQ05UN3Jqak9WZG56MTd6VlZJR2VLcGszNkFoS1gxVmt4RFp3emVBdElmUERlYk1NWW1vClkrbzhxNzRqdzdmdDVHWHNCMkd6a3RoeHh0ZnYxbTBDZ2dFQUFsMXZEMUpuY093WW9QcFg2Tmw3RXBmL1o3NFEKOGFEOUVJWlpmRkZ1NGQ5Z1hyMUExZWY4ZndZWlVXVS8zbHYwZzJqb3ZCQUpEajBFOCtpS3N5T3BNQVVJRGg1SAp1UzZmbndTR29oVDY2MVNXM0swK1BxKzRIK2RmZmpKeTB5bExZWFYrbHcxVk5wU2RraUtJVDRVaWU3NSt3QzZaCnhGK1Vtc0pyQmVRWjBURTg5cmhYNERTNGxpQlJkeTZYNU54Zm1PaTVjZThlMEZZd0NzNGVVdVR5NGZ1V2JjMEQKMDJCVWVFQWdHa2RLZ3F5VmRtUko2ZmFnZ1BPK3d2K3dxYXBUQ1ZEQk5kUjhETDlKRHdOSjJZMEw1SXRBOUxyegpMOXlWVHMyN1k1dDNPc0E3Wm1xQjMzZHdCMmN6bjJsMFdNTHZKRVZObnAzRTlDWXBQNlp5V1g3eVdRPT0KLS0tLS1FTkQgUlNBIFBSSVZBVEUgS0VZLS0tLS0K
    token: 2695cba6074739b32120d4cefc6a5e4b7bcc7851e5a010b258aa0c52e465a0f50e51da5caaaf4257bd8d385044dbda45bf1dadab87b9116a4ff3245b73a36631
\"\n + \"apiVersion: v1
clusters:
- cluster:
    certificate-authority-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUU2RENDQXRDZ0F3SUJBZ0lRSXdkYlU2UitDcDZ4SkNXRTNuUm1GekFOQmdrcWhraUc5dzBCQVFzRkFEQU4KTVFzd0NRWURWUVFERXdKallUQWdGdzB5TWpFd01UZ3hPVEU0TVRGYUdBOHlNRFV5TVRBeE9ERTVNamd4TVZvdwpEVEVMTUFrR0ExVUVBeE1DWTJFd2dnSWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUNEd0F3Z2dJS0FvSUNBUUN1ClJaRGhBbm0rdkVuOFZvNHZ5OWVUWTNTQWd5bWt6dVh1eUd5bUNiSkJ3b1NQa3llcnc4cU9BbnUyYmgzWDB1U0kKMTV4VTdyZDZJSlRIZmpzZ09EbWdtNU5GbklpWU9oVHlGdTNFanZNVEtCUHB1anNWRmg0Q1lXYlI4YXRaeENJOQpUd0tjeHZ5ZWdkenBTTThLSWxEY205UU15amdqL3QwQ3pUR1EwZ25MOHRhODdSNnRqNEp2QUNjTmNIUnFDMHpFCmE5b1R1YlNjTW5kZityU3NDN2x0V3VITk5jRUIwL1ZOdmFXRmYzV2U3bExPU2pNTTVaaGx2K1pOTlg5R3l0ZVkKbHJIcHRoVmtKelNwakZtSUNMRlB6SVdFamJ1UG9PK1ppYy83cVBnaDJBS3VqWHlHWG1UeXZQOTRPcUtoWm8xNQprT0FqQzdnQ3k2TXpPS2NzR2ltcVB2V1hHaXV5MTVaRktIZlJYcXhFMTJ4Y1hyUUp0OGZzdklTcldZalJ4R0xWCjl1Q0EvdXQrTS8va0FBOVNVL3BINTFDUVBMYyt1V2VoblN6UHFIWFBLN3pYSWc4c1VtNGFCSlpEc0dlSVdRVEgKR3o0aStTbFllV2M4SXcxQWt2Wm8xWHNOamNBTGIvUDY1cHNHYm5JVlNUb285ejdmUnJ5YzhRMnBOQmU5RlVNQQpWaVhTT3J6UENtWmMxOXZjMjZUZzJtRHc5QVgzaFNNckpCNTJOSmpOcGxDRmYzdVpjNmt2dVpjQmNhcEV6Q3FkCkVNT0RIcTRZMDMrN3ZwT1ViOThrMDR2Y0NjWE5ONGxRNFpTb2R4dHFUNjlrbmpWQnNhWUJVN01MazY4dXRTYWYKazAvWGRaQUkyUE1wS210a1l2bmZOcTVKcE1uK3RnUWkycUszVGhEWmp3SURBUUFCbzBJd1FEQU9CZ05WSFE4QgpBZjhFQkFNQ0FxUXdEd1lEVlIwVEFRSC9CQVV3QXdFQi96QWRCZ05WSFE0RUZnUVVrK1c2YkwxM1MvRE5IcjMwCkFscGJtOFZXUkJRd0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dJQkFJU3NXK2dGZDYvUW5vRTJqVFpFUUNXOGhBODIKbjFIZndHZmlTdGpxRnEwMDgwa0NIS0NNdnhvN0dzdkQ2NXJhdG42cy9KWTNiM2w1L3hsN2lkMWtZazNXS0FUNgpUNUV2RG52b3ZsQUxtL0VaMVVRVkk1ZmdvU2RhR0J6ckZLYlZacE1naE9IQXRiTWJvd2NCYzU2c2ZDNUtOY2JOCk84NERUQzI0SGpYSnUwRk5taFM4K1pMQUExcERhR0JpU3hoYm8wSWMrU1dnVExDY1dPQXNGZ3cvdEIwclNjYjMKS2VYUENUSFIrZ3ZmR05KNnlwL1oyY3RSVjJwdU9uaEl0d2NUNWJTdHh6STlZWVprVGpVcU4vOC9waDNhNzRrVwprdE05Q0RMZ1FwM0x6T0hXR3oyVFZhVjV1aDFHd1c1eDl5cCsrclVlVVY3YzVWdTNubSthNUVFOHRGbGpQYStCCjV5aXJubnlxZW9rZW1ZMHhXVTEvQUdUc3pSY1EzbTYzYS9nL3VsK01qTEVtaWVGQ1ZmQksvekQ0UFpEamdCWVEKUFlHQU44Z0U4ajVCanFwZkI5NEhnTnAvVHhtZ0Z2VGhZb21QMS9UclNIY0FvMnVqdC9yc2lKMmNGZmR6ZXh6TQpZeUs2c3lUZmpxaWYvdFoyTWFTUEFFaUVpWWtnZlhuTDBWWkxRS1hsdzFCZWsvMnhUaEVYUjhvejVVVzZRY0FICjcwVGVTNUFZYlFZR05ubjlkcmdtWEZXWlZQSUVkOEl0QW94L24xeldUZjBrcHF3blZBT1lOdWlmT1RoTnBVTEoKdzFzSGRpa0pQcnpIazc5d2MvOXREZlhxOHVraGNsTHJTNFhXejFwNTIxZ1MzRVM3ZHloTGlZbGlxWm8wY3FpRApTS3ZoWTJUbHJVai8rTWxzCi0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K
    server: https://test-force-rohanazuregroup-1bfbb5-4f0f44fc.hcp.westeurope.azmk8s.io:443
  name: test-force-delete2glatwi
contexts:
- context:
    cluster: test-force-delete2glatwi
    user: clusterUser_rohanazuregroup_test-force-delete2glatwi
  name: test-force-delete2glatwi
current-context: test-force-delete2glatwi
kind: Config
preferences: {}
users:
- name: clusterUser_rohanazuregroup_test-force-delete2glatwi
  user:
    client-certificate-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUZIakNDQXdhZ0F3SUJBZ0lSQVBQYk5UUy9HeXloUUcwQWZzTFRHTGd3RFFZSktvWklodmNOQVFFTEJRQXcKRFRFTE1Ba0dBMVVFQXhNQ1kyRXdIaGNOTWpJeE1ERTRNVGt4T0RFeFdoY05NalF4TURFNE1Ua3lPREV4V2pBdwpNUmN3RlFZRFZRUUtFdzV6ZVhOMFpXMDZiV0Z6ZEdWeWN6RVZNQk1HQTFVRUF4TU1iV0Z6ZEdWeVkyeHBaVzUwCk1JSUNJakFOQmdrcWhraUc5dzBCQVFFRkFBT0NBZzhBTUlJQ0NnS0NBZ0VBeUtNSGVuTnkwaVBXY081THpCczUKK2RMbDFRS1dqUHF6b2NTMG14SmFkMS9rcU9RNHp2T05zMFRpT0hPd3VORTI1QlNMQXZHMVB2M3RPb2hZYWIyOAp6VmozelBJd2s0UVkrcnhZYXI2ZFl0ZENIUWQzeXQ5a0U0ME5BaFkrR1UxczdycXo2d2NwRjNQUFdMNlpiMzZ5CjJQUkpVYkFTVDFrYTRXeDlXVlFiTnlMVmZkZWxWbStRR3dwT1UyR3JNT1ZmTGVzbFp1ekZzRE5lOWJTanhVTXkKcFdYUUZnSjBjT0greE1CSFlIbUs4bjM2NDhzZXNUS01McXRBa1dZUVN6S3lFMHNrUnVCQVNRNjY5aEZIaGlrQQpiVk9XTGl6Ykhkb3N5YmYrcldJUzBleEhWdDNZS1Q4MDQzUXltc0JEck8zemRQSmpJaC83REVtYVBUbkFLWDEzCmRqNmJkSStnbHpzUktVWmtNc1cxaDBRTzkydjRsRldyUi90VGRNZCt1TWFqUjBxaW9JcmZnM3ZoMmJKZzhSWTEKekQvZXVnOXMrMDc2WHNjdnUrcTlvTmRUb0tlRFlCQ3ZBMldzZ1JXYUUySDNFNzB5Z2VDd0QrbG9tOS9uN093SwpIT2xxSXVyV3EveStQODROY2xoRGQwTXkzQThBb1EwYW9kVVZ3OTZSYVN4TTZwQzhzRkMyZDZHaVh3YWl0ejk4CjdJR3ZDV1JYREVkM3FkdFRWSjlCV3E2dlJ2bitISHFoUHgxc2N6czZhL21sd3g0QXdoSGxGSW5ubjFlWXo1VmMKWjREbEo4S3JzcG5EYWk5TW40Tzc4SGgybXp3M3oxUUw1NTRFc0JMS080UzVOTjZuLzVSUlBDUWthSUk2T3ZqZApLbVVyOGMybTBzdjZtQnBEaWlHaU9EOENBd0VBQWFOV01GUXdEZ1lEVlIwUEFRSC9CQVFEQWdXZ01CTUdBMVVkCkpRUU1NQW9HQ0NzR0FRVUZCd01DTUF3R0ExVWRFd0VCL3dRQ01BQXdId1lEVlIwakJCZ3dGb0FVaytXNmJMMTMKUy9ETkhyMzBBbHBibThWV1JCUXdEUVlKS29aSWh2Y05BUUVMQlFBRGdnSUJBQis4MjlVNWJ3RE8xdUxvbDlpdwpmN0ZjV0hBWXNBSExybTU4V1hyeGZjdWxMVkZDTXRsUkFjNXFKdlRodEI5Mi9BdDdhL2dYTkk2akJKdWtCRXZLCkpZcThjWTVrYlRoVVVQQWtxWUNraStDOGQyQjNHS29KSWNNK2h0VHZRZVd3NTJmaDdIVXJGcDBicHZvY1VCaEoKSkN6bDkvbm12Z3gzMjNWU3JTL1l3bmNCZ1VOdW5hOHpnckQxUlVlQUhEbEhFQVp2aitkQkFYY2pOWkpJc241KwplU1RiSXRGbWhqV3ExUFlJTE13dHZGUmRuUkNtdlI1WDFXcjMzMjNhRW5ZczhGeHJwelZyYjBzdTRnOFpOR2JGCnhkU1lORVNRdHQyVUJFQS91UW1DbXlpLzNaMkNWdEtHV0M3Zk1TRGllbms1U2prY2Qwb3F2N2YrcXFHeW80QzkKUzYzQjE2ajlSMDgwcUNndSs5S2ZZbDNSR01ZOHFCMkYvRUdPTWYrTkJzM3JoSkgzSEErZnFoT21TakpYbEdQTwo5ZXNMc25DVjhmbVVWVUlnbGxSejNXamRPRFFuTDRwclNaMnlNbnpGNENKUTVmSStkeGRvWVhINzdJNWZPeGtmCmpkbmhMM3ZMeFo4OS9RWndmenZPWnZSYkhGR1dBbUQvcGVhQWMzR08vckR4aTZ0c2NlS1VrQ1ZURFJBUjIxUVoKODlZaHJwR1U5WU4wS29LQkhWYm1LMG4xc0RoVU5heHlXbFJxMlR0S1dtbjFKWUJpeTJKL1NveUdLdjJuWkFkMgpBaWpXQ3ByMStrM2o1ZnZvSnBVdjBHbC9CbUpmakc4SHh3eHhsSVd2UTNpYzlzU3owOExjOUdPVnJHem9ZV1lCCmVKUjZackhTWTJEVVNDbjRJbG9CenNSMgotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg==
    client-key-data: LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlKS1FJQkFBS0NBZ0VBeUtNSGVuTnkwaVBXY081THpCczUrZExsMVFLV2pQcXpvY1MwbXhKYWQxL2txT1E0Cnp2T05zMFRpT0hPd3VORTI1QlNMQXZHMVB2M3RPb2hZYWIyOHpWajN6UEl3azRRWStyeFlhcjZkWXRkQ0hRZDMKeXQ5a0U0ME5BaFkrR1UxczdycXo2d2NwRjNQUFdMNlpiMzZ5MlBSSlViQVNUMWthNFd4OVdWUWJOeUxWZmRlbApWbStRR3dwT1UyR3JNT1ZmTGVzbFp1ekZzRE5lOWJTanhVTXlwV1hRRmdKMGNPSCt4TUJIWUhtSzhuMzY0OHNlCnNUS01McXRBa1dZUVN6S3lFMHNrUnVCQVNRNjY5aEZIaGlrQWJWT1dMaXpiSGRvc3liZityV0lTMGV4SFZ0M1kKS1Q4MDQzUXltc0JEck8zemRQSmpJaC83REVtYVBUbkFLWDEzZGo2YmRJK2dsenNSS1Vaa01zVzFoMFFPOTJ2NApsRldyUi90VGRNZCt1TWFqUjBxaW9JcmZnM3ZoMmJKZzhSWTF6RC9ldWc5cyswNzZYc2N2dStxOW9OZFRvS2VECllCQ3ZBMldzZ1JXYUUySDNFNzB5Z2VDd0QrbG9tOS9uN093S0hPbHFJdXJXcS95K1A4NE5jbGhEZDBNeTNBOEEKb1EwYW9kVVZ3OTZSYVN4TTZwQzhzRkMyZDZHaVh3YWl0ejk4N0lHdkNXUlhERWQzcWR0VFZKOUJXcTZ2UnZuKwpISHFoUHgxc2N6czZhL21sd3g0QXdoSGxGSW5ubjFlWXo1VmNaNERsSjhLcnNwbkRhaTlNbjRPNzhIaDJtenczCnoxUUw1NTRFc0JMS080UzVOTjZuLzVSUlBDUWthSUk2T3ZqZEttVXI4YzJtMHN2Nm1CcERpaUdpT0Q4Q0F3RUEKQVFLQ0FnRUFvZ2F1V2wvelFPcTZHWGNIZFN3amxMR1E4NFZraVROSWI0SklDOWMxZ2FQS0orNHVSa3AvaTE4OApoVFJBYWZCaXNLdzZXc1ZSaGk1UUx1Mis0SjNlWThMT1V3N2UxYkplN2RXK1hXM1Q4ZWMrTiswNCszZU4vUS95CmlsWG1Eazd0Z3BqNkJQcENYL01oWmx4OTBvRng4eXpNTFJEUXFJMmYzSjNkV0k5SWJ5MU5WWlFacklUcHpqeHoKWm5qQVkvdEg4dkFydzBQQW5mdk9Xd1JuSVQyVUgrZnB0R3lGRmtnYmswNW9GZXZ4bnNUSUlMOSs2TUNXWG5Yago3Y2VMcnY2Z3VsMzdUTlY2ZE1WVmpMUUZtNzF1MzlzWnMrNTdnVThZemtvdGZGMXdMMkR0ZDZRdUZaazZuOWpOCnFSVk5VZERUeEFTZXRBYzVtUUd6Y25BNTFBZkR1U1NDbkRmREp1ZkN3OUsyUSs3L1RQYmU1ei94VkhaNzczK1AKbW5JZDQyQnZVbFZhaWNBL0pBY1krTXdyR0M3U20rQnhEUW8xc3YwUVhockxEeHo4d3E3cEtudUxVY3Q2ZTg2WgpOdVYxeWhGa0NveDFNWFRLWmtnSS9kUGswNk9SYjhIQ0lzcW8wWUo3dWdMaDZTNmpIMERzMjNGaEJIZVdHZjV4CitkMVhwa3Y0TUIrajRPUi9TTmo5ZU1qWkZEZU4xVnlYS2hNaW1DblFHT0ZlcTlpaWp3WmpYREFyUjBBSFdleVkKLzc0YVNqWXBFd3JHWDJxdE5EUW1kSStoa2prUS9CaU9aUitJaTZmeUZrbEE2OEMzTllhOUxmTUcvaUxDT2NnNApvSHdkc2pvbzN1Z0F1UVh2b1ZuOVdDZ05YbzhCTXU1WEI0RDRHSU5CWDc4cjdIYXhsemtDZ2dFQkFQNFk5ZU1wCkd6U2ZnNEZhUno2QXZad05meVZlNzI4QlE0Ky81K2x5TWRhY2c4L0VnZm8xbmx0YTJuZFhQYVpnOGFEaGoxbXUKS2R0KzlSMlFUeVNKZUNrWlVFMi8zeDl2SnNjN3dhUHhVZzZvLzlCcVQ4VXBMcHNpZFFtMEY3a2RXdzV6QzBmNQpGT0xpMStvWHpyWUxia2svb1JWTnNjWFptMlRmdVovdU4raitJTlhDNXI3MlpDNUdsNHhyTGZGRldWQmV0ZkUrCkIySGtBQlFNVzlBb09iU3dwWWxmdVRWSHYwbzExZmdOY0x3S2RDVU1TM21QVDFqNmJSODlKY2o3RFpGb2JuSnAKR1hZdWtlREhmUXJESFVJNUtJNDRKQnphMkJEZ0psakFFSzYwY3hWKzV0SVBjSkFMWlRSWlVUcWd4SWw1TEJrMwo3UlczMnlsYXR3RXJYTXNDZ2dFQkFNb2ptUzhGZFRYUWlhS2J1ckp3UCtNVi9pYXV3RUZlLy9MS1VlaitWN0FhCkJWeW12Vnh4alV1SWpOaytSeHlqN0l2K2gwOFE2OGU4bE03aTc1SjkyNGdQT24rU0psaUJGN2VtbzhEbjVTK3gKdUpSNU9ndVF1NWdac0xzL1VyR2dMM2lqZFJWMkxiRGZlS3dKK1VrRFNhbHE5YTNNZ1FVamdINU9iVVdQeGdOaQpUMURJa3FTYjZhTlNMSWY2blJURE56Z2ZMSFR5YzdkUnFkSFg3cEtFWnI4RkNYSzEvRktreWJoOU5ZdldaR1FICjJ2bnZVaERIYlRmaEJ4QlB4a2tRUUxqbUhwc3V2SVk3NVVjcXNSZ2dCTE1uSzR1V25Nd3ZYcmpFRlltZXdTY0IKS3Vaa2ZGWHNkRHp2a3NZSEp0VkJyT2JsY09NdllBUEFJaC9UVVBIYXQ5MENnZ0VBWjB1YVdmaFIrUGlIcTBRYQpCa3lyeUE2c2kzS21mMGZTUVZQUVlWUnM4a082U2ZJbXJLS3pkNHZkbC9vOU02L2hTbmVub050dVpLNmQwcURoCnE0YSs5R1k0QVFOcEh3dytoV3ZEY25RZDJadldNdEdsRUErSHhNVWpNd2lsUkRtczFKanNFeHdzNDNjNE13L3UKbG83YkRLZnpRZ2ZOcHhLcFd6NmY1V1lnRUhXT3Y1ZDEybGVycml4QVczTU93REJpK0lzb0I2UWh1Q2ZKNkZ5Rgp6bjhyeGtxeFlNRXJOMyt0UWV2VXlmd2N3alMxdG9IakJNMi9nZnA3OWVFVHhUcFo3NnkyaVlnME01a0c4SXZvClIwRytXVGVIUnhtSGhMdHFicUhRdE8rUFdKWSsycFEySFZydGRZRk1hUldPTWZrOUxHMjRBYUI4bndHWTlKek8KSEltV05RS0NBUUVBdEtkS0FROGtxTHErTnpoc2k0cnNYZnhLenJHQkd5dFhIZHFaaDl5LzY1L2x5Tjh2LzAxdwpsTi90MlQzaWVnTTZwZ2NOVUtsenYyZVhxcXFBcC9DWllha1NnKzdQSEd3TEVWUDVwNjdZVWw3SDh0SEJBWk1SClJTYzRuckJDTElOWHliNTBKb0VlNXFOaUNvZERabkJzeGQvVXY4WTBUSzVrNlAwWWhZR3YyMWhpUUhWeE1YSXkKUllkc1N2NjhvbEN4cWhPYVBNNXVncGlXWi9kT3ZWMWRJanpUSzlUZk05RVlneXhMemJVR3RhdWJsYUhBTzBTdwpKSytWQzR2d1QxNGFLZVZuMXhMVzVEVGxmVXYzUi9OZG4rdFM3SWJualVpTXBCNkNqU0wzeHU4eE13VDlaaUZUCjVSM0xrL1hTUytkTktuSkdDSlJQZ0NxWGxNL2IyTHE3ZFFLQ0FRQUdxZ3B3cjRXUzJ5QVpXOEFIRmpDbnJPZ2cKWFJsMkpzYzYwSFlmUnhCNlh0Q0VlR0llRVpGd3dSbWJ6WVpMM0QyaHc4QTcxSVZYdFZ0c0Zqa2lXMXQ2Z0prUgpGUFlFYzlHNWNudVRzUU5Ga1N5cFZXczMvdXRFMm9ZanhWT1hhc2VoUUh0djNIbER2Yk55MGJWclU0R2NxVmZJCjBsTHk0dUxYbjF0RTZzeGw0b2JTdkEyZ2pyU0FiSnVVVnltQjFONkk2ak5FdzdqRlAwUWdYMmxWZXA3dmZmdisKMUJtbVRhZVZiYzJiRVNZcS9HU0ducXhidGxoWWMxR1dkcVFOWWpkQSs0b1ZvZ01tbWpoQWFpYmdLeUNDb0k4SgpOQ1JjeGNxa2VUb3RRTmlrbzRYcXRJRkR3Vml1Nmw5NE92L1NSM2dialloWE15RjFzd21BWC9rbjh1QlgKLS0tLS1FTkQgUlNBIFBSSVZBVEUgS0VZLS0tLS0K
    token: c525cd4cc2cfafbb06b94021142d9affd0c94b888ec4002d718a2d6657900d1a0b077f2ab02dec59ae4c3ee0bdf40dd05d6e96a10ea9b030997969ae57a35d2e
\"\n \ }\n ]\n }" headers: cache-control: @@ -653,7 +731,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:32:44 GMT + - Tue, 18 Oct 2022 19:32:19 GMT expires: - '-1' pragma: @@ -687,7 +765,8 @@ interactions: ParameterSetName: - -g -n -l --tags --kube-config User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.10 (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Kubernetes?api-version=2021-04-01 response: @@ -696,21 +775,21 @@ interactions: Europe","East US","West Central US","South Central US","Southeast Asia","UK South","East US 2","West US 2","Australia East","North Europe","France Central","Central US","West US","North Central US","Korea Central","Japan East","West US 3","East - Asia","Canada Central","East US 2 EUAP","Canada East"],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"SystemAssignedResourceIdentity, - SupportsTags, SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/operationStatuses","locations":["East + Asia","Canada Central","East US 2 EUAP","Canada East"],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"SystemAssignedResourceIdentity, + SupportsTags, SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/operationStatuses","locations":["East US 2 EUAP","West Europe","East US","West Central US","South Central US","Southeast Asia","UK South","East US 2","West US 2","Australia East","North Europe","France Central","Central US","West US","North Central US","Korea Central","Japan - East","East Asia","West US 3","Canada East","Canada Central"],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"registeredSubscriptions","locations":[],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"Operations","locations":[],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview","2019-11-01-preview","2019-09-01-privatepreview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + East","East Asia","West US 3","Canada East","Canada Central"],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"registeredSubscriptions","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"Operations","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview","2019-11-01-preview","2019-09-01-privatepreview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache content-length: - - '2311' + - '2416' content-type: - application/json; charset=utf-8 date: - - Mon, 12 Sep 2022 09:32:46 GMT + - Tue, 18 Oct 2022 19:32:22 GMT expires: - '-1' pragma: @@ -738,7 +817,8 @@ interactions: ParameterSetName: - -g -n -l --tags --kube-config User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.10 (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.KubernetesConfiguration?api-version=2021-04-01 response: @@ -750,14 +830,14 @@ interactions: East","Canada East","Canada Central","Norway East","Germany West Central","Sweden Central","Switzerland North","Australia Southeast","Central India","South India","Japan West","Uk West","France South","Korea South","South Africa North","East - US 2 EUAP","Central US EUAP"],"apiVersions":["2022-03-01","2021-03-01","2020-10-01-preview","2020-07-01-preview","2019-11-01-preview"],"defaultApiVersion":"2022-03-01","capabilities":"SupportsExtension"},{"resourceType":"extensions","locations":["East + US 2 EUAP","Central US EUAP"],"apiVersions":["2022-07-01","2022-03-01","2021-03-01","2020-10-01-preview","2020-07-01-preview","2019-11-01-preview"],"defaultApiVersion":"2022-03-01","capabilities":"SupportsExtension"},{"resourceType":"extensions","locations":["East US","West Europe","West Central US","West US 2","West US 3","South Central US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France Central","Central US","North Central US","West US","Korea Central","East Asia","Japan East","Canada East","Canada Central","Norway East","Germany West Central","Sweden Central","Switzerland North","Australia Southeast","Central India","South India","Japan West","Uk West","France South","Korea South","South Africa North","East - US 2 EUAP","Central US EUAP"],"apiVersions":["2022-04-02-preview","2022-03-01","2021-09-01","2021-05-01-preview","2020-07-01-preview"],"defaultApiVersion":"2022-03-01","capabilities":"SystemAssignedResourceIdentity, + US 2 EUAP","Central US EUAP"],"apiVersions":["2022-07-01","2022-04-02-preview","2022-03-01","2021-09-01","2021-05-01-preview","2020-07-01-preview"],"defaultApiVersion":"2022-07-01","capabilities":"SystemAssignedResourceIdentity, SupportsExtension"},{"resourceType":"fluxConfigurations","locations":["East US","West Europe","West Central US","West US 2","West US 3","South Central US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France @@ -765,7 +845,7 @@ interactions: East","Canada East","Canada Central","Norway East","Germany West Central","Sweden Central","Switzerland North","Australia Southeast","Central India","South India","Japan West","Uk West","Korea South","France South","South Africa North","East - US 2 EUAP","Central US EUAP"],"apiVersions":["2022-03-01","2022-01-01-preview","2021-11-01-preview","2021-06-01-preview"],"defaultApiVersion":"2022-03-01","capabilities":"SupportsExtension"},{"resourceType":"operations","locations":[],"apiVersions":["2022-03-01","2022-01-01-preview","2021-12-01-preview","2021-11-01-preview","2021-09-01","2021-06-01-preview","2021-05-01-preview","2021-03-01","2020-10-01-preview","2020-07-01-preview","2019-11-01-preview"],"capabilities":"None"},{"resourceType":"privateLinkScopes","locations":["East + US 2 EUAP","Central US EUAP"],"apiVersions":["2022-07-01","2022-03-01","2022-01-01-preview","2021-11-01-preview","2021-06-01-preview"],"defaultApiVersion":"2022-07-01","capabilities":"SupportsExtension"},{"resourceType":"operations","locations":[],"apiVersions":["2022-03-01","2022-01-01-preview","2021-12-01-preview","2021-11-01-preview","2021-09-01","2021-06-01-preview","2021-05-01-preview","2021-03-01","2020-10-01-preview","2020-07-01-preview","2019-11-01-preview"],"capabilities":"None"},{"resourceType":"privateLinkScopes","locations":["East US","West Europe","West Central US","West US 2","West US 3","South Central US","East US 2","North Europe","UK South","Southeast Asia","Australia East","France Central","Central US","North Central US","West US","Korea Central","East Asia","Japan @@ -799,11 +879,11 @@ interactions: cache-control: - no-cache content-length: - - '6035' + - '6074' content-type: - application/json; charset=utf-8 date: - - Mon, 12 Sep 2022 09:32:46 GMT + - Tue, 18 Oct 2022 19:32:22 GMT expires: - '-1' pragma: @@ -825,29 +905,30 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io/apis/networking.k8s.io/v1/ + uri: https://test-force-rohanazuregroup-1bfbb5-4f0f44fc.hcp.westeurope.azmk8s.io/version/ response: body: - string: '{"kind":"APIResourceList","apiVersion":"v1","groupVersion":"networking.k8s.io/v1","resources":[{"name":"ingressclasses","singularName":"","namespaced":false,"kind":"IngressClass","verbs":["create","delete","deletecollection","get","list","patch","update","watch"],"storageVersionHash":"l/iqIbDgFyQ="},{"name":"ingresses","singularName":"","namespaced":true,"kind":"Ingress","verbs":["create","delete","deletecollection","get","list","patch","update","watch"],"shortNames":["ing"],"storageVersionHash":"39NQlfNR+bo="},{"name":"ingresses/status","singularName":"","namespaced":true,"kind":"Ingress","verbs":["get","patch","update"]},{"name":"networkpolicies","singularName":"","namespaced":true,"kind":"NetworkPolicy","verbs":["create","delete","deletecollection","get","list","patch","update","watch"],"shortNames":["netpol"],"storageVersionHash":"YpfwF18m1G8="}]} - - ' + string: "{\n \"major\": \"1\",\n \"minor\": \"23\",\n \"gitVersion\": \"v1.23.12\",\n + \ \"gitCommit\": \"c6939792865ef0f70f92006081690d77411c8ed5\",\n \"gitTreeState\": + \"clean\",\n \"buildDate\": \"2022-09-21T21:46:35Z\",\n \"goVersion\": \"go1.17.13\",\n + \ \"compiler\": \"gc\",\n \"platform\": \"linux/amd64\"\n}" headers: audit-id: - - b5a443a9-f9f7-4431-b930-8a4c71b50e48 + - 9eabe44c-a8cf-4e95-9854-f677a60933aa cache-control: - no-cache, private content-length: - - '864' + - '265' content-type: - application/json date: - - Mon, 12 Sep 2022 09:32:48 GMT + - Tue, 18 Oct 2022 19:32:23 GMT x-kubernetes-pf-flowschema-uid: - - ffa83d76-a56b-4d0b-bb6f-50d7f09474fb + - 92b2ef8a-fcc8-4f73-a2ca-c1502b7b9250 x-kubernetes-pf-prioritylevel-uid: - - e1255224-bf15-43fd-949c-62117a6d8625 + - a1989c01-0e94-4e3b-a68c-18561b43277e status: code: 200 message: OK @@ -859,35 +940,35 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io/api/v1/nodes + uri: https://test-force-rohanazuregroup-1bfbb5-4f0f44fc.hcp.westeurope.azmk8s.io/api/v1/nodes response: body: - string: '{"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"1354"},"items":[{"metadata":{"name":"aks-nodepool1-15603940-vmss000000","uid":"d2d73b08-0cdf-4a1f-b0ba-3e87278b7c67","resourceVersion":"1073","creationTimestamp":"2022-09-12T09:31:15Z","labels":{"agentpool":"nodepool1","beta.kubernetes.io/arch":"amd64","beta.kubernetes.io/instance-type":"Standard_B2s","beta.kubernetes.io/os":"linux","failure-domain.beta.kubernetes.io/region":"westeurope","failure-domain.beta.kubernetes.io/zone":"0","kubernetes.azure.com/agentpool":"nodepool1","kubernetes.azure.com/cluster":"MC_rohanazuregroup_test-force-delete000001_westeurope","kubernetes.azure.com/kubelet-identity-client-id":"6fbb5ca9-813b-4fcb-8493-52c0a63a05cb","kubernetes.azure.com/mode":"system","kubernetes.azure.com/node-image-version":"AKSUbuntu-1804gen2containerd-2022.08.23","kubernetes.azure.com/os-sku":"Ubuntu","kubernetes.azure.com/role":"agent","kubernetes.azure.com/storageprofile":"managed","kubernetes.azure.com/storagetier":"Premium_LRS","kubernetes.io/arch":"amd64","kubernetes.io/hostname":"aks-nodepool1-15603940-vmss000000","kubernetes.io/os":"linux","kubernetes.io/role":"agent","node-role.kubernetes.io/agent":"","node.kubernetes.io/instance-type":"Standard_B2s","storageprofile":"managed","storagetier":"Premium_LRS","topology.disk.csi.azure.com/zone":"","topology.kubernetes.io/region":"westeurope","topology.kubernetes.io/zone":"0"},"annotations":{"csi.volume.kubernetes.io/nodeid":"{\"disk.csi.azure.com\":\"aks-nodepool1-15603940-vmss000000\",\"file.csi.azure.com\":\"aks-nodepool1-15603940-vmss000000\"}","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"cloud-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:31:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:spec":{"f:podCIDR":{},"f:podCIDRs":{".":{},"v:\"10.244.0.0/24\"":{}}}}},{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:31:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:volumes.kubernetes.io/controller-managed-attach-detach":{}},"f:labels":{".":{},"f:agentpool":{},"f:beta.kubernetes.io/arch":{},"f:beta.kubernetes.io/os":{},"f:kubernetes.azure.com/agentpool":{},"f:kubernetes.azure.com/cluster":{},"f:kubernetes.azure.com/kubelet-identity-client-id":{},"f:kubernetes.azure.com/mode":{},"f:kubernetes.azure.com/node-image-version":{},"f:kubernetes.azure.com/os-sku":{},"f:kubernetes.azure.com/role":{},"f:kubernetes.azure.com/storageprofile":{},"f:kubernetes.azure.com/storagetier":{},"f:kubernetes.io/arch":{},"f:kubernetes.io/hostname":{},"f:kubernetes.io/os":{},"f:storageprofile":{},"f:storagetier":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:31:26Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl":{}}}}},{"manager":"cloud-node-manager","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:31:28Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{"f:beta.kubernetes.io/instance-type":{},"f:failure-domain.beta.kubernetes.io/region":{},"f:failure-domain.beta.kubernetes.io/zone":{},"f:node.kubernetes.io/instance-type":{},"f:topology.kubernetes.io/region":{},"f:topology.kubernetes.io/zone":{}}},"f:spec":{"f:providerID":{}}}},{"manager":"cloud-node-manager","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:31:28Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{"k:{\"type\":\"NetworkUnavailable\"}":{".":{},"f:type":{}}}}},"subresource":"status"},{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:31:29Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{"f:csi.volume.kubernetes.io/nodeid":{}},"f:labels":{"f:topology.disk.csi.azure.com/zone":{}}},"f:status":{"f:allocatable":{"f:ephemeral-storage":{}},"f:capacity":{"f:ephemeral-storage":{}},"f:conditions":{"k:{\"type\":\"DiskPressure\"}":{"f:lastHeartbeatTime":{}},"k:{\"type\":\"MemoryPressure\"}":{"f:lastHeartbeatTime":{}},"k:{\"type\":\"PIDPressure\"}":{"f:lastHeartbeatTime":{}},"k:{\"type\":\"Ready\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}}}}},"subresource":"status"},{"manager":"cloud-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:32:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{"k:{\"type\":\"NetworkUnavailable\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}}}}},"subresource":"status"},{"manager":"kubectl-label","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:32:14Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{"f:kubernetes.io/role":{},"f:node-role.kubernetes.io/agent":{}}}}}]},"spec":{"podCIDR":"10.244.0.0/24","podCIDRs":["10.244.0.0/24"],"providerID":"azure:///subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mc_rohanazuregroup_test-force-delete000001_westeurope/providers/Microsoft.Compute/virtualMachineScaleSets/aks-nodepool1-15603940-vmss/virtualMachines/0"},"status":{"capacity":{"cpu":"2","ephemeral-storage":"129886128Ki","hugepages-1Gi":"0","hugepages-2Mi":"0","memory":"4025808Ki","pods":"110"},"allocatable":{"cpu":"1900m","ephemeral-storage":"119703055367","hugepages-1Gi":"0","hugepages-2Mi":"0","memory":"2209232Ki","pods":"110"},"conditions":[{"type":"NetworkUnavailable","status":"False","lastHeartbeatTime":"2022-09-12T09:32:13Z","lastTransitionTime":"2022-09-12T09:32:13Z","reason":"RouteCreated","message":"RouteController - created a route"},{"type":"MemoryPressure","status":"False","lastHeartbeatTime":"2022-09-12T09:31:26Z","lastTransitionTime":"2022-09-12T09:31:15Z","reason":"KubeletHasSufficientMemory","message":"kubelet - has sufficient memory available"},{"type":"DiskPressure","status":"False","lastHeartbeatTime":"2022-09-12T09:31:26Z","lastTransitionTime":"2022-09-12T09:31:15Z","reason":"KubeletHasNoDiskPressure","message":"kubelet - has no disk pressure"},{"type":"PIDPressure","status":"False","lastHeartbeatTime":"2022-09-12T09:31:26Z","lastTransitionTime":"2022-09-12T09:31:15Z","reason":"KubeletHasSufficientPID","message":"kubelet - has sufficient PID available"},{"type":"Ready","status":"True","lastHeartbeatTime":"2022-09-12T09:31:26Z","lastTransitionTime":"2022-09-12T09:31:26Z","reason":"KubeletReady","message":"kubelet - is posting ready status. AppArmor enabled"}],"addresses":[{"type":"InternalIP","address":"10.224.0.4"},{"type":"Hostname","address":"aks-nodepool1-15603940-vmss000000"}],"daemonEndpoints":{"kubeletEndpoint":{"Port":10250}},"nodeInfo":{"machineID":"ed4322fa45994f3b83e51b1517430e2f","systemUUID":"89c33540-769f-497e-bd3d-998f3f92c886","bootID":"fb5c26bc-b2be-4571-9ffc-2ceeabfccaad","kernelVersion":"5.4.0-1089-azure","osImage":"Ubuntu - 18.04.6 LTS","containerRuntimeVersion":"containerd://1.5.11+azure-2","kubeletVersion":"v1.23.8","kubeProxyVersion":"v1.23.8","operatingSystem":"linux","architecture":"amd64"},"images":[{"names":["mcr.microsoft.com/azuremonitor/containerinsights/ciprod:ciprod08102022"],"sizeBytes":397844357},{"names":["mcr.microsoft.com/azuremonitor/containerinsights/ciprod:ciprod06272022-hotfix"],"sizeBytes":357023149},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:0.49.3"],"sizeBytes":287741913},{"names":["mcr.microsoft.com/oss/calico/cni:v3.23.1"],"sizeBytes":263014840},{"names":["mcr.microsoft.com/oss/calico/cni:v3.21.4"],"sizeBytes":236345866},{"names":["mcr.microsoft.com/oss/calico/cni:v3.21.6"],"sizeBytes":227829276},{"names":["mcr.microsoft.com/oss/calico/node:v3.23.1"],"sizeBytes":221560540},{"names":["mcr.microsoft.com/oss/calico/node:v3.21.4"],"sizeBytes":216363503},{"names":["mcr.microsoft.com/oss/calico/node:v3.21.6"],"sizeBytes":215379163},{"names":["mcr.microsoft.com/oss/tigera/operator:v1.23.8"],"sizeBytes":184105789},{"names":["mcr.microsoft.com/oss/cilium/cilium:v1.12.0"],"sizeBytes":166659049},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:0.19.0"],"sizeBytes":166352383},{"names":["mcr.microsoft.com/aks/hcp/hcp-tunnel-front:master.220527.2"],"sizeBytes":146994488},{"names":["mcr.microsoft.com/oss/calico/kube-controllers:v3.23.1"],"sizeBytes":136078571},{"names":["mcr.microsoft.com/oss/calico/typha:v3.23.1"],"sizeBytes":131467121},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.23.8-hotfix.20220620.2"],"sizeBytes":131132619},{"names":["mcr.microsoft.com/oss/tigera/operator:v1.24.2"],"sizeBytes":128711964},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy@sha256:fa6af244cd30025166da43044ea4a9cc54d96d1cfb4d0afab806d8b3452dc740","mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.23.8-hotfix.20220728.1"],"sizeBytes":128606434},{"names":["mcr.microsoft.com/oss/calico/typha:v3.21.4"],"sizeBytes":128235133},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.2.2.5"],"sizeBytes":123925992},{"names":["mcr.microsoft.com/oss/calico/kube-controllers:v3.21.6"],"sizeBytes":123549280},{"names":["mcr.microsoft.com/oss/calico/typha:v3.21.6"],"sizeBytes":119713369},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:v1.2.1"],"sizeBytes":107169290},{"names":["mcr.microsoft.com/oss/calico/node:v3.8.9.5"],"sizeBytes":101794833},{"names":["mcr.microsoft.com/aks/acc/sgx-attestation:3.1"],"sizeBytes":98058501},{"names":["mcr.microsoft.com/oss/kubernetes/exechealthz:1.2_v0.0.5"],"sizeBytes":94348102},{"names":["mcr.microsoft.com/aks/hcp/tunnel-openvpn:master.220527.2"],"sizeBytes":92531564},{"names":["mcr.microsoft.com/containernetworking/azure-npm:v1.4.29"],"sizeBytes":89255513},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.2.2"],"sizeBytes":88551490},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.22.0"],"sizeBytes":87645386},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.21.0"],"sizeBytes":87550430},{"names":["mcr.microsoft.com/aks/command/runtime:master.220211.1"],"sizeBytes":82792811},{"names":["mcr.microsoft.com/azure-application-gateway/kubernetes-ingress:1.5.2"],"sizeBytes":77081542},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.19.0"],"sizeBytes":75743056},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.20.0"],"sizeBytes":75152698},{"names":["mcr.microsoft.com/azure-application-gateway/kubernetes-ingress:1.4.0"],"sizeBytes":73895290},{"names":["mcr.microsoft.com/oss/nvidia/k8s-device-plugin:v0.9.0"],"sizeBytes":67291599},{"names":["mcr.microsoft.com/oss/kubernetes/dashboard:v2.0.1"],"sizeBytes":66415836},{"names":["mcr.microsoft.com/oss/calico/cni:v3.8.9.3"],"sizeBytes":63581323},{"names":["mcr.microsoft.com/oss/kubernetes-csi/secrets-store/driver:v0.0.21.4"],"sizeBytes":56238120},{"names":["mcr.microsoft.com/oss/calico/kube-controllers:v3.21.4"],"sizeBytes":54638514},{"names":["mcr.microsoft.com/oss/kubernetes-csi/secrets-store/driver:v1.2.2"],"sizeBytes":54542179},{"names":["mcr.microsoft.com/oss/azure/aad-pod-identity/nmi:v1.8.8.5"],"sizeBytes":53824335},{"names":["mcr.microsoft.com/oss/azure/aad-pod-identity/nmi:v1.8.8.4"],"sizeBytes":51968453},{"names":["mcr.microsoft.com/oss/kubernetes/kubernetes-dashboard:v1.10.1"],"sizeBytes":44907744},{"names":["mcr.microsoft.com/aks/acc/sgx-device-plugin:1.0"],"sizeBytes":42023095},{"names":["mcr.microsoft.com/oss/kubernetes/autoscaler/cluster-proportional-autoscaler:1.8.5"],"sizeBytes":41926181},{"names":["mcr.microsoft.com/aks/ip-masq-agent-v2:v0.1.3"],"sizeBytes":31272518},{"names":["mcr.microsoft.com/containernetworking/azure-cns:v1.4.29"],"sizeBytes":30470581},{"names":["mcr.microsoft.com/aks/acc/sgx-webhook:1.0"],"sizeBytes":30190110}]}}]} + string: '{"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"1487"},"items":[{"metadata":{"name":"aks-nodepool1-42371919-vmss000000","uid":"29bc844f-21b9-4d9c-9bbd-366b0fda8bbf","resourceVersion":"1106","creationTimestamp":"2022-10-18T19:30:09Z","labels":{"agentpool":"nodepool1","beta.kubernetes.io/arch":"amd64","beta.kubernetes.io/instance-type":"Standard_B4ms","beta.kubernetes.io/os":"linux","failure-domain.beta.kubernetes.io/region":"westeurope","failure-domain.beta.kubernetes.io/zone":"0","kubernetes.azure.com/agentpool":"nodepool1","kubernetes.azure.com/cluster":"MC_rohanazuregroup_test-force-delete000001_westeurope","kubernetes.azure.com/kubelet-identity-client-id":"09e30c05-18bc-4655-b841-ed8680d5e0cf","kubernetes.azure.com/mode":"system","kubernetes.azure.com/node-image-version":"AKSUbuntu-1804gen2containerd-2022.10.03","kubernetes.azure.com/os-sku":"Ubuntu","kubernetes.azure.com/role":"agent","kubernetes.azure.com/storageprofile":"managed","kubernetes.azure.com/storagetier":"Premium_LRS","kubernetes.io/arch":"amd64","kubernetes.io/hostname":"aks-nodepool1-42371919-vmss000000","kubernetes.io/os":"linux","kubernetes.io/role":"agent","node-role.kubernetes.io/agent":"","node.kubernetes.io/instance-type":"Standard_B4ms","storageprofile":"managed","storagetier":"Premium_LRS","topology.disk.csi.azure.com/zone":"","topology.kubernetes.io/region":"westeurope","topology.kubernetes.io/zone":"0"},"annotations":{"csi.volume.kubernetes.io/nodeid":"{\"disk.csi.azure.com\":\"aks-nodepool1-42371919-vmss000000\",\"file.csi.azure.com\":\"aks-nodepool1-42371919-vmss000000\"}","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"cloud-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:30:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:spec":{"f:podCIDR":{},"f:podCIDRs":{".":{},"v:\"10.244.0.0/24\"":{}}}}},{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:30:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:volumes.kubernetes.io/controller-managed-attach-detach":{}},"f:labels":{".":{},"f:agentpool":{},"f:beta.kubernetes.io/arch":{},"f:beta.kubernetes.io/os":{},"f:kubernetes.azure.com/agentpool":{},"f:kubernetes.azure.com/kubelet-identity-client-id":{},"f:kubernetes.azure.com/mode":{},"f:kubernetes.azure.com/node-image-version":{},"f:kubernetes.io/arch":{},"f:kubernetes.io/hostname":{},"f:kubernetes.io/os":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:30:20Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl":{}}}}},{"manager":"cloud-node-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:30:28Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{"f:beta.kubernetes.io/instance-type":{},"f:failure-domain.beta.kubernetes.io/region":{},"f:failure-domain.beta.kubernetes.io/zone":{},"f:node.kubernetes.io/instance-type":{},"f:topology.kubernetes.io/region":{},"f:topology.kubernetes.io/zone":{}}},"f:spec":{"f:providerID":{}}}},{"manager":"cloud-node-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:30:28Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{"k:{\"type\":\"NetworkUnavailable\"}":{".":{},"f:type":{}}}}},"subresource":"status"},{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:30:30Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{"f:csi.volume.kubernetes.io/nodeid":{}},"f:labels":{"f:topology.disk.csi.azure.com/zone":{}}},"f:status":{"f:allocatable":{"f:ephemeral-storage":{}},"f:capacity":{"f:ephemeral-storage":{}},"f:conditions":{"k:{\"type\":\"DiskPressure\"}":{"f:lastHeartbeatTime":{}},"k:{\"type\":\"MemoryPressure\"}":{"f:lastHeartbeatTime":{}},"k:{\"type\":\"PIDPressure\"}":{"f:lastHeartbeatTime":{}},"k:{\"type\":\"Ready\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}}},"f:images":{}}},"subresource":"status"},{"manager":"kubectl-label","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:30:55Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{"f:kubernetes.io/role":{},"f:node-role.kubernetes.io/agent":{}}}}},{"manager":"cloud-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:31:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{"k:{\"type\":\"NetworkUnavailable\"}":{"f:lastHeartbeatTime":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{}}}}},"subresource":"status"}]},"spec":{"podCIDR":"10.244.0.0/24","podCIDRs":["10.244.0.0/24"],"providerID":"azure:///subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mc_rohanazuregroup_test-force-delete000001_westeurope/providers/Microsoft.Compute/virtualMachineScaleSets/aks-nodepool1-42371919-vmss/virtualMachines/0"},"status":{"capacity":{"cpu":"4","ephemeral-storage":"129886128Ki","hugepages-1Gi":"0","hugepages-2Mi":"0","memory":"16393220Ki","pods":"110"},"allocatable":{"cpu":"3860m","ephemeral-storage":"119703055367","hugepages-1Gi":"0","hugepages-2Mi":"0","memory":"12899332Ki","pods":"110"},"conditions":[{"type":"NetworkUnavailable","status":"False","lastHeartbeatTime":"2022-10-18T19:31:10Z","lastTransitionTime":"2022-10-18T19:31:10Z","reason":"RouteCreated","message":"RouteController + created a route"},{"type":"MemoryPressure","status":"False","lastHeartbeatTime":"2022-10-18T19:30:19Z","lastTransitionTime":"2022-10-18T19:30:09Z","reason":"KubeletHasSufficientMemory","message":"kubelet + has sufficient memory available"},{"type":"DiskPressure","status":"False","lastHeartbeatTime":"2022-10-18T19:30:19Z","lastTransitionTime":"2022-10-18T19:30:09Z","reason":"KubeletHasNoDiskPressure","message":"kubelet + has no disk pressure"},{"type":"PIDPressure","status":"False","lastHeartbeatTime":"2022-10-18T19:30:19Z","lastTransitionTime":"2022-10-18T19:30:09Z","reason":"KubeletHasSufficientPID","message":"kubelet + has sufficient PID available"},{"type":"Ready","status":"True","lastHeartbeatTime":"2022-10-18T19:30:19Z","lastTransitionTime":"2022-10-18T19:30:19Z","reason":"KubeletReady","message":"kubelet + is posting ready status. AppArmor enabled"}],"addresses":[{"type":"InternalIP","address":"10.224.0.4"},{"type":"Hostname","address":"aks-nodepool1-42371919-vmss000000"}],"daemonEndpoints":{"kubeletEndpoint":{"Port":10250}},"nodeInfo":{"machineID":"e3ac926052b54fa6a11ec517ac10cb28","systemUUID":"bcb557bb-90ed-4fea-afaf-f1f1967feef7","bootID":"ec2d8170-135b-4eef-b756-25b27ac19ff2","kernelVersion":"5.4.0-1091-azure","osImage":"Ubuntu + 18.04.6 LTS","containerRuntimeVersion":"containerd://1.5.11+azure-2","kubeletVersion":"v1.23.12","kubeProxyVersion":"v1.23.12","operatingSystem":"linux","architecture":"amd64"},"images":[{"names":["mcr.microsoft.com/azuremonitor/containerinsights/ciprod:ciprod08102022"],"sizeBytes":397844357},{"names":["mcr.microsoft.com/azuremonitor/containerinsights/ciprod:ciprod06272022-hotfix"],"sizeBytes":357023149},{"names":["mcr.microsoft.com/azuremonitor/containerinsights/ciprod/prometheus-collector/images:5.2.0-main-09-29-2022-ca064de1"],"sizeBytes":315250960},{"names":["mcr.microsoft.com/azuremonitor/containerinsights/ciprod/prometheus-collector/images:5.1.0-main-09-23-2022-df3e2703"],"sizeBytes":315037321},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:0.49.3"],"sizeBytes":287741913},{"names":["mcr.microsoft.com/oss/calico/cni:v3.23.1"],"sizeBytes":263014840},{"names":["mcr.microsoft.com/oss/calico/cni:v3.21.4"],"sizeBytes":236345866},{"names":["mcr.microsoft.com/oss/calico/cni:v3.21.6"],"sizeBytes":227829276},{"names":["mcr.microsoft.com/oss/calico/node:v3.23.1"],"sizeBytes":221560540},{"names":["mcr.microsoft.com/oss/calico/node:v3.21.4"],"sizeBytes":216363503},{"names":["mcr.microsoft.com/oss/calico/node:v3.21.6"],"sizeBytes":215379163},{"names":["mcr.microsoft.com/oss/tigera/operator:v1.23.8"],"sizeBytes":184105789},{"names":["mcr.microsoft.com/oss/cilium/cilium:1.12.2"],"sizeBytes":166611722},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:0.19.0"],"sizeBytes":166352383},{"names":["mcr.microsoft.com/aks/hcp/hcp-tunnel-front:master.220527.2"],"sizeBytes":146994488},{"names":null,"sizeBytes":138243950},{"names":["mcr.microsoft.com/oss/calico/kube-controllers:v3.23.1"],"sizeBytes":136078571},{"names":["mcr.microsoft.com/oss/calico/typha:v3.23.1"],"sizeBytes":131467121},{"names":null,"sizeBytes":129890505},{"names":["mcr.microsoft.com/oss/kubernetes/kube-proxy:v1.23.12-hotfix.20220922.1"],"sizeBytes":128992809},{"names":["mcr.microsoft.com/oss/tigera/operator:v1.24.2"],"sizeBytes":128711964},{"names":["mcr.microsoft.com/oss/calico/typha:v3.21.4"],"sizeBytes":128235133},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.2.2.5"],"sizeBytes":123925992},{"names":null,"sizeBytes":123549904},{"names":["mcr.microsoft.com/oss/calico/kube-controllers:v3.21.6"],"sizeBytes":123549280},{"names":["mcr.microsoft.com/oss/calico/typha:v3.21.6"],"sizeBytes":119713369},{"names":null,"sizeBytes":115909379},{"names":null,"sizeBytes":115897326},{"names":null,"sizeBytes":115677896},{"names":["mcr.microsoft.com/oss/kubernetes/ingress/nginx-ingress-controller:v1.2.1"],"sizeBytes":107169290},{"names":["mcr.microsoft.com/oss/calico/node:v3.8.9.5"],"sizeBytes":101794833},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.22.0.3"],"sizeBytes":99538753},{"names":["mcr.microsoft.com/aks/acc/sgx-attestation:3.1"],"sizeBytes":98058501},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azuredisk-csi:v1.23.0"],"sizeBytes":95915873},{"names":["mcr.microsoft.com/oss/kubernetes/exechealthz:1.2_v0.0.5"],"sizeBytes":94348102},{"names":["mcr.microsoft.com/aks/hcp/tunnel-openvpn:master.220527.2"],"sizeBytes":92531564},{"names":["mcr.microsoft.com/containernetworking/azure-npm:v1.4.32"],"sizeBytes":90048618},{"names":["mcr.microsoft.com/containernetworking/azure-npm:v1.4.29"],"sizeBytes":89255513},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.2.2"],"sizeBytes":88551490},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.22.0"],"sizeBytes":83173887},{"names":["mcr.microsoft.com/aks/command/runtime:master.220211.1"],"sizeBytes":82792811},{"names":["mcr.microsoft.com/oss/kubernetes-csi/azurefile-csi:v1.21.0"],"sizeBytes":75345915},{"names":["mcr.microsoft.com/oss/nvidia/k8s-device-plugin:v0.9.0"],"sizeBytes":67291599},{"names":["mcr.microsoft.com/containernetworking/cni-dropgz:v0.0.2"],"sizeBytes":67202663},{"names":["mcr.microsoft.com/oss/kubernetes-csi/secrets-store/driver:v1.2.2.3"],"sizeBytes":64781810},{"names":["mcr.microsoft.com/oss/calico/cni:v3.8.9.3"],"sizeBytes":63581323},{"names":null,"sizeBytes":63271342},{"names":["mcr.microsoft.com/oss/kubernetes-csi/secrets-store/driver:v1.2.2.2"],"sizeBytes":56424516},{"names":["mcr.microsoft.com/oss/calico/kube-controllers:v3.21.4"],"sizeBytes":54638514},{"names":["mcr.microsoft.com/oss/azure/aad-pod-identity/nmi:v1.8.12.1"],"sizeBytes":46617098}]}}]} ' headers: audit-id: - - b070f1fc-1908-4219-84de-809ef52e338b + - 702df8c3-b0a8-4feb-83c9-77d6c3c5fd55 cache-control: - no-cache, private content-type: - application/json date: - - Mon, 12 Sep 2022 09:32:48 GMT + - Tue, 18 Oct 2022 19:32:24 GMT transfer-encoding: - chunked x-kubernetes-pf-flowschema-uid: - - ffa83d76-a56b-4d0b-bb6f-50d7f09474fb + - 92b2ef8a-fcc8-4f73-a2ca-c1502b7b9250 x-kubernetes-pf-prioritylevel-uid: - - e1255224-bf15-43fd-949c-62117a6d8625 + - a1989c01-0e94-4e3b-a68c-18561b43277e status: code: 200 message: OK @@ -900,17 +981,17 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: POST - uri: https://test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io/apis/authorization.k8s.io/v1/selfsubjectaccessreviews + uri: https://test-force-rohanazuregroup-1bfbb5-4f0f44fc.hcp.westeurope.azmk8s.io/apis/authorization.k8s.io/v1/selfsubjectaccessreviews response: body: - string: '{"kind":"SelfSubjectAccessReview","apiVersion":"authorization.k8s.io/v1","metadata":{"creationTimestamp":null,"managedFields":[{"manager":"OpenAPI-Generator","operation":"Update","apiVersion":"authorization.k8s.io/v1","time":"2022-09-12T09:32:49Z","fieldsType":"FieldsV1","fieldsV1":{"f:spec":{"f:resourceAttributes":{".":{},"f:group":{},"f:resource":{},"f:verb":{}}}}}]},"spec":{"resourceAttributes":{"verb":"create","group":"rbac.authorization.k8s.io","resource":"clusterrolebindings"}},"status":{"allowed":true}} + string: '{"kind":"SelfSubjectAccessReview","apiVersion":"authorization.k8s.io/v1","metadata":{"creationTimestamp":null,"managedFields":[{"manager":"OpenAPI-Generator","operation":"Update","apiVersion":"authorization.k8s.io/v1","time":"2022-10-18T19:32:24Z","fieldsType":"FieldsV1","fieldsV1":{"f:spec":{"f:resourceAttributes":{".":{},"f:group":{},"f:resource":{},"f:verb":{}}}}}]},"spec":{"resourceAttributes":{"verb":"create","group":"rbac.authorization.k8s.io","resource":"clusterrolebindings"}},"status":{"allowed":true}} ' headers: audit-id: - - 23fa0f53-2ce6-45ab-b7d6-148dcabb5c49 + - a74da1cb-28a3-483a-a13a-0ac07bab8373 cache-control: - no-cache, private content-length: @@ -918,49 +999,14 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:32:49 GMT + - Tue, 18 Oct 2022 19:32:24 GMT x-kubernetes-pf-flowschema-uid: - - ffa83d76-a56b-4d0b-bb6f-50d7f09474fb + - 92b2ef8a-fcc8-4f73-a2ca-c1502b7b9250 x-kubernetes-pf-prioritylevel-uid: - - e1255224-bf15-43fd-949c-62117a6d8625 + - a1989c01-0e94-4e3b-a68c-18561b43277e status: code: 201 message: Created -- request: - body: null - headers: - Accept: - - application/json - Content-Type: - - application/json - User-Agent: - - OpenAPI-Generator/11.0.0/python - method: GET - uri: https://test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io/version/ - response: - body: - string: "{\n \"major\": \"1\",\n \"minor\": \"23\",\n \"gitVersion\": \"v1.23.8\",\n - \ \"gitCommit\": \"a35ccc8395e8a5eaa83b7b8c981677893b651cba\",\n \"gitTreeState\": - \"clean\",\n \"buildDate\": \"2022-07-28T20:52:46Z\",\n \"goVersion\": \"go1.17.11\",\n - \ \"compiler\": \"gc\",\n \"platform\": \"linux/amd64\"\n}" - headers: - audit-id: - - c46f943c-c431-4fb6-b771-1ae2849fce75 - cache-control: - - no-cache, private - content-length: - - '264' - content-type: - - application/json - date: - - Mon, 12 Sep 2022 09:32:49 GMT - x-kubernetes-pf-flowschema-uid: - - ffa83d76-a56b-4d0b-bb6f-50d7f09474fb - x-kubernetes-pf-prioritylevel-uid: - - e1255224-bf15-43fd-949c-62117a6d8625 - status: - code: 200 - message: OK - request: body: null headers: @@ -975,7 +1021,8 @@ interactions: ParameterSetName: - -g -n -l --tags --kube-config User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.10 (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Kubernetes?api-version=2021-04-01 response: @@ -984,21 +1031,21 @@ interactions: Europe","East US","West Central US","South Central US","Southeast Asia","UK South","East US 2","West US 2","Australia East","North Europe","France Central","Central US","West US","North Central US","Korea Central","Japan East","West US 3","East - Asia","Canada Central","East US 2 EUAP","Canada East"],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"SystemAssignedResourceIdentity, - SupportsTags, SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/operationStatuses","locations":["East + Asia","Canada Central","East US 2 EUAP","Canada East"],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"SystemAssignedResourceIdentity, + SupportsTags, SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/operationStatuses","locations":["East US 2 EUAP","West Europe","East US","West Central US","South Central US","Southeast Asia","UK South","East US 2","West US 2","Australia East","North Europe","France Central","Central US","West US","North Central US","Korea Central","Japan - East","East Asia","West US 3","Canada East","Canada Central"],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"registeredSubscriptions","locations":[],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"Operations","locations":[],"apiVersions":["2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview","2019-11-01-preview","2019-09-01-privatepreview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + East","East Asia","West US 3","Canada East","Canada Central"],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"registeredSubscriptions","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview"],"capabilities":"None"},{"resourceType":"Operations","locations":[],"apiVersions":["2022-10-01-preview","2022-05-01-preview","2021-10-01","2021-04-01-preview","2021-03-01","2020-01-01-preview","2019-11-01-preview","2019-09-01-privatepreview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache content-length: - - '2311' + - '2416' content-type: - application/json; charset=utf-8 date: - - Mon, 12 Sep 2022 09:32:49 GMT + - Tue, 18 Oct 2022 19:32:25 GMT expires: - '-1' pragma: @@ -1026,7 +1073,8 @@ interactions: ParameterSetName: - -g -n -l --tags --kube-config User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.8.10 (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/rohanazuregroup/providers/Microsoft.Kubernetes/connectedClusters/cc-000002?api-version=2021-10-01 response: @@ -1038,11 +1086,11 @@ interactions: cache-control: - no-cache content-length: - - '235' + - '238' content-type: - application/json; charset=utf-8 date: - - Mon, 12 Sep 2022 09:32:52 GMT + - Tue, 18 Oct 2022 19:32:26 GMT expires: - '-1' pragma: @@ -1064,29 +1112,29 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io/api/v1/namespaces + uri: https://test-force-rohanazuregroup-1bfbb5-4f0f44fc.hcp.westeurope.azmk8s.io/api/v1/namespaces response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"1372"},"items":[{"metadata":{"name":"default","uid":"90da5316-e1cb-4697-b4f6-32c1aa68e122","resourceVersion":"204","creationTimestamp":"2022-09-12T09:30:16Z","labels":{"kubernetes.io/metadata.name":"default"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:30:16Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"kube-node-lease","uid":"ccc0c937-0df1-41b2-84d0-46b0b9445827","resourceVersion":"48","creationTimestamp":"2022-09-12T09:30:14Z","labels":{"kubernetes.io/metadata.name":"kube-node-lease"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:30:14Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"kube-public","uid":"9622a719-b92b-4e47-8d7b-098442cc4973","resourceVersion":"41","creationTimestamp":"2022-09-12T09:30:14Z","labels":{"kubernetes.io/metadata.name":"kube-public"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:30:14Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"kube-system","uid":"6b8eabf3-c8af-4e63-84a2-cb88ad1571d5","resourceVersion":"568","creationTimestamp":"2022-09-12T09:30:13Z","labels":{"addonmanager.kubernetes.io/mode":"Reconcile","control-plane":"true","kubernetes.io/cluster-service":"true","kubernetes.io/metadata.name":"kube-system"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"v1\",\"kind\":\"Namespace\",\"metadata\":{\"annotations\":{},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"Reconcile\",\"control-plane\":\"true\",\"kubernetes.io/cluster-service\":\"true\"},\"name\":\"kube-system\"}}\n"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:30:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:30:35Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{}},"f:labels":{"f:addonmanager.kubernetes.io/mode":{},"f:control-plane":{},"f:kubernetes.io/cluster-service":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}}]} + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"1500"},"items":[{"metadata":{"name":"default","uid":"25ab1bbd-5a66-49d3-bc5f-8a7ccc041184","resourceVersion":"205","creationTimestamp":"2022-10-18T19:28:56Z","labels":{"kubernetes.io/metadata.name":"default"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:28:56Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"kube-node-lease","uid":"33590637-ef44-4221-8093-ef1446b2f046","resourceVersion":"56","creationTimestamp":"2022-10-18T19:28:55Z","labels":{"kubernetes.io/metadata.name":"kube-node-lease"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:28:55Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"kube-public","uid":"4d4bd4d5-1503-494c-a816-2679a907c29d","resourceVersion":"51","creationTimestamp":"2022-10-18T19:28:55Z","labels":{"kubernetes.io/metadata.name":"kube-public"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:28:55Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}},{"metadata":{"name":"kube-system","uid":"9261f8df-725b-4c09-b82c-4db9561b6444","resourceVersion":"624","creationTimestamp":"2022-10-18T19:28:54Z","labels":{"addonmanager.kubernetes.io/mode":"Reconcile","control-plane":"true","kubernetes.io/cluster-service":"true","kubernetes.io/metadata.name":"kube-system"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"v1\",\"kind\":\"Namespace\",\"metadata\":{\"annotations\":{},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"Reconcile\",\"control-plane\":\"true\",\"kubernetes.io/cluster-service\":\"true\"},\"name\":\"kube-system\"}}\n"},"managedFields":[{"manager":"kube-apiserver","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:28:54Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:29:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{}},"f:labels":{"f:addonmanager.kubernetes.io/mode":{},"f:control-plane":{},"f:kubernetes.io/cluster-service":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}}]} ' headers: audit-id: - - bbb93f4d-1b5f-49ef-aed8-282d5d875470 + - 00fcc19c-e084-4180-9c15-253bca5dc805 cache-control: - no-cache, private content-type: - application/json date: - - Mon, 12 Sep 2022 09:32:53 GMT + - Tue, 18 Oct 2022 19:32:27 GMT transfer-encoding: - chunked x-kubernetes-pf-flowschema-uid: - - ffa83d76-a56b-4d0b-bb6f-50d7f09474fb + - 92b2ef8a-fcc8-4f73-a2ca-c1502b7b9250 x-kubernetes-pf-prioritylevel-uid: - - e1255224-bf15-43fd-949c-62117a6d8625 + - a1989c01-0e94-4e3b-a68c-18561b43277e status: code: 200 message: OK @@ -1104,7 +1152,8 @@ interactions: ParameterSetName: - -g -n -l --tags --kube-config User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.10 (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/rohanazuregroup?api-version=2021-04-01 response: @@ -1118,7 +1167,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Mon, 12 Sep 2022 09:32:54 GMT + - Tue, 18 Oct 2022 19:32:27 GMT expires: - '-1' pragma: @@ -1148,12 +1197,12 @@ interactions: ParameterSetName: - -g -n -l --tags --kube-config User-Agent: - - python/3.8.10 (Windows-10-10.0.19044-SP0) AZURECLI/2.38.0 + - python/3.7.7 (Windows-10-10.0.22621-SP0) AZURECLI/2.41.0 (MSI) method: POST uri: https://eastus.dp.kubernetesconfiguration.azure.com/azure-arc-k8sagents/GetLatestHelmPackagePath?api-version=2019-11-01-preview&releaseTrain=stable response: body: - string: '{"repositoryPath":"mcr.microsoft.com/azurearck8s/batch1/stable/azure-arc-k8sagents:1.7.18"}' + string: '{"repositoryPath":"mcr.microsoft.com/azurearck8s/batch1/stable/azure-arc-k8sagents:1.8.14"}' headers: api-supported-versions: - 2019-11-01-Preview @@ -1164,7 +1213,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Mon, 12 Sep 2022 09:32:55 GMT + - Tue, 18 Oct 2022 19:32:28 GMT strict-transport-security: - max-age=15724800; includeSubDomains x-content-type-options: @@ -1174,7 +1223,59 @@ interactions: message: OK - request: body: '{"tags": {"foo": "doo"}, "location": "eastus", "identity": {"type": "SystemAssigned"}, - "properties": {"agentPublicKeyCertificate": "MIICCgKCAgEApvvyRzLiL5/TiGeFHkApCUUerzb7UGKOlOTuYiqIQs2VhOW8lsMNLo7nb5HrRI39RHo7raXu6iJbnNvXlWi+qYrm3RS0uS1h5Z/0CvHhLUFY4GndswbbMUr/kABU1CufzY3yMO3Y8vRxiU1zbA/lGdRm6C7V00Fhh2Y7bSfvo7otZHGfsqOyoYNuWBRqoauQvBNRZ0FJEssCbey3eSaqDLbr31CmLGZlITB3CXgOu6H5oAXlnyMH8SDJlZceMBOatEKNeqkzHhgLoYs79r34pbyDhyDIdkDYHAZu+UTYK36QtjmUcpIqI0bmnSXWcEX86uB2AzDWsYyI+sETMWxi35yssz58ankz93ddXndRi5JDCLuuwrkO6JioqQeUd3Hja//YHuolBbMWy3kEerzzE6AiDOw6rf8IovhR1fh7L3YxABGA5d3QjXvlJV9oNlHuQBfDFRtUvA3WDGov+XBenNlswm3gey+UD0UdogFHNFutgdObJcejNmwVpIYZDC0QmvJmhVZzgd+OHGYjNGrWg09btaVTAZLhsfQfJwKcA2NKaguxSnWhf+7x/1iXT/2k7t+nLsEYfvkwV+9mEJYJwJQLt3gKspYXzzT9k9hSgzzXvS/y1TlUx/pShTbz6cs5IJIogYSGCr8KDaZzttmB5SJOZiBfyY8jH8z6V5Ix9SECAwEAAQ==", + "properties": {"agentPublicKeyCertificate": "MIICCgKCAgEAwSnivsiO8/tNEVsHHOTOLU8ia39n2OP0FWJMrG31RNZNxE1cnm+QpzzLjclFwLAjicHmh4f/dgTxiAXBfSKZIFqzXM1bhk5mynPkgO0ohPYgo3VtsOfWd9TZN8Gfyp19HauLRl2e6sSziWjVDaGGPdx+bxkLqTT6bzdVBxoti5rD37QRFtYA/dbvzynlxRaEglKD8O2jX5yO+wzv1/dQ0Gv7QotGXnuzIvqLQ8keGw2+jCsOUIX9M2olipK9I5BB6KoSxehL0n/A/d1BXO5Pmvch9w1estFwI4vqnjVvkHnSBsH375YIFlYLwS45ZFUZVIzulMQxLORYaSuTYzh5YYqTMT7ulP9FKqLM/qIB2SMp8/s0vTGoKRBVNKWYDgHrS9xqFhOsirA2aOxhvqusiV6/ezaEcjqyKkogOADXg7YyB4Um7TkB6mi3ailqlkx0j66ZeLvIkaAQXf5i+pv1BZQtxnOExrYCn8R34mjf6UIfwjrayi33GOrZF3khBsM2m+fo0FMVZickuU/JaF1qWPtVBDjG4kagD8wdAaICjSy9sn0Gk6C5M61vg2ivQD5SuwihpCgRXlxWUeq99qu/I0FrM66b3yWmDyQT6xNEAAhB5yKVyLhTAAj80PFAQuf8xXg2gKL6fywE9c5Z7CVLibQUDV2279jIQHSQfY/ED2kCAwEAAQ==", + "distribution": "aks", "infrastructure": "azure"}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - connectedk8s connect + Connection: + - keep-alive + Content-Length: + - '889' + Content-Type: + - application/json + ParameterSetName: + - -g -n -l --tags --kube-config + User-Agent: + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/rohanazuregroup/providers/Microsoft.Kubernetes/connectedClusters/cc-000002?api-version=2021-10-01 + response: + body: + string: '{"error":{"code":"SubscriptionRequestsThrottled","message":"Number + of requests for subscription ''1bfbb5d0-917e-4346-9026-1d3b344417f5'' and + operation ''PUT/SUBSCRIPTIONS/RESOURCEGROUPS/PROVIDERS/MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/'' + exceeded the backend storage limit. Please try again after ''6'' seconds."}}' + headers: + cache-control: + - no-cache + connection: + - close + content-length: + - '308' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 18 Oct 2022 19:33:35 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + status: + code: 429 + message: '' +- request: + body: '{"tags": {"foo": "doo"}, "location": "eastus", "identity": {"type": "SystemAssigned"}, + "properties": {"agentPublicKeyCertificate": "MIICCgKCAgEAwSnivsiO8/tNEVsHHOTOLU8ia39n2OP0FWJMrG31RNZNxE1cnm+QpzzLjclFwLAjicHmh4f/dgTxiAXBfSKZIFqzXM1bhk5mynPkgO0ohPYgo3VtsOfWd9TZN8Gfyp19HauLRl2e6sSziWjVDaGGPdx+bxkLqTT6bzdVBxoti5rD37QRFtYA/dbvzynlxRaEglKD8O2jX5yO+wzv1/dQ0Gv7QotGXnuzIvqLQ8keGw2+jCsOUIX9M2olipK9I5BB6KoSxehL0n/A/d1BXO5Pmvch9w1estFwI4vqnjVvkHnSBsH375YIFlYLwS45ZFUZVIzulMQxLORYaSuTYzh5YYqTMT7ulP9FKqLM/qIB2SMp8/s0vTGoKRBVNKWYDgHrS9xqFhOsirA2aOxhvqusiV6/ezaEcjqyKkogOADXg7YyB4Um7TkB6mi3ailqlkx0j66ZeLvIkaAQXf5i+pv1BZQtxnOExrYCn8R34mjf6UIfwjrayi33GOrZF3khBsM2m+fo0FMVZickuU/JaF1qWPtVBDjG4kagD8wdAaICjSy9sn0Gk6C5M61vg2ivQD5SuwihpCgRXlxWUeq99qu/I0FrM66b3yWmDyQT6xNEAAhB5yKVyLhTAAj80PFAQuf8xXg2gKL6fywE9c5Z7CVLibQUDV2279jIQHSQfY/ED2kCAwEAAQ==", "distribution": "aks", "infrastructure": "azure"}}' headers: Accept: @@ -1192,25 +1293,26 @@ interactions: ParameterSetName: - -g -n -l --tags --kube-config User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.8.10 (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/rohanazuregroup/providers/Microsoft.Kubernetes/connectedClusters/cc-000002?api-version=2021-10-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rohanazuregroup/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","name":"cc-000002","type":"microsoft.kubernetes/connectedclusters","location":"eastus","tags":{"foo":"doo"},"systemData":{"createdBy":"rohandassani@microsoft.com","createdByType":"User","createdAt":"2022-09-12T09:33:07.2322179Z","lastModifiedBy":"rohandassani@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-09-12T09:33:07.2322179Z"},"identity":{"principalId":"948a4688-e477-45d4-83db-77cb39849349","tenantId":"72f988bf-86f1-41af-91ab-2d7cd011db47","type":"SystemAssigned"},"properties":{"provisioningState":"Accepted","connectivityStatus":"Connecting","agentPublicKeyCertificate":"MIICCgKCAgEApvvyRzLiL5/TiGeFHkApCUUerzb7UGKOlOTuYiqIQs2VhOW8lsMNLo7nb5HrRI39RHo7raXu6iJbnNvXlWi+qYrm3RS0uS1h5Z/0CvHhLUFY4GndswbbMUr/kABU1CufzY3yMO3Y8vRxiU1zbA/lGdRm6C7V00Fhh2Y7bSfvo7otZHGfsqOyoYNuWBRqoauQvBNRZ0FJEssCbey3eSaqDLbr31CmLGZlITB3CXgOu6H5oAXlnyMH8SDJlZceMBOatEKNeqkzHhgLoYs79r34pbyDhyDIdkDYHAZu+UTYK36QtjmUcpIqI0bmnSXWcEX86uB2AzDWsYyI+sETMWxi35yssz58ankz93ddXndRi5JDCLuuwrkO6JioqQeUd3Hja//YHuolBbMWy3kEerzzE6AiDOw6rf8IovhR1fh7L3YxABGA5d3QjXvlJV9oNlHuQBfDFRtUvA3WDGov+XBenNlswm3gey+UD0UdogFHNFutgdObJcejNmwVpIYZDC0QmvJmhVZzgd+OHGYjNGrWg09btaVTAZLhsfQfJwKcA2NKaguxSnWhf+7x/1iXT/2k7t+nLsEYfvkwV+9mEJYJwJQLt3gKspYXzzT9k9hSgzzXvS/y1TlUx/pShTbz6cs5IJIogYSGCr8KDaZzttmB5SJOZiBfyY8jH8z6V5Ix9SECAwEAAQ==","distribution":"aks","infrastructure":"azure"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rohanazuregroup/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","name":"cc-000002","type":"microsoft.kubernetes/connectedclusters","location":"eastus","tags":{"foo":"doo"},"systemData":{"createdBy":"akkeshar@microsoft.com","createdByType":"User","createdAt":"2022-10-18T19:33:45.0468462Z","lastModifiedBy":"akkeshar@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-18T19:33:45.0468462Z"},"identity":{"principalId":"2fed9832-d6b9-4d78-a357-8dbe0cbf3832","tenantId":"72f988bf-86f1-41af-91ab-2d7cd011db47","type":"SystemAssigned"},"properties":{"provisioningState":"Accepted","connectivityStatus":"Connecting","agentPublicKeyCertificate":"MIICCgKCAgEAwSnivsiO8/tNEVsHHOTOLU8ia39n2OP0FWJMrG31RNZNxE1cnm+QpzzLjclFwLAjicHmh4f/dgTxiAXBfSKZIFqzXM1bhk5mynPkgO0ohPYgo3VtsOfWd9TZN8Gfyp19HauLRl2e6sSziWjVDaGGPdx+bxkLqTT6bzdVBxoti5rD37QRFtYA/dbvzynlxRaEglKD8O2jX5yO+wzv1/dQ0Gv7QotGXnuzIvqLQ8keGw2+jCsOUIX9M2olipK9I5BB6KoSxehL0n/A/d1BXO5Pmvch9w1estFwI4vqnjVvkHnSBsH375YIFlYLwS45ZFUZVIzulMQxLORYaSuTYzh5YYqTMT7ulP9FKqLM/qIB2SMp8/s0vTGoKRBVNKWYDgHrS9xqFhOsirA2aOxhvqusiV6/ezaEcjqyKkogOADXg7YyB4Um7TkB6mi3ailqlkx0j66ZeLvIkaAQXf5i+pv1BZQtxnOExrYCn8R34mjf6UIfwjrayi33GOrZF3khBsM2m+fo0FMVZickuU/JaF1qWPtVBDjG4kagD8wdAaICjSy9sn0Gk6C5M61vg2ivQD5SuwihpCgRXlxWUeq99qu/I0FrM66b3yWmDyQT6xNEAAhB5yKVyLhTAAj80PFAQuf8xXg2gKL6fywE9c5Z7CVLibQUDV2279jIQHSQfY/ED2kCAwEAAQ==","distribution":"aks","infrastructure":"azure"}}' headers: azure-asyncoperation: - - https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/a47b4811-42a1-435f-8331-76f75078280f*3E3E8ED0448D67377841D90676F937ABA2515F3748B0C9B73B4D317F771CEAEB?api-version=2021-10-01 + - https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/0c47cf1b-c103-4d28-a8fd-ce13e919b99a*4A811B08FC5020265FADFA7C448AB4B7508006C59187472731D1CD57E77833C1?api-version=2021-10-01 cache-control: - no-cache content-length: - - '1503' + - '1501' content-type: - application/json; charset=utf-8 date: - - Mon, 12 Sep 2022 09:33:11 GMT + - Tue, 18 Oct 2022 19:33:49 GMT etag: - - '"8c0069c8-0000-0100-0000-631efcd60000"' + - '"19004808-0000-0100-0000-634eff9c0000"' expires: - '-1' pragma: @@ -1222,7 +1324,7 @@ interactions: x-ms-providerhub-traffic: - 'True' x-ms-ratelimit-remaining-subscription-writes: - - '1199' + - '1198' status: code: 201 message: Created @@ -1240,23 +1342,24 @@ interactions: ParameterSetName: - -g -n -l --tags --kube-config User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.8.10 (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/a47b4811-42a1-435f-8331-76f75078280f*3E3E8ED0448D67377841D90676F937ABA2515F3748B0C9B73B4D317F771CEAEB?api-version=2021-10-01 + uri: https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/0c47cf1b-c103-4d28-a8fd-ce13e919b99a*4A811B08FC5020265FADFA7C448AB4B7508006C59187472731D1CD57E77833C1?api-version=2021-10-01 response: body: - string: '{"id":"/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/a47b4811-42a1-435f-8331-76f75078280f*3E3E8ED0448D67377841D90676F937ABA2515F3748B0C9B73B4D317F771CEAEB","name":"a47b4811-42a1-435f-8331-76f75078280f*3E3E8ED0448D67377841D90676F937ABA2515F3748B0C9B73B4D317F771CEAEB","resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rohanazuregroup/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","status":"Succeeded","startTime":"2022-09-12T09:33:10.2284149Z","endTime":"2022-09-12T09:33:16.2935566Z","properties":null}' + string: '{"id":"/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/0c47cf1b-c103-4d28-a8fd-ce13e919b99a*4A811B08FC5020265FADFA7C448AB4B7508006C59187472731D1CD57E77833C1","name":"0c47cf1b-c103-4d28-a8fd-ce13e919b99a*4A811B08FC5020265FADFA7C448AB4B7508006C59187472731D1CD57E77833C1","resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rohanazuregroup/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","status":"Succeeded","startTime":"2022-10-18T19:33:47.7681838Z","endTime":"2022-10-18T19:33:54.3029766Z","properties":null}' headers: cache-control: - no-cache content-length: - - '568' + - '571' content-type: - application/json; charset=utf-8 date: - - Mon, 12 Sep 2022 09:33:43 GMT + - Tue, 18 Oct 2022 19:34:20 GMT etag: - - '"4600effc-0000-0100-0000-631efcdc0000"' + - '"3f006483-0000-0100-0000-634effa20000"' expires: - '-1' pragma: @@ -1286,23 +1389,24 @@ interactions: ParameterSetName: - -g -n -l --tags --kube-config User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.8.10 (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/rohanazuregroup/providers/Microsoft.Kubernetes/connectedClusters/cc-000002?api-version=2021-10-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rohanazuregroup/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","name":"cc-000002","type":"microsoft.kubernetes/connectedclusters","location":"eastus","tags":{"foo":"doo"},"systemData":{"createdBy":"rohandassani@microsoft.com","createdByType":"User","createdAt":"2022-09-12T09:33:07.2322179Z","lastModifiedBy":"rohandassani@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-09-12T09:33:07.2322179Z"},"identity":{"principalId":"948a4688-e477-45d4-83db-77cb39849349","tenantId":"72f988bf-86f1-41af-91ab-2d7cd011db47","type":"SystemAssigned"},"properties":{"provisioningState":"Succeeded","connectivityStatus":"Connecting","agentPublicKeyCertificate":"MIICCgKCAgEApvvyRzLiL5/TiGeFHkApCUUerzb7UGKOlOTuYiqIQs2VhOW8lsMNLo7nb5HrRI39RHo7raXu6iJbnNvXlWi+qYrm3RS0uS1h5Z/0CvHhLUFY4GndswbbMUr/kABU1CufzY3yMO3Y8vRxiU1zbA/lGdRm6C7V00Fhh2Y7bSfvo7otZHGfsqOyoYNuWBRqoauQvBNRZ0FJEssCbey3eSaqDLbr31CmLGZlITB3CXgOu6H5oAXlnyMH8SDJlZceMBOatEKNeqkzHhgLoYs79r34pbyDhyDIdkDYHAZu+UTYK36QtjmUcpIqI0bmnSXWcEX86uB2AzDWsYyI+sETMWxi35yssz58ankz93ddXndRi5JDCLuuwrkO6JioqQeUd3Hja//YHuolBbMWy3kEerzzE6AiDOw6rf8IovhR1fh7L3YxABGA5d3QjXvlJV9oNlHuQBfDFRtUvA3WDGov+XBenNlswm3gey+UD0UdogFHNFutgdObJcejNmwVpIYZDC0QmvJmhVZzgd+OHGYjNGrWg09btaVTAZLhsfQfJwKcA2NKaguxSnWhf+7x/1iXT/2k7t+nLsEYfvkwV+9mEJYJwJQLt3gKspYXzzT9k9hSgzzXvS/y1TlUx/pShTbz6cs5IJIogYSGCr8KDaZzttmB5SJOZiBfyY8jH8z6V5Ix9SECAwEAAQ==","distribution":"AKS","infrastructure":"azure"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rohanazuregroup/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","name":"cc-000002","type":"microsoft.kubernetes/connectedclusters","location":"eastus","tags":{"foo":"doo"},"systemData":{"createdBy":"akkeshar@microsoft.com","createdByType":"User","createdAt":"2022-10-18T19:33:45.0468462Z","lastModifiedBy":"akkeshar@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-18T19:33:45.0468462Z"},"identity":{"principalId":"2fed9832-d6b9-4d78-a357-8dbe0cbf3832","tenantId":"72f988bf-86f1-41af-91ab-2d7cd011db47","type":"SystemAssigned"},"properties":{"provisioningState":"Succeeded","connectivityStatus":"Connecting","agentPublicKeyCertificate":"MIICCgKCAgEAwSnivsiO8/tNEVsHHOTOLU8ia39n2OP0FWJMrG31RNZNxE1cnm+QpzzLjclFwLAjicHmh4f/dgTxiAXBfSKZIFqzXM1bhk5mynPkgO0ohPYgo3VtsOfWd9TZN8Gfyp19HauLRl2e6sSziWjVDaGGPdx+bxkLqTT6bzdVBxoti5rD37QRFtYA/dbvzynlxRaEglKD8O2jX5yO+wzv1/dQ0Gv7QotGXnuzIvqLQ8keGw2+jCsOUIX9M2olipK9I5BB6KoSxehL0n/A/d1BXO5Pmvch9w1estFwI4vqnjVvkHnSBsH375YIFlYLwS45ZFUZVIzulMQxLORYaSuTYzh5YYqTMT7ulP9FKqLM/qIB2SMp8/s0vTGoKRBVNKWYDgHrS9xqFhOsirA2aOxhvqusiV6/ezaEcjqyKkogOADXg7YyB4Um7TkB6mi3ailqlkx0j66ZeLvIkaAQXf5i+pv1BZQtxnOExrYCn8R34mjf6UIfwjrayi33GOrZF3khBsM2m+fo0FMVZickuU/JaF1qWPtVBDjG4kagD8wdAaICjSy9sn0Gk6C5M61vg2ivQD5SuwihpCgRXlxWUeq99qu/I0FrM66b3yWmDyQT6xNEAAhB5yKVyLhTAAj80PFAQuf8xXg2gKL6fywE9c5Z7CVLibQUDV2279jIQHSQfY/ED2kCAwEAAQ==","distribution":"AKS","infrastructure":"azure"}}' headers: cache-control: - no-cache content-length: - - '1504' + - '1502' content-type: - application/json; charset=utf-8 date: - - Mon, 12 Sep 2022 09:33:44 GMT + - Tue, 18 Oct 2022 19:34:20 GMT etag: - - '"8c0080c8-0000-0100-0000-631efcdc0000"' + - '"19006508-0000-0100-0000-634effa20000"' expires: - '-1' pragma: @@ -1334,7 +1438,8 @@ interactions: ParameterSetName: - -g -n -l --tags --kube-config User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.10 (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ExtendedLocation?api-version=2021-04-01 response: @@ -1370,7 +1475,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Mon, 12 Sep 2022 09:33:44 GMT + - Tue, 18 Oct 2022 19:34:21 GMT expires: - '-1' pragma: @@ -1398,8 +1503,8 @@ interactions: ParameterSetName: - -g -n -l --tags --kube-config User-Agent: - - python/3.8.10 (Windows-10-10.0.19044-SP0) msrest/0.7.1 msrest_azure/0.6.4 - azure-graphrbac/0.60.0 Azure-SDK-For-Python AZURECLI/2.38.0 + - python/3.7.7 (Windows-10-10.0.22621-SP0) msrest/0.7.1 msrest_azure/0.6.4 azure-graphrbac/0.60.0 + Azure-SDK-For-Python AZURECLI/2.41.0 (MSI) accept-language: - en-US method: GET @@ -1422,19 +1527,19 @@ interactions: dataserviceversion: - 3.0; date: - - Mon, 12 Sep 2022 09:33:44 GMT + - Tue, 18 Oct 2022 19:34:21 GMT duration: - - '1633660' + - '1258515' expires: - '-1' ocp-aad-diagnostics-server-name: - - OPelI7VrBtHnJkm7E0PsgwaZrjo2NsmSLt6qfy5nvUE= + - JyeM+5eFzeTHbJ5JOsOk4ZavbV40VH6ifhSb7d1ws28= ocp-aad-session-key: - - WVgCaGXmw4JkNGC3dP2abHn0qnEYIRSZE0scLUbPsBCtJCjtoVp9PmOypF3RUHZ8_NpfWsEUM87EdFJMyBByv39ixUaj3ulF0uJQycazDV9eOgAm29owm60HKFH87B5Y.9UWmP9n6EGu8RaV6gACFuoaYVkav-h6Ke1hogL7Qaf4 + - enQqG2e46fQSo8PX9si9trQHBwQrwEtn8Ofa9uagnnZIMDfjYjWm7vuE_QM1YQqymeFQZLEvetWopZQESE7LE_aHmhQ0va5bP0CEUWjVdbFl4ddHjZ1RkQDbdlTWci00.NLHVyyBxFWLy2ktv17NlF8iUOhgGnkHpqd3G8bMZhXs pragma: - no-cache request-id: - - b63bbbe6-9a81-42c0-a416-fc96e8a10a31 + - aba70e0e-3903-4074-a0e8-e71244aa10cd strict-transport-security: - max-age=31536000; includeSubDomains x-aspnet-version: @@ -1462,23 +1567,24 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.8.10 (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/rohanazuregroup/providers/Microsoft.Kubernetes/connectedClusters/cc-000002?api-version=2022-05-01-preview + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/rohanazuregroup/providers/Microsoft.Kubernetes/connectedClusters/cc-000002?api-version=2022-10-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rohanazuregroup/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","name":"cc-000002","type":"microsoft.kubernetes/connectedclusters","location":"eastus","tags":{"foo":"doo"},"systemData":{"createdBy":"rohandassani@microsoft.com","createdByType":"User","createdAt":"2022-09-12T09:33:07.2322179Z","lastModifiedBy":"64b12d6e-6549-484c-8cc6-6281839ba394","lastModifiedByType":"Application","lastModifiedAt":"2022-09-12T09:34:31.378422Z"},"identity":{"principalId":"948a4688-e477-45d4-83db-77cb39849349","tenantId":"72f988bf-86f1-41af-91ab-2d7cd011db47","type":"SystemAssigned"},"properties":{"provisioningState":"Succeeded","connectivityStatus":"Connected","privateLinkState":"Disabled","agentPublicKeyCertificate":"MIICCgKCAgEApvvyRzLiL5/TiGeFHkApCUUerzb7UGKOlOTuYiqIQs2VhOW8lsMNLo7nb5HrRI39RHo7raXu6iJbnNvXlWi+qYrm3RS0uS1h5Z/0CvHhLUFY4GndswbbMUr/kABU1CufzY3yMO3Y8vRxiU1zbA/lGdRm6C7V00Fhh2Y7bSfvo7otZHGfsqOyoYNuWBRqoauQvBNRZ0FJEssCbey3eSaqDLbr31CmLGZlITB3CXgOu6H5oAXlnyMH8SDJlZceMBOatEKNeqkzHhgLoYs79r34pbyDhyDIdkDYHAZu+UTYK36QtjmUcpIqI0bmnSXWcEX86uB2AzDWsYyI+sETMWxi35yssz58ankz93ddXndRi5JDCLuuwrkO6JioqQeUd3Hja//YHuolBbMWy3kEerzzE6AiDOw6rf8IovhR1fh7L3YxABGA5d3QjXvlJV9oNlHuQBfDFRtUvA3WDGov+XBenNlswm3gey+UD0UdogFHNFutgdObJcejNmwVpIYZDC0QmvJmhVZzgd+OHGYjNGrWg09btaVTAZLhsfQfJwKcA2NKaguxSnWhf+7x/1iXT/2k7t+nLsEYfvkwV+9mEJYJwJQLt3gKspYXzzT9k9hSgzzXvS/y1TlUx/pShTbz6cs5IJIogYSGCr8KDaZzttmB5SJOZiBfyY8jH8z6V5Ix9SECAwEAAQ==","distribution":"","infrastructure":"azure","kubernetesVersion":"1.23.8","totalNodeCount":1,"agentVersion":"1.7.18","totalCoreCount":2,"lastConnectivityTime":"2022-09-12T09:34:27.573Z"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rohanazuregroup/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","name":"cc-000002","type":"microsoft.kubernetes/connectedclusters","location":"eastus","tags":{"foo":"doo"},"systemData":{"createdBy":"akkeshar@microsoft.com","createdByType":"User","createdAt":"2022-10-18T19:33:45.0468462Z","lastModifiedBy":"64b12d6e-6549-484c-8cc6-6281839ba394","lastModifiedByType":"Application","lastModifiedAt":"2022-10-18T19:40:03.2790589Z"},"identity":{"principalId":"2fed9832-d6b9-4d78-a357-8dbe0cbf3832","tenantId":"72f988bf-86f1-41af-91ab-2d7cd011db47","type":"SystemAssigned"},"properties":{"provisioningState":"Succeeded","connectivityStatus":"Connected","privateLinkState":"Disabled","azureHybridBenefit":"NotApplicable","agentPublicKeyCertificate":"MIICCgKCAgEAwSnivsiO8/tNEVsHHOTOLU8ia39n2OP0FWJMrG31RNZNxE1cnm+QpzzLjclFwLAjicHmh4f/dgTxiAXBfSKZIFqzXM1bhk5mynPkgO0ohPYgo3VtsOfWd9TZN8Gfyp19HauLRl2e6sSziWjVDaGGPdx+bxkLqTT6bzdVBxoti5rD37QRFtYA/dbvzynlxRaEglKD8O2jX5yO+wzv1/dQ0Gv7QotGXnuzIvqLQ8keGw2+jCsOUIX9M2olipK9I5BB6KoSxehL0n/A/d1BXO5Pmvch9w1estFwI4vqnjVvkHnSBsH375YIFlYLwS45ZFUZVIzulMQxLORYaSuTYzh5YYqTMT7ulP9FKqLM/qIB2SMp8/s0vTGoKRBVNKWYDgHrS9xqFhOsirA2aOxhvqusiV6/ezaEcjqyKkogOADXg7YyB4Um7TkB6mi3ailqlkx0j66ZeLvIkaAQXf5i+pv1BZQtxnOExrYCn8R34mjf6UIfwjrayi33GOrZF3khBsM2m+fo0FMVZickuU/JaF1qWPtVBDjG4kagD8wdAaICjSy9sn0Gk6C5M61vg2ivQD5SuwihpCgRXlxWUeq99qu/I0FrM66b3yWmDyQT6xNEAAhB5yKVyLhTAAj80PFAQuf8xXg2gKL6fywE9c5Z7CVLibQUDV2279jIQHSQfY/ED2kCAwEAAQ==","distribution":"AKS","infrastructure":"azure","kubernetesVersion":"1.23.12","totalNodeCount":1,"agentVersion":"1.8.14","totalCoreCount":4,"lastConnectivityTime":"2022-10-18T19:39:51.418Z","managedIdentityCertificateExpirationTime":"2023-01-16T19:28:00Z"}}' headers: cache-control: - no-cache content-length: - - '1687' + - '1797' content-type: - application/json; charset=utf-8 date: - - Mon, 12 Sep 2022 09:38:29 GMT + - Tue, 18 Oct 2022 19:41:07 GMT etag: - - '"8c00a9c9-0000-0100-0000-631efd270000"' + - '"1900300e-0000-0100-0000-634f01130000"' expires: - '-1' pragma: @@ -1504,29 +1610,30 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io/apis/networking.k8s.io/v1/ + uri: https://test-force-rohanazuregroup-1bfbb5-4f0f44fc.hcp.westeurope.azmk8s.io/version/ response: body: - string: '{"kind":"APIResourceList","apiVersion":"v1","groupVersion":"networking.k8s.io/v1","resources":[{"name":"ingressclasses","singularName":"","namespaced":false,"kind":"IngressClass","verbs":["create","delete","deletecollection","get","list","patch","update","watch"],"storageVersionHash":"l/iqIbDgFyQ="},{"name":"ingresses","singularName":"","namespaced":true,"kind":"Ingress","verbs":["create","delete","deletecollection","get","list","patch","update","watch"],"shortNames":["ing"],"storageVersionHash":"39NQlfNR+bo="},{"name":"ingresses/status","singularName":"","namespaced":true,"kind":"Ingress","verbs":["get","patch","update"]},{"name":"networkpolicies","singularName":"","namespaced":true,"kind":"NetworkPolicy","verbs":["create","delete","deletecollection","get","list","patch","update","watch"],"shortNames":["netpol"],"storageVersionHash":"YpfwF18m1G8="}]} - - ' + string: "{\n \"major\": \"1\",\n \"minor\": \"23\",\n \"gitVersion\": \"v1.23.12\",\n + \ \"gitCommit\": \"c6939792865ef0f70f92006081690d77411c8ed5\",\n \"gitTreeState\": + \"clean\",\n \"buildDate\": \"2022-09-21T21:46:35Z\",\n \"goVersion\": \"go1.17.13\",\n + \ \"compiler\": \"gc\",\n \"platform\": \"linux/amd64\"\n}" headers: audit-id: - - b884a3c9-5205-4b97-a35c-b3aaef666f4b + - eb29f669-9a3f-4cc9-ac20-e50366659403 cache-control: - no-cache, private content-length: - - '864' + - '265' content-type: - application/json date: - - Mon, 12 Sep 2022 09:38:31 GMT + - Tue, 18 Oct 2022 19:41:08 GMT x-kubernetes-pf-flowschema-uid: - - ffa83d76-a56b-4d0b-bb6f-50d7f09474fb + - 92b2ef8a-fcc8-4f73-a2ca-c1502b7b9250 x-kubernetes-pf-prioritylevel-uid: - - e1255224-bf15-43fd-949c-62117a6d8625 + - a1989c01-0e94-4e3b-a68c-18561b43277e status: code: 200 message: OK @@ -1546,7 +1653,8 @@ interactions: ParameterSetName: - -g -n --kube-config --force -y User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.8.10 (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: DELETE uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/rohanazuregroup/providers/Microsoft.Kubernetes/connectedClusters/cc-000002?api-version=2021-10-01 response: @@ -1554,7 +1662,7 @@ interactions: string: 'null' headers: azure-asyncoperation: - - https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/9892768f-9d3a-46c5-9fd6-ad98147ad95b*3E3E8ED0448D67377841D90676F937ABA2515F3748B0C9B73B4D317F771CEAEB?api-version=2021-10-01 + - https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/4af07712-ea52-4c55-b865-a27d65f9e93d*4A811B08FC5020265FADFA7C448AB4B7508006C59187472731D1CD57E77833C1?api-version=2021-10-01 cache-control: - no-cache content-length: @@ -1562,13 +1670,13 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Mon, 12 Sep 2022 09:38:35 GMT + - Tue, 18 Oct 2022 19:41:12 GMT etag: - - '"8c0020cd-0000-0100-0000-631efe1c0000"' + - '"1900490f-0000-0100-0000-634f01580000"' expires: - '-1' location: - - https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/9892768f-9d3a-46c5-9fd6-ad98147ad95b*3E3E8ED0448D67377841D90676F937ABA2515F3748B0C9B73B4D317F771CEAEB?api-version=2021-10-01 + - https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/4af07712-ea52-4c55-b865-a27d65f9e93d*4A811B08FC5020265FADFA7C448AB4B7508006C59187472731D1CD57E77833C1?api-version=2021-10-01 pragma: - no-cache strict-transport-security: @@ -1596,23 +1704,24 @@ interactions: ParameterSetName: - -g -n --kube-config --force -y User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.8.10 (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/9892768f-9d3a-46c5-9fd6-ad98147ad95b*3E3E8ED0448D67377841D90676F937ABA2515F3748B0C9B73B4D317F771CEAEB?api-version=2021-10-01 + uri: https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/4af07712-ea52-4c55-b865-a27d65f9e93d*4A811B08FC5020265FADFA7C448AB4B7508006C59187472731D1CD57E77833C1?api-version=2021-10-01 response: body: - string: '{"id":"/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/9892768f-9d3a-46c5-9fd6-ad98147ad95b*3E3E8ED0448D67377841D90676F937ABA2515F3748B0C9B73B4D317F771CEAEB","name":"9892768f-9d3a-46c5-9fd6-ad98147ad95b*3E3E8ED0448D67377841D90676F937ABA2515F3748B0C9B73B4D317F771CEAEB","resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rohanazuregroup/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","status":"Succeeded","startTime":"2022-09-12T09:38:35.4482822Z","endTime":"2022-09-12T09:38:40.4566608Z","properties":null}' + string: '{"id":"/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/4af07712-ea52-4c55-b865-a27d65f9e93d*4A811B08FC5020265FADFA7C448AB4B7508006C59187472731D1CD57E77833C1","name":"4af07712-ea52-4c55-b865-a27d65f9e93d*4A811B08FC5020265FADFA7C448AB4B7508006C59187472731D1CD57E77833C1","resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rohanazuregroup/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","status":"Succeeded","startTime":"2022-10-18T19:41:12.408117Z","endTime":"2022-10-18T19:41:17.0350374Z","properties":null}' headers: cache-control: - no-cache content-length: - - '568' + - '570' content-type: - application/json; charset=utf-8 date: - - Mon, 12 Sep 2022 09:39:06 GMT + - Tue, 18 Oct 2022 19:41:42 GMT etag: - - '"460063fe-0000-0100-0000-631efe200000"' + - '"3f00e18a-0000-0100-0000-634f015d0000"' expires: - '-1' pragma: @@ -1642,23 +1751,24 @@ interactions: ParameterSetName: - -g -n --kube-config --force -y User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.8.10 (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-mgmt-hybridkubernetes/1.0.0b1 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/9892768f-9d3a-46c5-9fd6-ad98147ad95b*3E3E8ED0448D67377841D90676F937ABA2515F3748B0C9B73B4D317F771CEAEB?api-version=2021-10-01 + uri: https://management.azure.com/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/4af07712-ea52-4c55-b865-a27d65f9e93d*4A811B08FC5020265FADFA7C448AB4B7508006C59187472731D1CD57E77833C1?api-version=2021-10-01 response: body: - string: '{"id":"/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/9892768f-9d3a-46c5-9fd6-ad98147ad95b*3E3E8ED0448D67377841D90676F937ABA2515F3748B0C9B73B4D317F771CEAEB","name":"9892768f-9d3a-46c5-9fd6-ad98147ad95b*3E3E8ED0448D67377841D90676F937ABA2515F3748B0C9B73B4D317F771CEAEB","resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rohanazuregroup/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","status":"Succeeded","startTime":"2022-09-12T09:38:35.4482822Z","endTime":"2022-09-12T09:38:40.4566608Z","properties":null}' + string: '{"id":"/providers/Microsoft.Kubernetes/locations/EASTUS/operationStatuses/4af07712-ea52-4c55-b865-a27d65f9e93d*4A811B08FC5020265FADFA7C448AB4B7508006C59187472731D1CD57E77833C1","name":"4af07712-ea52-4c55-b865-a27d65f9e93d*4A811B08FC5020265FADFA7C448AB4B7508006C59187472731D1CD57E77833C1","resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rohanazuregroup/providers/Microsoft.Kubernetes/connectedClusters/cc-000002","status":"Succeeded","startTime":"2022-10-18T19:41:12.408117Z","endTime":"2022-10-18T19:41:17.0350374Z","properties":null}' headers: cache-control: - no-cache content-length: - - '568' + - '570' content-type: - application/json; charset=utf-8 date: - - Mon, 12 Sep 2022 09:39:07 GMT + - Tue, 18 Oct 2022 19:41:42 GMT etag: - - '"460063fe-0000-0100-0000-631efe200000"' + - '"3f00e18a-0000-0100-0000-634f015d0000"' expires: - '-1' pragma: @@ -1682,17 +1792,17 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://test-force-rohanazuregroup-1bfbb5-4f0f44fc.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"3237"},"items":[{"metadata":{"name":"azure-arc","uid":"59addb9b-15f7-46da-a0ec-4b1860aa2918","resourceVersion":"3231","creationTimestamp":"2022-09-12T09:33:55Z","deletionTimestamp":"2022-09-12T09:39:17Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:33:55Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating"}}]} + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"4349"},"items":[{"metadata":{"name":"azure-arc","uid":"2da97484-c129-468d-ba7e-6ba33eee09bb","resourceVersion":"4341","creationTimestamp":"2022-10-18T19:34:31Z","deletionTimestamp":"2022-10-18T19:42:05Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:34:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating"}}]} ' headers: audit-id: - - 96960c3f-b339-471e-ad2c-05ccc561d272 + - db6081c2-3dca-48b3-b189-db0a6210b4b9 cache-control: - no-cache, private content-length: @@ -1700,11 +1810,11 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:39:18 GMT + - Tue, 18 Oct 2022 19:42:06 GMT x-kubernetes-pf-flowschema-uid: - - ffa83d76-a56b-4d0b-bb6f-50d7f09474fb + - 92b2ef8a-fcc8-4f73-a2ca-c1502b7b9250 x-kubernetes-pf-prioritylevel-uid: - - e1255224-bf15-43fd-949c-62117a6d8625 + - a1989c01-0e94-4e3b-a68c-18561b43277e status: code: 200 message: OK @@ -1716,17 +1826,17 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://test-force-rohanazuregroup-1bfbb5-4f0f44fc.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"3341"},"items":[{"metadata":{"name":"azure-arc","uid":"59addb9b-15f7-46da-a0ec-4b1860aa2918","resourceVersion":"3231","creationTimestamp":"2022-09-12T09:33:55Z","deletionTimestamp":"2022-09-12T09:39:17Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:33:55Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating"}}]} + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"4496"},"items":[{"metadata":{"name":"azure-arc","uid":"2da97484-c129-468d-ba7e-6ba33eee09bb","resourceVersion":"4341","creationTimestamp":"2022-10-18T19:34:31Z","deletionTimestamp":"2022-10-18T19:42:05Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:34:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating"}}]} ' headers: audit-id: - - fac27b4a-e027-4587-a537-a6c0be65abfb + - 275c1eb9-29b5-4b33-8932-c8c4ae113e62 cache-control: - no-cache, private content-length: @@ -1734,206 +1844,11 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:39:23 GMT - x-kubernetes-pf-flowschema-uid: - - ffa83d76-a56b-4d0b-bb6f-50d7f09474fb - x-kubernetes-pf-prioritylevel-uid: - - e1255224-bf15-43fd-949c-62117a6d8625 - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Content-Type: - - application/json - User-Agent: - - OpenAPI-Generator/11.0.0/python - method: GET - uri: https://test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc - response: - body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"3454"},"items":[{"metadata":{"name":"azure-arc","uid":"59addb9b-15f7-46da-a0ec-4b1860aa2918","resourceVersion":"3429","creationTimestamp":"2022-09-12T09:33:55Z","deletionTimestamp":"2022-09-12T09:39:17Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:33:55Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:39:25Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ResourcesDiscovered","message":"All - resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ParsedGroupVersions","message":"All - legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ContentDeleted","message":"All - content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"SomeResourcesRemain","message":"Some - resources are remaining: pods. has 10 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ContentHasNoFinalizers","message":"All - content-preserving finalizers finished"}]}}]} - - ' - headers: - audit-id: - - 1774286e-231b-4e64-bc66-ed1a8f100db9 - cache-control: - - no-cache, private - content-type: - - application/json - date: - - Mon, 12 Sep 2022 09:39:28 GMT - transfer-encoding: - - chunked - x-kubernetes-pf-flowschema-uid: - - ffa83d76-a56b-4d0b-bb6f-50d7f09474fb - x-kubernetes-pf-prioritylevel-uid: - - e1255224-bf15-43fd-949c-62117a6d8625 - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Content-Type: - - application/json - User-Agent: - - OpenAPI-Generator/11.0.0/python - method: GET - uri: https://test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc - response: - body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"3472"},"items":[{"metadata":{"name":"azure-arc","uid":"59addb9b-15f7-46da-a0ec-4b1860aa2918","resourceVersion":"3460","creationTimestamp":"2022-09-12T09:33:55Z","deletionTimestamp":"2022-09-12T09:39:17Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:33:55Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:39:25Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ResourcesDiscovered","message":"All - resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ParsedGroupVersions","message":"All - legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ContentDeleted","message":"All - content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"SomeResourcesRemain","message":"Some - resources are remaining: pods. has 7 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ContentHasNoFinalizers","message":"All - content-preserving finalizers finished"}]}}]} - - ' - headers: - audit-id: - - 98148210-44c5-4154-bab7-24c9cfe3d75e - cache-control: - - no-cache, private - content-type: - - application/json - date: - - Mon, 12 Sep 2022 09:39:34 GMT - transfer-encoding: - - chunked - x-kubernetes-pf-flowschema-uid: - - ffa83d76-a56b-4d0b-bb6f-50d7f09474fb - x-kubernetes-pf-prioritylevel-uid: - - e1255224-bf15-43fd-949c-62117a6d8625 - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Content-Type: - - application/json - User-Agent: - - OpenAPI-Generator/11.0.0/python - method: GET - uri: https://test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc - response: - body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"3491"},"items":[{"metadata":{"name":"azure-arc","uid":"59addb9b-15f7-46da-a0ec-4b1860aa2918","resourceVersion":"3460","creationTimestamp":"2022-09-12T09:33:55Z","deletionTimestamp":"2022-09-12T09:39:17Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:33:55Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:39:25Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ResourcesDiscovered","message":"All - resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ParsedGroupVersions","message":"All - legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ContentDeleted","message":"All - content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"SomeResourcesRemain","message":"Some - resources are remaining: pods. has 7 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ContentHasNoFinalizers","message":"All - content-preserving finalizers finished"}]}}]} - - ' - headers: - audit-id: - - fa434da0-0a7a-4cb5-a457-917223148622 - cache-control: - - no-cache, private - content-type: - - application/json - date: - - Mon, 12 Sep 2022 09:39:39 GMT - transfer-encoding: - - chunked - x-kubernetes-pf-flowschema-uid: - - ffa83d76-a56b-4d0b-bb6f-50d7f09474fb - x-kubernetes-pf-prioritylevel-uid: - - e1255224-bf15-43fd-949c-62117a6d8625 - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Content-Type: - - application/json - User-Agent: - - OpenAPI-Generator/11.0.0/python - method: GET - uri: https://test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc - response: - body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"3509"},"items":[{"metadata":{"name":"azure-arc","uid":"59addb9b-15f7-46da-a0ec-4b1860aa2918","resourceVersion":"3460","creationTimestamp":"2022-09-12T09:33:55Z","deletionTimestamp":"2022-09-12T09:39:17Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:33:55Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:39:25Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ResourcesDiscovered","message":"All - resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ParsedGroupVersions","message":"All - legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ContentDeleted","message":"All - content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"SomeResourcesRemain","message":"Some - resources are remaining: pods. has 7 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ContentHasNoFinalizers","message":"All - content-preserving finalizers finished"}]}}]} - - ' - headers: - audit-id: - - e6b7418c-fbde-421a-9f4d-1c379fa90da8 - cache-control: - - no-cache, private - content-type: - - application/json - date: - - Mon, 12 Sep 2022 09:39:44 GMT - transfer-encoding: - - chunked - x-kubernetes-pf-flowschema-uid: - - ffa83d76-a56b-4d0b-bb6f-50d7f09474fb - x-kubernetes-pf-prioritylevel-uid: - - e1255224-bf15-43fd-949c-62117a6d8625 - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Content-Type: - - application/json - User-Agent: - - OpenAPI-Generator/11.0.0/python - method: GET - uri: https://test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc - response: - body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"3550"},"items":[{"metadata":{"name":"azure-arc","uid":"59addb9b-15f7-46da-a0ec-4b1860aa2918","resourceVersion":"3460","creationTimestamp":"2022-09-12T09:33:55Z","deletionTimestamp":"2022-09-12T09:39:17Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:33:55Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:39:25Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ResourcesDiscovered","message":"All - resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ParsedGroupVersions","message":"All - legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ContentDeleted","message":"All - content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"SomeResourcesRemain","message":"Some - resources are remaining: pods. has 7 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ContentHasNoFinalizers","message":"All - content-preserving finalizers finished"}]}}]} - - ' - headers: - audit-id: - - 37ae2f3c-cb16-4358-b8ef-bc3f8f5a3947 - cache-control: - - no-cache, private - content-type: - - application/json - date: - - Mon, 12 Sep 2022 09:39:49 GMT - transfer-encoding: - - chunked + - Tue, 18 Oct 2022 19:42:11 GMT x-kubernetes-pf-flowschema-uid: - - ffa83d76-a56b-4d0b-bb6f-50d7f09474fb + - 92b2ef8a-fcc8-4f73-a2ca-c1502b7b9250 x-kubernetes-pf-prioritylevel-uid: - - e1255224-bf15-43fd-949c-62117a6d8625 + - a1989c01-0e94-4e3b-a68c-18561b43277e status: code: 200 message: OK @@ -1945,34 +1860,34 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://test-force-rohanazuregroup-1bfbb5-4f0f44fc.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"3626"},"items":[{"metadata":{"name":"azure-arc","uid":"59addb9b-15f7-46da-a0ec-4b1860aa2918","resourceVersion":"3460","creationTimestamp":"2022-09-12T09:33:55Z","deletionTimestamp":"2022-09-12T09:39:17Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:33:55Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:39:25Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ResourcesDiscovered","message":"All - resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ParsedGroupVersions","message":"All - legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ContentDeleted","message":"All - content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"SomeResourcesRemain","message":"Some - resources are remaining: pods. has 7 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ContentHasNoFinalizers","message":"All + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"4568"},"items":[{"metadata":{"name":"azure-arc","uid":"2da97484-c129-468d-ba7e-6ba33eee09bb","resourceVersion":"4542","creationTimestamp":"2022-10-18T19:34:31Z","deletionTimestamp":"2022-10-18T19:42:05Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:34:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:42:12Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"ResourcesDiscovered","message":"All + resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"ParsedGroupVersions","message":"All + legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"ContentDeleted","message":"All + content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"SomeResourcesRemain","message":"Some + resources are remaining: pods. has 10 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"ContentHasNoFinalizers","message":"All content-preserving finalizers finished"}]}}]} ' headers: audit-id: - - 7bbea2fe-d131-4b40-af3b-6477007b4b8e + - 6874e8ac-89b5-415c-a50e-087af5743115 cache-control: - no-cache, private content-type: - application/json date: - - Mon, 12 Sep 2022 09:39:54 GMT + - Tue, 18 Oct 2022 19:42:17 GMT transfer-encoding: - chunked x-kubernetes-pf-flowschema-uid: - - ffa83d76-a56b-4d0b-bb6f-50d7f09474fb + - 92b2ef8a-fcc8-4f73-a2ca-c1502b7b9250 x-kubernetes-pf-prioritylevel-uid: - - e1255224-bf15-43fd-949c-62117a6d8625 + - a1989c01-0e94-4e3b-a68c-18561b43277e status: code: 200 message: OK @@ -1984,34 +1899,34 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://test-force-rohanazuregroup-1bfbb5-4f0f44fc.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"3643"},"items":[{"metadata":{"name":"azure-arc","uid":"59addb9b-15f7-46da-a0ec-4b1860aa2918","resourceVersion":"3460","creationTimestamp":"2022-09-12T09:33:55Z","deletionTimestamp":"2022-09-12T09:39:17Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:33:55Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:39:25Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ResourcesDiscovered","message":"All - resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ParsedGroupVersions","message":"All - legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ContentDeleted","message":"All - content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"SomeResourcesRemain","message":"Some - resources are remaining: pods. has 7 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ContentHasNoFinalizers","message":"All + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"4586"},"items":[{"metadata":{"name":"azure-arc","uid":"2da97484-c129-468d-ba7e-6ba33eee09bb","resourceVersion":"4571","creationTimestamp":"2022-10-18T19:34:31Z","deletionTimestamp":"2022-10-18T19:42:05Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:34:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:42:12Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"ResourcesDiscovered","message":"All + resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"ParsedGroupVersions","message":"All + legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"ContentDeleted","message":"All + content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"SomeResourcesRemain","message":"Some + resources are remaining: pods. has 7 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"ContentHasNoFinalizers","message":"All content-preserving finalizers finished"}]}}]} ' headers: audit-id: - - e0e460c7-f072-4c8f-a1e4-5532caec6bdd + - 37145431-e4bc-4643-be8b-696d2117bd77 cache-control: - no-cache, private content-type: - application/json date: - - Mon, 12 Sep 2022 09:39:59 GMT + - Tue, 18 Oct 2022 19:42:22 GMT transfer-encoding: - chunked x-kubernetes-pf-flowschema-uid: - - ffa83d76-a56b-4d0b-bb6f-50d7f09474fb + - 92b2ef8a-fcc8-4f73-a2ca-c1502b7b9250 x-kubernetes-pf-prioritylevel-uid: - - e1255224-bf15-43fd-949c-62117a6d8625 + - a1989c01-0e94-4e3b-a68c-18561b43277e status: code: 200 message: OK @@ -2023,34 +1938,34 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://test-force-rohanazuregroup-1bfbb5-4f0f44fc.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"3659"},"items":[{"metadata":{"name":"azure-arc","uid":"59addb9b-15f7-46da-a0ec-4b1860aa2918","resourceVersion":"3460","creationTimestamp":"2022-09-12T09:33:55Z","deletionTimestamp":"2022-09-12T09:39:17Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:33:55Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:39:25Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ResourcesDiscovered","message":"All - resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ParsedGroupVersions","message":"All - legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ContentDeleted","message":"All - content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"SomeResourcesRemain","message":"Some - resources are remaining: pods. has 7 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ContentHasNoFinalizers","message":"All + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"4605"},"items":[{"metadata":{"name":"azure-arc","uid":"2da97484-c129-468d-ba7e-6ba33eee09bb","resourceVersion":"4571","creationTimestamp":"2022-10-18T19:34:31Z","deletionTimestamp":"2022-10-18T19:42:05Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:34:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:42:12Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"ResourcesDiscovered","message":"All + resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"ParsedGroupVersions","message":"All + legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"ContentDeleted","message":"All + content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"SomeResourcesRemain","message":"Some + resources are remaining: pods. has 7 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"ContentHasNoFinalizers","message":"All content-preserving finalizers finished"}]}}]} ' headers: audit-id: - - 24701a3f-d8a9-4c5b-962b-836eb49d613a + - f4cea766-6263-4609-967d-12432e7d06b5 cache-control: - no-cache, private content-type: - application/json date: - - Mon, 12 Sep 2022 09:40:05 GMT + - Tue, 18 Oct 2022 19:42:27 GMT transfer-encoding: - chunked x-kubernetes-pf-flowschema-uid: - - ffa83d76-a56b-4d0b-bb6f-50d7f09474fb + - 92b2ef8a-fcc8-4f73-a2ca-c1502b7b9250 x-kubernetes-pf-prioritylevel-uid: - - e1255224-bf15-43fd-949c-62117a6d8625 + - a1989c01-0e94-4e3b-a68c-18561b43277e status: code: 200 message: OK @@ -2062,34 +1977,34 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://test-force-rohanazuregroup-1bfbb5-4f0f44fc.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"3677"},"items":[{"metadata":{"name":"azure-arc","uid":"59addb9b-15f7-46da-a0ec-4b1860aa2918","resourceVersion":"3460","creationTimestamp":"2022-09-12T09:33:55Z","deletionTimestamp":"2022-09-12T09:39:17Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:33:55Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:39:25Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ResourcesDiscovered","message":"All - resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ParsedGroupVersions","message":"All - legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ContentDeleted","message":"All - content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"SomeResourcesRemain","message":"Some - resources are remaining: pods. has 7 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ContentHasNoFinalizers","message":"All + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"4623"},"items":[{"metadata":{"name":"azure-arc","uid":"2da97484-c129-468d-ba7e-6ba33eee09bb","resourceVersion":"4571","creationTimestamp":"2022-10-18T19:34:31Z","deletionTimestamp":"2022-10-18T19:42:05Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:34:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:42:12Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"ResourcesDiscovered","message":"All + resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"ParsedGroupVersions","message":"All + legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"ContentDeleted","message":"All + content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"SomeResourcesRemain","message":"Some + resources are remaining: pods. has 7 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"ContentHasNoFinalizers","message":"All content-preserving finalizers finished"}]}}]} ' headers: audit-id: - - c05a8dbf-0f33-4513-8b1c-e0beb5e10a64 + - 654565de-b323-4996-9f18-09745c490733 cache-control: - no-cache, private content-type: - application/json date: - - Mon, 12 Sep 2022 09:40:10 GMT + - Tue, 18 Oct 2022 19:42:32 GMT transfer-encoding: - chunked x-kubernetes-pf-flowschema-uid: - - ffa83d76-a56b-4d0b-bb6f-50d7f09474fb + - 92b2ef8a-fcc8-4f73-a2ca-c1502b7b9250 x-kubernetes-pf-prioritylevel-uid: - - e1255224-bf15-43fd-949c-62117a6d8625 + - a1989c01-0e94-4e3b-a68c-18561b43277e status: code: 200 message: OK @@ -2101,34 +2016,34 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://test-force-rohanazuregroup-1bfbb5-4f0f44fc.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"3793"},"items":[{"metadata":{"name":"azure-arc","uid":"59addb9b-15f7-46da-a0ec-4b1860aa2918","resourceVersion":"3460","creationTimestamp":"2022-09-12T09:33:55Z","deletionTimestamp":"2022-09-12T09:39:17Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:33:55Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-09-12T09:39:25Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ResourcesDiscovered","message":"All - resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ParsedGroupVersions","message":"All - legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ContentDeleted","message":"All - content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"SomeResourcesRemain","message":"Some - resources are remaining: pods. has 7 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-09-12T09:39:25Z","reason":"ContentHasNoFinalizers","message":"All + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"4656"},"items":[{"metadata":{"name":"azure-arc","uid":"2da97484-c129-468d-ba7e-6ba33eee09bb","resourceVersion":"4571","creationTimestamp":"2022-10-18T19:34:31Z","deletionTimestamp":"2022-10-18T19:42:05Z","labels":{"admission.policy.azure.com/ignore":"true","app.kubernetes.io/managed-by":"Helm","control-plane":"true","kubernetes.io/metadata.name":"azure-arc"},"annotations":{"meta.helm.sh/release-name":"azure-arc","meta.helm.sh/release-namespace":"default"},"managedFields":[{"manager":"helm","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:34:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:meta.helm.sh/release-name":{},"f:meta.helm.sh/release-namespace":{}},"f:labels":{".":{},"f:admission.policy.azure.com/ignore":{},"f:app.kubernetes.io/managed-by":{},"f:control-plane":{},"f:kubernetes.io/metadata.name":{}}}}},{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2022-10-18T19:42:12Z","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:conditions":{".":{},"k:{\"type\":\"NamespaceContentRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionContentFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionDiscoveryFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceDeletionGroupVersionParsingFailure\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}},"k:{\"type\":\"NamespaceFinalizersRemaining\"}":{".":{},"f:lastTransitionTime":{},"f:message":{},"f:reason":{},"f:status":{},"f:type":{}}}}},"subresource":"status"}]},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Terminating","conditions":[{"type":"NamespaceDeletionDiscoveryFailure","status":"False","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"ResourcesDiscovered","message":"All + resources successfully discovered"},{"type":"NamespaceDeletionGroupVersionParsingFailure","status":"False","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"ParsedGroupVersions","message":"All + legacy kube types successfully parsed"},{"type":"NamespaceDeletionContentFailure","status":"False","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"ContentDeleted","message":"All + content successfully deleted, may be waiting on finalization"},{"type":"NamespaceContentRemaining","status":"True","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"SomeResourcesRemain","message":"Some + resources are remaining: pods. has 7 resource instances"},{"type":"NamespaceFinalizersRemaining","status":"False","lastTransitionTime":"2022-10-18T19:42:12Z","reason":"ContentHasNoFinalizers","message":"All content-preserving finalizers finished"}]}}]} ' headers: audit-id: - - 8508ab37-7a6c-4010-83e6-110fc86d905f + - f2e212d3-6f2b-44b2-b5c4-7f919ac081f4 cache-control: - no-cache, private content-type: - application/json date: - - Mon, 12 Sep 2022 09:40:15 GMT + - Tue, 18 Oct 2022 19:42:37 GMT transfer-encoding: - chunked x-kubernetes-pf-flowschema-uid: - - ffa83d76-a56b-4d0b-bb6f-50d7f09474fb + - 92b2ef8a-fcc8-4f73-a2ca-c1502b7b9250 x-kubernetes-pf-prioritylevel-uid: - - e1255224-bf15-43fd-949c-62117a6d8625 + - a1989c01-0e94-4e3b-a68c-18561b43277e status: code: 200 message: OK @@ -2140,17 +2055,17 @@ interactions: Content-Type: - application/json User-Agent: - - OpenAPI-Generator/11.0.0/python + - OpenAPI-Generator/24.2.0/python method: GET - uri: https://test-force-rohanazuregroup-1bfbb5-6069f968.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc + uri: https://test-force-rohanazuregroup-1bfbb5-4f0f44fc.hcp.westeurope.azmk8s.io/api/v1/namespaces?fieldSelector=metadata.name%3Dazure-arc response: body: - string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"3824"},"items":[]} + string: '{"kind":"NamespaceList","apiVersion":"v1","metadata":{"resourceVersion":"4683"},"items":[]} ' headers: audit-id: - - 941faeae-ec63-4c35-9362-a743f47c7fe6 + - 543c4065-f830-4674-9e0e-44b0b06c24dc cache-control: - no-cache, private content-length: @@ -2158,11 +2073,11 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:40:20 GMT + - Tue, 18 Oct 2022 19:42:43 GMT x-kubernetes-pf-flowschema-uid: - - ffa83d76-a56b-4d0b-bb6f-50d7f09474fb + - 92b2ef8a-fcc8-4f73-a2ca-c1502b7b9250 x-kubernetes-pf-prioritylevel-uid: - - e1255224-bf15-43fd-949c-62117a6d8625 + - a1989c01-0e94-4e3b-a68c-18561b43277e status: code: 200 message: OK @@ -2182,26 +2097,26 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rohanazuregroup/providers/Microsoft.ContainerService/managedClusters/test-force-delete000001?api-version=2022-04-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rohanazuregroup/providers/Microsoft.ContainerService/managedClusters/test-force-delete000001?api-version=2022-07-01 response: body: string: '' headers: azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/f873e9d6-b0af-478a-a486-4dc23b7d4f70?api-version=2017-08-31 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/24629577-7711-4ef3-857f-1b28868ca8c2?api-version=2017-08-31 cache-control: - no-cache content-length: - '0' date: - - Mon, 12 Sep 2022 09:40:22 GMT + - Tue, 18 Oct 2022 19:42:47 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operationresults/f873e9d6-b0af-478a-a486-4dc23b7d4f70?api-version=2017-08-31 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operationresults/24629577-7711-4ef3-857f-1b28868ca8c2?api-version=2017-08-31 pragma: - no-cache server: @@ -2229,14 +2144,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/f873e9d6-b0af-478a-a486-4dc23b7d4f70?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/24629577-7711-4ef3-857f-1b28868ca8c2?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"d6e973f8-afb0-8a47-a486-4dc23b7d4f70\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:40:22.4582213Z\"\n }" + string: "{\n \"name\": \"77956224-1177-f34e-857f-1b28868ca8c2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:42:47.4537864Z\"\n }" headers: cache-control: - no-cache @@ -2245,7 +2160,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:40:52 GMT + - Tue, 18 Oct 2022 19:43:17 GMT expires: - '-1' pragma: @@ -2277,14 +2192,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/f873e9d6-b0af-478a-a486-4dc23b7d4f70?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/24629577-7711-4ef3-857f-1b28868ca8c2?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"d6e973f8-afb0-8a47-a486-4dc23b7d4f70\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:40:22.4582213Z\"\n }" + string: "{\n \"name\": \"77956224-1177-f34e-857f-1b28868ca8c2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:42:47.4537864Z\"\n }" headers: cache-control: - no-cache @@ -2293,7 +2208,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:41:22 GMT + - Tue, 18 Oct 2022 19:43:47 GMT expires: - '-1' pragma: @@ -2325,14 +2240,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/f873e9d6-b0af-478a-a486-4dc23b7d4f70?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/24629577-7711-4ef3-857f-1b28868ca8c2?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"d6e973f8-afb0-8a47-a486-4dc23b7d4f70\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:40:22.4582213Z\"\n }" + string: "{\n \"name\": \"77956224-1177-f34e-857f-1b28868ca8c2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:42:47.4537864Z\"\n }" headers: cache-control: - no-cache @@ -2341,7 +2256,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:41:53 GMT + - Tue, 18 Oct 2022 19:44:18 GMT expires: - '-1' pragma: @@ -2373,14 +2288,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/f873e9d6-b0af-478a-a486-4dc23b7d4f70?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/24629577-7711-4ef3-857f-1b28868ca8c2?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"d6e973f8-afb0-8a47-a486-4dc23b7d4f70\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:40:22.4582213Z\"\n }" + string: "{\n \"name\": \"77956224-1177-f34e-857f-1b28868ca8c2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:42:47.4537864Z\"\n }" headers: cache-control: - no-cache @@ -2389,7 +2304,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:42:23 GMT + - Tue, 18 Oct 2022 19:44:47 GMT expires: - '-1' pragma: @@ -2421,14 +2336,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/f873e9d6-b0af-478a-a486-4dc23b7d4f70?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/24629577-7711-4ef3-857f-1b28868ca8c2?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"d6e973f8-afb0-8a47-a486-4dc23b7d4f70\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:40:22.4582213Z\"\n }" + string: "{\n \"name\": \"77956224-1177-f34e-857f-1b28868ca8c2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:42:47.4537864Z\"\n }" headers: cache-control: - no-cache @@ -2437,7 +2352,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:42:54 GMT + - Tue, 18 Oct 2022 19:45:18 GMT expires: - '-1' pragma: @@ -2469,14 +2384,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/f873e9d6-b0af-478a-a486-4dc23b7d4f70?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/24629577-7711-4ef3-857f-1b28868ca8c2?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"d6e973f8-afb0-8a47-a486-4dc23b7d4f70\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:40:22.4582213Z\"\n }" + string: "{\n \"name\": \"77956224-1177-f34e-857f-1b28868ca8c2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:42:47.4537864Z\"\n }" headers: cache-control: - no-cache @@ -2485,7 +2400,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:43:24 GMT + - Tue, 18 Oct 2022 19:45:49 GMT expires: - '-1' pragma: @@ -2517,14 +2432,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/f873e9d6-b0af-478a-a486-4dc23b7d4f70?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/24629577-7711-4ef3-857f-1b28868ca8c2?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"d6e973f8-afb0-8a47-a486-4dc23b7d4f70\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:40:22.4582213Z\"\n }" + string: "{\n \"name\": \"77956224-1177-f34e-857f-1b28868ca8c2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:42:47.4537864Z\"\n }" headers: cache-control: - no-cache @@ -2533,7 +2448,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:43:55 GMT + - Tue, 18 Oct 2022 19:46:20 GMT expires: - '-1' pragma: @@ -2565,14 +2480,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/f873e9d6-b0af-478a-a486-4dc23b7d4f70?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/24629577-7711-4ef3-857f-1b28868ca8c2?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"d6e973f8-afb0-8a47-a486-4dc23b7d4f70\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:40:22.4582213Z\"\n }" + string: "{\n \"name\": \"77956224-1177-f34e-857f-1b28868ca8c2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:42:47.4537864Z\"\n }" headers: cache-control: - no-cache @@ -2581,7 +2496,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:44:25 GMT + - Tue, 18 Oct 2022 19:46:49 GMT expires: - '-1' pragma: @@ -2613,14 +2528,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/f873e9d6-b0af-478a-a486-4dc23b7d4f70?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/24629577-7711-4ef3-857f-1b28868ca8c2?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"d6e973f8-afb0-8a47-a486-4dc23b7d4f70\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:40:22.4582213Z\"\n }" + string: "{\n \"name\": \"77956224-1177-f34e-857f-1b28868ca8c2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:42:47.4537864Z\"\n }" headers: cache-control: - no-cache @@ -2629,7 +2544,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:44:56 GMT + - Tue, 18 Oct 2022 19:47:20 GMT expires: - '-1' pragma: @@ -2661,14 +2576,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/f873e9d6-b0af-478a-a486-4dc23b7d4f70?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/24629577-7711-4ef3-857f-1b28868ca8c2?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"d6e973f8-afb0-8a47-a486-4dc23b7d4f70\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:40:22.4582213Z\"\n }" + string: "{\n \"name\": \"77956224-1177-f34e-857f-1b28868ca8c2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:42:47.4537864Z\"\n }" headers: cache-control: - no-cache @@ -2677,7 +2592,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:45:26 GMT + - Tue, 18 Oct 2022 19:47:50 GMT expires: - '-1' pragma: @@ -2709,14 +2624,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/f873e9d6-b0af-478a-a486-4dc23b7d4f70?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/24629577-7711-4ef3-857f-1b28868ca8c2?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"d6e973f8-afb0-8a47-a486-4dc23b7d4f70\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:40:22.4582213Z\"\n }" + string: "{\n \"name\": \"77956224-1177-f34e-857f-1b28868ca8c2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:42:47.4537864Z\"\n }" headers: cache-control: - no-cache @@ -2725,7 +2640,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:45:57 GMT + - Tue, 18 Oct 2022 19:48:20 GMT expires: - '-1' pragma: @@ -2757,14 +2672,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/f873e9d6-b0af-478a-a486-4dc23b7d4f70?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/24629577-7711-4ef3-857f-1b28868ca8c2?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"d6e973f8-afb0-8a47-a486-4dc23b7d4f70\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:40:22.4582213Z\"\n }" + string: "{\n \"name\": \"77956224-1177-f34e-857f-1b28868ca8c2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:42:47.4537864Z\"\n }" headers: cache-control: - no-cache @@ -2773,7 +2688,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:46:26 GMT + - Tue, 18 Oct 2022 19:48:51 GMT expires: - '-1' pragma: @@ -2805,14 +2720,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/f873e9d6-b0af-478a-a486-4dc23b7d4f70?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/24629577-7711-4ef3-857f-1b28868ca8c2?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"d6e973f8-afb0-8a47-a486-4dc23b7d4f70\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:40:22.4582213Z\"\n }" + string: "{\n \"name\": \"77956224-1177-f34e-857f-1b28868ca8c2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:42:47.4537864Z\"\n }" headers: cache-control: - no-cache @@ -2821,7 +2736,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:46:57 GMT + - Tue, 18 Oct 2022 19:49:22 GMT expires: - '-1' pragma: @@ -2853,14 +2768,14 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/f873e9d6-b0af-478a-a486-4dc23b7d4f70?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/24629577-7711-4ef3-857f-1b28868ca8c2?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"d6e973f8-afb0-8a47-a486-4dc23b7d4f70\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-09-12T09:40:22.4582213Z\"\n }" + string: "{\n \"name\": \"77956224-1177-f34e-857f-1b28868ca8c2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-18T19:42:47.4537864Z\"\n }" headers: cache-control: - no-cache @@ -2869,7 +2784,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:47:27 GMT + - Tue, 18 Oct 2022 19:49:52 GMT expires: - '-1' pragma: @@ -2901,15 +2816,15 @@ interactions: ParameterSetName: - -g -n -y User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-containerservice/19.1.0 Python/3.8.10 - (Windows-10-10.0.19044-SP0) + - AZURECLI/2.41.0 (MSI) azsdk-python-azure-mgmt-containerservice/20.3.0 Python/3.7.7 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/f873e9d6-b0af-478a-a486-4dc23b7d4f70?api-version=2017-08-31 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westeurope/operations/24629577-7711-4ef3-857f-1b28868ca8c2?api-version=2017-08-31 response: body: - string: "{\n \"name\": \"d6e973f8-afb0-8a47-a486-4dc23b7d4f70\",\n \"status\": - \"Succeeded\",\n \"startTime\": \"2022-09-12T09:40:22.4582213Z\",\n \"endTime\": - \"2022-09-12T09:47:37.0879282Z\"\n }" + string: "{\n \"name\": \"77956224-1177-f34e-857f-1b28868ca8c2\",\n \"status\": + \"Succeeded\",\n \"startTime\": \"2022-10-18T19:42:47.4537864Z\",\n \"endTime\": + \"2022-10-18T19:50:04.8689661Z\"\n }" headers: cache-control: - no-cache @@ -2918,7 +2833,7 @@ interactions: content-type: - application/json date: - - Mon, 12 Sep 2022 09:47:57 GMT + - Tue, 18 Oct 2022 19:50:22 GMT expires: - '-1' pragma: diff --git a/src/connectedk8s/azext_connectedk8s/tests/latest/test_connectedk8s_scenario.py b/src/connectedk8s/azext_connectedk8s/tests/latest/test_connectedk8s_scenario.py index 54011a26b76..3b19a318433 100644 --- a/src/connectedk8s/azext_connectedk8s/tests/latest/test_connectedk8s_scenario.py +++ b/src/connectedk8s/azext_connectedk8s/tests/latest/test_connectedk8s_scenario.py @@ -29,7 +29,7 @@ def test_connectedk8s(self): 'kubeconfigpls': "%s" % (_get_test_data_file('pls-config.yaml')), 'managed_cluster_name': managed_cluster_name }) - self.cmd('aks create -g akkeshar -n {} -s Standard_B2s -l westeurope -c 1 --generate-ssh-keys'.format(managed_cluster_name)) + self.cmd('aks create -g akkeshar -n {} -s Standard_B4ms -l westeurope -c 1 --generate-ssh-keys'.format(managed_cluster_name)) self.cmd('aks get-credentials -g akkeshar -n {managed_cluster_name} -f {kubeconfig}') self.cmd('connectedk8s connect -g akkeshar -n {name} -l eastus --tags foo=doo --kube-config {kubeconfig}', checks=[ self.check('tags.foo', 'doo'), @@ -41,14 +41,25 @@ def test_connectedk8s(self): self.check('tags.foo', 'doo') ]) self.cmd('connectedk8s delete -g akkeshar -n {name} --kube-config {kubeconfig} -y') + + # Test 2022-10-01-preview api properties + self.cmd('connectedk8s connect -g akkeshar -n {name} -l eastus --distribution aks_management --infrastructure azure_stack_hci --distribution-version 1.0 --tags foo=doo --kube-config {kubeconfig}', checks=[ + self.check('distributionVersion', '1.0'), + self.check('name', '{name}') + ]) + self.cmd('connectedk8s update -g akkeshar -n {name} --azure-hybrid-benefit true --kube-config {kubeconfig} --yes', checks=[ + self.check('azureHybridBenefit', 'True'), + self.check('name', '{name}') + ]) + self.cmd('aks delete -g akkeshar -n {} -y'.format(managed_cluster_name)) # delete the kube config os.remove("%s" % (_get_test_data_file(managed_cluster_name + '-config.yaml'))) # Private link test - self.cmd('aks get-credentials -g akkeshar -n akkeshar -f {kubeconfigpls}') - self.cmd('connectedk8s connect -g akkeshar -n cliplscc -l eastus --tags foo=doo --kube-config {kubeconfigpls} --enable-private-link true --pls-arm-id /subscriptions/1bfbb5d0-917e-4346-9026-1d3b344417f5/resourceGroups/akkeshar-pls/providers/Microsoft.HybridCompute/privateLinkScopes/testpls', checks=[ + self.cmd('aks get-credentials -g akkeshar -n tempaks -f {kubeconfigpls}') + self.cmd('connectedk8s connect -g akkeshar -n cliplscc -l eastus2euap --tags foo=doo --kube-config {kubeconfigpls} --enable-private-link true --pls-arm-id /subscriptions/1bfbb5d0-917e-4346-9026-1d3b344417f5/resourceGroups/akkeshar/providers/Microsoft.HybridCompute/privateLinkScopes/temppls --yes', checks=[ self.check('name', 'cliplscc') ]) self.cmd('connectedk8s delete -g akkeshar -n cliplscc --kube-config {kubeconfigpls} -y') @@ -66,7 +77,7 @@ def test_forcedelete(self): 'managed_cluster_name': managed_cluster_name }) - self.cmd('aks create -g rohanazuregroup -n {} -s Standard_B2s -l westeurope -c 1 --generate-ssh-keys'.format(managed_cluster_name)) + self.cmd('aks create -g rohanazuregroup -n {} -s Standard_B4ms -l westeurope -c 1 --generate-ssh-keys'.format(managed_cluster_name)) self.cmd('aks get-credentials -g rohanazuregroup -n {managed_cluster_name} -f {kubeconfig}') self.cmd('connectedk8s connect -g rohanazuregroup -n {name} -l eastus --tags foo=doo --kube-config {kubeconfig}', checks=[ self.check('tags.foo', 'doo'), diff --git a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/_patch.py b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/_patch.py deleted file mode 100644 index 74e48ecd07c..00000000000 --- a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/_patch.py +++ /dev/null @@ -1,31 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# -# Copyright (c) Microsoft Corporation. All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the ""Software""), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. -# -# -------------------------------------------------------------------------- - -# This file is used for handwritten extensions to the generated code. Example: -# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md -def patch_sdk(): - pass \ No newline at end of file diff --git a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/models/__init__.py b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/models/__init__.py deleted file mode 100644 index 73c1f72a0a5..00000000000 --- a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/models/__init__.py +++ /dev/null @@ -1,63 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from ._models_py3 import ConnectedCluster -from ._models_py3 import ConnectedClusterIdentity -from ._models_py3 import ConnectedClusterList -from ._models_py3 import ConnectedClusterPatch -from ._models_py3 import CredentialResult -from ._models_py3 import CredentialResults -from ._models_py3 import ErrorAdditionalInfo -from ._models_py3 import ErrorDetail -from ._models_py3 import ErrorResponse -from ._models_py3 import HybridConnectionConfig -from ._models_py3 import ListClusterUserCredentialProperties -from ._models_py3 import Operation -from ._models_py3 import OperationDisplay -from ._models_py3 import OperationList -from ._models_py3 import Resource -from ._models_py3 import SystemData -from ._models_py3 import TrackedResource - - -from ._connected_kubernetes_client_enums import ( - AuthenticationMethod, - ConnectivityStatus, - CreatedByType, - LastModifiedByType, - PrivateLinkState, - ProvisioningState, - ResourceIdentityType, -) - -__all__ = [ - 'ConnectedCluster', - 'ConnectedClusterIdentity', - 'ConnectedClusterList', - 'ConnectedClusterPatch', - 'CredentialResult', - 'CredentialResults', - 'ErrorAdditionalInfo', - 'ErrorDetail', - 'ErrorResponse', - 'HybridConnectionConfig', - 'ListClusterUserCredentialProperties', - 'Operation', - 'OperationDisplay', - 'OperationList', - 'Resource', - 'SystemData', - 'TrackedResource', - 'AuthenticationMethod', - 'ConnectivityStatus', - 'CreatedByType', - 'LastModifiedByType', - 'PrivateLinkState', - 'ProvisioningState', - 'ResourceIdentityType', -] diff --git a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/models/_models.py b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/models/_models.py deleted file mode 100644 index cbd55c35e2d..00000000000 --- a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/models/_models.py +++ /dev/null @@ -1,671 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.core.exceptions import HttpResponseError -import msrest.serialization - - -class Resource(msrest.serialization.Model): - """Common fields that are returned in the response for all Azure Resource Manager resources. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - - -class TrackedResource(Resource): - """The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :param tags: A set of tags. Resource tags. - :type tags: dict[str, str] - :param location: Required. The geo-location where the resource lives. - :type location: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(TrackedResource, self).__init__(**kwargs) - self.tags = kwargs.get('tags', None) - self.location = kwargs['location'] - - -class ConnectedCluster(TrackedResource): - """Represents a connected cluster. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :param tags: A set of tags. Resource tags. - :type tags: dict[str, str] - :param location: Required. The geo-location where the resource lives. - :type location: str - :param identity: Required. The identity of the connected cluster. - :type identity: ~connected_kubernetes_client.models.ConnectedClusterIdentity - :ivar system_data: Metadata pertaining to creation and last modification of the resource. - :vartype system_data: ~connected_kubernetes_client.models.SystemData - :param agent_public_key_certificate: Required. Base64 encoded public certificate used by the - agent to do the initial handshake to the backend services in Azure. - :type agent_public_key_certificate: str - :ivar kubernetes_version: The Kubernetes version of the connected cluster resource. - :vartype kubernetes_version: str - :ivar total_node_count: Number of nodes present in the connected cluster resource. - :vartype total_node_count: int - :ivar total_core_count: Number of CPU cores present in the connected cluster resource. - :vartype total_core_count: int - :ivar agent_version: Version of the agent running on the connected cluster resource. - :vartype agent_version: str - :param provisioning_state: Provisioning state of the connected cluster resource. Possible - values include: "Succeeded", "Failed", "Canceled", "Provisioning", "Updating", "Deleting", - "Accepted". - :type provisioning_state: str or ~connected_kubernetes_client.models.ProvisioningState - :param distribution: The Kubernetes distribution running on this connected cluster. - :type distribution: str - :param infrastructure: The infrastructure on which the Kubernetes cluster represented by this - connected cluster is running on. - :type infrastructure: str - :ivar offering: Connected cluster offering. - :vartype offering: str - :ivar managed_identity_certificate_expiration_time: Expiration time of the managed identity - certificate. - :vartype managed_identity_certificate_expiration_time: ~datetime.datetime - :ivar last_connectivity_time: Time representing the last instance when heart beat was received - from the cluster. - :vartype last_connectivity_time: ~datetime.datetime - :ivar connectivity_status: Represents the connectivity status of the connected cluster. - Possible values include: "Connecting", "Connected", "Offline", "Expired". - :vartype connectivity_status: str or ~connected_kubernetes_client.models.ConnectivityStatus - :param private_link_state: Property which describes the state of private link on a connected - cluster resource. Possible values include: "Enabled", "Disabled". - :type private_link_state: str or ~connected_kubernetes_client.models.PrivateLinkState - :param private_link_scope_resource_id: The resource id of the private link scope this connected - cluster is assigned to, if any. - :type private_link_scope_resource_id: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'location': {'required': True}, - 'identity': {'required': True}, - 'system_data': {'readonly': True}, - 'agent_public_key_certificate': {'required': True}, - 'kubernetes_version': {'readonly': True}, - 'total_node_count': {'readonly': True}, - 'total_core_count': {'readonly': True}, - 'agent_version': {'readonly': True}, - 'offering': {'readonly': True}, - 'managed_identity_certificate_expiration_time': {'readonly': True}, - 'last_connectivity_time': {'readonly': True}, - 'connectivity_status': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ConnectedClusterIdentity'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'agent_public_key_certificate': {'key': 'properties.agentPublicKeyCertificate', 'type': 'str'}, - 'kubernetes_version': {'key': 'properties.kubernetesVersion', 'type': 'str'}, - 'total_node_count': {'key': 'properties.totalNodeCount', 'type': 'int'}, - 'total_core_count': {'key': 'properties.totalCoreCount', 'type': 'int'}, - 'agent_version': {'key': 'properties.agentVersion', 'type': 'str'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'distribution': {'key': 'properties.distribution', 'type': 'str'}, - 'infrastructure': {'key': 'properties.infrastructure', 'type': 'str'}, - 'offering': {'key': 'properties.offering', 'type': 'str'}, - 'managed_identity_certificate_expiration_time': {'key': 'properties.managedIdentityCertificateExpirationTime', 'type': 'iso-8601'}, - 'last_connectivity_time': {'key': 'properties.lastConnectivityTime', 'type': 'iso-8601'}, - 'connectivity_status': {'key': 'properties.connectivityStatus', 'type': 'str'}, - 'private_link_state': {'key': 'properties.privateLinkState', 'type': 'str'}, - 'private_link_scope_resource_id': {'key': 'properties.privateLinkScopeResourceId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ConnectedCluster, self).__init__(**kwargs) - self.identity = kwargs['identity'] - self.system_data = None - self.agent_public_key_certificate = kwargs['agent_public_key_certificate'] - self.kubernetes_version = None - self.total_node_count = None - self.total_core_count = None - self.agent_version = None - self.provisioning_state = kwargs.get('provisioning_state', None) - self.distribution = kwargs.get('distribution', None) - self.infrastructure = kwargs.get('infrastructure', None) - self.offering = None - self.managed_identity_certificate_expiration_time = None - self.last_connectivity_time = None - self.connectivity_status = None - self.private_link_state = kwargs.get('private_link_state', None) - self.private_link_scope_resource_id = kwargs.get('private_link_scope_resource_id', None) - - -class ConnectedClusterIdentity(msrest.serialization.Model): - """Identity for the connected cluster. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar principal_id: The principal id of connected cluster identity. This property will only be - provided for a system assigned identity. - :vartype principal_id: str - :ivar tenant_id: The tenant id associated with the connected cluster. This property will only - be provided for a system assigned identity. - :vartype tenant_id: str - :param type: Required. The type of identity used for the connected cluster. The type - 'SystemAssigned, includes a system created identity. The type 'None' means no identity is - assigned to the connected cluster. Possible values include: "None", "SystemAssigned". Default - value: "SystemAssigned". - :type type: str or ~connected_kubernetes_client.models.ResourceIdentityType - """ - - _validation = { - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ConnectedClusterIdentity, self).__init__(**kwargs) - self.principal_id = None - self.tenant_id = None - self.type = kwargs.get('type', "SystemAssigned") - - -class ConnectedClusterList(msrest.serialization.Model): - """The paginated list of connected Clusters. - - :param value: The list of connected clusters. - :type value: list[~connected_kubernetes_client.models.ConnectedCluster] - :param next_link: The link to fetch the next page of connected cluster. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[ConnectedCluster]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ConnectedClusterList, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) - - -class ConnectedClusterPatch(msrest.serialization.Model): - """Object containing updates for patch operations. - - :param tags: A set of tags. Resource tags. - :type tags: dict[str, str] - :param private_link_state: Property which describes the state of private link on a connected - cluster resource. Possible values include: "Enabled", "Disabled". - :type private_link_state: str or ~connected_kubernetes_client.models.PrivateLinkState - :param private_link_scope_resource_id: The resource id of the private link scope this connected - cluster is assigned to, if any. - :type private_link_scope_resource_id: str - """ - - _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'private_link_state': {'key': 'properties.privateLinkState', 'type': 'str'}, - 'private_link_scope_resource_id': {'key': 'properties.privateLinkScopeResourceId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ConnectedClusterPatch, self).__init__(**kwargs) - self.tags = kwargs.get('tags', None) - self.private_link_state = kwargs.get('private_link_state', None) - self.private_link_scope_resource_id = kwargs.get('private_link_scope_resource_id', None) - - -class CredentialResult(msrest.serialization.Model): - """The credential result response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: The name of the credential. - :vartype name: str - :ivar value: Base64-encoded Kubernetes configuration file. - :vartype value: bytearray - """ - - _validation = { - 'name': {'readonly': True}, - 'value': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'bytearray'}, - } - - def __init__( - self, - **kwargs - ): - super(CredentialResult, self).__init__(**kwargs) - self.name = None - self.value = None - - -class CredentialResults(msrest.serialization.Model): - """The list of credential result response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar hybrid_connection_config: Contains the REP (rendezvous endpoint) and “Sender” access - token. - :vartype hybrid_connection_config: ~connected_kubernetes_client.models.HybridConnectionConfig - :ivar kubeconfigs: Base64-encoded Kubernetes configuration file. - :vartype kubeconfigs: list[~connected_kubernetes_client.models.CredentialResult] - """ - - _validation = { - 'hybrid_connection_config': {'readonly': True}, - 'kubeconfigs': {'readonly': True}, - } - - _attribute_map = { - 'hybrid_connection_config': {'key': 'hybridConnectionConfig', 'type': 'HybridConnectionConfig'}, - 'kubeconfigs': {'key': 'kubeconfigs', 'type': '[CredentialResult]'}, - } - - def __init__( - self, - **kwargs - ): - super(CredentialResults, self).__init__(**kwargs) - self.hybrid_connection_config = None - self.kubeconfigs = None - - -class ErrorAdditionalInfo(msrest.serialization.Model): - """The resource management error additional info. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar type: The additional info type. - :vartype type: str - :ivar info: The additional info. - :vartype info: any - """ - - _validation = { - 'type': {'readonly': True}, - 'info': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'info': {'key': 'info', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ErrorAdditionalInfo, self).__init__(**kwargs) - self.type = None - self.info = None - - -class ErrorDetail(msrest.serialization.Model): - """The error detail. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar code: The error code. - :vartype code: str - :ivar message: The error message. - :vartype message: str - :ivar target: The error target. - :vartype target: str - :ivar details: The error details. - :vartype details: list[~connected_kubernetes_client.models.ErrorDetail] - :ivar additional_info: The error additional info. - :vartype additional_info: list[~connected_kubernetes_client.models.ErrorAdditionalInfo] - """ - - _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, - 'target': {'readonly': True}, - 'details': {'readonly': True}, - 'additional_info': {'readonly': True}, - } - - _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'target': {'key': 'target', 'type': 'str'}, - 'details': {'key': 'details', 'type': '[ErrorDetail]'}, - 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'}, - } - - def __init__( - self, - **kwargs - ): - super(ErrorDetail, self).__init__(**kwargs) - self.code = None - self.message = None - self.target = None - self.details = None - self.additional_info = None - - -class ErrorResponse(msrest.serialization.Model): - """Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.). - - :param error: The error object. - :type error: ~connected_kubernetes_client.models.ErrorDetail - """ - - _attribute_map = { - 'error': {'key': 'error', 'type': 'ErrorDetail'}, - } - - def __init__( - self, - **kwargs - ): - super(ErrorResponse, self).__init__(**kwargs) - self.error = kwargs.get('error', None) - - -class HybridConnectionConfig(msrest.serialization.Model): - """Contains the REP (rendezvous endpoint) and “Sender” access token. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar expiration_time: Timestamp when this token will be expired. - :vartype expiration_time: long - :ivar hybrid_connection_name: Name of the connection. - :vartype hybrid_connection_name: str - :ivar relay: Name of the relay. - :vartype relay: str - :ivar token: Sender access token. - :vartype token: str - """ - - _validation = { - 'expiration_time': {'readonly': True}, - 'hybrid_connection_name': {'readonly': True}, - 'relay': {'readonly': True}, - 'token': {'readonly': True}, - } - - _attribute_map = { - 'expiration_time': {'key': 'expirationTime', 'type': 'long'}, - 'hybrid_connection_name': {'key': 'hybridConnectionName', 'type': 'str'}, - 'relay': {'key': 'relay', 'type': 'str'}, - 'token': {'key': 'token', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(HybridConnectionConfig, self).__init__(**kwargs) - self.expiration_time = None - self.hybrid_connection_name = None - self.relay = None - self.token = None - - -class ListClusterUserCredentialsProperties(msrest.serialization.Model): - """ListClusterUserCredentialsProperties. - - All required parameters must be populated in order to send to Azure. - - :param authentication_method: Required. The mode of client authentication. Possible values - include: "Token", "AAD". - :type authentication_method: str or ~connected_kubernetes_client.models.AuthenticationMethod - :param client_proxy: Required. Boolean value to indicate whether the request is for client side - proxy or not. - :type client_proxy: bool - """ - - _validation = { - 'authentication_method': {'required': True}, - 'client_proxy': {'required': True}, - } - - _attribute_map = { - 'authentication_method': {'key': 'authenticationMethod', 'type': 'str'}, - 'client_proxy': {'key': 'clientProxy', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - super(ListClusterUserCredentialsProperties, self).__init__(**kwargs) - self.authentication_method = kwargs['authentication_method'] - self.client_proxy = kwargs['client_proxy'] - - -class Operation(msrest.serialization.Model): - """The Connected cluster API operation. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: Operation name: {Microsoft.Kubernetes}/{resource}/{operation}. - :vartype name: str - :ivar display: The object that represents the operation. - :vartype display: ~connected_kubernetes_client.models.OperationDisplay - """ - - _validation = { - 'name': {'readonly': True}, - 'display': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display': {'key': 'display', 'type': 'OperationDisplay'}, - } - - def __init__( - self, - **kwargs - ): - super(Operation, self).__init__(**kwargs) - self.name = None - self.display = None - - -class OperationDisplay(msrest.serialization.Model): - """The object that represents the operation. - - :param provider: Service provider: Microsoft.connectedClusters. - :type provider: str - :param resource: Connected Cluster Resource on which the operation is performed. - :type resource: str - :param operation: Operation type: Read, write, delete, etc. - :type operation: str - :param description: Description of the operation. - :type description: str - """ - - _attribute_map = { - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(OperationDisplay, self).__init__(**kwargs) - self.provider = kwargs.get('provider', None) - self.resource = kwargs.get('resource', None) - self.operation = kwargs.get('operation', None) - self.description = kwargs.get('description', None) - - -class OperationList(msrest.serialization.Model): - """The paginated list of connected cluster API operations. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The list of connected cluster API operations. - :vartype value: list[~connected_kubernetes_client.models.Operation] - :param next_link: The link to fetch the next page of connected cluster API operations. - :type next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[Operation]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(OperationList, self).__init__(**kwargs) - self.value = None - self.next_link = kwargs.get('next_link', None) - - -class SystemData(msrest.serialization.Model): - """Metadata pertaining to creation and last modification of the resource. - - :param created_by: The identity that created the resource. - :type created_by: str - :param created_by_type: The type of identity that created the resource. Possible values - include: "User", "Application", "ManagedIdentity", "Key". - :type created_by_type: str or ~connected_kubernetes_client.models.CreatedByType - :param created_at: The timestamp of resource creation (UTC). - :type created_at: ~datetime.datetime - :param last_modified_by: The identity that last modified the resource. - :type last_modified_by: str - :param last_modified_by_type: The type of identity that last modified the resource. Possible - values include: "User", "Application", "ManagedIdentity", "Key". - :type last_modified_by_type: str or ~connected_kubernetes_client.models.LastModifiedByType - :param last_modified_at: The timestamp of resource modification (UTC). - :type last_modified_at: ~datetime.datetime - """ - - _attribute_map = { - 'created_by': {'key': 'createdBy', 'type': 'str'}, - 'created_by_type': {'key': 'createdByType', 'type': 'str'}, - 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, - 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, - 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, - 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, - } - - def __init__( - self, - **kwargs - ): - super(SystemData, self).__init__(**kwargs) - self.created_by = kwargs.get('created_by', None) - self.created_by_type = kwargs.get('created_by_type', None) - self.created_at = kwargs.get('created_at', None) - self.last_modified_by = kwargs.get('last_modified_by', None) - self.last_modified_by_type = kwargs.get('last_modified_by_type', None) - self.last_modified_at = kwargs.get('last_modified_at', None) diff --git a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/operations/_connected_cluster_operations.py b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/operations/_connected_cluster_operations.py deleted file mode 100644 index 324b381cafb..00000000000 --- a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/operations/_connected_cluster_operations.py +++ /dev/null @@ -1,911 +0,0 @@ -# pylint: disable=too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.arm_polling import ARMPolling - -from .. import models as _models -from .._vendor import _convert_request, _format_url_section -T = TypeVar('T') -JSONType = Any -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - -def build_create_request_initial( - subscription_id: str, - resource_group_name: str, - cluster_name: str, - *, - json: JSONType = None, - content: Any = None, - **kwargs: Any -) -> HttpRequest: - api_version = kwargs.pop('api_version', "2022-05-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" - # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}") # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - json=json, - content=content, - **kwargs - ) - - -def build_update_request( - subscription_id: str, - resource_group_name: str, - cluster_name: str, - *, - json: JSONType = None, - content: Any = None, - **kwargs: Any -) -> HttpRequest: - api_version = kwargs.pop('api_version', "2022-05-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" - # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}") # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PATCH", - url=_url, - params=_query_parameters, - headers=_header_parameters, - json=json, - content=content, - **kwargs - ) - - -def build_get_request( - subscription_id: str, - resource_group_name: str, - cluster_name: str, - **kwargs: Any -) -> HttpRequest: - api_version = kwargs.pop('api_version', "2022-05-01-preview") # type: str - - accept = "application/json" - # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}") # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id: str, - resource_group_name: str, - cluster_name: str, - **kwargs: Any -) -> HttpRequest: - api_version = kwargs.pop('api_version', "2022-05-01-preview") # type: str - - accept = "application/json" - # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}") # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_list_cluster_user_credential_request( - subscription_id: str, - resource_group_name: str, - cluster_name: str, - *, - json: JSONType = None, - content: Any = None, - **kwargs: Any -) -> HttpRequest: - api_version = kwargs.pop('api_version', "2022-05-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" - # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}/listClusterUserCredential") # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - json=json, - content=content, - **kwargs - ) - - -def build_list_by_resource_group_request( - subscription_id: str, - resource_group_name: str, - **kwargs: Any -) -> HttpRequest: - api_version = kwargs.pop('api_version', "2022-05-01-preview") # type: str - - accept = "application/json" - # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters") # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_list_by_subscription_request( - subscription_id: str, - **kwargs: Any -) -> HttpRequest: - api_version = kwargs.pop('api_version', "2022-05-01-preview") # type: str - - accept = "application/json" - # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Kubernetes/connectedClusters") - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -class ConnectedClusterOperations(object): - """ConnectedClusterOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.hybridkubernetes.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = _models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def _create_initial( - self, - resource_group_name: str, - cluster_name: str, - connected_cluster: "_models.ConnectedCluster", - **kwargs: Any - ) -> "_models.ConnectedCluster": - cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectedCluster"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - - api_version = kwargs.pop('api_version', "2022-05-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - - _json = self._serialize.body(connected_cluster, 'ConnectedCluster') - - request = build_create_request_initial( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - cluster_name=cluster_name, - api_version=api_version, - content_type=content_type, - json=_json, - template_url=self._create_initial.metadata['url'], - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200, 201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if response.status_code == 200: - deserialized = self._deserialize('ConnectedCluster', pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize('ConnectedCluster', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - _create_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}"} # type: ignore - - - @distributed_trace - def begin_create( - self, - resource_group_name: str, - cluster_name: str, - connected_cluster: "_models.ConnectedCluster", - **kwargs: Any - ) -> LROPoller["_models.ConnectedCluster"]: - """Register a new Kubernetes cluster with Azure Resource Manager. - - API to register a new Kubernetes cluster and create a tracked resource in Azure Resource - Manager (ARM). - - :param resource_group_name: The name of the resource group. The name is case insensitive. - :type resource_group_name: str - :param cluster_name: The name of the Kubernetes cluster on which get is called. - :type cluster_name: str - :param connected_cluster: Parameters supplied to Create a Connected Cluster. - :type connected_cluster: ~azure.mgmt.hybridkubernetes.models.ConnectedCluster - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either ConnectedCluster or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.hybridkubernetes.models.ConnectedCluster] - :raises: ~azure.core.exceptions.HttpResponseError - """ - api_version = kwargs.pop('api_version', "2022-05-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectedCluster"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._create_initial( - resource_group_name=resource_group_name, - cluster_name=cluster_name, - connected_cluster=connected_cluster, - api_version=api_version, - content_type=content_type, - cls=lambda x,y,z: x, - **kwargs - ) - kwargs.pop('error_map', None) - - def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('ConnectedCluster', pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - - begin_create.metadata = {'url': "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}"} # type: ignore - - @distributed_trace - def update( - self, - resource_group_name: str, - cluster_name: str, - connected_cluster_patch: "_models.ConnectedClusterPatch", - **kwargs: Any - ) -> "_models.ConnectedCluster": - """Updates a connected cluster. - - API to update certain properties of the connected cluster resource. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - :type resource_group_name: str - :param cluster_name: The name of the Kubernetes cluster on which get is called. - :type cluster_name: str - :param connected_cluster_patch: Parameters supplied to update Connected Cluster. - :type connected_cluster_patch: ~azure.mgmt.hybridkubernetes.models.ConnectedClusterPatch - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ConnectedCluster, or the result of cls(response) - :rtype: ~azure.mgmt.hybridkubernetes.models.ConnectedCluster - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectedCluster"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - - api_version = kwargs.pop('api_version', "2022-05-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - - _json = self._serialize.body(connected_cluster_patch, 'ConnectedClusterPatch') - - request = build_update_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - cluster_name=cluster_name, - api_version=api_version, - content_type=content_type, - json=_json, - template_url=self.update.metadata['url'], - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ConnectedCluster', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - update.metadata = {'url': "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}"} # type: ignore - - - @distributed_trace - def get( - self, - resource_group_name: str, - cluster_name: str, - **kwargs: Any - ) -> "_models.ConnectedCluster": - """Get the properties of the specified connected cluster. - - Returns the properties of the specified connected cluster, including name, identity, - properties, and additional cluster details. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - :type resource_group_name: str - :param cluster_name: The name of the Kubernetes cluster on which get is called. - :type cluster_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ConnectedCluster, or the result of cls(response) - :rtype: ~azure.mgmt.hybridkubernetes.models.ConnectedCluster - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectedCluster"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - - api_version = kwargs.pop('api_version', "2022-05-01-preview") # type: str - - - request = build_get_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - cluster_name=cluster_name, - api_version=api_version, - template_url=self.get.metadata['url'], - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ConnectedCluster', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}"} # type: ignore - - - def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - cluster_name: str, - **kwargs: Any - ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - - api_version = kwargs.pop('api_version', "2022-05-01-preview") # type: str - - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - cluster_name=cluster_name, - api_version=api_version, - template_url=self._delete_initial.metadata['url'], - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200, 202, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}"} # type: ignore - - - @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - cluster_name: str, - **kwargs: Any - ) -> LROPoller[None]: - """Delete a connected cluster. - - Delete a connected cluster, removing the tracked resource in Azure Resource Manager (ARM). - - :param resource_group_name: The name of the resource group. The name is case insensitive. - :type resource_group_name: str - :param cluster_name: The name of the Kubernetes cluster on which get is called. - :type cluster_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError - """ - api_version = kwargs.pop('api_version', "2022-05-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._delete_initial( - resource_group_name=resource_group_name, - cluster_name=cluster_name, - api_version=api_version, - cls=lambda x,y,z: x, - **kwargs - ) - kwargs.pop('error_map', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}"} # type: ignore - - @distributed_trace - def list_cluster_user_credential( - self, - resource_group_name: str, - cluster_name: str, - properties: "_models.ListClusterUserCredentialProperties", - **kwargs: Any - ) -> "_models.CredentialResults": - """Gets cluster user credentials of a connected cluster. - - Gets cluster user credentials of the connected cluster with a specified resource group and - name. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - :type resource_group_name: str - :param cluster_name: The name of the Kubernetes cluster on which get is called. - :type cluster_name: str - :param properties: ListClusterUserCredential properties. - :type properties: ~azure.mgmt.hybridkubernetes.models.ListClusterUserCredentialProperties - :keyword callable cls: A custom type or function that will be passed the direct response - :return: CredentialResults, or the result of cls(response) - :rtype: ~azure.mgmt.hybridkubernetes.models.CredentialResults - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.CredentialResults"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - - api_version = kwargs.pop('api_version', "2022-05-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - - _json = self._serialize.body(properties, 'ListClusterUserCredentialProperties') - - request = build_list_cluster_user_credential_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - cluster_name=cluster_name, - api_version=api_version, - content_type=content_type, - json=_json, - template_url=self.list_cluster_user_credential.metadata['url'], - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize('CredentialResults', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - list_cluster_user_credential.metadata = {'url': "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}/listClusterUserCredential"} # type: ignore - - - @distributed_trace - def list_by_resource_group( - self, - resource_group_name: str, - **kwargs: Any - ) -> Iterable["_models.ConnectedClusterList"]: - """Lists all connected clusters. - - API to enumerate registered connected K8s clusters under a Resource Group. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - :type resource_group_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ConnectedClusterList or the result of - cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.hybridkubernetes.models.ConnectedClusterList] - :raises: ~azure.core.exceptions.HttpResponseError - """ - api_version = kwargs.pop('api_version', "2022-05-01-preview") # type: str - - cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectedClusterList"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - def prepare_request(next_link=None): - if not next_link: - - request = build_list_by_resource_group_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - api_version=api_version, - template_url=self.list_by_resource_group.metadata['url'], - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - else: - - request = build_list_by_resource_group_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - api_version=api_version, - template_url=next_link, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize("ConnectedClusterList", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - - return ItemPaged( - get_next, extract_data - ) - list_by_resource_group.metadata = {'url': "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters"} # type: ignore - - @distributed_trace - def list_by_subscription( - self, - **kwargs: Any - ) -> Iterable["_models.ConnectedClusterList"]: - """Lists all connected clusters. - - API to enumerate registered connected K8s clusters under a Subscription. - - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ConnectedClusterList or the result of - cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.hybridkubernetes.models.ConnectedClusterList] - :raises: ~azure.core.exceptions.HttpResponseError - """ - api_version = kwargs.pop('api_version', "2022-05-01-preview") # type: str - - cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectedClusterList"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - def prepare_request(next_link=None): - if not next_link: - - request = build_list_by_subscription_request( - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=self.list_by_subscription.metadata['url'], - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - else: - - request = build_list_by_subscription_request( - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=next_link, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize("ConnectedClusterList", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - - return ItemPaged( - get_next, extract_data - ) - list_by_subscription.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.Kubernetes/connectedClusters"} # type: ignore diff --git a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/operations/_operations.py b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/operations/_operations.py deleted file mode 100644 index ba86790a390..00000000000 --- a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/operations/_operations.py +++ /dev/null @@ -1,144 +0,0 @@ -# pylint: disable=too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._vendor import _convert_request -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - -def build_get_request( - **kwargs: Any -) -> HttpRequest: - api_version = kwargs.pop('api_version', "2022-05-01-preview") # type: str - - accept = "application/json" - # Construct URL - _url = kwargs.pop("template_url", "/providers/Microsoft.Kubernetes/operations") - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -class Operations(object): - """Operations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.hybridkubernetes.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = _models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - @distributed_trace - def get( - self, - **kwargs: Any - ) -> Iterable["_models.OperationList"]: - """Lists all of the available API operations for Connected Cluster resource. - - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OperationList or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.hybridkubernetes.models.OperationList] - :raises: ~azure.core.exceptions.HttpResponseError - """ - api_version = kwargs.pop('api_version', "2022-05-01-preview") # type: str - - cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationList"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - def prepare_request(next_link=None): - if not next_link: - - request = build_get_request( - api_version=api_version, - template_url=self.get.metadata['url'], - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - else: - - request = build_get_request( - api_version=api_version, - template_url=next_link, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize("OperationList", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - - return ItemPaged( - get_next, extract_data - ) - get.metadata = {'url': "/providers/Microsoft.Kubernetes/operations"} # type: ignore diff --git a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/__init__.py b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/__init__.py similarity index 63% rename from src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/__init__.py rename to src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/__init__.py index f5d7896c3ca..99d5c7c90f2 100644 --- a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/__init__.py +++ b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/__init__.py @@ -10,9 +10,15 @@ from ._version import VERSION __version__ = VERSION -__all__ = ['ConnectedKubernetesClient'] -# `._patch.py` is used for handwritten extensions to the generated code -# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md -from ._patch import patch_sdk -patch_sdk() +try: + from ._patch import __all__ as _patch_all + from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = ["ConnectedKubernetesClient"] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/_configuration.py b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/_configuration.py similarity index 60% rename from src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/_configuration.py rename to src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/_configuration.py index 61599a0c3ac..1bcd2bfbe5e 100644 --- a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/_configuration.py +++ b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/_configuration.py @@ -25,23 +25,18 @@ class ConnectedKubernetesClientConfiguration(Configuration): # pylint: disable= Note that all parameters used to create this instance are saved as instance attributes. - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials.TokenCredential - :param subscription_id: The ID of the target subscription. + :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str - :keyword api_version: Api Version. Default value is "2021-10-01". Note that overriding this - default value may result in unsupported behavior. + :keyword api_version: Api Version. Default value is "2022-10-01-preview". Note that overriding + this default value may result in unsupported behavior. :paramtype api_version: str """ - def __init__( - self, - credential: "TokenCredential", - subscription_id: str, - **kwargs: Any - ) -> None: + def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None: super(ConnectedKubernetesClientConfiguration, self).__init__(**kwargs) - api_version = kwargs.pop('api_version', "2021-10-01") # type: str + api_version = kwargs.pop("api_version", "2022-10-01-preview") # type: str if credential is None: raise ValueError("Parameter 'credential' must not be None.") @@ -51,23 +46,24 @@ def __init__( self.credential = credential self.subscription_id = subscription_id self.api_version = api_version - self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) - kwargs.setdefault('sdk_moniker', 'mgmt-hybridkubernetes/{}'.format(VERSION)) + self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "mgmt-hybridkubernetes/{}".format(VERSION)) self._configure(**kwargs) def _configure( - self, - **kwargs # type: Any + self, **kwargs # type: Any ): # type: (...) -> None - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") if self.credential and not self.authentication_policy: - self.authentication_policy = ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs) + self.authentication_policy = ARMChallengeAuthenticationPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/_connected_kubernetes_client.py b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/_connected_kubernetes_client.py similarity index 81% rename from src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/_connected_kubernetes_client.py rename to src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/_connected_kubernetes_client.py index 68195eea625..d69464bba2c 100644 --- a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/_connected_kubernetes_client.py +++ b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/_connected_kubernetes_client.py @@ -9,34 +9,34 @@ from copy import deepcopy from typing import Any, TYPE_CHECKING -from msrest import Deserializer, Serializer - from azure.core.rest import HttpRequest, HttpResponse from azure.mgmt.core import ARMPipelineClient from . import models from ._configuration import ConnectedKubernetesClientConfiguration +from ._serialization import Deserializer, Serializer from .operations import ConnectedClusterOperations, Operations if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials import TokenCredential -class ConnectedKubernetesClient: + +class ConnectedKubernetesClient: # pylint: disable=client-accepts-api-version-keyword """Azure Connected Cluster Resource Provider API for adopting any Kubernetes Cluster. :ivar connected_cluster: ConnectedClusterOperations operations :vartype connected_cluster: azure.mgmt.hybridkubernetes.operations.ConnectedClusterOperations :ivar operations: Operations operations :vartype operations: azure.mgmt.hybridkubernetes.operations.Operations - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials.TokenCredential - :param subscription_id: The ID of the target subscription. + :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str :param base_url: Service URL. Default value is "https://management.azure.com". :type base_url: str - :keyword api_version: Api Version. Default value is "2021-10-01". Note that overriding this - default value may result in unsupported behavior. + :keyword api_version: Api Version. Default value is "2022-10-01-preview". Note that overriding + this default value may result in unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. @@ -49,22 +49,21 @@ def __init__( base_url: str = "https://management.azure.com", **kwargs: Any ) -> None: - self._config = ConnectedKubernetesClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs) + self._config = ConnectedKubernetesClientConfiguration( + credential=credential, subscription_id=subscription_id, **kwargs + ) self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self._serialize.client_side_validation = False - self.connected_cluster = ConnectedClusterOperations(self._client, self._config, self._serialize, self._deserialize) + self.connected_cluster = ConnectedClusterOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) - - def _send_request( - self, - request: HttpRequest, - **kwargs: Any - ) -> HttpResponse: + def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest @@ -73,7 +72,7 @@ def _send_request( >>> response = client._send_request(request) - For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request :param request: The network request you want to make. Required. :type request: ~azure.core.rest.HttpRequest diff --git a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/_patch.py b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/_patch.py new file mode 100644 index 00000000000..f7dd3251033 --- /dev/null +++ b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/_serialization.py b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/_serialization.py new file mode 100644 index 00000000000..7c1dedb5133 --- /dev/null +++ b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/_serialization.py @@ -0,0 +1,1970 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# pylint: skip-file + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote # type: ignore +import xml.etree.ElementTree as ET + +import isodate + +from typing import Dict, Any, cast, TYPE_CHECKING + +from azure.core.exceptions import DeserializationError, SerializationError, raise_with_traceback + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +if TYPE_CHECKING: + from typing import Optional, Union, AnyStr, IO, Mapping + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data, content_type=None): + # type: (Optional[Union[AnyStr, IO]], Optional[str]) -> Any + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise_with_traceback(DeserializationError, "XML is invalid") + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes, headers): + # type: (Optional[Union[AnyStr, IO]], Mapping) -> Any + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +try: + basestring # type: ignore + unicode_str = unicode # type: ignore +except NameError: + basestring = str # type: ignore + unicode_str = str # type: ignore + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + + +class UTC(datetime.tzinfo): + """Time Zone info for handling UTC""" + + def utcoffset(self, dt): + """UTF offset for UTC is 0.""" + return datetime.timedelta(0) + + def tzname(self, dt): + """Timestamp representation.""" + return "Z" + + def dst(self, dt): + """No daylight saving for UTC.""" + return datetime.timedelta(hours=1) + + +try: + from datetime import timezone as _FixedOffset +except ImportError: # Python 2.7 + + class _FixedOffset(datetime.tzinfo): # type: ignore + """Fixed offset in minutes east from UTC. + Copy/pasted from Python doc + :param datetime.timedelta offset: offset in timedelta format + """ + + def __init__(self, offset): + self.__offset = offset + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return str(self.__offset.total_seconds() / 3600) + + def __repr__(self): + return "".format(self.tzname(None)) + + def dst(self, dt): + return datetime.timedelta(0) + + def __getinitargs__(self): + return (self.__offset,) + + +try: + from datetime import timezone + + TZ_UTC = timezone.utc # type: ignore +except ImportError: + TZ_UTC = UTC() # type: ignore + +_FLATTEN = re.compile(r"(? y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes=None): + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize(self, target_obj, data_type=None, **kwargs): + """Serialize data into a string according to type. + + :param target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises: SerializationError if serialization fails. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() + try: + attributes = target_obj._attribute_map + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) + continue + if xml_desc.get("text", False): + serialized.text = new_attr + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = unicode_str(new_attr) + serialized.append(local_node) + else: # JSON + for k in reversed(keys): + unflattened = {k: new_attr} + new_attr = unflattened + + _new_attr = new_attr + _serialized = serialized + for k in keys: + if k not in _serialized: + _serialized.update(_new_attr) + _new_attr = _new_attr[k] + _serialized = _serialized[k] + except ValueError: + continue + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise_with_traceback(SerializationError, msg, err) + else: + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises: SerializationError if serialization fails. + :raises: ValueError if data is None + """ + + # Just in case this is a dict + internal_data_type = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) + except DeserializationError as err: + raise_with_traceback(SerializationError, "Unable to build a model: " + str(err), err) + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + data = [self.serialize_data(d, internal_data_type, **kwargs) if d is not None else "" for d in data] + if not kwargs.get("skip_quote", False): + data = [quote(str(d), safe="") for d in data] + return str(self.serialize_iter(data, internal_data_type, **kwargs)) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :param bool required: Whether it's essential that the data not be + empty or None + :raises: AttributeError if required data is None. + :raises: ValueError if data is None + :raises: SerializationError if serialization fails. + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + elif data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise_with_traceback(SerializationError, msg.format(data, data_type), err) + else: + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param data: Object to be serialized. + :param str data_type: Type of object in the iterable. + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param data: Object to be serialized. + :rtype: str + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + else: + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list attr: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param bool required: Whether the objects in the iterable must + not be None or empty. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + :rtype: list, str + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError: + serialized.append(None) + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :param bool required: Whether the objects in the dictionary must + not be None or empty. + :rtype: dict + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is unicode_str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + elif obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) + return result + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) + + @staticmethod + def serialize_bytearray(attr, **kwargs): + """Serialize bytearray into base-64 string. + + :param attr: Object to be serialized. + :rtype: str + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): + """Serialize str into base-64 string. + + :param attr: Object to be serialized. + :rtype: str + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): + """Serialize Decimal object to float. + + :param attr: Object to be serialized. + :rtype: float + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): + """Serialize long (Py2) or int (Py3). + + :param attr: Object to be serialized. + :rtype: int/long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: TypeError if format invalid. + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError: + raise TypeError("RFC1123 object must be valid Datetime object.") + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: SerializationError if format invalid. + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise_with_traceback(SerializationError, msg, err) + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise_with_traceback(TypeError, msg, err) + + @staticmethod + def serialize_unix(attr, **kwargs): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises: SerializationError if format invalid + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError: + raise TypeError("Unix time object must be valid Datetime object.") + + +def rest_key_extractor(attr, attr_desc, data): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + # https://github.com/Azure/msrest-for-python/issues/197 + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor(attr, attr_desc, data): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + # https://github.com/Azure/msrest-for-python/issues/197 + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): + """Extract the attribute in "data" based on the last part of the JSON path key.""" + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + else: + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + else: # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer(object): + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes=None): + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, basestring): + return self.deserialize_data(data, response) + elif isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None: + return data + try: + attributes = response._attribute_map + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name + raise_with_traceback(DeserializationError, msg, err) + else: + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deseralize. + """ + if target is None: + return None, None + + if isinstance(target, basestring): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deseralize. + :param str content_type: Swagger "produces" if available. + """ + try: + return self(target_obj, data, content_type=content_type) + except: + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param raw_data: Data to be processed. + :param content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param response: The response model class. + :param d_attrs: The deserialized response attributes. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [k for k, v in response._validation.items() if v.get("readonly")] + const = [k for k, v in response._validation.items() if v.get("constant")] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) + raise DeserializationError(msg + str(err)) + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) + + def deserialize_data(self, data, data_type): + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise_with_traceback(DeserializationError, msg, err) + else: + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :rtype: dict + :raises: TypeError if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, basestring): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + else: + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :rtype: str, int, float or bool + :raises: TypeError if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + else: + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + elif isinstance(attr, basestring): + if attr.lower() in ["true", "1"]: + return True + elif attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): + return data + except NameError: + return str(data) + else: + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + # https://github.com/Azure/azure-rest-api-specs/issues/141 + try: + return list(enum_obj.__members__.values())[data] + except IndexError: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) + attr = attr + padding + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :rtype: Decimal + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(attr) + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise_with_traceback(DeserializationError, msg, err) + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :rtype: long or int + :raises: ValueError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :rtype: TimeDelta + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise_with_traceback(DeserializationError, msg, err) + else: + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :rtype: Date + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :rtype: Datetime + :raises: DeserializationError if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) + try: + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj diff --git a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/_vendor.py b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/_vendor.py similarity index 89% rename from src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/_vendor.py rename to src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/_vendor.py index 138f663c53a..9aad73fc743 100644 --- a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/_vendor.py +++ b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/_vendor.py @@ -7,6 +7,7 @@ from azure.core.pipeline.transport import HttpRequest + def _convert_request(request, files=None): data = request.content if not files else None request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) @@ -14,6 +15,7 @@ def _convert_request(request, files=None): request.set_formdata_body(files) return request + def _format_url_section(template, **kwargs): components = template.split("/") while components: @@ -21,7 +23,5 @@ def _format_url_section(template, **kwargs): return template.format(**kwargs) except KeyError as key: formatted_components = template.split("/") - components = [ - c for c in formatted_components if "{}".format(key.args[0]) not in c - ] + components = [c for c in formatted_components if "{}".format(key.args[0]) not in c] template = "/".join(components) diff --git a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/_version.py b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/_version.py similarity index 100% rename from src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/_version.py rename to src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/_version.py diff --git a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/models/__init__.py b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/models/__init__.py new file mode 100644 index 00000000000..a409e5ff06a --- /dev/null +++ b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/models/__init__.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._models_py3 import ConnectedCluster +from ._models_py3 import ConnectedClusterIdentity +from ._models_py3 import ConnectedClusterList +from ._models_py3 import ConnectedClusterPatch +from ._models_py3 import CredentialResult +from ._models_py3 import CredentialResults +from ._models_py3 import ErrorAdditionalInfo +from ._models_py3 import ErrorDetail +from ._models_py3 import ErrorResponse +from ._models_py3 import HybridConnectionConfig +from ._models_py3 import ListClusterUserCredentialProperties +from ._models_py3 import Operation +from ._models_py3 import OperationDisplay +from ._models_py3 import OperationList +from ._models_py3 import Resource +from ._models_py3 import SystemData +from ._models_py3 import TrackedResource + +from ._connected_kubernetes_client_enums import AuthenticationMethod +from ._connected_kubernetes_client_enums import AzureHybridBenefit +from ._connected_kubernetes_client_enums import ConnectivityStatus +from ._connected_kubernetes_client_enums import CreatedByType +from ._connected_kubernetes_client_enums import LastModifiedByType +from ._connected_kubernetes_client_enums import PrivateLinkState +from ._connected_kubernetes_client_enums import ProvisioningState +from ._connected_kubernetes_client_enums import ResourceIdentityType +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "ConnectedCluster", + "ConnectedClusterIdentity", + "ConnectedClusterList", + "ConnectedClusterPatch", + "CredentialResult", + "CredentialResults", + "ErrorAdditionalInfo", + "ErrorDetail", + "ErrorResponse", + "HybridConnectionConfig", + "ListClusterUserCredentialProperties", + "Operation", + "OperationDisplay", + "OperationList", + "Resource", + "SystemData", + "TrackedResource", + "AuthenticationMethod", + "AzureHybridBenefit", + "ConnectivityStatus", + "CreatedByType", + "LastModifiedByType", + "PrivateLinkState", + "ProvisioningState", + "ResourceIdentityType", +] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/models/_connected_kubernetes_client_enums.py b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/models/_connected_kubernetes_client_enums.py similarity index 61% rename from src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/models/_connected_kubernetes_client_enums.py rename to src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/models/_connected_kubernetes_client_enums.py index 143c472449b..795716da16d 100644 --- a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/models/_connected_kubernetes_client_enums.py +++ b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/models/_connected_kubernetes_client_enums.py @@ -7,54 +7,60 @@ # -------------------------------------------------------------------------- from enum import Enum -from six import with_metaclass from azure.core import CaseInsensitiveEnumMeta -class AuthenticationMethod(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The mode of client authentication. - """ +class AuthenticationMethod(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The mode of client authentication.""" TOKEN = "Token" AAD = "AAD" -class ConnectivityStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Represents the connectivity status of the connected cluster. - """ + +class AzureHybridBenefit(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Indicates whether Azure Hybrid Benefit is opted in.""" + + TRUE = "True" + FALSE = "False" + NOT_APPLICABLE = "NotApplicable" + + +class ConnectivityStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Represents the connectivity status of the connected cluster.""" CONNECTING = "Connecting" CONNECTED = "Connected" OFFLINE = "Offline" EXPIRED = "Expired" -class CreatedByType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The type of identity that created the resource. - """ + +class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of identity that created the resource.""" USER = "User" APPLICATION = "Application" MANAGED_IDENTITY = "ManagedIdentity" KEY = "Key" -class LastModifiedByType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The type of identity that last modified the resource. - """ + +class LastModifiedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of identity that last modified the resource.""" USER = "User" APPLICATION = "Application" MANAGED_IDENTITY = "ManagedIdentity" KEY = "Key" -class PrivateLinkState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Property which describes the state of private link on a connected cluster resource. - """ + +class PrivateLinkState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Property which describes the state of private link on a connected cluster resource.""" ENABLED = "Enabled" DISABLED = "Disabled" -class ProvisioningState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The current deployment state of connectedClusters. - """ + +class ProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The current deployment state of connectedClusters.""" SUCCEEDED = "Succeeded" FAILED = "Failed" @@ -64,7 +70,8 @@ class ProvisioningState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): DELETING = "Deleting" ACCEPTED = "Accepted" -class ResourceIdentityType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + +class ResourceIdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The type of identity used for the connected cluster. The type 'SystemAssigned, includes a system created identity. The type 'None' means no identity is assigned to the connected cluster. diff --git a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/models/_models_py3.py b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/models/_models_py3.py similarity index 53% rename from src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/models/_models_py3.py rename to src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/models/_models_py3.py index 359ec398bf6..c6bc8f72b1c 100644 --- a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/models/_models_py3.py +++ b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/models/_models_py3.py @@ -1,4 +1,5 @@ # coding=utf-8 +# pylint: disable=too-many-lines # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. @@ -7,15 +8,16 @@ # -------------------------------------------------------------------------- import datetime -from typing import Any, Dict, List, Optional, Union +from typing import Dict, List, Optional, TYPE_CHECKING, Union -from azure.core.exceptions import HttpResponseError -import msrest.serialization +from .. import _serialization -from ._connected_kubernetes_client_enums import * +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from .. import models as _models -class Resource(msrest.serialization.Model): +class Resource(_serialization.Model): """Common fields that are returned in the response for all Azure Resource Manager resources. Variables are only populated by the server, and will be ignored when sending a request. @@ -31,24 +33,20 @@ class Resource(msrest.serialization.Model): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(Resource, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.id = None self.name = None self.type = None @@ -69,46 +67,40 @@ class TrackedResource(Resource): :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts". :vartype type: str - :ivar tags: A set of tags. Resource tags. + :ivar tags: Resource tags. :vartype tags: dict[str, str] - :ivar location: Required. The geo-location where the resource lives. + :ivar location: The geo-location where the resource lives. Required. :vartype location: str """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'location': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "location": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, } - def __init__( - self, - *, - location: str, - tags: Optional[Dict[str, str]] = None, - **kwargs - ): + def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs): """ - :keyword tags: A set of tags. Resource tags. + :keyword tags: Resource tags. :paramtype tags: dict[str, str] - :keyword location: Required. The geo-location where the resource lives. + :keyword location: The geo-location where the resource lives. Required. :paramtype location: str """ - super(TrackedResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.tags = tags self.location = location -class ConnectedCluster(TrackedResource): +class ConnectedCluster(TrackedResource): # pylint: disable=too-many-instance-attributes """Represents a connected cluster. Variables are only populated by the server, and will be ignored when sending a request. @@ -123,16 +115,16 @@ class ConnectedCluster(TrackedResource): :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts". :vartype type: str - :ivar tags: A set of tags. Resource tags. + :ivar tags: Resource tags. :vartype tags: dict[str, str] - :ivar location: Required. The geo-location where the resource lives. + :ivar location: The geo-location where the resource lives. Required. :vartype location: str - :ivar identity: Required. The identity of the connected cluster. + :ivar identity: The identity of the connected cluster. Required. :vartype identity: ~azure.mgmt.hybridkubernetes.models.ConnectedClusterIdentity :ivar system_data: Metadata pertaining to creation and last modification of the resource. :vartype system_data: ~azure.mgmt.hybridkubernetes.models.SystemData - :ivar agent_public_key_certificate: Required. Base64 encoded public certificate used by the - agent to do the initial handshake to the backend services in Azure. + :ivar agent_public_key_certificate: Base64 encoded public certificate used by the agent to do + the initial handshake to the backend services in Azure. Required. :vartype agent_public_key_certificate: str :ivar kubernetes_version: The Kubernetes version of the connected cluster resource. :vartype kubernetes_version: str @@ -142,11 +134,13 @@ class ConnectedCluster(TrackedResource): :vartype total_core_count: int :ivar agent_version: Version of the agent running on the connected cluster resource. :vartype agent_version: str - :ivar provisioning_state: Provisioning state of the connected cluster resource. Possible values - include: "Succeeded", "Failed", "Canceled", "Provisioning", "Updating", "Deleting", "Accepted". + :ivar provisioning_state: Provisioning state of the connected cluster resource. Known values + are: "Succeeded", "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". :vartype provisioning_state: str or ~azure.mgmt.hybridkubernetes.models.ProvisioningState :ivar distribution: The Kubernetes distribution running on this connected cluster. :vartype distribution: str + :ivar distribution_version: The Kubernetes distribution version on this connected cluster. + :vartype distribution_version: str :ivar infrastructure: The infrastructure on which the Kubernetes cluster represented by this connected cluster is running on. :vartype infrastructure: str @@ -158,100 +152,118 @@ class ConnectedCluster(TrackedResource): :ivar last_connectivity_time: Time representing the last instance when heart beat was received from the cluster. :vartype last_connectivity_time: ~datetime.datetime - :ivar connectivity_status: Represents the connectivity status of the connected cluster. - Possible values include: "Connecting", "Connected", "Offline", "Expired". + :ivar connectivity_status: Represents the connectivity status of the connected cluster. Known + values are: "Connecting", "Connected", "Offline", and "Expired". :vartype connectivity_status: str or ~azure.mgmt.hybridkubernetes.models.ConnectivityStatus :ivar private_link_state: Property which describes the state of private link on a connected - cluster resource. Possible values include: "Enabled", "Disabled". Default value: "Disabled". + cluster resource. Known values are: "Enabled" and "Disabled". :vartype private_link_state: str or ~azure.mgmt.hybridkubernetes.models.PrivateLinkState :ivar private_link_scope_resource_id: The resource id of the private link scope this connected cluster is assigned to, if any. :vartype private_link_scope_resource_id: str + :ivar azure_hybrid_benefit: Indicates whether Azure Hybrid Benefit is opted in. Known values + are: "True", "False", and "NotApplicable". + :vartype azure_hybrid_benefit: str or ~azure.mgmt.hybridkubernetes.models.AzureHybridBenefit + :ivar miscellaneous_properties: More properties related to the Connected Cluster. + :vartype miscellaneous_properties: dict[str, str] """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'location': {'required': True}, - 'identity': {'required': True}, - 'system_data': {'readonly': True}, - 'agent_public_key_certificate': {'required': True}, - 'kubernetes_version': {'readonly': True}, - 'total_node_count': {'readonly': True}, - 'total_core_count': {'readonly': True}, - 'agent_version': {'readonly': True}, - 'offering': {'readonly': True}, - 'managed_identity_certificate_expiration_time': {'readonly': True}, - 'last_connectivity_time': {'readonly': True}, - 'connectivity_status': {'readonly': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "location": {"required": True}, + "identity": {"required": True}, + "system_data": {"readonly": True}, + "agent_public_key_certificate": {"required": True}, + "kubernetes_version": {"readonly": True}, + "total_node_count": {"readonly": True}, + "total_core_count": {"readonly": True}, + "agent_version": {"readonly": True}, + "offering": {"readonly": True}, + "managed_identity_certificate_expiration_time": {"readonly": True}, + "last_connectivity_time": {"readonly": True}, + "connectivity_status": {"readonly": True}, + "miscellaneous_properties": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ConnectedClusterIdentity'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'agent_public_key_certificate': {'key': 'properties.agentPublicKeyCertificate', 'type': 'str'}, - 'kubernetes_version': {'key': 'properties.kubernetesVersion', 'type': 'str'}, - 'total_node_count': {'key': 'properties.totalNodeCount', 'type': 'int'}, - 'total_core_count': {'key': 'properties.totalCoreCount', 'type': 'int'}, - 'agent_version': {'key': 'properties.agentVersion', 'type': 'str'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'distribution': {'key': 'properties.distribution', 'type': 'str'}, - 'infrastructure': {'key': 'properties.infrastructure', 'type': 'str'}, - 'offering': {'key': 'properties.offering', 'type': 'str'}, - 'managed_identity_certificate_expiration_time': {'key': 'properties.managedIdentityCertificateExpirationTime', 'type': 'iso-8601'}, - 'last_connectivity_time': {'key': 'properties.lastConnectivityTime', 'type': 'iso-8601'}, - 'connectivity_status': {'key': 'properties.connectivityStatus', 'type': 'str'}, - 'private_link_state': {'key': 'properties.privateLinkState', 'type': 'str'}, - 'private_link_scope_resource_id': {'key': 'properties.privateLinkScopeResourceId', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ConnectedClusterIdentity"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "agent_public_key_certificate": {"key": "properties.agentPublicKeyCertificate", "type": "str"}, + "kubernetes_version": {"key": "properties.kubernetesVersion", "type": "str"}, + "total_node_count": {"key": "properties.totalNodeCount", "type": "int"}, + "total_core_count": {"key": "properties.totalCoreCount", "type": "int"}, + "agent_version": {"key": "properties.agentVersion", "type": "str"}, + "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, + "distribution": {"key": "properties.distribution", "type": "str"}, + "distribution_version": {"key": "properties.distributionVersion", "type": "str"}, + "infrastructure": {"key": "properties.infrastructure", "type": "str"}, + "offering": {"key": "properties.offering", "type": "str"}, + "managed_identity_certificate_expiration_time": { + "key": "properties.managedIdentityCertificateExpirationTime", + "type": "iso-8601", + }, + "last_connectivity_time": {"key": "properties.lastConnectivityTime", "type": "iso-8601"}, + "connectivity_status": {"key": "properties.connectivityStatus", "type": "str"}, + "private_link_state": {"key": "properties.privateLinkState", "type": "str"}, + "private_link_scope_resource_id": {"key": "properties.privateLinkScopeResourceId", "type": "str"}, + "azure_hybrid_benefit": {"key": "properties.azureHybridBenefit", "type": "str"}, + "miscellaneous_properties": {"key": "properties.miscellaneousProperties", "type": "{str}"}, } - def __init__( + def __init__( # pylint: disable=too-many-locals self, *, location: str, - identity: "ConnectedClusterIdentity", + identity: "_models.ConnectedClusterIdentity", agent_public_key_certificate: str, tags: Optional[Dict[str, str]] = None, - provisioning_state: Optional[Union[str, "ProvisioningState"]] = None, + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = None, distribution: Optional[str] = None, + distribution_version: Optional[str] = None, infrastructure: Optional[str] = None, - private_link_state: Optional[Union[str, "PrivateLinkState"]] = "Disabled", + private_link_state: Union[str, "_models.PrivateLinkState"] = "Disabled", private_link_scope_resource_id: Optional[str] = None, + azure_hybrid_benefit: Optional[Union[str, "_models.AzureHybridBenefit"]] = None, **kwargs ): """ - :keyword tags: A set of tags. Resource tags. + :keyword tags: Resource tags. :paramtype tags: dict[str, str] - :keyword location: Required. The geo-location where the resource lives. + :keyword location: The geo-location where the resource lives. Required. :paramtype location: str - :keyword identity: Required. The identity of the connected cluster. + :keyword identity: The identity of the connected cluster. Required. :paramtype identity: ~azure.mgmt.hybridkubernetes.models.ConnectedClusterIdentity - :keyword agent_public_key_certificate: Required. Base64 encoded public certificate used by the - agent to do the initial handshake to the backend services in Azure. + :keyword agent_public_key_certificate: Base64 encoded public certificate used by the agent to + do the initial handshake to the backend services in Azure. Required. :paramtype agent_public_key_certificate: str - :keyword provisioning_state: Provisioning state of the connected cluster resource. Possible - values include: "Succeeded", "Failed", "Canceled", "Provisioning", "Updating", "Deleting", - "Accepted". + :keyword provisioning_state: Provisioning state of the connected cluster resource. Known values + are: "Succeeded", "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". :paramtype provisioning_state: str or ~azure.mgmt.hybridkubernetes.models.ProvisioningState :keyword distribution: The Kubernetes distribution running on this connected cluster. :paramtype distribution: str + :keyword distribution_version: The Kubernetes distribution version on this connected cluster. + :paramtype distribution_version: str :keyword infrastructure: The infrastructure on which the Kubernetes cluster represented by this connected cluster is running on. :paramtype infrastructure: str :keyword private_link_state: Property which describes the state of private link on a connected - cluster resource. Possible values include: "Enabled", "Disabled". Default value: "Disabled". + cluster resource. Known values are: "Enabled" and "Disabled". :paramtype private_link_state: str or ~azure.mgmt.hybridkubernetes.models.PrivateLinkState :keyword private_link_scope_resource_id: The resource id of the private link scope this connected cluster is assigned to, if any. :paramtype private_link_scope_resource_id: str + :keyword azure_hybrid_benefit: Indicates whether Azure Hybrid Benefit is opted in. Known values + are: "True", "False", and "NotApplicable". + :paramtype azure_hybrid_benefit: str or ~azure.mgmt.hybridkubernetes.models.AzureHybridBenefit """ - super(ConnectedCluster, self).__init__(tags=tags, location=location, **kwargs) + super().__init__(tags=tags, location=location, **kwargs) self.identity = identity self.system_data = None self.agent_public_key_certificate = agent_public_key_certificate @@ -261,6 +273,7 @@ def __init__( self.agent_version = None self.provisioning_state = provisioning_state self.distribution = distribution + self.distribution_version = distribution_version self.infrastructure = infrastructure self.offering = None self.managed_identity_certificate_expiration_time = None @@ -268,9 +281,11 @@ def __init__( self.connectivity_status = None self.private_link_state = private_link_state self.private_link_scope_resource_id = private_link_scope_resource_id + self.azure_hybrid_benefit = azure_hybrid_benefit + self.miscellaneous_properties = None -class ConnectedClusterIdentity(msrest.serialization.Model): +class ConnectedClusterIdentity(_serialization.Model): """Identity for the connected cluster. Variables are only populated by the server, and will be ignored when sending a request. @@ -283,45 +298,38 @@ class ConnectedClusterIdentity(msrest.serialization.Model): :ivar tenant_id: The tenant id associated with the connected cluster. This property will only be provided for a system assigned identity. :vartype tenant_id: str - :ivar type: Required. The type of identity used for the connected cluster. The type - 'SystemAssigned, includes a system created identity. The type 'None' means no identity is - assigned to the connected cluster. Possible values include: "None", "SystemAssigned". Default - value: "SystemAssigned". + :ivar type: The type of identity used for the connected cluster. The type 'SystemAssigned, + includes a system created identity. The type 'None' means no identity is assigned to the + connected cluster. Known values are: "None" and "SystemAssigned". :vartype type: str or ~azure.mgmt.hybridkubernetes.models.ResourceIdentityType """ _validation = { - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, - 'type': {'required': True}, + "principal_id": {"readonly": True}, + "tenant_id": {"readonly": True}, + "type": {"required": True}, } _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, + "principal_id": {"key": "principalId", "type": "str"}, + "tenant_id": {"key": "tenantId", "type": "str"}, + "type": {"key": "type", "type": "str"}, } - def __init__( - self, - *, - type: Union[str, "ResourceIdentityType"] = "SystemAssigned", - **kwargs - ): + def __init__(self, *, type: Union[str, "_models.ResourceIdentityType"] = "SystemAssigned", **kwargs): """ - :keyword type: Required. The type of identity used for the connected cluster. The type - 'SystemAssigned, includes a system created identity. The type 'None' means no identity is - assigned to the connected cluster. Possible values include: "None", "SystemAssigned". Default - value: "SystemAssigned". + :keyword type: The type of identity used for the connected cluster. The type 'SystemAssigned, + includes a system created identity. The type 'None' means no identity is assigned to the + connected cluster. Known values are: "None" and "SystemAssigned". :paramtype type: str or ~azure.mgmt.hybridkubernetes.models.ResourceIdentityType """ - super(ConnectedClusterIdentity, self).__init__(**kwargs) + super().__init__(**kwargs) self.principal_id = None self.tenant_id = None self.type = type -class ConnectedClusterList(msrest.serialization.Model): +class ConnectedClusterList(_serialization.Model): """The paginated list of connected Clusters. :ivar value: The list of connected clusters. @@ -331,16 +339,12 @@ class ConnectedClusterList(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': '[ConnectedCluster]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[ConnectedCluster]"}, + "next_link": {"key": "nextLink", "type": "str"}, } def __init__( - self, - *, - value: Optional[List["ConnectedCluster"]] = None, - next_link: Optional[str] = None, - **kwargs + self, *, value: Optional[List["_models.ConnectedCluster"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: The list of connected clusters. @@ -348,46 +352,62 @@ def __init__( :keyword next_link: The link to fetch the next page of connected cluster. :paramtype next_link: str """ - super(ConnectedClusterList, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.next_link = next_link -class ConnectedClusterPatch(msrest.serialization.Model): +class ConnectedClusterPatch(_serialization.Model): """Object containing updates for patch operations. - :ivar tags: A set of tags. Resource tags. + :ivar tags: Resource tags. :vartype tags: dict[str, str] - :ivar properties: Describes the connected cluster resource properties that can be updated - during PATCH operation. - :vartype properties: any + :ivar distribution: Represents the distribution of the connected cluster. + :vartype distribution: str + :ivar distribution_version: Represents the Kubernetes distribution version on this connected + cluster. + :vartype distribution_version: str + :ivar azure_hybrid_benefit: Indicates whether Azure Hybrid Benefit is opted in. Known values + are: "True", "False", and "NotApplicable". + :vartype azure_hybrid_benefit: str or ~azure.mgmt.hybridkubernetes.models.AzureHybridBenefit """ _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'properties': {'key': 'properties', 'type': 'object'}, + "tags": {"key": "tags", "type": "{str}"}, + "distribution": {"key": "properties.distribution", "type": "str"}, + "distribution_version": {"key": "properties.distributionVersion", "type": "str"}, + "azure_hybrid_benefit": {"key": "properties.azureHybridBenefit", "type": "str"}, } def __init__( self, *, tags: Optional[Dict[str, str]] = None, - properties: Optional[Any] = None, + distribution: Optional[str] = None, + distribution_version: Optional[str] = None, + azure_hybrid_benefit: Optional[Union[str, "_models.AzureHybridBenefit"]] = None, **kwargs ): """ - :keyword tags: A set of tags. Resource tags. + :keyword tags: Resource tags. :paramtype tags: dict[str, str] - :keyword properties: Describes the connected cluster resource properties that can be updated - during PATCH operation. - :paramtype properties: any - """ - super(ConnectedClusterPatch, self).__init__(**kwargs) + :keyword distribution: Represents the distribution of the connected cluster. + :paramtype distribution: str + :keyword distribution_version: Represents the Kubernetes distribution version on this connected + cluster. + :paramtype distribution_version: str + :keyword azure_hybrid_benefit: Indicates whether Azure Hybrid Benefit is opted in. Known values + are: "True", "False", and "NotApplicable". + :paramtype azure_hybrid_benefit: str or ~azure.mgmt.hybridkubernetes.models.AzureHybridBenefit + """ + super().__init__(**kwargs) self.tags = tags - self.properties = properties + self.distribution = distribution + self.distribution_version = distribution_version + self.azure_hybrid_benefit = azure_hybrid_benefit -class CredentialResult(msrest.serialization.Model): +class CredentialResult(_serialization.Model): """The credential result response. Variables are only populated by the server, and will be ignored when sending a request. @@ -395,31 +415,27 @@ class CredentialResult(msrest.serialization.Model): :ivar name: The name of the credential. :vartype name: str :ivar value: Base64-encoded Kubernetes configuration file. - :vartype value: bytearray + :vartype value: bytes """ _validation = { - 'name': {'readonly': True}, - 'value': {'readonly': True}, + "name": {"readonly": True}, + "value": {"readonly": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'bytearray'}, + "name": {"key": "name", "type": "str"}, + "value": {"key": "value", "type": "bytearray"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(CredentialResult, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.name = None self.value = None -class CredentialResults(msrest.serialization.Model): +class CredentialResults(_serialization.Model): """The list of credential result response. Variables are only populated by the server, and will be ignored when sending a request. @@ -432,27 +448,23 @@ class CredentialResults(msrest.serialization.Model): """ _validation = { - 'hybrid_connection_config': {'readonly': True}, - 'kubeconfigs': {'readonly': True}, + "hybrid_connection_config": {"readonly": True}, + "kubeconfigs": {"readonly": True}, } _attribute_map = { - 'hybrid_connection_config': {'key': 'hybridConnectionConfig', 'type': 'HybridConnectionConfig'}, - 'kubeconfigs': {'key': 'kubeconfigs', 'type': '[CredentialResult]'}, + "hybrid_connection_config": {"key": "hybridConnectionConfig", "type": "HybridConnectionConfig"}, + "kubeconfigs": {"key": "kubeconfigs", "type": "[CredentialResult]"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(CredentialResults, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.hybrid_connection_config = None self.kubeconfigs = None -class ErrorAdditionalInfo(msrest.serialization.Model): +class ErrorAdditionalInfo(_serialization.Model): """The resource management error additional info. Variables are only populated by the server, and will be ignored when sending a request. @@ -460,31 +472,27 @@ class ErrorAdditionalInfo(msrest.serialization.Model): :ivar type: The additional info type. :vartype type: str :ivar info: The additional info. - :vartype info: any + :vartype info: JSON """ _validation = { - 'type': {'readonly': True}, - 'info': {'readonly': True}, + "type": {"readonly": True}, + "info": {"readonly": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'info': {'key': 'info', 'type': 'object'}, + "type": {"key": "type", "type": "str"}, + "info": {"key": "info", "type": "object"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(ErrorAdditionalInfo, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.type = None self.info = None -class ErrorDetail(msrest.serialization.Model): +class ErrorDetail(_serialization.Model): """The error detail. Variables are only populated by the server, and will be ignored when sending a request. @@ -502,28 +510,24 @@ class ErrorDetail(msrest.serialization.Model): """ _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, - 'target': {'readonly': True}, - 'details': {'readonly': True}, - 'additional_info': {'readonly': True}, + "code": {"readonly": True}, + "message": {"readonly": True}, + "target": {"readonly": True}, + "details": {"readonly": True}, + "additional_info": {"readonly": True}, } _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'target': {'key': 'target', 'type': 'str'}, - 'details': {'key': 'details', 'type': '[ErrorDetail]'}, - 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'}, + "code": {"key": "code", "type": "str"}, + "message": {"key": "message", "type": "str"}, + "target": {"key": "target", "type": "str"}, + "details": {"key": "details", "type": "[ErrorDetail]"}, + "additional_info": {"key": "additionalInfo", "type": "[ErrorAdditionalInfo]"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(ErrorDetail, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.code = None self.message = None self.target = None @@ -531,7 +535,7 @@ def __init__( self.additional_info = None -class ErrorResponse(msrest.serialization.Model): +class ErrorResponse(_serialization.Model): """Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.). :ivar error: The error object. @@ -539,30 +543,25 @@ class ErrorResponse(msrest.serialization.Model): """ _attribute_map = { - 'error': {'key': 'error', 'type': 'ErrorDetail'}, + "error": {"key": "error", "type": "ErrorDetail"}, } - def __init__( - self, - *, - error: Optional["ErrorDetail"] = None, - **kwargs - ): + def __init__(self, *, error: Optional["_models.ErrorDetail"] = None, **kwargs): """ :keyword error: The error object. :paramtype error: ~azure.mgmt.hybridkubernetes.models.ErrorDetail """ - super(ErrorResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.error = error -class HybridConnectionConfig(msrest.serialization.Model): +class HybridConnectionConfig(_serialization.Model): """Contains the REP (rendezvous endpoint) and “Sender” access token. Variables are only populated by the server, and will be ignored when sending a request. :ivar expiration_time: Timestamp when this token will be expired. - :vartype expiration_time: long + :vartype expiration_time: int :ivar hybrid_connection_name: Name of the connection. :vartype hybrid_connection_name: str :ivar relay: Name of the relay. @@ -572,77 +571,69 @@ class HybridConnectionConfig(msrest.serialization.Model): """ _validation = { - 'expiration_time': {'readonly': True}, - 'hybrid_connection_name': {'readonly': True}, - 'relay': {'readonly': True}, - 'token': {'readonly': True}, + "expiration_time": {"readonly": True}, + "hybrid_connection_name": {"readonly": True}, + "relay": {"readonly": True}, + "token": {"readonly": True}, } _attribute_map = { - 'expiration_time': {'key': 'expirationTime', 'type': 'long'}, - 'hybrid_connection_name': {'key': 'hybridConnectionName', 'type': 'str'}, - 'relay': {'key': 'relay', 'type': 'str'}, - 'token': {'key': 'token', 'type': 'str'}, + "expiration_time": {"key": "expirationTime", "type": "int"}, + "hybrid_connection_name": {"key": "hybridConnectionName", "type": "str"}, + "relay": {"key": "relay", "type": "str"}, + "token": {"key": "token", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(HybridConnectionConfig, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.expiration_time = None self.hybrid_connection_name = None self.relay = None self.token = None -class ListClusterUserCredentialProperties(msrest.serialization.Model): +class ListClusterUserCredentialProperties(_serialization.Model): """ListClusterUserCredentialProperties. All required parameters must be populated in order to send to Azure. - :ivar authentication_method: Required. The mode of client authentication. Possible values - include: "Token", "AAD". + :ivar authentication_method: The mode of client authentication. Required. Known values are: + "Token" and "AAD". :vartype authentication_method: str or ~azure.mgmt.hybridkubernetes.models.AuthenticationMethod - :ivar client_proxy: Required. Boolean value to indicate whether the request is for client side - proxy or not. + :ivar client_proxy: Boolean value to indicate whether the request is for client side proxy or + not. Required. :vartype client_proxy: bool """ _validation = { - 'authentication_method': {'required': True}, - 'client_proxy': {'required': True}, + "authentication_method": {"required": True}, + "client_proxy": {"required": True}, } _attribute_map = { - 'authentication_method': {'key': 'authenticationMethod', 'type': 'str'}, - 'client_proxy': {'key': 'clientProxy', 'type': 'bool'}, + "authentication_method": {"key": "authenticationMethod", "type": "str"}, + "client_proxy": {"key": "clientProxy", "type": "bool"}, } def __init__( - self, - *, - authentication_method: Union[str, "AuthenticationMethod"], - client_proxy: bool, - **kwargs + self, *, authentication_method: Union[str, "_models.AuthenticationMethod"], client_proxy: bool, **kwargs ): """ - :keyword authentication_method: Required. The mode of client authentication. Possible values - include: "Token", "AAD". + :keyword authentication_method: The mode of client authentication. Required. Known values are: + "Token" and "AAD". :paramtype authentication_method: str or ~azure.mgmt.hybridkubernetes.models.AuthenticationMethod - :keyword client_proxy: Required. Boolean value to indicate whether the request is for client - side proxy or not. + :keyword client_proxy: Boolean value to indicate whether the request is for client side proxy + or not. Required. :paramtype client_proxy: bool """ - super(ListClusterUserCredentialProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.authentication_method = authentication_method self.client_proxy = client_proxy -class Operation(msrest.serialization.Model): +class Operation(_serialization.Model): """The Connected cluster API operation. Variables are only populated by the server, and will be ignored when sending a request. @@ -654,27 +645,23 @@ class Operation(msrest.serialization.Model): """ _validation = { - 'name': {'readonly': True}, - 'display': {'readonly': True}, + "name": {"readonly": True}, + "display": {"readonly": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display': {'key': 'display', 'type': 'OperationDisplay'}, + "name": {"key": "name", "type": "str"}, + "display": {"key": "display", "type": "OperationDisplay"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(Operation, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.name = None self.display = None -class OperationDisplay(msrest.serialization.Model): +class OperationDisplay(_serialization.Model): """The object that represents the operation. :ivar provider: Service provider: Microsoft.connectedClusters. @@ -688,10 +675,10 @@ class OperationDisplay(msrest.serialization.Model): """ _attribute_map = { - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, + "provider": {"key": "provider", "type": "str"}, + "resource": {"key": "resource", "type": "str"}, + "operation": {"key": "operation", "type": "str"}, + "description": {"key": "description", "type": "str"}, } def __init__( @@ -713,14 +700,14 @@ def __init__( :keyword description: Description of the operation. :paramtype description: str """ - super(OperationDisplay, self).__init__(**kwargs) + super().__init__(**kwargs) self.provider = provider self.resource = resource self.operation = operation self.description = description -class OperationList(msrest.serialization.Model): +class OperationList(_serialization.Model): """The paginated list of connected cluster API operations. Variables are only populated by the server, and will be ignored when sending a request. @@ -732,85 +719,80 @@ class OperationList(msrest.serialization.Model): """ _validation = { - 'value': {'readonly': True}, + "value": {"readonly": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[Operation]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[Operation]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - *, - next_link: Optional[str] = None, - **kwargs - ): + def __init__(self, *, next_link: Optional[str] = None, **kwargs): """ :keyword next_link: The link to fetch the next page of connected cluster API operations. :paramtype next_link: str """ - super(OperationList, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = None self.next_link = next_link -class SystemData(msrest.serialization.Model): +class SystemData(_serialization.Model): """Metadata pertaining to creation and last modification of the resource. :ivar created_by: The identity that created the resource. :vartype created_by: str - :ivar created_by_type: The type of identity that created the resource. Possible values include: - "User", "Application", "ManagedIdentity", "Key". + :ivar created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", and "Key". :vartype created_by_type: str or ~azure.mgmt.hybridkubernetes.models.CreatedByType :ivar created_at: The timestamp of resource creation (UTC). :vartype created_at: ~datetime.datetime :ivar last_modified_by: The identity that last modified the resource. :vartype last_modified_by: str - :ivar last_modified_by_type: The type of identity that last modified the resource. Possible - values include: "User", "Application", "ManagedIdentity", "Key". + :ivar last_modified_by_type: The type of identity that last modified the resource. Known values + are: "User", "Application", "ManagedIdentity", and "Key". :vartype last_modified_by_type: str or ~azure.mgmt.hybridkubernetes.models.LastModifiedByType :ivar last_modified_at: The timestamp of resource modification (UTC). :vartype last_modified_at: ~datetime.datetime """ _attribute_map = { - 'created_by': {'key': 'createdBy', 'type': 'str'}, - 'created_by_type': {'key': 'createdByType', 'type': 'str'}, - 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, - 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, - 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, - 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + "created_by": {"key": "createdBy", "type": "str"}, + "created_by_type": {"key": "createdByType", "type": "str"}, + "created_at": {"key": "createdAt", "type": "iso-8601"}, + "last_modified_by": {"key": "lastModifiedBy", "type": "str"}, + "last_modified_by_type": {"key": "lastModifiedByType", "type": "str"}, + "last_modified_at": {"key": "lastModifiedAt", "type": "iso-8601"}, } def __init__( self, *, created_by: Optional[str] = None, - created_by_type: Optional[Union[str, "CreatedByType"]] = None, + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, created_at: Optional[datetime.datetime] = None, last_modified_by: Optional[str] = None, - last_modified_by_type: Optional[Union[str, "LastModifiedByType"]] = None, + last_modified_by_type: Optional[Union[str, "_models.LastModifiedByType"]] = None, last_modified_at: Optional[datetime.datetime] = None, **kwargs ): """ :keyword created_by: The identity that created the resource. :paramtype created_by: str - :keyword created_by_type: The type of identity that created the resource. Possible values - include: "User", "Application", "ManagedIdentity", "Key". + :keyword created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", and "Key". :paramtype created_by_type: str or ~azure.mgmt.hybridkubernetes.models.CreatedByType :keyword created_at: The timestamp of resource creation (UTC). :paramtype created_at: ~datetime.datetime :keyword last_modified_by: The identity that last modified the resource. :paramtype last_modified_by: str - :keyword last_modified_by_type: The type of identity that last modified the resource. Possible - values include: "User", "Application", "ManagedIdentity", "Key". + :keyword last_modified_by_type: The type of identity that last modified the resource. Known + values are: "User", "Application", "ManagedIdentity", and "Key". :paramtype last_modified_by_type: str or ~azure.mgmt.hybridkubernetes.models.LastModifiedByType :keyword last_modified_at: The timestamp of resource modification (UTC). :paramtype last_modified_at: ~datetime.datetime """ - super(SystemData, self).__init__(**kwargs) + super().__init__(**kwargs) self.created_by = created_by self.created_by_type = created_by_type self.created_at = created_at diff --git a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/models/_patch.py b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/models/_patch.py new file mode 100644 index 00000000000..f7dd3251033 --- /dev/null +++ b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/models/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/operations/__init__.py b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/operations/__init__.py similarity index 66% rename from src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/operations/__init__.py rename to src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/operations/__init__.py index 4bf80ae81ea..795e9a8dbdf 100644 --- a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_05_01/operations/__init__.py +++ b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/operations/__init__.py @@ -9,7 +9,13 @@ from ._connected_cluster_operations import ConnectedClusterOperations from ._operations import Operations +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk + __all__ = [ - 'ConnectedClusterOperations', - 'Operations', + "ConnectedClusterOperations", + "Operations", ] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/operations/_connected_cluster_operations.py b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/operations/_connected_cluster_operations.py new file mode 100644 index 00000000000..eecc6791c2f --- /dev/null +++ b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/operations/_connected_cluster_operations.py @@ -0,0 +1,1141 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_create_request( + resource_group_name: str, cluster_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, cluster_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request(resource_group_name: str, cluster_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, cluster_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_cluster_user_credential_request( + resource_group_name: str, cluster_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}/listClusterUserCredential", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_by_resource_group_request(resource_group_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Kubernetes/connectedClusters" + ) + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class ConnectedClusterOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.hybridkubernetes.ConnectedKubernetesClient`'s + :attr:`connected_cluster` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + def _create_initial( + self, + resource_group_name: str, + cluster_name: str, + connected_cluster: Union[_models.ConnectedCluster, IO], + **kwargs: Any + ) -> _models.ConnectedCluster: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConnectedCluster] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(connected_cluster, (IO, bytes)): + _content = connected_cluster + else: + _json = self._serialize.body(connected_cluster, "ConnectedCluster") + + request = build_create_request( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ConnectedCluster", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ConnectedCluster", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}"} # type: ignore + + @overload + def begin_create( + self, + resource_group_name: str, + cluster_name: str, + connected_cluster: _models.ConnectedCluster, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ConnectedCluster]: + """Register a new Kubernetes cluster with Azure Resource Manager. + + API to register a new Kubernetes cluster and create a tracked resource in Azure Resource + Manager (ARM). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param cluster_name: The name of the Kubernetes cluster on which get is called. Required. + :type cluster_name: str + :param connected_cluster: Parameters supplied to Create a Connected Cluster. Required. + :type connected_cluster: ~azure.mgmt.hybridkubernetes.models.ConnectedCluster + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ConnectedCluster or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.hybridkubernetes.models.ConnectedCluster] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create( + self, + resource_group_name: str, + cluster_name: str, + connected_cluster: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ConnectedCluster]: + """Register a new Kubernetes cluster with Azure Resource Manager. + + API to register a new Kubernetes cluster and create a tracked resource in Azure Resource + Manager (ARM). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param cluster_name: The name of the Kubernetes cluster on which get is called. Required. + :type cluster_name: str + :param connected_cluster: Parameters supplied to Create a Connected Cluster. Required. + :type connected_cluster: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ConnectedCluster or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.hybridkubernetes.models.ConnectedCluster] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create( + self, + resource_group_name: str, + cluster_name: str, + connected_cluster: Union[_models.ConnectedCluster, IO], + **kwargs: Any + ) -> LROPoller[_models.ConnectedCluster]: + """Register a new Kubernetes cluster with Azure Resource Manager. + + API to register a new Kubernetes cluster and create a tracked resource in Azure Resource + Manager (ARM). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param cluster_name: The name of the Kubernetes cluster on which get is called. Required. + :type cluster_name: str + :param connected_cluster: Parameters supplied to Create a Connected Cluster. Is either a model + type or a IO type. Required. + :type connected_cluster: ~azure.mgmt.hybridkubernetes.models.ConnectedCluster or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ConnectedCluster or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.hybridkubernetes.models.ConnectedCluster] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConnectedCluster] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_initial( # type: ignore + resource_group_name=resource_group_name, + cluster_name=cluster_name, + connected_cluster=connected_cluster, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ConnectedCluster", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}"} # type: ignore + + @overload + def update( + self, + resource_group_name: str, + cluster_name: str, + connected_cluster_patch: _models.ConnectedClusterPatch, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ConnectedCluster: + """Updates a connected cluster. + + API to update certain properties of the connected cluster resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param cluster_name: The name of the Kubernetes cluster on which get is called. Required. + :type cluster_name: str + :param connected_cluster_patch: Parameters supplied to update Connected Cluster. Required. + :type connected_cluster_patch: ~azure.mgmt.hybridkubernetes.models.ConnectedClusterPatch + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ConnectedCluster or the result of cls(response) + :rtype: ~azure.mgmt.hybridkubernetes.models.ConnectedCluster + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update( + self, + resource_group_name: str, + cluster_name: str, + connected_cluster_patch: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ConnectedCluster: + """Updates a connected cluster. + + API to update certain properties of the connected cluster resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param cluster_name: The name of the Kubernetes cluster on which get is called. Required. + :type cluster_name: str + :param connected_cluster_patch: Parameters supplied to update Connected Cluster. Required. + :type connected_cluster_patch: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ConnectedCluster or the result of cls(response) + :rtype: ~azure.mgmt.hybridkubernetes.models.ConnectedCluster + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update( + self, + resource_group_name: str, + cluster_name: str, + connected_cluster_patch: Union[_models.ConnectedClusterPatch, IO], + **kwargs: Any + ) -> _models.ConnectedCluster: + """Updates a connected cluster. + + API to update certain properties of the connected cluster resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param cluster_name: The name of the Kubernetes cluster on which get is called. Required. + :type cluster_name: str + :param connected_cluster_patch: Parameters supplied to update Connected Cluster. Is either a + model type or a IO type. Required. + :type connected_cluster_patch: ~azure.mgmt.hybridkubernetes.models.ConnectedClusterPatch or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ConnectedCluster or the result of cls(response) + :rtype: ~azure.mgmt.hybridkubernetes.models.ConnectedCluster + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConnectedCluster] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(connected_cluster_patch, (IO, bytes)): + _content = connected_cluster_patch + else: + _json = self._serialize.body(connected_cluster_patch, "ConnectedClusterPatch") + + request = build_update_request( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ConnectedCluster", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + update.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}"} # type: ignore + + @distributed_trace + def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> _models.ConnectedCluster: + """Get the properties of the specified connected cluster. + + Returns the properties of the specified connected cluster, including name, identity, + properties, and additional cluster details. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param cluster_name: The name of the Kubernetes cluster on which get is called. Required. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ConnectedCluster or the result of cls(response) + :rtype: ~azure.mgmt.hybridkubernetes.models.ConnectedCluster + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConnectedCluster] + + request = build_get_request( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ConnectedCluster", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, cluster_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}"} # type: ignore + + @distributed_trace + def begin_delete(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> LROPoller[None]: + """Delete a connected cluster. + + Delete a connected cluster, removing the tracked resource in Azure Resource Manager (ARM). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param cluster_name: The name of the Kubernetes cluster on which get is called. Required. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + cluster_name=cluster_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}"} # type: ignore + + @overload + def list_cluster_user_credential( + self, + resource_group_name: str, + cluster_name: str, + properties: _models.ListClusterUserCredentialProperties, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CredentialResults: + """Gets cluster user credentials of a connected cluster. + + Gets cluster user credentials of the connected cluster with a specified resource group and + name. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param cluster_name: The name of the Kubernetes cluster on which get is called. Required. + :type cluster_name: str + :param properties: ListClusterUserCredential properties. Required. + :type properties: ~azure.mgmt.hybridkubernetes.models.ListClusterUserCredentialProperties + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CredentialResults or the result of cls(response) + :rtype: ~azure.mgmt.hybridkubernetes.models.CredentialResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def list_cluster_user_credential( + self, + resource_group_name: str, + cluster_name: str, + properties: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CredentialResults: + """Gets cluster user credentials of a connected cluster. + + Gets cluster user credentials of the connected cluster with a specified resource group and + name. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param cluster_name: The name of the Kubernetes cluster on which get is called. Required. + :type cluster_name: str + :param properties: ListClusterUserCredential properties. Required. + :type properties: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CredentialResults or the result of cls(response) + :rtype: ~azure.mgmt.hybridkubernetes.models.CredentialResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def list_cluster_user_credential( + self, + resource_group_name: str, + cluster_name: str, + properties: Union[_models.ListClusterUserCredentialProperties, IO], + **kwargs: Any + ) -> _models.CredentialResults: + """Gets cluster user credentials of a connected cluster. + + Gets cluster user credentials of the connected cluster with a specified resource group and + name. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param cluster_name: The name of the Kubernetes cluster on which get is called. Required. + :type cluster_name: str + :param properties: ListClusterUserCredential properties. Is either a model type or a IO type. + Required. + :type properties: ~azure.mgmt.hybridkubernetes.models.ListClusterUserCredentialProperties or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CredentialResults or the result of cls(response) + :rtype: ~azure.mgmt.hybridkubernetes.models.CredentialResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CredentialResults] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(properties, (IO, bytes)): + _content = properties + else: + _json = self._serialize.body(properties, "ListClusterUserCredentialProperties") + + request = build_list_cluster_user_credential_request( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.list_cluster_user_credential.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("CredentialResults", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_cluster_user_credential.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters/{clusterName}/listClusterUserCredential"} # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.ConnectedCluster"]: + """Lists all connected clusters. + + API to enumerate registered connected K8s clusters under a Resource Group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ConnectedCluster or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.hybridkubernetes.models.ConnectedCluster] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConnectedClusterList] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_by_resource_group.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ConnectedClusterList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list_by_resource_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Kubernetes/connectedClusters"} # type: ignore + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.ConnectedCluster"]: + """Lists all connected clusters. + + API to enumerate registered connected K8s clusters under a Subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ConnectedCluster or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.hybridkubernetes.models.ConnectedCluster] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConnectedClusterList] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_by_subscription.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ConnectedClusterList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list_by_subscription.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Kubernetes/connectedClusters"} # type: ignore diff --git a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/operations/_operations.py b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/operations/_operations.py new file mode 100644 index 00000000000..cd2b9cf9257 --- /dev/null +++ b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/operations/_operations.py @@ -0,0 +1,147 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/providers/Microsoft.Kubernetes/operations") + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class Operations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.hybridkubernetes.ConnectedKubernetesClient`'s + :attr:`operations` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, **kwargs: Any) -> Iterable["_models.Operation"]: + """Lists all of the available API operations for Connected Cluster resource. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either Operation or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.hybridkubernetes.models.Operation] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.OperationList] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_get_request( + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("OperationList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + get.metadata = {"url": "/providers/Microsoft.Kubernetes/operations"} # type: ignore diff --git a/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/operations/_patch.py b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/operations/_patch.py new file mode 100644 index 00000000000..f7dd3251033 --- /dev/null +++ b/src/connectedk8s/azext_connectedk8s/vendored_sdks/preview_2022_10_01/operations/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/src/connectedk8s/setup.py b/src/connectedk8s/setup.py index 961b610dddf..ca2f52cd937 100644 --- a/src/connectedk8s/setup.py +++ b/src/connectedk8s/setup.py @@ -17,7 +17,7 @@ # TODO: Confirm this is the right version number you want and it matches your # HISTORY.rst entry. -VERSION = '1.3.4' +VERSION = '1.3.5' # The full list of classifiers is available at # https://pypi.python.org/pypi?%3Aaction=list_classifiers @@ -35,7 +35,7 @@ # TODO: Add any additional SDK dependencies here DEPENDENCIES = [ - 'kubernetes==11.0.0', + 'kubernetes==24.2.0', 'pycryptodome==3.14.1', 'azure-mgmt-hybridcompute==7.0.0' ] From d1775732e6a9abbfbed835378455b1d121b13e2a Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Thu, 20 Oct 2022 07:10:49 +0000 Subject: [PATCH 09/85] [Release] Update index.json for extension [ connectedk8s ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=10119&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/0a308b9a3ab581f2e4a2d8b70f233adee11f1afe --- src/index.json | 53 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) diff --git a/src/index.json b/src/index.json index a07b0e1ab39..3eca230f455 100644 --- a/src/index.json +++ b/src/index.json @@ -16803,6 +16803,59 @@ "version": "1.3.4" }, "sha256Digest": "83ed63bb821ae47b944b6d2e4894229bfc76e9b0cefec8b73a0c74f9ea44e833" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/connectedk8s-1.3.5-py2.py3-none-any.whl", + "filename": "connectedk8s-1.3.5-py2.py3-none-any.whl", + "metadata": { + "azext.minCliCoreVersion": "2.38.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License" + ], + "description_content_type": "text/markdown", + "extensions": { + "python.details": { + "contacts": [ + { + "email": "k8connect@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/connectedk8s" + } + } + }, + "extras": [], + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "connectedk8s", + "run_requires": [ + { + "requires": [ + "azure-mgmt-hybridcompute (==7.0.0)", + "kubernetes (==24.2.0)", + "pycryptodome (==3.14.1)" + ] + } + ], + "summary": "Microsoft Azure Command-Line Tools Connectedk8s Extension", + "version": "1.3.5" + }, + "sha256Digest": "17ba7dd032c87e7ff4b9cce298dd81171e6e75bcfe2912f7c2f3cd1f55c00d11" } ], "connectedmachine": [ From d1a9d16cc487dcf4b2f96954a2a291cd5ee65e03 Mon Sep 17 00:00:00 2001 From: Bin Xia Date: Thu, 20 Oct 2022 16:11:21 +0800 Subject: [PATCH 10/85] Support updating SSH public key with `az aks update --ssh-key-value` (#5464) --- src/aks-preview/HISTORY.rst | 5 + src/aks-preview/azext_aks_preview/_help.py | 4 + src/aks-preview/azext_aks_preview/_params.py | 2 + .../azext_aks_preview/_validators.py | 14 + src/aks-preview/azext_aks_preview/custom.py | 1 + .../managed_cluster_decorator.py | 42 + ..._aks_create_and_update_ssh_public_key.yaml | 2604 +++++++++++++++++ .../tests/latest/test_aks_commands.py | 35 + src/aks-preview/setup.py | 2 +- 9 files changed, 2708 insertions(+), 1 deletion(-) create mode 100644 src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_create_and_update_ssh_public_key.yaml diff --git a/src/aks-preview/HISTORY.rst b/src/aks-preview/HISTORY.rst index 090ed5b1e25..699f2169e02 100644 --- a/src/aks-preview/HISTORY.rst +++ b/src/aks-preview/HISTORY.rst @@ -12,6 +12,11 @@ To release a new version, please select a new version number (usually plus 1 to Pending +++++++ +0.5.111 ++++++++ + +* Support updating SSH public key with `az aks update --ssh-key-value`. + 0.5.110 +++++++ diff --git a/src/aks-preview/azext_aks_preview/_help.py b/src/aks-preview/azext_aks_preview/_help.py index c95e7be4b13..2de4e84b396 100644 --- a/src/aks-preview/azext_aks_preview/_help.py +++ b/src/aks-preview/azext_aks_preview/_help.py @@ -894,6 +894,10 @@ - name: --cluster-snapshot-id type: string short-summary: The source cluster snapshot id is used to update existing cluster. + - name: --ssh-key-value + type: string + short-summary: Public key path or key contents to install on node VMs for SSH access. For example, + 'ssh-rsa AAAAB...snip...UcyupgH azureuser@linuxvm'. examples: - name: Reconcile the cluster back to its current state. text: az aks update -g MyResourceGroup -n MyManagedCluster diff --git a/src/aks-preview/azext_aks_preview/_params.py b/src/aks-preview/azext_aks_preview/_params.py index 6848b044604..fde0f75cdfc 100644 --- a/src/aks-preview/azext_aks_preview/_params.py +++ b/src/aks-preview/azext_aks_preview/_params.py @@ -116,6 +116,7 @@ validate_snapshot_name, validate_spot_max_price, validate_ssh_key, + validate_ssh_key_for_update, validate_taints, validate_user, validate_vm_set_type, @@ -388,6 +389,7 @@ def load_arguments(self, _): c.argument('disable_defender', action='store_true', validator=validate_defender_disable_and_enable_parameters) c.argument('enable_defender', action='store_true') c.argument('defender_config', validator=validate_defender_config_parameter) + c.argument('ssh_key_value', type=file_type, completer=FilesCompleter(), validator=validate_ssh_key_for_update) # addons c.argument('enable_secret_rotation', action='store_true') c.argument('disable_secret_rotation', action='store_true') diff --git a/src/aks-preview/azext_aks_preview/_validators.py b/src/aks-preview/azext_aks_preview/_validators.py index 0ee667e8bb9..7f4291aff12 100644 --- a/src/aks-preview/azext_aks_preview/_validators.py +++ b/src/aks-preview/azext_aks_preview/_validators.py @@ -64,6 +64,20 @@ def validate_ssh_key(namespace): namespace.ssh_key_value = content +def validate_ssh_key_for_update(namespace): + string_or_file = namespace.ssh_key_value + if not string_or_file: + return + content = string_or_file + if os.path.exists(string_or_file): + logger.info('Use existing SSH public key file: %s', string_or_file) + with open(string_or_file, 'r') as f: + content = f.read() + elif not keys.is_valid_ssh_rsa_public_key(content): + raise InvalidArgumentValueError('An RSA key file or key value must be supplied to SSH Key Value') + namespace.ssh_key_value = content + + def validate_create_parameters(namespace): if not namespace.name: raise CLIError('--name has no value') diff --git a/src/aks-preview/azext_aks_preview/custom.py b/src/aks-preview/azext_aks_preview/custom.py index 2868a7e8a62..44d34267d0b 100644 --- a/src/aks-preview/azext_aks_preview/custom.py +++ b/src/aks-preview/azext_aks_preview/custom.py @@ -810,6 +810,7 @@ def aks_update( enable_vpa=False, disable_vpa=False, cluster_snapshot_id=None, + ssh_key_value=None, ): # DO NOT MOVE: get all the original parameters and save them as a dictionary raw_parameters = locals() diff --git a/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py b/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py index 7cd115ed38d..7d8decd9425 100644 --- a/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py +++ b/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py @@ -14,6 +14,7 @@ from azure.cli.command_modules.acs._helpers import ( check_is_msi_cluster, format_parameter_name_to_option_name, + safe_list_get, safe_lower, ) from azure.cli.command_modules.acs._validators import ( @@ -2080,6 +2081,24 @@ def get_disable_vpa(self) -> bool: """ return self._get_disable_vpa(enable_validation=True) + def get_ssh_key_value_for_update(self) -> Tuple[str, bool]: + """Obtain the value of ssh_key_value for "az aks update". + + Note: no_ssh_key will not be decorated into the `mc` object. + + If the user provides a string-like input for --ssh-key-value, the validator function "validate_ssh_key_for_update" will + check whether it is a file path, if so, read its content and return; if it is a valid public key, return it. + Otherwise, raise error. + + :return: ssh_key_value of string type + """ + # read the original value passed by the command + ssh_key_value = self.raw_param.get("ssh_key_value") + + # this parameter does not need dynamic completion + # this parameter does not need validation + return ssh_key_value + class AKSPreviewManagedClusterCreateDecorator(AKSManagedClusterCreateDecorator): def __init__( @@ -2988,6 +3007,27 @@ def update_creation_data(self, mc: ManagedCluster) -> ManagedCluster: source_resource_id=snapshot_id ) mc.creation_data = creation_data + + return mc + + def update_linux_profile(self, mc: ManagedCluster) -> ManagedCluster: + """Update Linux profile for the ManagedCluster object. + + :return: the ManagedCluster object + """ + self._ensure_mc(mc) + + ssh_key_value = self.context.get_ssh_key_value_for_update() + + if ssh_key_value: + mc.linux_profile.ssh = self.models.ContainerServiceSshConfiguration( + public_keys=[ + self.models.ContainerServiceSshPublicKey( + key_data=ssh_key_value + ) + ] + ) + return mc def update_mc_profile_preview(self) -> ManagedCluster: @@ -3032,5 +3072,7 @@ def update_mc_profile_preview(self) -> ManagedCluster: mc = self.update_vpa(mc) # update creation data mc = self.update_creation_data(mc) + # update linux profile + mc = self.update_linux_profile(mc) return mc diff --git a/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_create_and_update_ssh_public_key.yaml b/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_create_and_update_ssh_public_key.yaml new file mode 100644 index 00000000000..600048e15c3 --- /dev/null +++ b/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_create_and_update_ssh_public_key.yaml @@ -0,0 +1,2604 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name -c --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.10 (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001","name":"clitest000001","type":"Microsoft.Resources/resourceGroups","location":"westus2","tags":{"product":"azurecli","cause":"automation","date":"2022-10-19T07:30:54Z"},"properties":{"provisioningState":"Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '305' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 07:30:54 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"location": "westus2", "identity": {"type": "SystemAssigned"}, "properties": + {"kubernetesVersion": "", "dnsPrefix": "cliakstest-clitestcvkxrczgf-79a739", + "agentPoolProfiles": [{"count": 1, "vmSize": "Standard_DS2_v2", "osDiskSizeGB": + 0, "workloadRuntime": "OCIContainer", "osType": "Linux", "enableAutoScaling": + false, "type": "VirtualMachineScaleSets", "mode": "System", "orchestratorVersion": + "", "upgradeSettings": {}, "enableNodePublicIP": false, "enableCustomCATrust": + false, "scaleSetPriority": "Regular", "scaleSetEvictionPolicy": "Delete", "spotMaxPrice": + -1.0, "nodeTaints": [], "enableEncryptionAtHost": false, "enableUltraSSD": false, + "enableFIPS": false, "name": "nodepool1"}], "linuxProfile": {"adminUsername": + "azureuser", "ssh": {"publicKeys": [{"keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDjBMbLg2FYbO869FuegeCaldfK8p28BOV+8ERSs/zC2Tu6F9nwn7JAudD+cqyw7P3fS+RWJA8oz9q31px8IY0jotmYZw2IqEr9COCtE5NYm/NG6BqjpV7vU5T4S6P522X8wQdtIc/I/U4Q+HisEOJ0NTaR5IhimKi2ql/m2n7LfsxBHe+WMXEKf/NcHvoANgee/oJ11Mi0SWTKoQLKKhmiT2xpBiA3nZkFkB29TPBQcCbZReC4s3VcWmUNCxqzBncIm2KfFe2Zp/Ikgop102uy2jBwgtlIEcNjUFR1erP76pjyZ/pCcbEu/QW1lZPMj3+q7CxQj5QrGArV8fh/b2Q/ + azcli_aks_live_test@example.com\n"}]}}, "addonProfiles": {}, "enableRBAC": true, + "enablePodSecurityPolicy": false, "networkProfile": {"networkPlugin": "kubenet", + "podCidr": "10.244.0.0/16", "serviceCidr": "10.0.0.0/16", "dnsServiceIP": "10.0.0.10", + "dockerBridgeCidr": "172.17.0.1/16", "outboundType": "loadBalancer", "loadBalancerSku": + "standard"}, "disableLocalAccounts": false, "storageProfile": {}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + Content-Length: + - '1558' + Content-Type: + - application/json + ParameterSetName: + - --resource-group --name -c --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002\",\n + \ \"location\": \"westus2\",\n \"name\": \"cliakstest000002\",\n \"type\": + \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \"provisioningState\": + \"Creating\",\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"kubernetesVersion\": + \"1.23.12\",\n \"currentKubernetesVersion\": \"1.23.12\",\n \"dnsPrefix\": + \"cliakstest-clitestcvkxrczgf-79a739\",\n \"fqdn\": \"cliakstest-clitestcvkxrczgf-79a739-b811a83a.hcp.westus2.azmk8s.io\",\n + \ \"azurePortalFQDN\": \"cliakstest-clitestcvkxrczgf-79a739-b811a83a.portal.hcp.westus2.azmk8s.io\",\n + \ \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \"count\": + 1,\n \"vmSize\": \"Standard_DS2_v2\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": + \"Managed\",\n \"kubeletDiskType\": \"OS\",\n \"workloadRuntime\": + \"OCIContainer\",\n \"maxPods\": 110,\n \"type\": \"VirtualMachineScaleSets\",\n + \ \"enableAutoScaling\": false,\n \"provisioningState\": \"Creating\",\n + \ \"powerState\": {\n \"code\": \"Running\"\n },\n \"orchestratorVersion\": + \"1.23.12\",\n \"currentOrchestratorVersion\": \"1.23.12\",\n \"enableNodePublicIP\": + false,\n \"enableCustomCATrust\": false,\n \"mode\": \"System\",\n + \ \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n + \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\": + \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\": {},\n + \ \"enableFIPS\": false\n }\n ],\n \"linuxProfile\": {\n \"adminUsername\": + \"azureuser\",\n \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDjBMbLg2FYbO869FuegeCaldfK8p28BOV+8ERSs/zC2Tu6F9nwn7JAudD+cqyw7P3fS+RWJA8oz9q31px8IY0jotmYZw2IqEr9COCtE5NYm/NG6BqjpV7vU5T4S6P522X8wQdtIc/I/U4Q+HisEOJ0NTaR5IhimKi2ql/m2n7LfsxBHe+WMXEKf/NcHvoANgee/oJ11Mi0SWTKoQLKKhmiT2xpBiA3nZkFkB29TPBQcCbZReC4s3VcWmUNCxqzBncIm2KfFe2Zp/Ikgop102uy2jBwgtlIEcNjUFR1erP76pjyZ/pCcbEu/QW1lZPMj3+q7CxQj5QrGArV8fh/b2Q/ + azcli_aks_live_test@example.com\\n\"\n }\n ]\n }\n },\n \"servicePrincipalProfile\": + {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n },\n \"nodeResourceGroup\": + \"MC_clitest000001_cliakstest000002_westus2\",\n \"enableRBAC\": true,\n + \ \"enablePodSecurityPolicy\": false,\n \"networkProfile\": {\n \"networkPlugin\": + \"kubenet\",\n \"loadBalancerSku\": \"standard\",\n \"loadBalancerProfile\": + {\n \"managedOutboundIPs\": {\n \"count\": 1\n },\n \"backendPoolType\": + \"nodeIPConfiguration\"\n },\n \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": + \"10.0.0.0/16\",\n \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": + \"172.17.0.1/16\",\n \"outboundType\": \"loadBalancer\",\n \"podCidrs\": + [\n \"10.244.0.0/16\"\n ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n + \ ],\n \"ipFamilies\": [\n \"IPv4\"\n ]\n },\n \"maxAgentPools\": + 100,\n \"disableLocalAccounts\": false,\n \"securityProfile\": {},\n \"storageProfile\": + {\n \"diskCSIDriver\": {\n \"enabled\": true,\n \"version\": \"v1\"\n + \ },\n \"fileCSIDriver\": {\n \"enabled\": true\n },\n \"snapshotController\": + {\n \"enabled\": true\n }\n },\n \"oidcIssuerProfile\": {\n \"enabled\": + false\n },\n \"workloadAutoScalerProfile\": {}\n },\n \"identity\": + {\n \"type\": \"SystemAssigned\",\n \"principalId\":\"00000000-0000-0000-0000-000000000001\",\n + \ \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\": + {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" + headers: + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b9acfce5-9ff2-44a7-8ada-1259af6109e2?api-version=2016-03-30 + cache-control: + - no-cache + content-length: + - '3453' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:30:59 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name -c --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b9acfce5-9ff2-44a7-8ada-1259af6109e2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"e5fcacb9-f29f-a744-8ada-1259af6109e2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:30:59.5281387Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:31:29 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name -c --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b9acfce5-9ff2-44a7-8ada-1259af6109e2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"e5fcacb9-f29f-a744-8ada-1259af6109e2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:30:59.5281387Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:31:59 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name -c --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b9acfce5-9ff2-44a7-8ada-1259af6109e2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"e5fcacb9-f29f-a744-8ada-1259af6109e2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:30:59.5281387Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:32:29 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name -c --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b9acfce5-9ff2-44a7-8ada-1259af6109e2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"e5fcacb9-f29f-a744-8ada-1259af6109e2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:30:59.5281387Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:33:00 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name -c --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b9acfce5-9ff2-44a7-8ada-1259af6109e2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"e5fcacb9-f29f-a744-8ada-1259af6109e2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:30:59.5281387Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:33:30 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name -c --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b9acfce5-9ff2-44a7-8ada-1259af6109e2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"e5fcacb9-f29f-a744-8ada-1259af6109e2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:30:59.5281387Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:34:00 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name -c --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b9acfce5-9ff2-44a7-8ada-1259af6109e2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"e5fcacb9-f29f-a744-8ada-1259af6109e2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:30:59.5281387Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:34:30 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name -c --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b9acfce5-9ff2-44a7-8ada-1259af6109e2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"e5fcacb9-f29f-a744-8ada-1259af6109e2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:30:59.5281387Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:34:59 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name -c --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b9acfce5-9ff2-44a7-8ada-1259af6109e2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"e5fcacb9-f29f-a744-8ada-1259af6109e2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:30:59.5281387Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:35:30 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name -c --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b9acfce5-9ff2-44a7-8ada-1259af6109e2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"e5fcacb9-f29f-a744-8ada-1259af6109e2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:30:59.5281387Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:36:00 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name -c --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b9acfce5-9ff2-44a7-8ada-1259af6109e2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"e5fcacb9-f29f-a744-8ada-1259af6109e2\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:30:59.5281387Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:36:30 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name -c --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b9acfce5-9ff2-44a7-8ada-1259af6109e2?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"e5fcacb9-f29f-a744-8ada-1259af6109e2\",\n \"status\": + \"Succeeded\",\n \"startTime\": \"2022-10-19T07:30:59.5281387Z\",\n \"endTime\": + \"2022-10-19T07:36:48.2270126Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '170' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:37:00 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name -c --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002\",\n + \ \"location\": \"westus2\",\n \"name\": \"cliakstest000002\",\n \"type\": + \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \"provisioningState\": + \"Succeeded\",\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"kubernetesVersion\": + \"1.23.12\",\n \"currentKubernetesVersion\": \"1.23.12\",\n \"dnsPrefix\": + \"cliakstest-clitestcvkxrczgf-79a739\",\n \"fqdn\": \"cliakstest-clitestcvkxrczgf-79a739-b811a83a.hcp.westus2.azmk8s.io\",\n + \ \"azurePortalFQDN\": \"cliakstest-clitestcvkxrczgf-79a739-b811a83a.portal.hcp.westus2.azmk8s.io\",\n + \ \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \"count\": + 1,\n \"vmSize\": \"Standard_DS2_v2\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": + \"Managed\",\n \"kubeletDiskType\": \"OS\",\n \"workloadRuntime\": + \"OCIContainer\",\n \"maxPods\": 110,\n \"type\": \"VirtualMachineScaleSets\",\n + \ \"enableAutoScaling\": false,\n \"provisioningState\": \"Succeeded\",\n + \ \"powerState\": {\n \"code\": \"Running\"\n },\n \"orchestratorVersion\": + \"1.23.12\",\n \"currentOrchestratorVersion\": \"1.23.12\",\n \"enableNodePublicIP\": + false,\n \"enableCustomCATrust\": false,\n \"mode\": \"System\",\n + \ \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n + \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\": + \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\": {},\n + \ \"enableFIPS\": false\n }\n ],\n \"linuxProfile\": {\n \"adminUsername\": + \"azureuser\",\n \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDjBMbLg2FYbO869FuegeCaldfK8p28BOV+8ERSs/zC2Tu6F9nwn7JAudD+cqyw7P3fS+RWJA8oz9q31px8IY0jotmYZw2IqEr9COCtE5NYm/NG6BqjpV7vU5T4S6P522X8wQdtIc/I/U4Q+HisEOJ0NTaR5IhimKi2ql/m2n7LfsxBHe+WMXEKf/NcHvoANgee/oJ11Mi0SWTKoQLKKhmiT2xpBiA3nZkFkB29TPBQcCbZReC4s3VcWmUNCxqzBncIm2KfFe2Zp/Ikgop102uy2jBwgtlIEcNjUFR1erP76pjyZ/pCcbEu/QW1lZPMj3+q7CxQj5QrGArV8fh/b2Q/ + azcli_aks_live_test@example.com\\n\"\n }\n ]\n }\n },\n \"servicePrincipalProfile\": + {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n },\n \"nodeResourceGroup\": + \"MC_clitest000001_cliakstest000002_westus2\",\n \"enableRBAC\": true,\n + \ \"enablePodSecurityPolicy\": false,\n \"networkProfile\": {\n \"networkPlugin\": + \"kubenet\",\n \"loadBalancerSku\": \"Standard\",\n \"loadBalancerProfile\": + {\n \"managedOutboundIPs\": {\n \"count\": 1\n },\n \"effectiveOutboundIPs\": + [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/ee46931e-116f-4edb-94e5-772300f92532\"\n + \ }\n ],\n \"backendPoolType\": \"nodeIPConfiguration\"\n },\n + \ \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\",\n + \ \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\",\n + \ \"outboundType\": \"loadBalancer\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\n + \ ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\": + [\n \"IPv4\"\n ]\n },\n \"maxAgentPools\": 100,\n \"identityProfile\": + {\n \"kubeletidentity\": {\n \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/cliakstest000002-agentpool\",\n + \ \"clientId\":\"00000000-0000-0000-0000-000000000001\",\n \"objectId\":\"00000000-0000-0000-0000-000000000001\"\n + \ }\n },\n \"disableLocalAccounts\": false,\n \"securityProfile\": + {},\n \"storageProfile\": {\n \"diskCSIDriver\": {\n \"enabled\": + true,\n \"version\": \"v1\"\n },\n \"fileCSIDriver\": {\n \"enabled\": + true\n },\n \"snapshotController\": {\n \"enabled\": true\n }\n + \ },\n \"oidcIssuerProfile\": {\n \"enabled\": false\n },\n \"workloadAutoScalerProfile\": + {}\n },\n \"identity\": {\n \"type\": \"SystemAssigned\",\n \"principalId\":\"00000000-0000-0000-0000-000000000001\",\n + \ \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\": + {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" + headers: + cache-control: + - no-cache + content-length: + - '4106' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:37:00 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002\",\n + \ \"location\": \"westus2\",\n \"name\": \"cliakstest000002\",\n \"type\": + \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \"provisioningState\": + \"Succeeded\",\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"kubernetesVersion\": + \"1.23.12\",\n \"currentKubernetesVersion\": \"1.23.12\",\n \"dnsPrefix\": + \"cliakstest-clitestcvkxrczgf-79a739\",\n \"fqdn\": \"cliakstest-clitestcvkxrczgf-79a739-b811a83a.hcp.westus2.azmk8s.io\",\n + \ \"azurePortalFQDN\": \"cliakstest-clitestcvkxrczgf-79a739-b811a83a.portal.hcp.westus2.azmk8s.io\",\n + \ \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \"count\": + 1,\n \"vmSize\": \"Standard_DS2_v2\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": + \"Managed\",\n \"kubeletDiskType\": \"OS\",\n \"workloadRuntime\": + \"OCIContainer\",\n \"maxPods\": 110,\n \"type\": \"VirtualMachineScaleSets\",\n + \ \"enableAutoScaling\": false,\n \"provisioningState\": \"Succeeded\",\n + \ \"powerState\": {\n \"code\": \"Running\"\n },\n \"orchestratorVersion\": + \"1.23.12\",\n \"currentOrchestratorVersion\": \"1.23.12\",\n \"enableNodePublicIP\": + false,\n \"enableCustomCATrust\": false,\n \"mode\": \"System\",\n + \ \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n + \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\": + \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\": {},\n + \ \"enableFIPS\": false\n }\n ],\n \"linuxProfile\": {\n \"adminUsername\": + \"azureuser\",\n \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDjBMbLg2FYbO869FuegeCaldfK8p28BOV+8ERSs/zC2Tu6F9nwn7JAudD+cqyw7P3fS+RWJA8oz9q31px8IY0jotmYZw2IqEr9COCtE5NYm/NG6BqjpV7vU5T4S6P522X8wQdtIc/I/U4Q+HisEOJ0NTaR5IhimKi2ql/m2n7LfsxBHe+WMXEKf/NcHvoANgee/oJ11Mi0SWTKoQLKKhmiT2xpBiA3nZkFkB29TPBQcCbZReC4s3VcWmUNCxqzBncIm2KfFe2Zp/Ikgop102uy2jBwgtlIEcNjUFR1erP76pjyZ/pCcbEu/QW1lZPMj3+q7CxQj5QrGArV8fh/b2Q/ + azcli_aks_live_test@example.com\\n\"\n }\n ]\n }\n },\n \"servicePrincipalProfile\": + {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n },\n \"nodeResourceGroup\": + \"MC_clitest000001_cliakstest000002_westus2\",\n \"enableRBAC\": true,\n + \ \"enablePodSecurityPolicy\": false,\n \"networkProfile\": {\n \"networkPlugin\": + \"kubenet\",\n \"loadBalancerSku\": \"Standard\",\n \"loadBalancerProfile\": + {\n \"managedOutboundIPs\": {\n \"count\": 1\n },\n \"effectiveOutboundIPs\": + [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/ee46931e-116f-4edb-94e5-772300f92532\"\n + \ }\n ],\n \"backendPoolType\": \"nodeIPConfiguration\"\n },\n + \ \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\",\n + \ \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\",\n + \ \"outboundType\": \"loadBalancer\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\n + \ ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\": + [\n \"IPv4\"\n ]\n },\n \"maxAgentPools\": 100,\n \"identityProfile\": + {\n \"kubeletidentity\": {\n \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/cliakstest000002-agentpool\",\n + \ \"clientId\":\"00000000-0000-0000-0000-000000000001\",\n \"objectId\":\"00000000-0000-0000-0000-000000000001\"\n + \ }\n },\n \"disableLocalAccounts\": false,\n \"securityProfile\": + {},\n \"storageProfile\": {\n \"diskCSIDriver\": {\n \"enabled\": + true,\n \"version\": \"v1\"\n },\n \"fileCSIDriver\": {\n \"enabled\": + true\n },\n \"snapshotController\": {\n \"enabled\": true\n }\n + \ },\n \"oidcIssuerProfile\": {\n \"enabled\": false\n },\n \"workloadAutoScalerProfile\": + {}\n },\n \"identity\": {\n \"type\": \"SystemAssigned\",\n \"principalId\":\"00000000-0000-0000-0000-000000000001\",\n + \ \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\": + {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" + headers: + cache-control: + - no-cache + content-length: + - '4106' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:37:01 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"location": "westus2", "sku": {"name": "Basic", "tier": "Free"}, "identity": + {"type": "SystemAssigned"}, "properties": {"kubernetesVersion": "1.23.12", "dnsPrefix": + "cliakstest-clitestcvkxrczgf-79a739", "agentPoolProfiles": [{"count": 1, "vmSize": + "Standard_DS2_v2", "osDiskSizeGB": 128, "osDiskType": "Managed", "kubeletDiskType": + "OS", "workloadRuntime": "OCIContainer", "maxPods": 110, "osType": "Linux", + "osSKU": "Ubuntu", "enableAutoScaling": false, "type": "VirtualMachineScaleSets", + "mode": "System", "orchestratorVersion": "1.23.12", "upgradeSettings": {}, "powerState": + {"code": "Running"}, "enableNodePublicIP": false, "enableCustomCATrust": false, + "enableEncryptionAtHost": false, "enableUltraSSD": false, "enableFIPS": false, + "name": "nodepool1"}], "linuxProfile": {"adminUsername": "azureuser", "ssh": + {"publicKeys": [{"keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCYpZoWGqsIbCKOvcrtPi5PpgoaP24pKJ8yk80qBYbqIjyVngCfM8rbgQCZKx4D8emmN7UxjiSt+c4WtV1aUfbT7VA5r4neuhPVgkqgp7CmkKdf0beV/0i5K28J7RojDTktllY9EYRYK6A4olLplaHJiuqbsMYa8amv43ol6IxgM3eE2BiEYm0/uvNKDmZ8AN4w07fFKjz1+wfdkluxC73qhijMY6FCgw+xEvvS1kd2Se6L/M/qV+VVnxW+S/bBT4Yew2dR6KWnauJvxXzdM8WQHyJy52jQ1n5PHxVRMgjRLhWvbcNNgPseFpULxe3a4ATS8kKO2Z9pzpSOgEpW7LVz"}]}}, + "servicePrincipalProfile": {"clientId":"00000000-0000-0000-0000-000000000001"}, + "nodeResourceGroup": "MC_clitest000001_cliakstest000002_westus2", "enableRBAC": + true, "enablePodSecurityPolicy": false, "networkProfile": {"networkPlugin": + "kubenet", "podCidr": "10.244.0.0/16", "serviceCidr": "10.0.0.0/16", "dnsServiceIP": + "10.0.0.10", "dockerBridgeCidr": "172.17.0.1/16", "outboundType": "loadBalancer", + "loadBalancerSku": "Standard", "loadBalancerProfile": {"managedOutboundIPs": + {"count": 1, "countIPv6": 0}, "effectiveOutboundIPs": [{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/ee46931e-116f-4edb-94e5-772300f92532"}], + "backendPoolType": "nodeIPConfiguration"}, "podCidrs": ["10.244.0.0/16"], "serviceCidrs": + ["10.0.0.0/16"], "ipFamilies": ["IPv4"]}, "identityProfile": {"kubeletidentity": + {"resourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/cliakstest000002-agentpool", + "clientId":"00000000-0000-0000-0000-000000000001", "objectId":"00000000-0000-0000-0000-000000000001"}}, + "disableLocalAccounts": false, "securityProfile": {}, "storageProfile": {}, + "workloadAutoScalerProfile": {}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + Content-Length: + - '2568' + Content-Type: + - application/json + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002\",\n + \ \"location\": \"westus2\",\n \"name\": \"cliakstest000002\",\n \"type\": + \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \"provisioningState\": + \"Updating\",\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"kubernetesVersion\": + \"1.23.12\",\n \"currentKubernetesVersion\": \"1.23.12\",\n \"dnsPrefix\": + \"cliakstest-clitestcvkxrczgf-79a739\",\n \"fqdn\": \"cliakstest-clitestcvkxrczgf-79a739-b811a83a.hcp.westus2.azmk8s.io\",\n + \ \"azurePortalFQDN\": \"cliakstest-clitestcvkxrczgf-79a739-b811a83a.portal.hcp.westus2.azmk8s.io\",\n + \ \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \"count\": + 1,\n \"vmSize\": \"Standard_DS2_v2\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": + \"Managed\",\n \"kubeletDiskType\": \"OS\",\n \"workloadRuntime\": + \"OCIContainer\",\n \"maxPods\": 110,\n \"type\": \"VirtualMachineScaleSets\",\n + \ \"enableAutoScaling\": false,\n \"provisioningState\": \"Updating\",\n + \ \"powerState\": {\n \"code\": \"Running\"\n },\n \"orchestratorVersion\": + \"1.23.12\",\n \"currentOrchestratorVersion\": \"1.23.12\",\n \"enableNodePublicIP\": + false,\n \"enableCustomCATrust\": false,\n \"mode\": \"System\",\n + \ \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n + \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\": + \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\": {},\n + \ \"enableFIPS\": false\n }\n ],\n \"linuxProfile\": {\n \"adminUsername\": + \"azureuser\",\n \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCYpZoWGqsIbCKOvcrtPi5PpgoaP24pKJ8yk80qBYbqIjyVngCfM8rbgQCZKx4D8emmN7UxjiSt+c4WtV1aUfbT7VA5r4neuhPVgkqgp7CmkKdf0beV/0i5K28J7RojDTktllY9EYRYK6A4olLplaHJiuqbsMYa8amv43ol6IxgM3eE2BiEYm0/uvNKDmZ8AN4w07fFKjz1+wfdkluxC73qhijMY6FCgw+xEvvS1kd2Se6L/M/qV+VVnxW+S/bBT4Yew2dR6KWnauJvxXzdM8WQHyJy52jQ1n5PHxVRMgjRLhWvbcNNgPseFpULxe3a4ATS8kKO2Z9pzpSOgEpW7LVz\"\n + \ }\n ]\n }\n },\n \"servicePrincipalProfile\": {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n + \ },\n \"nodeResourceGroup\": \"MC_clitest000001_cliakstest000002_westus2\",\n + \ \"enableRBAC\": true,\n \"enablePodSecurityPolicy\": false,\n \"networkProfile\": + {\n \"networkPlugin\": \"kubenet\",\n \"loadBalancerSku\": \"Standard\",\n + \ \"loadBalancerProfile\": {\n \"managedOutboundIPs\": {\n \"count\": + 1\n },\n \"effectiveOutboundIPs\": [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/ee46931e-116f-4edb-94e5-772300f92532\"\n + \ }\n ],\n \"backendPoolType\": \"nodeIPConfiguration\"\n },\n + \ \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\",\n + \ \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\",\n + \ \"outboundType\": \"loadBalancer\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\n + \ ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\": + [\n \"IPv4\"\n ]\n },\n \"maxAgentPools\": 100,\n \"identityProfile\": + {\n \"kubeletidentity\": {\n \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/cliakstest000002-agentpool\",\n + \ \"clientId\":\"00000000-0000-0000-0000-000000000001\",\n \"objectId\":\"00000000-0000-0000-0000-000000000001\"\n + \ }\n },\n \"disableLocalAccounts\": false,\n \"securityProfile\": + {},\n \"storageProfile\": {\n \"diskCSIDriver\": {\n \"enabled\": + true,\n \"version\": \"v1\"\n },\n \"fileCSIDriver\": {\n \"enabled\": + true\n },\n \"snapshotController\": {\n \"enabled\": true\n }\n + \ },\n \"oidcIssuerProfile\": {\n \"enabled\": false\n },\n \"workloadAutoScalerProfile\": + {}\n },\n \"identity\": {\n \"type\": \"SystemAssigned\",\n \"principalId\":\"00000000-0000-0000-0000-000000000001\",\n + \ \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\": + {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" + headers: + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + cache-control: + - no-cache + content-length: + - '4070' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:37:05 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:37:34 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:38:04 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:38:34 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:39:04 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:39:35 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:40:05 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:40:35 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:41:05 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:41:35 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:42:05 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:42:35 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:43:05 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:43:36 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:44:06 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:44:36 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:45:06 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:45:36 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:46:06 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:46:37 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:47:07 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:47:37 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:48:07 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:48:37 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:49:07 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:49:37 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:50:07 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:50:37 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:51:08 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:51:37 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b8a16daf-0a06-4e04-b55b-b2144a5538b8?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"af6da1b8-060a-044e-b55b-b2144a5538b8\",\n \"status\": + \"Succeeded\",\n \"startTime\": \"2022-10-19T07:37:05.2708709Z\",\n \"endTime\": + \"2022-10-19T07:51:47.897658Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '169' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:52:07 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --ssh-key-value -o + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002\",\n + \ \"location\": \"westus2\",\n \"name\": \"cliakstest000002\",\n \"type\": + \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \"provisioningState\": + \"Succeeded\",\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"kubernetesVersion\": + \"1.23.12\",\n \"currentKubernetesVersion\": \"1.23.12\",\n \"dnsPrefix\": + \"cliakstest-clitestcvkxrczgf-79a739\",\n \"fqdn\": \"cliakstest-clitestcvkxrczgf-79a739-b811a83a.hcp.westus2.azmk8s.io\",\n + \ \"azurePortalFQDN\": \"cliakstest-clitestcvkxrczgf-79a739-b811a83a.portal.hcp.westus2.azmk8s.io\",\n + \ \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \"count\": + 1,\n \"vmSize\": \"Standard_DS2_v2\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": + \"Managed\",\n \"kubeletDiskType\": \"OS\",\n \"workloadRuntime\": + \"OCIContainer\",\n \"maxPods\": 110,\n \"type\": \"VirtualMachineScaleSets\",\n + \ \"enableAutoScaling\": false,\n \"provisioningState\": \"Succeeded\",\n + \ \"powerState\": {\n \"code\": \"Running\"\n },\n \"orchestratorVersion\": + \"1.23.12\",\n \"currentOrchestratorVersion\": \"1.23.12\",\n \"enableNodePublicIP\": + false,\n \"enableCustomCATrust\": false,\n \"mode\": \"System\",\n + \ \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n + \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\": + \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\": {},\n + \ \"enableFIPS\": false\n }\n ],\n \"linuxProfile\": {\n \"adminUsername\": + \"azureuser\",\n \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCYpZoWGqsIbCKOvcrtPi5PpgoaP24pKJ8yk80qBYbqIjyVngCfM8rbgQCZKx4D8emmN7UxjiSt+c4WtV1aUfbT7VA5r4neuhPVgkqgp7CmkKdf0beV/0i5K28J7RojDTktllY9EYRYK6A4olLplaHJiuqbsMYa8amv43ol6IxgM3eE2BiEYm0/uvNKDmZ8AN4w07fFKjz1+wfdkluxC73qhijMY6FCgw+xEvvS1kd2Se6L/M/qV+VVnxW+S/bBT4Yew2dR6KWnauJvxXzdM8WQHyJy52jQ1n5PHxVRMgjRLhWvbcNNgPseFpULxe3a4ATS8kKO2Z9pzpSOgEpW7LVz\"\n + \ }\n ]\n }\n },\n \"servicePrincipalProfile\": {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n + \ },\n \"nodeResourceGroup\": \"MC_clitest000001_cliakstest000002_westus2\",\n + \ \"enableRBAC\": true,\n \"enablePodSecurityPolicy\": false,\n \"networkProfile\": + {\n \"networkPlugin\": \"kubenet\",\n \"loadBalancerSku\": \"Standard\",\n + \ \"loadBalancerProfile\": {\n \"managedOutboundIPs\": {\n \"count\": + 1\n },\n \"effectiveOutboundIPs\": [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/ee46931e-116f-4edb-94e5-772300f92532\"\n + \ }\n ],\n \"backendPoolType\": \"nodeIPConfiguration\"\n },\n + \ \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\",\n + \ \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\",\n + \ \"outboundType\": \"loadBalancer\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\n + \ ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\": + [\n \"IPv4\"\n ]\n },\n \"maxAgentPools\": 100,\n \"identityProfile\": + {\n \"kubeletidentity\": {\n \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/cliakstest000002-agentpool\",\n + \ \"clientId\":\"00000000-0000-0000-0000-000000000001\",\n \"objectId\":\"00000000-0000-0000-0000-000000000001\"\n + \ }\n },\n \"disableLocalAccounts\": false,\n \"securityProfile\": + {},\n \"storageProfile\": {\n \"diskCSIDriver\": {\n \"enabled\": + true,\n \"version\": \"v1\"\n },\n \"fileCSIDriver\": {\n \"enabled\": + true\n },\n \"snapshotController\": {\n \"enabled\": true\n }\n + \ },\n \"oidcIssuerProfile\": {\n \"enabled\": false\n },\n \"workloadAutoScalerProfile\": + {}\n },\n \"identity\": {\n \"type\": \"SystemAssigned\",\n \"principalId\":\"00000000-0000-0000-0000-000000000001\",\n + \ \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\": + {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" + headers: + cache-control: + - no-cache + content-length: + - '4072' + content-type: + - application/json + date: + - Wed, 19 Oct 2022 07:52:08 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -g -n --yes --no-wait + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1020-azure-x86_64-with-glibc2.29) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview + response: + body: + string: '' + headers: + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/4e48894b-5cd4-4665-a6f9-c35045f3f345?api-version=2016-03-30 + cache-control: + - no-cache + content-length: + - '0' + date: + - Wed, 19 Oct 2022 07:52:09 GMT + expires: + - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operationresults/4e48894b-5cd4-4665-a6f9-c35045f3f345?api-version=2016-03-30 + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + status: + code: 202 + message: Accepted +version: 1 diff --git a/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py b/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py index e8f5e58c17e..e49e03996d0 100644 --- a/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py +++ b/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py @@ -6071,3 +6071,38 @@ def test_aks_nodepool_update_with_nsg_control(self, resource_group, resource_gro self.cmd(cmd, checks=[ self.is_empty(), ]) + + @AllowLargeResponse() + @AKSCustomResourceGroupPreparer(random_name_length=17, name_prefix='clitest', location='westus2') + def test_aks_create_and_update_ssh_public_key(self, resource_group, resource_group_location): + aks_name = self.create_random_name('cliakstest', 16) + self.kwargs.update({ + 'resource_group': resource_group, + 'name': aks_name, + 'ssh_key_value': self.generate_ssh_keys() + }) + + create_cmd = 'aks create --resource-group={resource_group} --name={name} ' \ + '-c 1 --ssh-key-value={ssh_key_value} -o json' + self.cmd(create_cmd, checks=[ + self.check('provisioningState', 'Succeeded'), + ]) + + TEST_SSH_KEY_PUB = 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCYpZoWGqsIbCKOvcrtPi5PpgoaP24pKJ8yk80qBYbqIjyVngCfM8rbgQCZKx4D8emmN7UxjiSt+c4WtV1aUfbT7VA5r4neuhPVgkqgp7CmkKdf0beV/0i5K28J7RojDTktllY9EYRYK6A4olLplaHJiuqbsMYa8amv43ol6IxgM3eE2BiEYm0/uvNKDmZ8AN4w07fFKjz1+wfdkluxC73qhijMY6FCgw+xEvvS1kd2Se6L/M/qV+VVnxW+S/bBT4Yew2dR6KWnauJvxXzdM8WQHyJy52jQ1n5PHxVRMgjRLhWvbcNNgPseFpULxe3a4ATS8kKO2Z9pzpSOgEpW7LVz' # pylint: disable=line-too-long + _, pathname = tempfile.mkstemp() + with open(pathname, 'w') as key_file: + key_file.write(TEST_SSH_KEY_PUB) + self.kwargs.update({ + 'ssh_key_value': pathname.replace('\\', '\\\\') + }) + + update_cmd = 'aks update --resource-group={resource_group} --name={name} ' \ + '--ssh-key-value={ssh_key_value} -o json' + self.cmd(update_cmd, checks=[ + self.check('provisioningState', 'Succeeded'), + self.check('linuxProfile.ssh.publicKeys[0].keyData', TEST_SSH_KEY_PUB) + ]) + + # delete + self.cmd( + 'aks delete -g {resource_group} -n {name} --yes --no-wait', checks=[self.is_empty()]) diff --git a/src/aks-preview/setup.py b/src/aks-preview/setup.py index 043401d1547..42914e2ffba 100644 --- a/src/aks-preview/setup.py +++ b/src/aks-preview/setup.py @@ -9,7 +9,7 @@ from setuptools import setup, find_packages -VERSION = "0.5.110" +VERSION = "0.5.111" CLASSIFIERS = [ "Development Status :: 4 - Beta", From 7679096416413cf429438bbdd8d5f7ccc21d5010 Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Thu, 20 Oct 2022 08:17:25 +0000 Subject: [PATCH 11/85] [Release] Update index.json for extension [ aks-preview ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=10137&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/d1a9d16cc487dcf4b2f96954a2a291cd5ee65e03 --- src/index.json | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/src/index.json b/src/index.json index 3eca230f455..93d67fe5f89 100644 --- a/src/index.json +++ b/src/index.json @@ -6911,6 +6911,49 @@ "version": "0.5.110" }, "sha256Digest": "eb0922cd4f404db8fc064c315ee86bdb750b9389e0a2513d04fa535855b410bb" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/aks_preview-0.5.111-py2.py3-none-any.whl", + "filename": "aks_preview-0.5.111-py2.py3-none-any.whl", + "metadata": { + "azext.isPreview": true, + "azext.minCliCoreVersion": "2.38.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/aks-preview" + } + } + }, + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "aks-preview", + "summary": "Provides a preview for upcoming AKS features", + "version": "0.5.111" + }, + "sha256Digest": "7da4a7815d1f192f1e5c1172e732956ddfaa9d218856188b9ce5906040b21fbe" } ], "alertsmanagement": [ From d78a840c3cfa60371e6323454c535ec6093fd3dc Mon Sep 17 00:00:00 2001 From: Bavneet Singh <33008256+bavneetsingh16@users.noreply.github.com> Date: Thu, 20 Oct 2022 02:10:52 -0700 Subject: [PATCH 12/85] [k8s-configuration] Update configuration CLI to v1.7.0 to support Azure Blob Storage (#5472) * Create pull.yml * Update pull.yml * Update azure-pipelines.yml * Initial commit of k8s-extension * Update CODEOWNERS * Update azure-pipelines.yml * Create pull.yml * Update pull.yml * Update pull.yml * Update pipelines file * Update k8s-configuration name * Update test script params * Update pipeline file * Remove codeowners * Update pipelines file * Update CODEOWNERS * Update private preview pipelines * Remove open service mesh from public release * Update pipeline files * Update custom pipelines files * Add publish step to k8s-configuration * Update pipeline to publish extension * Update public extension pipeline * Change condition variable * Update pipeline naming * Add version to public preview/private preview * Update pipelines * Add different testing based on private branch * Add annotations to extension model * Update k8s-custom-pipelines.yml * Update SDKs with Updated Swagger Spec for 2020-07-01-preview (#13) * Update sdks with updated swagger spec * Update version and history rst * Reorder release history timeline * Fix ExtensionInstanceForCreate for import * remove py2 bdist support * Add custom table formatting * Remove unnecessary files * Fix style issues * Fix branch based on comments * Update identity piece manually * Don't handle defaults at the CLI level * Remove defaults from CLI client * Check null target namespace with namespace scope * Update style * Add cassandra operator and location to model * Stage Public Version of k8s-extension 0.2.0 for official release (#15) * Create pull.yml * Update pull.yml * Update azure-pipelines.yml * Initial commit of k8s-extension * Update pipelines file * Update CODEOWNERS * Update private preview pipelines * Remove open service mesh from public release * Update pipeline files * Update public extension pipeline * Change condition variable * Add version to public preview/private preview * Update pipelines * Add different testing based on private branch * Add annotations to extension model * Update k8s-custom-pipelines.yml * Update SDKs with Updated Swagger Spec for 2020-07-01-preview (#13) * Update sdks with updated swagger spec * Update version and history rst * Reorder release history timeline * Fix ExtensionInstanceForCreate for import * remove py2 bdist support * Add custom table formatting * Remove unnecessary files * Fix style issues * Fix branch based on comments * Update identity piece manually * Don't handle defaults at the CLI level * Remove defaults from CLI client * Check null target namespace with namespace scope * Update style * Add cassandra operator and location to model Co-authored-by: action@github.com * Remove custom pipelines file * Update extension description, remove private const * Update pipeline file * Disable check ref docs * Disable refs docs * Update to include better create warning logs and remove update context (#20) * Update to include better create warning logs and remove update context * Remove help text for update * Fix spelling error * Update message * Fix k8s-extension conflict with private version * Fix style errors * Fix filename * add customization for microsoft.azureml.kubernetes (#23) * add customization for microsoft.azureml.kubernetes * Update release history Co-authored-by: Yue Yu Co-authored-by: jonathan-innis * Add E2E Testing from Separate branch into internal code (#26) * Add internal e2e testing * Change to testing folder * Inference CLI validation for Scoring FE (#24) * cli validation starter * added the call to the fe validation function * nodeport validation not required * test fix Co-authored-by: Jonathan Innis * legal warning added (#27) * Remove deprecated method logger.warn * Update k8s-custom-pipelines.yml for Azure Pipelines * Update k8s-custom-pipelines.yml for Azure Pipelines * Add Azure Defender to E2E testing (#28) * Add azure defender testing to e2e * Remove the debug flag * Add configuration testing * Fix pipeline failures * Make test script more intuitive * Remove parameter from testing * Add some debug * Fix wrong location for k8s config whl * Fix pip install upgrade issue * Fix pip install upgrade issue * Add Check for Provider Registration and Refactor (#19) * Add check for provider registration and refactor * Fix bug in checking registration * Add license header to utils * Update private key check and error messaging * Update based on refactoring * Fix failing tests * Add provider registration check * Create a test for uppercase url, address comments * Add blank line to fix style check * Testing increase to ubuntu-latest * Update k8s-configuration Models to Track2 (#63) * Update models to track2 * Increase k8s-configuration version number * Update kind version * Change error to warning because of DSA failure * Upgrade helm operator chart version (#75) * Pin helm version * Bump version * Migrate pipeline (#90) * Disable updates on configuration tests (#89) * Release k8s-configuration v1.2.0 for Flux v2 Public Preview (#86) * Scaffold out the k8s-config package * Base implementation of CLI commands * Add create scenario and cleanup in consts * Add help text to commands, params * Add other clients to client factory * Automatically installing the flux extension * Move flux and extension into modules * Updated the versioned sdks * Push working command for testing * Update to multi api versioned sdk * Support other extension methods * Fix nullity check * Add source control provider * Add scc commands * Add defer logic for create with cache * Use default extension with identity * Fix identity creation * Add kustomization caching * Add formatters * Add scc provider * Add help text for k8s-config fluxv1 * Add help text for extension * Allow force delete of extension and fluxconfiguration * Add location to the extension model * Update with latest from k8s-extension * Add k8s-config testing * Add license header * Fix all style issues * Update codeowners file * Validate data before checking cluster compliance * No kustomizations warning * Fix identity issue in 2020-07-01 * Fix k8s regex * Fix configuration name regex validation * Fix name length validation * Adding some validation warnings * Add protected settings to request * Exclude private test path * Add suspend functionality * Add correct values to build Kustomization * Add no_wait * Fix style issues * Use base64 encoded httpsUser * Fix formatting error and base64 encoding error * Fix style issues * Fix force * Updated help text * Style fixes * Increase namespace maximum len * Add managed cluster support to k8s-config * Custom confirmation when prune is enabled * Add flux commands to existing k8s-configuration * Remove extension provider from CLI * Fix style issues * Override extension variables * Strip newlines from known_hosts file * Update help text and validators * Strip newlines from known hosts * Add provisioning state check for flux extension * Pin helm version * Remove validation from create command * Add patch support with new SDK * Add implementation for CRUD of source and kustomization * Fix errors on patch * Fix some bugs in patching properties * Add fixes for patch in k8s-configuraiton * Change duration formatting in table output * Add validation and conversion for durations * Bump verison and fix typo * Fix bug with dependencies * Fix linter and style issues * Fix delete prune check * Add flux testing * Create separate jobs for scenarios * Update error text * Fix filepath suggestion from CLI team * Fix unneeded file edit * Add a Deployed Object List to the Flux CLI (#91) * Enable a deployed object list on the CLI * Show detail when extension install fails * Bump version * Update deployed object format (#93) * Fix help text for consistency * Enable Bucket Support in the CLI (#92) * Update vendored_sdks * Enable source kind generation factories and use kwargs to pass to provider * Add better validation logic to source generator * Move away from classes in the provider directory * Fix style issues using black auto-formatter * Fix linter failures * Update identity with api version and rp in same function * Bucket Testing for E2E Testing (#96) * Bucket testing * Add switching kind test to update CLI * Bump version * Fix help text and parameter naming for bucket (#100) * Fix default help text for parameters (#101) * Edit history with breaking change * Prepare GA FluxConfiguration 2022-03-01 (#103) * Add vendored sdks for new api-version * Update the new depends on definition * Update table formatting * BucketDefinition to BucketPatchDefinition * Remove http url warning * Update vendored_sdks * Update action file to add depenencies * Remove preview from command groups * Update changelog * Make dependencies none when not specified * Use KustomizationPatch instead of Kustomization for internal rep (#117) * Only test k8s-configuration in azdev test (#140) * modify codeowners for k8s-configuration (#157) Co-authored-by: Bavneet Singh * [k8s-configuration] add support for provisionedClusters (#146) * [k8s-configuration] add support for provisionedClusters * resolve cli errors * remove sourceControlConfiguration support for provisionedClusters * code cleanup * updates hybridcontainerservice api version * change description for cluster_type Co-authored-by: Bavneet Singh * bump k8s-configuration version to 1.6.0 * CI fix[k8s-configuration]: deprecate python version 3.6 use in testing (#174) * CI fix[k8s-configuration]: deprecate python version 3.6 use in testing * correct the python version for package install Co-authored-by: Bavneet Singh * Support Azure blob as source (#156) * [k8s-configuration] Update configuration CLI to v1.7.0 to support Azure Blob Storage * change help message Co-authored-by: Jonathan Innis Co-authored-by: action@github.com Co-authored-by: yuyue9284 <15863499+yuyue9284@users.noreply.github.com> Co-authored-by: Yue Yu Co-authored-by: Lia Kazakova <58274127+liakaz@users.noreply.github.com> Co-authored-by: Bavneet Singh Co-authored-by: Summer Hasama <69527370+summerhasama@users.noreply.github.com> --- src/k8s-configuration/HISTORY.rst | 4 + .../azext_k8s_configuration/_help.py | 15 +- .../azext_k8s_configuration/_params.py | 58 +- .../azext_k8s_configuration/consts.py | 51 +- .../providers/FluxConfigurationProvider.py | 177 +- .../tests/latest/test_validators.py | 72 +- .../azext_k8s_configuration/validators.py | 38 + .../_source_control_configuration_client.py | 24 +- .../vendored_sdks/models.py | 1 + .../vendored_sdks/v2022_07_01/__init__.py | 23 + .../v2022_07_01/_configuration.py | 73 + .../vendored_sdks/v2022_07_01/_patch.py | 19 + .../_source_control_configuration_client.py | 129 + .../vendored_sdks/v2022_07_01/_vendor.py | 27 + .../vendored_sdks/v2022_07_01/_version.py | 9 + .../vendored_sdks/v2022_07_01/aio/__init__.py | 20 + .../v2022_07_01/aio/_configuration.py | 72 + .../vendored_sdks/v2022_07_01/aio/_patch.py | 19 + .../_source_control_configuration_client.py | 126 + .../v2022_07_01/aio/operations/__init__.py | 28 + .../aio/operations/_extensions_operations.py | 701 +++++ ...flux_config_operation_status_operations.py | 125 + .../_flux_configurations_operations.py | 704 +++++ .../_operation_status_operations.py | 229 ++ .../v2022_07_01/aio/operations/_operations.py | 123 + .../v2022_07_01/aio/operations/_patch.py | 19 + ...ource_control_configurations_operations.py | 459 +++ .../v2022_07_01/models/__init__.py | 133 + .../v2022_07_01/models/_models_py3.py | 2656 +++++++++++++++++ .../v2022_07_01/models/_patch.py | 19 + ...urce_control_configuration_client_enums.py | 121 + .../v2022_07_01/operations/__init__.py | 28 + .../operations/_extensions_operations.py | 934 ++++++ ...flux_config_operation_status_operations.py | 173 ++ .../_flux_configurations_operations.py | 939 ++++++ .../_operation_status_operations.py | 317 ++ .../v2022_07_01/operations/_operations.py | 153 + .../v2022_07_01/operations/_patch.py | 19 + ...ource_control_configurations_operations.py | 639 ++++ src/k8s-configuration/setup.py | 2 +- 40 files changed, 9459 insertions(+), 19 deletions(-) create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/__init__.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/_configuration.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/_patch.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/_source_control_configuration_client.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/_vendor.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/_version.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/__init__.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/_configuration.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/_patch.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/_source_control_configuration_client.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/__init__.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_extensions_operations.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_flux_config_operation_status_operations.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_flux_configurations_operations.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_operation_status_operations.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_operations.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_patch.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_source_control_configurations_operations.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/models/__init__.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/models/_models_py3.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/models/_patch.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/models/_source_control_configuration_client_enums.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/__init__.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_extensions_operations.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_flux_config_operation_status_operations.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_flux_configurations_operations.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_operation_status_operations.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_operations.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_patch.py create mode 100644 src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_source_control_configurations_operations.py diff --git a/src/k8s-configuration/HISTORY.rst b/src/k8s-configuration/HISTORY.rst index 9282f9828a6..db7b1e4c8ac 100644 --- a/src/k8s-configuration/HISTORY.rst +++ b/src/k8s-configuration/HISTORY.rst @@ -3,6 +3,10 @@ Release History =============== +1.7.0 +++++++++++++++++++ +* Add support for Azure Blob Storage + 1.6.0 ++++++++++++++++++ * Add support for provisionedClusters diff --git a/src/k8s-configuration/azext_k8s_configuration/_help.py b/src/k8s-configuration/azext_k8s_configuration/_help.py index 8b6c6dc04f8..5d1062f7330 100644 --- a/src/k8s-configuration/azext_k8s_configuration/_help.py +++ b/src/k8s-configuration/azext_k8s_configuration/_help.py @@ -94,6 +94,14 @@ --kind bucket --url https://bucket-provider.minio.io \\ --bucket-name my-bucket --kustomization name=my-kustomization \\ --bucket-access-key my-access-key --bucket-secret-key my-secret-key + - name: Create a Kubernetes v2 Flux Configuration with Azure Blob Source Kind + text: |- + az k8s-configuration flux create --resource-group my-resource-group \\ + --cluster-name mycluster --cluster-type connectedClusters \\ + --name myconfig --scope cluster --namespace my-namespace \\ + --kind azblob --url https://mystorageaccount.blob.core.windows.net \\ + --container-name my-container --kustomization name=my-kustomization \\ + --account-key my-account-key """ helps[ @@ -108,11 +116,16 @@ --cluster-name mycluster --cluster-type connectedClusters --name myconfig \\ --url https://github.com/Azure/arc-k8s-demo --branch main \\ --kustomization name=my-kustomization path=./my/new-path - - name: Update a Flux v2 Kubernetse configuration with Bucket Source Kind to connect insecurely + - name: Update a Flux v2 Kubernetes configuration with Bucket Source Kind to connect insecurely text: |- az k8s-configuration flux update --resource-group my-resource-group \\ --cluster-name mycluster --cluster-type connectedClusters --name myconfig \\ --bucket-insecure + - name: Update a Flux v2 Kubernetes configuration with Azure Blob Source Kind with another container name + text: |- + az k8s-configuration flux update --resource-group my-resource-group \\ + --cluster-name mycluster --cluster-type connectedClusters --name myconfig \\ + --container-name other-container """ helps[ diff --git a/src/k8s-configuration/azext_k8s_configuration/_params.py b/src/k8s-configuration/azext_k8s_configuration/_params.py index 7ef8abe1ad0..e93db8d8b50 100644 --- a/src/k8s-configuration/azext_k8s_configuration/_params.py +++ b/src/k8s-configuration/azext_k8s_configuration/_params.py @@ -67,7 +67,7 @@ def load_arguments(self, _): ) c.argument( "kind", - arg_type=get_enum_type([consts.GIT, consts.BUCKET]), + arg_type=get_enum_type([consts.GIT, consts.BUCKET, consts.AZBLOB]), help="Source kind to reconcile", ) c.argument( @@ -178,6 +178,62 @@ def load_arguments(self, _): help="Define kustomizations to sync sources with parameters ['name', 'path', 'depends_on', 'timeout', 'sync_interval', 'retry_interval', 'prune', 'force']", nargs="+", ) + c.argument( + "container_name", + help="Name of the Azure Blob Storage container to sync", + ) + c.argument( + "sp_client_id", + arg_group="Azure Blob Auth", + options_list=["--sp-client-id", "--service-principal-client-id"], + help="The client ID for authenticating a service principal with Azure Blob, required for this authentication method", + ) + c.argument( + "sp_tenant_id", + arg_group="Azure Blob Auth", + options_list=["--sp-tenant-id", "--service-principal-tenant-id"], + help="The tenant ID for authenticating a service principal with Azure Blob, required for this authentication method", + ) + c.argument( + "sp_client_secret", + arg_group="Azure Blob Auth", + options_list=["--sp-client-secret", "--service-principal-client-secret"], + help="The client secret for authenticating a service principal with Azure Blob", + ) + c.argument( + "sp_client_cert", + arg_group="Azure Blob Auth", + options_list=["--sp-client-cert", "--service-principal-client-certificate"], + help="The Base64 encoded client certificate for authenticating a service principal with Azure Blob", + ) + c.argument( + "sp_client_cert_password", + arg_group="Azure Blob Auth", + options_list=["--sp-cert-password", "--service-principal-client-certificate-password"], + help="The password for the client certificate used to authenticate a service principal with Azure Blob", + ) + c.argument( + "sp_client_cert_send_chain", + arg_group="Azure Blob Auth", + options_list=["--sp-cert-send-chain", "--service-principal-client-certificate-send-chain"], + help="Specify whether to include x5c header in client claims when acquiring a token to enable subject name / issuer based authentication for the client certificate", + ) + c.argument( + "account_key", + arg_group="Azure Blob Auth", + help="The Azure Blob Shared Key for authentication ", + ) + c.argument( + "sas_token", + arg_group="Azure Blob Auth", + help="The Azure Blob SAS Token for authentication ", + ) + c.argument( + "mi_client_id", + arg_group="Azure Blob Auth", + options_list=["--mi-client-id", "--managed-identity-client-id"], + help="The client ID of the managed identity for authentication with Azure Blob", + ) with self.argument_context("k8s-configuration flux update") as c: c.argument( diff --git a/src/k8s-configuration/azext_k8s_configuration/consts.py b/src/k8s-configuration/azext_k8s_configuration/consts.py index cf022906c5b..0da0dc5c430 100644 --- a/src/k8s-configuration/azext_k8s_configuration/consts.py +++ b/src/k8s-configuration/azext_k8s_configuration/consts.py @@ -8,8 +8,8 @@ # API VERSIONS ----------------------------------------- SOURCE_CONTROL_API_VERSION = "2022-03-01" -FLUXCONFIG_API_VERSION = "2022-03-01" -EXTENSION_API_VERSION = "2022-03-01" +FLUXCONFIG_API_VERSION = "2022-07-01" +EXTENSION_API_VERSION = "2022-07-01" # ERROR/HELP TEXT DEFINITIONS ----------------------------------------- @@ -41,7 +41,30 @@ REQUIRED_BUCKET_VALUES_MISSING_HELP = ( "Provide either both of '--secret-key' and '--access-key' or '--local-auth-ref'" ) - +REQUIRED_AZURE_BLOB_SERVICE_PRINCIPAL_VALUES_MISSING_ERROR = ( + "Error! Service principal is invalid because it is missing value(s)" +) +REQUIRED_AZURE_BLOB_SERVICE_PRINCIPAL_VALUES_MISSING_HELP = ( + "Provide '--sp-client-id', '--sp-tenant-id', and either '--sp-client-secret' or '--sp-client-cert'" +) +REQUIRED_AZURE_BLOB_SERVICE_PRINCIPAL_AUTH_ERROR = ( + "Error! Too many authentication methods provided for service principal" +) +REQUIRED_AZURE_BLOB_SERVICE_PRINCIPAL_AUTH_HELP = ( + "Provide either '--sp-client-secret' or '--sp-client-cert'" +) +REQUIRED_AZURE_BLOB_SERVICE_PRINCIPAL_CERT_VALUES_MISSING_ERROR = ( + "Error! Service principal certificate password is invalid" +) +REQUIRED_AZURE_BLOB_SERVICE_PRINCIPAL_CERT_VALUES_MISSING_HELP = ( + "Provide '--sp-client-id', '--sp-tenant-id', and '--sp-client-cert' with your '--sp-cert-password" +) +REQUIRED_AZURE_BLOB_AUTH_ERROR = ( + "Error! Too many authentication methods provided for Azure Blob" +) +REQUIRED_AZURE_BLOB_AUTH_HELP = ( + "Specify one of the available authentication methods from the list: '--local-auth-ref', '--account-key', '--sas-token', '--mi-client-id', or service principal with '--sp-client-id', '--sp-tenant-id', and either '--sp-client-secret' or '--sp-client-cert'" +) EXTRA_VALUES_PROVIDED_ERROR = ( "Error! Invalid properties [{}] were specified for kind '{}'" ) @@ -213,6 +236,24 @@ "local_auth_ref", } +AZUREBLOB_REQUIRED_PARAMS = {"url", "container_name"} +AZUREBLOB_VALID_PARAMS = { + "url", + "container_name", + "sync_interval", + "timeout", + "account_key", + "local_auth_ref", + "sp_tenant_id", + "sp_client_id", + "sp_client_cert", + "sp_client_cert_password", + "sp_client_secret", + "sp_client_cert_send_chain", + "sas_token", + "mi_client_id", +} + DEPENDENCY_KEYS = ["dependencies", "depends_on", "dependsOn", "depends"] SYNC_INTERVAL_KEYS = ["interval", "sync_interval", "syncInterval"] RETRY_INTERVAL_KEYS = ["retryInterval", "retry_interval"] @@ -222,12 +263,16 @@ VALID_DURATION_REGEX = r"((?P\d+?)h)?((?P\d+?)m)?((?P\d+?)s)?" VALID_GIT_URL_REGEX = r"^(((http|https|ssh)://)|(git@))" VALID_BUCKET_URL_REGEX = r"^(((http|https)://))" +VALID_AZUREBLOB_URL_REGEX = r"^(((http|https)://))" VALID_KUBERNETES_DNS_SUBDOMAIN_NAME_REGEX = r"^[a-z0-9]([\.\-a-z0-9]*[a-z0-9])?$" VALID_KUBERNETES_DNS_NAME_REGEX = r"^[a-z0-9]([\-a-z0-9]*[a-z0-9])?$" GIT = "git" BUCKET = "bucket" +BUCKET_CAPS = "Bucket" +AZBLOB = "azblob" +AZURE_BLOB = "AzureBlob" GIT_REPOSITORY = "GitRepository" CONNECTED_CLUSTER_TYPE = "connectedclusters" diff --git a/src/k8s-configuration/azext_k8s_configuration/providers/FluxConfigurationProvider.py b/src/k8s-configuration/azext_k8s_configuration/providers/FluxConfigurationProvider.py index 8324cbc350f..089dbc61547 100644 --- a/src/k8s-configuration/azext_k8s_configuration/providers/FluxConfigurationProvider.py +++ b/src/k8s-configuration/azext_k8s_configuration/providers/FluxConfigurationProvider.py @@ -41,24 +41,29 @@ validate_git_url, validate_known_hosts, validate_repository_ref, + validate_azure_blob_auth, validate_duration, validate_private_key, validate_url_with_params, ) from .. import consts -from ..vendored_sdks.v2022_03_01.models import ( +from ..vendored_sdks.v2022_07_01.models import ( FluxConfiguration, FluxConfigurationPatch, GitRepositoryDefinition, GitRepositoryPatchDefinition, BucketDefinition, BucketPatchDefinition, + AzureBlobDefinition, + AzureBlobPatchDefinition, + ServicePrincipalDefinition, + ManagedIdentityDefinition, RepositoryRefDefinition, KustomizationDefinition, KustomizationPatchDefinition, SourceKindType, ) -from ..vendored_sdks.v2022_03_01.models import Extension, Identity +from ..vendored_sdks.v2022_07_01.models import Extension, Identity logger = get_logger(__name__) @@ -150,6 +155,16 @@ def create_config( suspend=False, kustomization=None, no_wait=False, + container_name=None, + sp_tenant_id=None, + sp_client_id=None, + sp_client_cert=None, + sp_client_cert_password=None, + sp_client_secret=None, + sp_client_cert_send_chain=False, + account_key=None, + sas_token=None, + mi_client_id=None, cluster_resource_provider=None, ): @@ -179,6 +194,16 @@ def create_config( bucket_access_key=bucket_access_key, bucket_secret_key=bucket_secret_key, bucket_insecure=bucket_insecure, + container_name=container_name, + account_key=account_key, + sas_token=sas_token, + sp_tenant_id=sp_tenant_id, + sp_client_id=sp_client_id, + sp_client_cert=sp_client_cert, + sp_client_cert_password=sp_client_cert_password, + sp_client_secret=sp_client_secret, + sp_client_cert_send_chain=sp_client_cert_send_chain, + mi_client_id=mi_client_id, ) # This update func is a generated update function that modifies @@ -264,6 +289,16 @@ def update_config( kustomization=None, no_wait=False, yes=False, + container_name=None, + sp_tenant_id=None, + sp_client_id=None, + sp_client_cert=None, + sp_client_cert_password=None, + sp_client_secret=None, + sp_client_cert_send_chain=False, + account_key=None, + sas_token=None, + mi_client_id=None, cluster_resource_provider=None, ): @@ -298,6 +333,16 @@ def update_config( bucket_access_key=bucket_access_key, bucket_secret_key=bucket_secret_key, bucket_insecure=bucket_insecure, + container_name=container_name, + account_key=account_key, + sas_token=sas_token, + sp_tenant_id=sp_tenant_id, + sp_client_id=sp_client_id, + sp_client_cert=sp_client_cert, + sp_client_cert_password=sp_client_cert_password, + sp_client_secret=sp_client_secret, + sp_client_cert_send_chain=sp_client_cert_send_chain, + mi_client_id=mi_client_id, ) # This update func is a generated update function that modifies @@ -772,13 +817,17 @@ def __add_identity( def source_kind_generator_factory(kind=consts.GIT, **kwargs): if kind == consts.GIT: return GitRepositoryGenerator(**kwargs) - return BucketGenerator(**kwargs) + if kind == consts.BUCKET: + return BucketGenerator(**kwargs) + return AzureBlobGenerator(**kwargs) def convert_to_cli_source_kind(rp_source_kind): if rp_source_kind == consts.GIT_REPOSITORY: return consts.GIT - return consts.BUCKET + elif rp_source_kind == consts.BUCKET_CAPS: + return consts.BUCKET + return consts.AZBLOB class SourceKindGenerator: @@ -936,13 +985,8 @@ def git_repository_updater(config): self.validate() config.source_kind = SourceKindType.GIT_REPOSITORY - # Have to set these things to none otherwise the patch will fail - # due to default values - config.bucket = BucketDefinition( - insecure=None, - timeout_in_seconds=None, - sync_interval_in_seconds=None, - ) + config.bucket = BucketPatchDefinition() + config.azure_blob = AzureBlobPatchDefinition() return config return git_repository_updater @@ -1025,11 +1069,122 @@ def bucket_patch_updater(config): self.validate() config.source_kind = SourceKindType.BUCKET config.git_repository = GitRepositoryPatchDefinition() + config.azure_blob = AzureBlobPatchDefinition() return config return bucket_patch_updater +class AzureBlobGenerator(SourceKindGenerator): + def __init__(self, **kwargs): + # Common Pre-Validation + super().__init__( + consts.AZBLOB, consts.AZUREBLOB_REQUIRED_PARAMS, consts.AZUREBLOB_VALID_PARAMS + ) + super().validate_params(**kwargs) + + # Pre-Validations + validate_duration("--timeout", kwargs.get("timeout")) + validate_duration("--sync-interval", kwargs.get("sync_interval")) + + self.kwargs = kwargs + self.url = kwargs.get("url") + self.container_name = kwargs.get("container_name") + self.timeout = kwargs.get("timeout") + self.sync_interval = kwargs.get("sync_interval") + self.account_key = kwargs.get("account_key") + self.sas_token = kwargs.get("sas_token") + self.local_auth_ref = kwargs.get("local_auth_ref") + + self.service_principal = None + if any( + [ + kwargs.get("sp_client_id"), + kwargs.get("sp_tenant_id"), + kwargs.get("sp_client_secret"), + kwargs.get("sp_client_cert"), + kwargs.get("sp_client_cert_password"), + kwargs.get("sp_client_cert_send_chain") + ] + ): + self.service_principal = ServicePrincipalDefinition( + client_id=kwargs.get("sp_client_id"), + tenant_id=kwargs.get("sp_tenant_id"), + client_secret=kwargs.get("sp_client_secret"), + client_certificate=kwargs.get("sp_client_cert"), + client_certificate_password=kwargs.get("sp_client_cert_password"), + client_certificate_send_chain=kwargs.get("sp_client_cert_send_chain") + ) + + self.managed_identity = None + if any( + [ + kwargs.get("mi_client_id"), + ] + ): + self.managed_identity = ManagedIdentityDefinition( + client_id=kwargs.get("mi_client_id"), + ) + + def validate(self): + super().validate_required_params(**self.kwargs) + validate_bucket_url(self.url) + validate_azure_blob_auth(self) + + def generate_update_func(self): + """ + generate_update_func(self) generates a function to add a Azure Blob + object to the flux configuration for the PUT case + """ + self.validate() + + def azure_blob_updater(config): + config.azure_blob = AzureBlobDefinition( + url=self.url, + container_name=self.container_name, + timeout_in_seconds=parse_duration(self.timeout), + sync_interval_in_seconds=parse_duration(self.sync_interval), + account_key=self.account_key, + sas_token=self.sas_token, + service_principal=self.service_principal, + managed_identity=self.managed_identity, + local_auth_ref=self.local_auth_ref, + ) + config.source_kind = SourceKindType.AZURE_BLOB + return config + + return azure_blob_updater + + def generate_patch_update_func(self, swapped_kind): + """ + generate_patch_update_func(self) generates a function update the AzureBlob + object to the flux configuration for the PATCH case. + If the source kind has been changed, we also set the GitRepository and Bucket to null + """ + + def azure_blob_patch_updater(config): + if any(kwarg is not None for kwarg in self.kwargs.values()): + config.azure_blob = AzureBlobPatchDefinition( + url=self.url, + container_name=self.container_name, + timeout_in_seconds=parse_duration(self.timeout), + sync_interval_in_seconds=parse_duration(self.sync_interval), + account_key=self.account_key, + sas_token=self.sas_token, + local_auth_ref=self.local_auth_ref, + service_principal=self.service_principal, + managed_identity=self.managed_identity, + ) + if swapped_kind: + self.validate() + config.source_kind = SourceKindType.AZURE_BLOB + config.bucket = BucketPatchDefinition() + config.git_repository = GitRepositoryPatchDefinition() + return config + + return azure_blob_patch_updater + + def get_protected_settings( ssh_private_key, ssh_private_key_file, https_key, bucket_secret_key ): diff --git a/src/k8s-configuration/azext_k8s_configuration/tests/latest/test_validators.py b/src/k8s-configuration/azext_k8s_configuration/tests/latest/test_validators.py index 03113f3ae17..ff333df6990 100644 --- a/src/k8s-configuration/azext_k8s_configuration/tests/latest/test_validators.py +++ b/src/k8s-configuration/azext_k8s_configuration/tests/latest/test_validators.py @@ -3,15 +3,17 @@ # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- +from typing import Any import unittest import base64 from azext_k8s_configuration.providers.SourceControlConfigurationProvider import get_protected_settings -from azure.cli.core.azclierror import InvalidArgumentValueError, MutuallyExclusiveArgumentError +from azure.cli.core.azclierror import InvalidArgumentValueError, MutuallyExclusiveArgumentError, RequiredArgumentMissingError from azext_k8s_configuration.validators import ( validate_configuration_name, validate_known_hosts, validate_operator_instance_name, validate_operator_namespace, + validate_azure_blob_auth, validate_private_key, validate_url_with_params, ) @@ -114,6 +116,56 @@ def test_end_hyphen_config_name(self): self.assertEqual(str(cm.exception), err) +class TestValidateAzureBlobAuth(unittest.TestCase): + def test_valid_service_principal(self): + sp = ServicePrincipal("tenantid","clientid","mysecret") + azblob = AzureBlob(sp) + validate_azure_blob_auth(azblob) + + def test_missing_client_id_service_principal(self): + sp = ServicePrincipal("tenantid",None,"mysecret") + azblob = AzureBlob(sp) + err = 'Error! Service principal is invalid because it is missing value(s)' + with self.assertRaises(RequiredArgumentMissingError) as cm: + validate_azure_blob_auth(azblob) + self.assertEqual(str(cm.exception), err) + + def test_missing_secret_service_principal(self): + sp = ServicePrincipal("tenantid","clientid") + azblob = AzureBlob(sp) + err = 'Error! Service principal is invalid because it is missing value(s)' + with self.assertRaises(RequiredArgumentMissingError) as cm: + validate_azure_blob_auth(azblob) + self.assertEqual(str(cm.exception), err) + + def test_too_many_auth_service_principal(self): + sp = ServicePrincipal("tenantid","clientid","mysecret","mycert") + azblob = AzureBlob(sp) + err = 'Error! Too many authentication methods provided for service principal' + with self.assertRaises(MutuallyExclusiveArgumentError) as cm: + validate_azure_blob_auth(azblob) + self.assertEqual(str(cm.exception), err) + + def test_missing_cert_service_principal(self): + sp = ServicePrincipal("tenantid","clientid","mysecret",None,"mycertpass") + azblob = AzureBlob(sp) + err = 'Error! Service principal certificate password is invalid' + with self.assertRaises(RequiredArgumentMissingError) as cm: + validate_azure_blob_auth(azblob) + self.assertEqual(str(cm.exception), err) + + def test_too_many_auth_azure_blob(self): + azblob = AzureBlob(None,"myaccountkey", "mylocalauthref") + err = 'Error! Too many authentication methods provided for Azure Blob' + with self.assertRaises(MutuallyExclusiveArgumentError) as cm: + validate_azure_blob_auth(azblob) + self.assertEqual(str(cm.exception), err) + + def test_valid_one_auth_azure_blob(self): + azblob = AzureBlob(None,"myaccountkey", None) + validate_azure_blob_auth(azblob) + + class TestValidateURLWithParams(unittest.TestCase): def test_ssh_private_key_with_ssh_url(self): validate_url_with_params('git@github.com:jonathan-innis/helm-operator-get-started-private.git', True, False, False, False, False, False) @@ -173,6 +225,24 @@ def __init__(self, operator_namespace): self.operator_namespace = operator_namespace +class ServicePrincipal: + def __init__(self, tenant_id = None, client_id = None, client_secret = None, client_certificate = None, client_certificate_password = None): + self.tenant_id = tenant_id + self.client_id = client_id + self.client_secret = client_secret + self.client_certificate = client_certificate + self.client_certificate_password = client_certificate_password + + +class AzureBlob: + def __init__(self, service_principal = None, account_key = None, local_auth_ref = None): + self.service_principal = service_principal + self.account_key = account_key + self.local_auth_ref = local_auth_ref + self.sas_token = None + self.managed_identity = None + + class OperatorInstanceName: def __init__(self, operator_instance_name): self.operator_instance_name = operator_instance_name diff --git a/src/k8s-configuration/azext_k8s_configuration/validators.py b/src/k8s-configuration/azext_k8s_configuration/validators.py index ddf7228eefa..bed3b5de216 100644 --- a/src/k8s-configuration/azext_k8s_configuration/validators.py +++ b/src/k8s-configuration/azext_k8s_configuration/validators.py @@ -95,6 +95,44 @@ def validate_repository_ref(repository_ref): ) +def validate_azure_blob_auth(azure_blob): + if azure_blob.service_principal: + sp = azure_blob.service_principal + if not ((sp.client_id and sp.tenant_id) and (sp.client_secret or sp.client_certificate)): + raise RequiredArgumentMissingError( + consts.REQUIRED_AZURE_BLOB_SERVICE_PRINCIPAL_VALUES_MISSING_ERROR, + consts.REQUIRED_AZURE_BLOB_SERVICE_PRINCIPAL_VALUES_MISSING_HELP, + ) + + if sp.client_secret and sp.client_certificate: + raise MutuallyExclusiveArgumentError( + consts.REQUIRED_AZURE_BLOB_SERVICE_PRINCIPAL_AUTH_ERROR, + consts.REQUIRED_AZURE_BLOB_SERVICE_PRINCIPAL_AUTH_HELP, + ) + + if sp.client_certificate_password and not sp.client_certificate: + raise RequiredArgumentMissingError( + consts.REQUIRED_AZURE_BLOB_SERVICE_PRINCIPAL_CERT_VALUES_MISSING_ERROR, + consts.REQUIRED_AZURE_BLOB_SERVICE_PRINCIPAL_CERT_VALUES_MISSING_HELP, + ) + + auth_count = 0 + for auth in [ + azure_blob.service_principal, + azure_blob.account_key, + azure_blob.sas_token, + azure_blob.local_auth_ref, + azure_blob.managed_identity + ]: + if auth: + auth_count += 1 + if auth_count > 1: + raise MutuallyExclusiveArgumentError( + consts.REQUIRED_AZURE_BLOB_AUTH_ERROR, + consts.REQUIRED_AZURE_BLOB_AUTH_HELP, + ) + + def validate_duration(arg_name: str, duration: str): if not duration: return diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/_source_control_configuration_client.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/_source_control_configuration_client.py index a75870fe68b..683e7580c2c 100644 --- a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/_source_control_configuration_client.py +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/_source_control_configuration_client.py @@ -55,7 +55,7 @@ class SourceControlConfigurationClient(MultiApiClientMixin, _SDKClient): :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ - DEFAULT_API_VERSION = '2022-03-01' + DEFAULT_API_VERSION = '2022-07-01' _PROFILE_TAG = "azure.mgmt.kubernetesconfiguration.SourceControlConfigurationClient" LATEST_PROFILE = ProfileDefinition({ _PROFILE_TAG: { @@ -100,6 +100,7 @@ def models(cls, api_version=DEFAULT_API_VERSION): * 2021-11-01-preview: :mod:`v2021_11_01_preview.models` * 2022-01-01-preview: :mod:`v2022_01_01_preview.models` * 2022-03-01: :mod:`v2022_03_01.models` + * 2022-07-01: :mod:`v2022_07_01.models` """ if api_version == '2020-07-01-preview': from .v2020_07_01_preview import models @@ -125,6 +126,9 @@ def models(cls, api_version=DEFAULT_API_VERSION): elif api_version == '2022-03-01': from .v2022_03_01 import models return models + elif api_version == '2022-07-01': + from .v2022_07_01 import models + return models raise ValueError("API version {} is not available".format(api_version)) @property @@ -194,6 +198,7 @@ def extensions(self): * 2021-11-01-preview: :class:`ExtensionsOperations` * 2022-01-01-preview: :class:`ExtensionsOperations` * 2022-03-01: :class:`ExtensionsOperations` + * 2022-07-01: :class:`ExtensionsOperations` """ api_version = self._get_api_version('extensions') if api_version == '2020-07-01-preview': @@ -208,6 +213,8 @@ def extensions(self): from .v2022_01_01_preview.operations import ExtensionsOperations as OperationClass elif api_version == '2022-03-01': from .v2022_03_01.operations import ExtensionsOperations as OperationClass + elif api_version == '2022-07-01': + from .v2022_07_01.operations import ExtensionsOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'extensions'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -219,6 +226,7 @@ def flux_config_operation_status(self): * 2021-11-01-preview: :class:`FluxConfigOperationStatusOperations` * 2022-01-01-preview: :class:`FluxConfigOperationStatusOperations` * 2022-03-01: :class:`FluxConfigOperationStatusOperations` + * 2022-07-01: :class:`FluxConfigOperationStatusOperations` """ api_version = self._get_api_version('flux_config_operation_status') if api_version == '2021-11-01-preview': @@ -227,6 +235,8 @@ def flux_config_operation_status(self): from .v2022_01_01_preview.operations import FluxConfigOperationStatusOperations as OperationClass elif api_version == '2022-03-01': from .v2022_03_01.operations import FluxConfigOperationStatusOperations as OperationClass + elif api_version == '2022-07-01': + from .v2022_07_01.operations import FluxConfigOperationStatusOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'flux_config_operation_status'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -238,6 +248,7 @@ def flux_configurations(self): * 2021-11-01-preview: :class:`FluxConfigurationsOperations` * 2022-01-01-preview: :class:`FluxConfigurationsOperations` * 2022-03-01: :class:`FluxConfigurationsOperations` + * 2022-07-01: :class:`FluxConfigurationsOperations` """ api_version = self._get_api_version('flux_configurations') if api_version == '2021-11-01-preview': @@ -246,6 +257,8 @@ def flux_configurations(self): from .v2022_01_01_preview.operations import FluxConfigurationsOperations as OperationClass elif api_version == '2022-03-01': from .v2022_03_01.operations import FluxConfigurationsOperations as OperationClass + elif api_version == '2022-07-01': + from .v2022_07_01.operations import FluxConfigurationsOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'flux_configurations'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -278,6 +291,7 @@ def operation_status(self): * 2021-11-01-preview: :class:`OperationStatusOperations` * 2022-01-01-preview: :class:`OperationStatusOperations` * 2022-03-01: :class:`OperationStatusOperations` + * 2022-07-01: :class:`OperationStatusOperations` """ api_version = self._get_api_version('operation_status') if api_version == '2021-05-01-preview': @@ -290,6 +304,8 @@ def operation_status(self): from .v2022_01_01_preview.operations import OperationStatusOperations as OperationClass elif api_version == '2022-03-01': from .v2022_03_01.operations import OperationStatusOperations as OperationClass + elif api_version == '2022-07-01': + from .v2022_07_01.operations import OperationStatusOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'operation_status'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -306,6 +322,7 @@ def operations(self): * 2021-11-01-preview: :class:`Operations` * 2022-01-01-preview: :class:`Operations` * 2022-03-01: :class:`Operations` + * 2022-07-01: :class:`Operations` """ api_version = self._get_api_version('operations') if api_version == '2020-07-01-preview': @@ -324,6 +341,8 @@ def operations(self): from .v2022_01_01_preview.operations import Operations as OperationClass elif api_version == '2022-03-01': from .v2022_03_01.operations import Operations as OperationClass + elif api_version == '2022-07-01': + from .v2022_07_01.operations import Operations as OperationClass else: raise ValueError("API version {} does not have operation group 'operations'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -339,6 +358,7 @@ def source_control_configurations(self): * 2021-11-01-preview: :class:`SourceControlConfigurationsOperations` * 2022-01-01-preview: :class:`SourceControlConfigurationsOperations` * 2022-03-01: :class:`SourceControlConfigurationsOperations` + * 2022-07-01: :class:`SourceControlConfigurationsOperations` """ api_version = self._get_api_version('source_control_configurations') if api_version == '2020-07-01-preview': @@ -355,6 +375,8 @@ def source_control_configurations(self): from .v2022_01_01_preview.operations import SourceControlConfigurationsOperations as OperationClass elif api_version == '2022-03-01': from .v2022_03_01.operations import SourceControlConfigurationsOperations as OperationClass + elif api_version == '2022-07-01': + from .v2022_07_01.operations import SourceControlConfigurationsOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'source_control_configurations'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/models.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/models.py index c9c8d2ae160..95e025d8c5d 100644 --- a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/models.py +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/models.py @@ -6,3 +6,4 @@ # -------------------------------------------------------------------------- from .v2022_01_01_preview.models import * from .v2022_03_01.models import * +from .v2022_07_01.models import * diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/__init__.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/__init__.py new file mode 100644 index 00000000000..ebc5a7b13bb --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/__init__.py @@ -0,0 +1,23 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._source_control_configuration_client import SourceControlConfigurationClient +from ._version import VERSION + +__version__ = VERSION + +try: + from ._patch import __all__ as _patch_all + from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk +__all__ = ['SourceControlConfigurationClient'] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/_configuration.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/_configuration.py new file mode 100644 index 00000000000..eced29bdbb7 --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/_configuration.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy + +from ._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials import TokenCredential + + +class SourceControlConfigurationClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes + """Configuration for SourceControlConfigurationClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The ID of the target subscription. + :type subscription_id: str + :keyword api_version: Api Version. Default value is "2022-07-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__( + self, + credential: "TokenCredential", + subscription_id: str, + **kwargs: Any + ) -> None: + super(SourceControlConfigurationClientConfiguration, self).__init__(**kwargs) + api_version = kwargs.pop('api_version', "2022-07-01") # type: str + + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + + self.credential = credential + self.subscription_id = subscription_id + self.api_version = api_version + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'mgmt-kubernetesconfiguration/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs # type: Any + ): + # type: (...) -> None + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/_patch.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/_patch.py new file mode 100644 index 00000000000..0ad201a8c58 --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/_patch.py @@ -0,0 +1,19 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/_source_control_configuration_client.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/_source_control_configuration_client.py new file mode 100644 index 00000000000..b6a35ed8e82 --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/_source_control_configuration_client.py @@ -0,0 +1,129 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, TYPE_CHECKING + +from msrest import Deserializer, Serializer + +from azure.core.rest import HttpRequest, HttpResponse +from azure.mgmt.core import ARMPipelineClient + +from . import models +from ._configuration import SourceControlConfigurationClientConfiguration +from .operations import ExtensionsOperations, FluxConfigOperationStatusOperations, FluxConfigurationsOperations, OperationStatusOperations, Operations, SourceControlConfigurationsOperations + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials import TokenCredential + +class SourceControlConfigurationClient: + """KubernetesConfiguration Client. + + :ivar extensions: ExtensionsOperations operations + :vartype extensions: + azure.mgmt.kubernetesconfiguration.v2022_07_01.operations.ExtensionsOperations + :ivar operation_status: OperationStatusOperations operations + :vartype operation_status: + azure.mgmt.kubernetesconfiguration.v2022_07_01.operations.OperationStatusOperations + :ivar flux_configurations: FluxConfigurationsOperations operations + :vartype flux_configurations: + azure.mgmt.kubernetesconfiguration.v2022_07_01.operations.FluxConfigurationsOperations + :ivar flux_config_operation_status: FluxConfigOperationStatusOperations operations + :vartype flux_config_operation_status: + azure.mgmt.kubernetesconfiguration.v2022_07_01.operations.FluxConfigOperationStatusOperations + :ivar source_control_configurations: SourceControlConfigurationsOperations operations + :vartype source_control_configurations: + azure.mgmt.kubernetesconfiguration.v2022_07_01.operations.SourceControlConfigurationsOperations + :ivar operations: Operations operations + :vartype operations: azure.mgmt.kubernetesconfiguration.v2022_07_01.operations.Operations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The ID of the target subscription. + :type subscription_id: str + :param base_url: Service URL. Default value is "https://management.azure.com". + :type base_url: str + :keyword api_version: Api Version. Default value is "2022-07-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + """ + + def __init__( + self, + credential: "TokenCredential", + subscription_id: str, + base_url: str = "https://management.azure.com", + **kwargs: Any + ) -> None: + self._config = SourceControlConfigurationClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs) + self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + self._serialize.client_side_validation = False + self.extensions = ExtensionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.operation_status = OperationStatusOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.flux_configurations = FluxConfigurationsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.flux_config_operation_status = FluxConfigOperationStatusOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.source_control_configurations = SourceControlConfigurationsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize + ) + + + def _send_request( + self, + request: HttpRequest, + **kwargs: Any + ) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, **kwargs) + + def close(self): + # type: () -> None + self._client.close() + + def __enter__(self): + # type: () -> SourceControlConfigurationClient + self._client.__enter__() + return self + + def __exit__(self, *exc_details): + # type: (Any) -> None + self._client.__exit__(*exc_details) diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/_vendor.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/_vendor.py new file mode 100644 index 00000000000..138f663c53a --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/_vendor.py @@ -0,0 +1,27 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.pipeline.transport import HttpRequest + +def _convert_request(request, files=None): + data = request.content if not files else None + request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) + if files: + request.set_formdata_body(files) + return request + +def _format_url_section(template, **kwargs): + components = template.split("/") + while components: + try: + return template.format(**kwargs) + except KeyError as key: + formatted_components = template.split("/") + components = [ + c for c in formatted_components if "{}".format(key.args[0]) not in c + ] + template = "/".join(components) diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/_version.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/_version.py new file mode 100644 index 00000000000..59deb8c7263 --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.1.0" diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/__init__.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/__init__.py new file mode 100644 index 00000000000..1efc0259c5a --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/__init__.py @@ -0,0 +1,20 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._source_control_configuration_client import SourceControlConfigurationClient + +try: + from ._patch import __all__ as _patch_all + from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk +__all__ = ['SourceControlConfigurationClient'] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/_configuration.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/_configuration.py new file mode 100644 index 00000000000..c77eb44d89a --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/_configuration.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy + +from .._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + + +class SourceControlConfigurationClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes + """Configuration for SourceControlConfigurationClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The ID of the target subscription. + :type subscription_id: str + :keyword api_version: Api Version. Default value is "2022-07-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + **kwargs: Any + ) -> None: + super(SourceControlConfigurationClientConfiguration, self).__init__(**kwargs) + api_version = kwargs.pop('api_version', "2022-07-01") # type: str + + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + + self.credential = credential + self.subscription_id = subscription_id + self.api_version = api_version + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'mgmt-kubernetesconfiguration/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs: Any + ) -> None: + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/_patch.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/_patch.py new file mode 100644 index 00000000000..0ad201a8c58 --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/_patch.py @@ -0,0 +1,19 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/_source_control_configuration_client.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/_source_control_configuration_client.py new file mode 100644 index 00000000000..e3b227f1dc8 --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/_source_control_configuration_client.py @@ -0,0 +1,126 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Awaitable, TYPE_CHECKING + +from msrest import Deserializer, Serializer + +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.mgmt.core import AsyncARMPipelineClient + +from .. import models +from ._configuration import SourceControlConfigurationClientConfiguration +from .operations import ExtensionsOperations, FluxConfigOperationStatusOperations, FluxConfigurationsOperations, OperationStatusOperations, Operations, SourceControlConfigurationsOperations + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + +class SourceControlConfigurationClient: + """KubernetesConfiguration Client. + + :ivar extensions: ExtensionsOperations operations + :vartype extensions: + azure.mgmt.kubernetesconfiguration.v2022_07_01.aio.operations.ExtensionsOperations + :ivar operation_status: OperationStatusOperations operations + :vartype operation_status: + azure.mgmt.kubernetesconfiguration.v2022_07_01.aio.operations.OperationStatusOperations + :ivar flux_configurations: FluxConfigurationsOperations operations + :vartype flux_configurations: + azure.mgmt.kubernetesconfiguration.v2022_07_01.aio.operations.FluxConfigurationsOperations + :ivar flux_config_operation_status: FluxConfigOperationStatusOperations operations + :vartype flux_config_operation_status: + azure.mgmt.kubernetesconfiguration.v2022_07_01.aio.operations.FluxConfigOperationStatusOperations + :ivar source_control_configurations: SourceControlConfigurationsOperations operations + :vartype source_control_configurations: + azure.mgmt.kubernetesconfiguration.v2022_07_01.aio.operations.SourceControlConfigurationsOperations + :ivar operations: Operations operations + :vartype operations: azure.mgmt.kubernetesconfiguration.v2022_07_01.aio.operations.Operations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The ID of the target subscription. + :type subscription_id: str + :param base_url: Service URL. Default value is "https://management.azure.com". + :type base_url: str + :keyword api_version: Api Version. Default value is "2022-07-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + base_url: str = "https://management.azure.com", + **kwargs: Any + ) -> None: + self._config = SourceControlConfigurationClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs) + self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + self._serialize.client_side_validation = False + self.extensions = ExtensionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.operation_status = OperationStatusOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.flux_configurations = FluxConfigurationsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.flux_config_operation_status = FluxConfigOperationStatusOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.source_control_configurations = SourceControlConfigurationsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize + ) + + + def _send_request( + self, + request: HttpRequest, + **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, **kwargs) + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> "SourceControlConfigurationClient": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details) -> None: + await self._client.__aexit__(*exc_details) diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/__init__.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/__init__.py new file mode 100644 index 00000000000..02567809480 --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/__init__.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._extensions_operations import ExtensionsOperations +from ._operation_status_operations import OperationStatusOperations +from ._flux_configurations_operations import FluxConfigurationsOperations +from ._flux_config_operation_status_operations import FluxConfigOperationStatusOperations +from ._source_control_configurations_operations import SourceControlConfigurationsOperations +from ._operations import Operations + +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk +__all__ = [ + 'ExtensionsOperations', + 'OperationStatusOperations', + 'FluxConfigurationsOperations', + 'FluxConfigOperationStatusOperations', + 'SourceControlConfigurationsOperations', + 'Operations', +] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() \ No newline at end of file diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_extensions_operations.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_extensions_operations.py new file mode 100644 index 00000000000..b5ac3ba997b --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_extensions_operations.py @@ -0,0 +1,701 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._extensions_operations import build_create_request_initial, build_delete_request_initial, build_get_request, build_list_request, build_update_request_initial +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ExtensionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.kubernetesconfiguration.v2022_07_01.aio.SourceControlConfigurationClient`'s + :attr:`extensions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + async def _create_initial( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + extension_name: str, + extension: _models.Extension, + **kwargs: Any + ) -> _models.Extension: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.Extension] + + _json = self._serialize.body(extension, 'Extension') + + request = build_create_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + extension_name=extension_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._create_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('Extension', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('Extension', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}"} # type: ignore + + + @distributed_trace_async + async def begin_create( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + extension_name: str, + extension: _models.Extension, + **kwargs: Any + ) -> AsyncLROPoller[_models.Extension]: + """Create a new Kubernetes Cluster Extension. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param extension_name: Name of the Extension. + :type extension_name: str + :param extension: Properties necessary to Create an Extension. + :type extension: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.Extension + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Extension or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.Extension] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.Extension] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_initial( # type: ignore + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + extension_name=extension_name, + extension=extension, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Extension', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + lro_options={'final-state-via': 'azure-async-operation'}, + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}"} # type: ignore + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + extension_name: str, + **kwargs: Any + ) -> _models.Extension: + """Gets Kubernetes Cluster Extension. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param extension_name: Name of the Extension. + :type extension_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Extension, or the result of cls(response) + :rtype: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.Extension + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.Extension] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + extension_name=extension_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Extension', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}"} # type: ignore + + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + extension_name: str, + force_delete: Optional[bool] = None, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + extension_name=extension_name, + api_version=api_version, + force_delete=force_delete, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}"} # type: ignore + + + @distributed_trace_async + async def begin_delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + extension_name: str, + force_delete: Optional[bool] = None, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a Kubernetes Cluster Extension. This will cause the Agent to Uninstall the extension + from the cluster. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param extension_name: Name of the Extension. + :type extension_name: str + :param force_delete: Delete the extension resource in Azure - not the normal asynchronous + delete. Default value is None. + :type force_delete: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + extension_name=extension_name, + force_delete=force_delete, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + lro_options={'final-state-via': 'azure-async-operation'}, + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}"} # type: ignore + + async def _update_initial( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + extension_name: str, + patch_extension: _models.PatchExtension, + **kwargs: Any + ) -> _models.Extension: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.Extension] + + _json = self._serialize.body(patch_extension, 'PatchExtension') + + request = build_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + extension_name=extension_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('Extension', pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize('Extension', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}"} # type: ignore + + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + extension_name: str, + patch_extension: _models.PatchExtension, + **kwargs: Any + ) -> AsyncLROPoller[_models.Extension]: + """Patch an existing Kubernetes Cluster Extension. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param extension_name: Name of the Extension. + :type extension_name: str + :param patch_extension: Properties to Patch in an existing Extension. + :type patch_extension: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.PatchExtension + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Extension or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.Extension] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.Extension] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( # type: ignore + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + extension_name=extension_name, + patch_extension=patch_extension, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Extension', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + lro_options={'final-state-via': 'azure-async-operation'}, + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}"} # type: ignore + + @distributed_trace + def list( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + **kwargs: Any + ) -> AsyncIterable[_models.ExtensionsList]: + """List all Extensions in the cluster. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ExtensionsList or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ExtensionsList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ExtensionsList] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ExtensionsList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions"} # type: ignore diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_flux_config_operation_status_operations.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_flux_config_operation_status_operations.py new file mode 100644 index 00000000000..cfd2966b5c8 --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_flux_config_operation_status_operations.py @@ -0,0 +1,125 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Optional, TypeVar + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._flux_config_operation_status_operations import build_get_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class FluxConfigOperationStatusOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.kubernetesconfiguration.v2022_07_01.aio.SourceControlConfigurationClient`'s + :attr:`flux_config_operation_status` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + flux_configuration_name: str, + operation_id: str, + **kwargs: Any + ) -> _models.OperationStatusResult: + """Get Async Operation status. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param flux_configuration_name: Name of the Flux Configuration. + :type flux_configuration_name: str + :param operation_id: operation Id. + :type operation_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: OperationStatusResult, or the result of cls(response) + :rtype: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.OperationStatusResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.OperationStatusResult] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + flux_configuration_name=flux_configuration_name, + operation_id=operation_id, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('OperationStatusResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}/operations/{operationId}"} # type: ignore + diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_flux_configurations_operations.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_flux_configurations_operations.py new file mode 100644 index 00000000000..f263b7dde29 --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_flux_configurations_operations.py @@ -0,0 +1,704 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._flux_configurations_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_request, build_update_request_initial +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class FluxConfigurationsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.kubernetesconfiguration.v2022_07_01.aio.SourceControlConfigurationClient`'s + :attr:`flux_configurations` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + flux_configuration_name: str, + **kwargs: Any + ) -> _models.FluxConfiguration: + """Gets details of the Flux Configuration. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param flux_configuration_name: Name of the Flux Configuration. + :type flux_configuration_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FluxConfiguration, or the result of cls(response) + :rtype: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.FluxConfiguration + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.FluxConfiguration] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + flux_configuration_name=flux_configuration_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('FluxConfiguration', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}"} # type: ignore + + + async def _create_or_update_initial( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + flux_configuration_name: str, + flux_configuration: _models.FluxConfiguration, + **kwargs: Any + ) -> _models.FluxConfiguration: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.FluxConfiguration] + + _json = self._serialize.body(flux_configuration, 'FluxConfiguration') + + request = build_create_or_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + flux_configuration_name=flux_configuration_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._create_or_update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('FluxConfiguration', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('FluxConfiguration', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}"} # type: ignore + + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + flux_configuration_name: str, + flux_configuration: _models.FluxConfiguration, + **kwargs: Any + ) -> AsyncLROPoller[_models.FluxConfiguration]: + """Create a new Kubernetes Flux Configuration. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param flux_configuration_name: Name of the Flux Configuration. + :type flux_configuration_name: str + :param flux_configuration: Properties necessary to Create a FluxConfiguration. + :type flux_configuration: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.FluxConfiguration + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FluxConfiguration or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.FluxConfiguration] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.FluxConfiguration] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + flux_configuration_name=flux_configuration_name, + flux_configuration=flux_configuration, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('FluxConfiguration', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + lro_options={'final-state-via': 'azure-async-operation'}, + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}"} # type: ignore + + async def _update_initial( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + flux_configuration_name: str, + flux_configuration_patch: _models.FluxConfigurationPatch, + **kwargs: Any + ) -> _models.FluxConfiguration: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.FluxConfiguration] + + _json = self._serialize.body(flux_configuration_patch, 'FluxConfigurationPatch') + + request = build_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + flux_configuration_name=flux_configuration_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('FluxConfiguration', pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize('FluxConfiguration', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}"} # type: ignore + + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + flux_configuration_name: str, + flux_configuration_patch: _models.FluxConfigurationPatch, + **kwargs: Any + ) -> AsyncLROPoller[_models.FluxConfiguration]: + """Update an existing Kubernetes Flux Configuration. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param flux_configuration_name: Name of the Flux Configuration. + :type flux_configuration_name: str + :param flux_configuration_patch: Properties to Patch in an existing Flux Configuration. + :type flux_configuration_patch: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.FluxConfigurationPatch + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FluxConfiguration or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.FluxConfiguration] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.FluxConfiguration] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( # type: ignore + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + flux_configuration_name=flux_configuration_name, + flux_configuration_patch=flux_configuration_patch, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('FluxConfiguration', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + lro_options={'final-state-via': 'azure-async-operation'}, + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + flux_configuration_name: str, + force_delete: Optional[bool] = None, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + flux_configuration_name=flux_configuration_name, + api_version=api_version, + force_delete=force_delete, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}"} # type: ignore + + + @distributed_trace_async + async def begin_delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + flux_configuration_name: str, + force_delete: Optional[bool] = None, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """This will delete the YAML file used to set up the Flux Configuration, thus stopping future sync + from the source repo. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param flux_configuration_name: Name of the Flux Configuration. + :type flux_configuration_name: str + :param force_delete: Delete the extension resource in Azure - not the normal asynchronous + delete. Default value is None. + :type force_delete: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + flux_configuration_name=flux_configuration_name, + force_delete=force_delete, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + lro_options={'final-state-via': 'azure-async-operation'}, + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}"} # type: ignore + + @distributed_trace + def list( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + **kwargs: Any + ) -> AsyncIterable[_models.FluxConfigurationsList]: + """List all Flux Configurations. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either FluxConfigurationsList or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.FluxConfigurationsList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.FluxConfigurationsList] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("FluxConfigurationsList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations"} # type: ignore diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_operation_status_operations.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_operation_status_operations.py new file mode 100644 index 00000000000..d33baa4af98 --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_operation_status_operations.py @@ -0,0 +1,229 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._operation_status_operations import build_get_request, build_list_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class OperationStatusOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.kubernetesconfiguration.v2022_07_01.aio.SourceControlConfigurationClient`'s + :attr:`operation_status` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + extension_name: str, + operation_id: str, + **kwargs: Any + ) -> _models.OperationStatusResult: + """Get Async Operation status. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param extension_name: Name of the Extension. + :type extension_name: str + :param operation_id: operation Id. + :type operation_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: OperationStatusResult, or the result of cls(response) + :rtype: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.OperationStatusResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.OperationStatusResult] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + extension_name=extension_name, + operation_id=operation_id, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('OperationStatusResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}/operations/{operationId}"} # type: ignore + + + @distributed_trace + def list( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + **kwargs: Any + ) -> AsyncIterable[_models.OperationStatusList]: + """List Async Operations, currently in progress, in a cluster. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OperationStatusList or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.OperationStatusList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.OperationStatusList] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("OperationStatusList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/operations"} # type: ignore diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_operations.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_operations.py new file mode 100644 index 00000000000..1d1135a03bd --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_operations.py @@ -0,0 +1,123 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._operations import build_list_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class Operations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.kubernetesconfiguration.v2022_07_01.aio.SourceControlConfigurationClient`'s + :attr:`operations` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + **kwargs: Any + ) -> AsyncIterable[_models.ResourceProviderOperationList]: + """List all the available operations the KubernetesConfiguration resource provider supports. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ResourceProviderOperationList or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ResourceProviderOperationList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ResourceProviderOperationList] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ResourceProviderOperationList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/providers/Microsoft.KubernetesConfiguration/operations"} # type: ignore diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_patch.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_patch.py new file mode 100644 index 00000000000..0ad201a8c58 --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_patch.py @@ -0,0 +1,19 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_source_control_configurations_operations.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_source_control_configurations_operations.py new file mode 100644 index 00000000000..5aee90e1bba --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/aio/operations/_source_control_configurations_operations.py @@ -0,0 +1,459 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._source_control_configurations_operations import build_create_or_update_request, build_delete_request_initial, build_get_request, build_list_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class SourceControlConfigurationsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.kubernetesconfiguration.v2022_07_01.aio.SourceControlConfigurationClient`'s + :attr:`source_control_configurations` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + source_control_configuration_name: str, + **kwargs: Any + ) -> _models.SourceControlConfiguration: + """Gets details of the Source Control Configuration. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param source_control_configuration_name: Name of the Source Control Configuration. + :type source_control_configuration_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SourceControlConfiguration, or the result of cls(response) + :rtype: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.SourceControlConfiguration + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.SourceControlConfiguration] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + source_control_configuration_name=source_control_configuration_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SourceControlConfiguration', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/sourceControlConfigurations/{sourceControlConfigurationName}"} # type: ignore + + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + source_control_configuration_name: str, + source_control_configuration: _models.SourceControlConfiguration, + **kwargs: Any + ) -> _models.SourceControlConfiguration: + """Create a new Kubernetes Source Control Configuration. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param source_control_configuration_name: Name of the Source Control Configuration. + :type source_control_configuration_name: str + :param source_control_configuration: Properties necessary to Create KubernetesConfiguration. + :type source_control_configuration: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.SourceControlConfiguration + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SourceControlConfiguration, or the result of cls(response) + :rtype: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.SourceControlConfiguration + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.SourceControlConfiguration] + + _json = self._serialize.body(source_control_configuration, 'SourceControlConfiguration') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + source_control_configuration_name=source_control_configuration_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('SourceControlConfiguration', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('SourceControlConfiguration', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/sourceControlConfigurations/{sourceControlConfigurationName}"} # type: ignore + + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + source_control_configuration_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + source_control_configuration_name=source_control_configuration_name, + api_version=api_version, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/sourceControlConfigurations/{sourceControlConfigurationName}"} # type: ignore + + + @distributed_trace_async + async def begin_delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + source_control_configuration_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """This will delete the YAML file used to set up the Source control configuration, thus stopping + future sync from the source repo. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param source_control_configuration_name: Name of the Source Control Configuration. + :type source_control_configuration_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + source_control_configuration_name=source_control_configuration_name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/sourceControlConfigurations/{sourceControlConfigurationName}"} # type: ignore + + @distributed_trace + def list( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + **kwargs: Any + ) -> AsyncIterable[_models.SourceControlConfigurationList]: + """List all Source Control Configurations. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either SourceControlConfigurationList or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.SourceControlConfigurationList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.SourceControlConfigurationList] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("SourceControlConfigurationList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/sourceControlConfigurations"} # type: ignore diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/models/__init__.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/models/__init__.py new file mode 100644 index 00000000000..cfb79872060 --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/models/__init__.py @@ -0,0 +1,133 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._models_py3 import AzureBlobDefinition +from ._models_py3 import AzureBlobPatchDefinition +from ._models_py3 import BucketDefinition +from ._models_py3 import BucketPatchDefinition +from ._models_py3 import ComplianceStatus +from ._models_py3 import ErrorAdditionalInfo +from ._models_py3 import ErrorDetail +from ._models_py3 import ErrorResponse +from ._models_py3 import Extension +from ._models_py3 import ExtensionPropertiesAksAssignedIdentity +from ._models_py3 import ExtensionStatus +from ._models_py3 import ExtensionsList +from ._models_py3 import FluxConfiguration +from ._models_py3 import FluxConfigurationPatch +from ._models_py3 import FluxConfigurationsList +from ._models_py3 import GitRepositoryDefinition +from ._models_py3 import GitRepositoryPatchDefinition +from ._models_py3 import HelmOperatorProperties +from ._models_py3 import HelmReleasePropertiesDefinition +from ._models_py3 import Identity +from ._models_py3 import KustomizationDefinition +from ._models_py3 import KustomizationPatchDefinition +from ._models_py3 import ManagedIdentityDefinition +from ._models_py3 import ManagedIdentityPatchDefinition +from ._models_py3 import ObjectReferenceDefinition +from ._models_py3 import ObjectStatusConditionDefinition +from ._models_py3 import ObjectStatusDefinition +from ._models_py3 import OperationStatusList +from ._models_py3 import OperationStatusResult +from ._models_py3 import PatchExtension +from ._models_py3 import ProxyResource +from ._models_py3 import RepositoryRefDefinition +from ._models_py3 import Resource +from ._models_py3 import ResourceProviderOperation +from ._models_py3 import ResourceProviderOperationDisplay +from ._models_py3 import ResourceProviderOperationList +from ._models_py3 import Scope +from ._models_py3 import ScopeCluster +from ._models_py3 import ScopeNamespace +from ._models_py3 import ServicePrincipalDefinition +from ._models_py3 import ServicePrincipalPatchDefinition +from ._models_py3 import SourceControlConfiguration +from ._models_py3 import SourceControlConfigurationList +from ._models_py3 import SystemData + + +from ._source_control_configuration_client_enums import ( + AKSIdentityType, + ComplianceStateType, + CreatedByType, + FluxComplianceState, + KustomizationValidationType, + LevelType, + MessageLevelType, + OperatorScopeType, + OperatorType, + ProvisioningState, + ProvisioningStateType, + ScopeType, + SourceKindType, +) +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk +__all__ = [ + 'AzureBlobDefinition', + 'AzureBlobPatchDefinition', + 'BucketDefinition', + 'BucketPatchDefinition', + 'ComplianceStatus', + 'ErrorAdditionalInfo', + 'ErrorDetail', + 'ErrorResponse', + 'Extension', + 'ExtensionPropertiesAksAssignedIdentity', + 'ExtensionStatus', + 'ExtensionsList', + 'FluxConfiguration', + 'FluxConfigurationPatch', + 'FluxConfigurationsList', + 'GitRepositoryDefinition', + 'GitRepositoryPatchDefinition', + 'HelmOperatorProperties', + 'HelmReleasePropertiesDefinition', + 'Identity', + 'KustomizationDefinition', + 'KustomizationPatchDefinition', + 'ManagedIdentityDefinition', + 'ManagedIdentityPatchDefinition', + 'ObjectReferenceDefinition', + 'ObjectStatusConditionDefinition', + 'ObjectStatusDefinition', + 'OperationStatusList', + 'OperationStatusResult', + 'PatchExtension', + 'ProxyResource', + 'RepositoryRefDefinition', + 'Resource', + 'ResourceProviderOperation', + 'ResourceProviderOperationDisplay', + 'ResourceProviderOperationList', + 'Scope', + 'ScopeCluster', + 'ScopeNamespace', + 'ServicePrincipalDefinition', + 'ServicePrincipalPatchDefinition', + 'SourceControlConfiguration', + 'SourceControlConfigurationList', + 'SystemData', + 'AKSIdentityType', + 'ComplianceStateType', + 'CreatedByType', + 'FluxComplianceState', + 'KustomizationValidationType', + 'LevelType', + 'MessageLevelType', + 'OperatorScopeType', + 'OperatorType', + 'ProvisioningState', + 'ProvisioningStateType', + 'ScopeType', + 'SourceKindType', +] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() \ No newline at end of file diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/models/_models_py3.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/models/_models_py3.py new file mode 100644 index 00000000000..ef6bea40689 --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/models/_models_py3.py @@ -0,0 +1,2656 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +import datetime +from typing import Dict, List, Optional, TYPE_CHECKING, Union + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + import __init__ as _models + + +class AzureBlobDefinition(msrest.serialization.Model): + """Parameters to reconcile to the AzureBlob source kind type. + + :ivar url: The URL to sync for the flux configuration Azure Blob storage account. + :vartype url: str + :ivar container_name: The Azure Blob container name to sync from the url endpoint for the flux + configuration. + :vartype container_name: str + :ivar timeout_in_seconds: The maximum time to attempt to reconcile the cluster Azure Blob + source with the remote. + :vartype timeout_in_seconds: long + :ivar sync_interval_in_seconds: The interval at which to re-reconcile the cluster Azure Blob + source with the remote. + :vartype sync_interval_in_seconds: long + :ivar service_principal: Parameters to authenticate using Service Principal. + :vartype service_principal: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ServicePrincipalDefinition + :ivar account_key: The account key (shared key) to access the storage account. + :vartype account_key: str + :ivar sas_token: The Shared Access token to access the storage container. + :vartype sas_token: str + :ivar managed_identity: Parameters to authenticate using a Managed Identity. + :vartype managed_identity: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ManagedIdentityDefinition + :ivar local_auth_ref: Name of a local secret on the Kubernetes cluster to use as the + authentication secret rather than the managed or user-provided configuration secrets. + :vartype local_auth_ref: str + """ + + _attribute_map = { + 'url': {'key': 'url', 'type': 'str'}, + 'container_name': {'key': 'containerName', 'type': 'str'}, + 'timeout_in_seconds': {'key': 'timeoutInSeconds', 'type': 'long'}, + 'sync_interval_in_seconds': {'key': 'syncIntervalInSeconds', 'type': 'long'}, + 'service_principal': {'key': 'servicePrincipal', 'type': 'ServicePrincipalDefinition'}, + 'account_key': {'key': 'accountKey', 'type': 'str'}, + 'sas_token': {'key': 'sasToken', 'type': 'str'}, + 'managed_identity': {'key': 'managedIdentity', 'type': 'ManagedIdentityDefinition'}, + 'local_auth_ref': {'key': 'localAuthRef', 'type': 'str'}, + } + + def __init__( + self, + *, + url: Optional[str] = None, + container_name: Optional[str] = None, + timeout_in_seconds: Optional[int] = 600, + sync_interval_in_seconds: Optional[int] = 600, + service_principal: Optional["_models.ServicePrincipalDefinition"] = None, + account_key: Optional[str] = None, + sas_token: Optional[str] = None, + managed_identity: Optional["_models.ManagedIdentityDefinition"] = None, + local_auth_ref: Optional[str] = None, + **kwargs + ): + """ + :keyword url: The URL to sync for the flux configuration Azure Blob storage account. + :paramtype url: str + :keyword container_name: The Azure Blob container name to sync from the url endpoint for the + flux configuration. + :paramtype container_name: str + :keyword timeout_in_seconds: The maximum time to attempt to reconcile the cluster Azure Blob + source with the remote. + :paramtype timeout_in_seconds: long + :keyword sync_interval_in_seconds: The interval at which to re-reconcile the cluster Azure Blob + source with the remote. + :paramtype sync_interval_in_seconds: long + :keyword service_principal: Parameters to authenticate using Service Principal. + :paramtype service_principal: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ServicePrincipalDefinition + :keyword account_key: The account key (shared key) to access the storage account. + :paramtype account_key: str + :keyword sas_token: The Shared Access token to access the storage container. + :paramtype sas_token: str + :keyword managed_identity: Parameters to authenticate using a Managed Identity. + :paramtype managed_identity: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ManagedIdentityDefinition + :keyword local_auth_ref: Name of a local secret on the Kubernetes cluster to use as the + authentication secret rather than the managed or user-provided configuration secrets. + :paramtype local_auth_ref: str + """ + super(AzureBlobDefinition, self).__init__(**kwargs) + self.url = url + self.container_name = container_name + self.timeout_in_seconds = timeout_in_seconds + self.sync_interval_in_seconds = sync_interval_in_seconds + self.service_principal = service_principal + self.account_key = account_key + self.sas_token = sas_token + self.managed_identity = managed_identity + self.local_auth_ref = local_auth_ref + + +class AzureBlobPatchDefinition(msrest.serialization.Model): + """Parameters to reconcile to the AzureBlob source kind type. + + :ivar url: The URL to sync for the flux configuration Azure Blob storage account. + :vartype url: str + :ivar container_name: The Azure Blob container name to sync from the url endpoint for the flux + configuration. + :vartype container_name: str + :ivar timeout_in_seconds: The maximum time to attempt to reconcile the cluster Azure Blob + source with the remote. + :vartype timeout_in_seconds: long + :ivar sync_interval_in_seconds: The interval at which to re-reconcile the cluster Azure Blob + source with the remote. + :vartype sync_interval_in_seconds: long + :ivar service_principal: Parameters to authenticate using Service Principal. + :vartype service_principal: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ServicePrincipalPatchDefinition + :ivar account_key: The account key (shared key) to access the storage account. + :vartype account_key: str + :ivar sas_token: The Shared Access token to access the storage container. + :vartype sas_token: str + :ivar managed_identity: Parameters to authenticate using a Managed Identity. + :vartype managed_identity: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ManagedIdentityPatchDefinition + :ivar local_auth_ref: Name of a local secret on the Kubernetes cluster to use as the + authentication secret rather than the managed or user-provided configuration secrets. + :vartype local_auth_ref: str + """ + + _attribute_map = { + 'url': {'key': 'url', 'type': 'str'}, + 'container_name': {'key': 'containerName', 'type': 'str'}, + 'timeout_in_seconds': {'key': 'timeoutInSeconds', 'type': 'long'}, + 'sync_interval_in_seconds': {'key': 'syncIntervalInSeconds', 'type': 'long'}, + 'service_principal': {'key': 'servicePrincipal', 'type': 'ServicePrincipalPatchDefinition'}, + 'account_key': {'key': 'accountKey', 'type': 'str'}, + 'sas_token': {'key': 'sasToken', 'type': 'str'}, + 'managed_identity': {'key': 'managedIdentity', 'type': 'ManagedIdentityPatchDefinition'}, + 'local_auth_ref': {'key': 'localAuthRef', 'type': 'str'}, + } + + def __init__( + self, + *, + url: Optional[str] = None, + container_name: Optional[str] = None, + timeout_in_seconds: Optional[int] = None, + sync_interval_in_seconds: Optional[int] = None, + service_principal: Optional["_models.ServicePrincipalPatchDefinition"] = None, + account_key: Optional[str] = None, + sas_token: Optional[str] = None, + managed_identity: Optional["_models.ManagedIdentityPatchDefinition"] = None, + local_auth_ref: Optional[str] = None, + **kwargs + ): + """ + :keyword url: The URL to sync for the flux configuration Azure Blob storage account. + :paramtype url: str + :keyword container_name: The Azure Blob container name to sync from the url endpoint for the + flux configuration. + :paramtype container_name: str + :keyword timeout_in_seconds: The maximum time to attempt to reconcile the cluster Azure Blob + source with the remote. + :paramtype timeout_in_seconds: long + :keyword sync_interval_in_seconds: The interval at which to re-reconcile the cluster Azure Blob + source with the remote. + :paramtype sync_interval_in_seconds: long + :keyword service_principal: Parameters to authenticate using Service Principal. + :paramtype service_principal: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ServicePrincipalPatchDefinition + :keyword account_key: The account key (shared key) to access the storage account. + :paramtype account_key: str + :keyword sas_token: The Shared Access token to access the storage container. + :paramtype sas_token: str + :keyword managed_identity: Parameters to authenticate using a Managed Identity. + :paramtype managed_identity: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ManagedIdentityPatchDefinition + :keyword local_auth_ref: Name of a local secret on the Kubernetes cluster to use as the + authentication secret rather than the managed or user-provided configuration secrets. + :paramtype local_auth_ref: str + """ + super(AzureBlobPatchDefinition, self).__init__(**kwargs) + self.url = url + self.container_name = container_name + self.timeout_in_seconds = timeout_in_seconds + self.sync_interval_in_seconds = sync_interval_in_seconds + self.service_principal = service_principal + self.account_key = account_key + self.sas_token = sas_token + self.managed_identity = managed_identity + self.local_auth_ref = local_auth_ref + + +class BucketDefinition(msrest.serialization.Model): + """Parameters to reconcile to the Bucket source kind type. + + :ivar url: The URL to sync for the flux configuration S3 bucket. + :vartype url: str + :ivar bucket_name: The bucket name to sync from the url endpoint for the flux configuration. + :vartype bucket_name: str + :ivar insecure: Specify whether to use insecure communication when puling data from the S3 + bucket. + :vartype insecure: bool + :ivar timeout_in_seconds: The maximum time to attempt to reconcile the cluster bucket source + with the remote. + :vartype timeout_in_seconds: long + :ivar sync_interval_in_seconds: The interval at which to re-reconcile the cluster bucket source + with the remote. + :vartype sync_interval_in_seconds: long + :ivar access_key: Plaintext access key used to securely access the S3 bucket. + :vartype access_key: str + :ivar local_auth_ref: Name of a local secret on the Kubernetes cluster to use as the + authentication secret rather than the managed or user-provided configuration secrets. + :vartype local_auth_ref: str + """ + + _attribute_map = { + 'url': {'key': 'url', 'type': 'str'}, + 'bucket_name': {'key': 'bucketName', 'type': 'str'}, + 'insecure': {'key': 'insecure', 'type': 'bool'}, + 'timeout_in_seconds': {'key': 'timeoutInSeconds', 'type': 'long'}, + 'sync_interval_in_seconds': {'key': 'syncIntervalInSeconds', 'type': 'long'}, + 'access_key': {'key': 'accessKey', 'type': 'str'}, + 'local_auth_ref': {'key': 'localAuthRef', 'type': 'str'}, + } + + def __init__( + self, + *, + url: Optional[str] = None, + bucket_name: Optional[str] = None, + insecure: Optional[bool] = True, + timeout_in_seconds: Optional[int] = 600, + sync_interval_in_seconds: Optional[int] = 600, + access_key: Optional[str] = None, + local_auth_ref: Optional[str] = None, + **kwargs + ): + """ + :keyword url: The URL to sync for the flux configuration S3 bucket. + :paramtype url: str + :keyword bucket_name: The bucket name to sync from the url endpoint for the flux configuration. + :paramtype bucket_name: str + :keyword insecure: Specify whether to use insecure communication when puling data from the S3 + bucket. + :paramtype insecure: bool + :keyword timeout_in_seconds: The maximum time to attempt to reconcile the cluster bucket source + with the remote. + :paramtype timeout_in_seconds: long + :keyword sync_interval_in_seconds: The interval at which to re-reconcile the cluster bucket + source with the remote. + :paramtype sync_interval_in_seconds: long + :keyword access_key: Plaintext access key used to securely access the S3 bucket. + :paramtype access_key: str + :keyword local_auth_ref: Name of a local secret on the Kubernetes cluster to use as the + authentication secret rather than the managed or user-provided configuration secrets. + :paramtype local_auth_ref: str + """ + super(BucketDefinition, self).__init__(**kwargs) + self.url = url + self.bucket_name = bucket_name + self.insecure = insecure + self.timeout_in_seconds = timeout_in_seconds + self.sync_interval_in_seconds = sync_interval_in_seconds + self.access_key = access_key + self.local_auth_ref = local_auth_ref + + +class BucketPatchDefinition(msrest.serialization.Model): + """Parameters to reconcile to the Bucket source kind type. + + :ivar url: The URL to sync for the flux configuration S3 bucket. + :vartype url: str + :ivar bucket_name: The bucket name to sync from the url endpoint for the flux configuration. + :vartype bucket_name: str + :ivar insecure: Specify whether to use insecure communication when puling data from the S3 + bucket. + :vartype insecure: bool + :ivar timeout_in_seconds: The maximum time to attempt to reconcile the cluster bucket source + with the remote. + :vartype timeout_in_seconds: long + :ivar sync_interval_in_seconds: The interval at which to re-reconcile the cluster bucket source + with the remote. + :vartype sync_interval_in_seconds: long + :ivar access_key: Plaintext access key used to securely access the S3 bucket. + :vartype access_key: str + :ivar local_auth_ref: Name of a local secret on the Kubernetes cluster to use as the + authentication secret rather than the managed or user-provided configuration secrets. + :vartype local_auth_ref: str + """ + + _attribute_map = { + 'url': {'key': 'url', 'type': 'str'}, + 'bucket_name': {'key': 'bucketName', 'type': 'str'}, + 'insecure': {'key': 'insecure', 'type': 'bool'}, + 'timeout_in_seconds': {'key': 'timeoutInSeconds', 'type': 'long'}, + 'sync_interval_in_seconds': {'key': 'syncIntervalInSeconds', 'type': 'long'}, + 'access_key': {'key': 'accessKey', 'type': 'str'}, + 'local_auth_ref': {'key': 'localAuthRef', 'type': 'str'}, + } + + def __init__( + self, + *, + url: Optional[str] = None, + bucket_name: Optional[str] = None, + insecure: Optional[bool] = None, + timeout_in_seconds: Optional[int] = None, + sync_interval_in_seconds: Optional[int] = None, + access_key: Optional[str] = None, + local_auth_ref: Optional[str] = None, + **kwargs + ): + """ + :keyword url: The URL to sync for the flux configuration S3 bucket. + :paramtype url: str + :keyword bucket_name: The bucket name to sync from the url endpoint for the flux configuration. + :paramtype bucket_name: str + :keyword insecure: Specify whether to use insecure communication when puling data from the S3 + bucket. + :paramtype insecure: bool + :keyword timeout_in_seconds: The maximum time to attempt to reconcile the cluster bucket source + with the remote. + :paramtype timeout_in_seconds: long + :keyword sync_interval_in_seconds: The interval at which to re-reconcile the cluster bucket + source with the remote. + :paramtype sync_interval_in_seconds: long + :keyword access_key: Plaintext access key used to securely access the S3 bucket. + :paramtype access_key: str + :keyword local_auth_ref: Name of a local secret on the Kubernetes cluster to use as the + authentication secret rather than the managed or user-provided configuration secrets. + :paramtype local_auth_ref: str + """ + super(BucketPatchDefinition, self).__init__(**kwargs) + self.url = url + self.bucket_name = bucket_name + self.insecure = insecure + self.timeout_in_seconds = timeout_in_seconds + self.sync_interval_in_seconds = sync_interval_in_seconds + self.access_key = access_key + self.local_auth_ref = local_auth_ref + + +class ComplianceStatus(msrest.serialization.Model): + """Compliance Status details. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar compliance_state: The compliance state of the configuration. Known values are: "Pending", + "Compliant", "Noncompliant", "Installed", "Failed". + :vartype compliance_state: str or + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ComplianceStateType + :ivar last_config_applied: Datetime the configuration was last applied. + :vartype last_config_applied: ~datetime.datetime + :ivar message: Message from when the configuration was applied. + :vartype message: str + :ivar message_level: Level of the message. Known values are: "Error", "Warning", "Information". + :vartype message_level: str or + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.MessageLevelType + """ + + _validation = { + 'compliance_state': {'readonly': True}, + } + + _attribute_map = { + 'compliance_state': {'key': 'complianceState', 'type': 'str'}, + 'last_config_applied': {'key': 'lastConfigApplied', 'type': 'iso-8601'}, + 'message': {'key': 'message', 'type': 'str'}, + 'message_level': {'key': 'messageLevel', 'type': 'str'}, + } + + def __init__( + self, + *, + last_config_applied: Optional[datetime.datetime] = None, + message: Optional[str] = None, + message_level: Optional[Union[str, "_models.MessageLevelType"]] = None, + **kwargs + ): + """ + :keyword last_config_applied: Datetime the configuration was last applied. + :paramtype last_config_applied: ~datetime.datetime + :keyword message: Message from when the configuration was applied. + :paramtype message: str + :keyword message_level: Level of the message. Known values are: "Error", "Warning", + "Information". + :paramtype message_level: str or + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.MessageLevelType + """ + super(ComplianceStatus, self).__init__(**kwargs) + self.compliance_state = None + self.last_config_applied = last_config_applied + self.message = message + self.message_level = message_level + + +class ErrorAdditionalInfo(msrest.serialization.Model): + """The resource management error additional info. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar type: The additional info type. + :vartype type: str + :ivar info: The additional info. + :vartype info: any + """ + + _validation = { + 'type': {'readonly': True}, + 'info': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'info': {'key': 'info', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ErrorAdditionalInfo, self).__init__(**kwargs) + self.type = None + self.info = None + + +class ErrorDetail(msrest.serialization.Model): + """The error detail. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str + :ivar target: The error target. + :vartype target: str + :ivar details: The error details. + :vartype details: list[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ErrorDetail] + :ivar additional_info: The error additional info. + :vartype additional_info: + list[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ErrorAdditionalInfo] + """ + + _validation = { + 'code': {'readonly': True}, + 'message': {'readonly': True}, + 'target': {'readonly': True}, + 'details': {'readonly': True}, + 'additional_info': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorDetail]'}, + 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ErrorDetail, self).__init__(**kwargs) + self.code = None + self.message = None + self.target = None + self.details = None + self.additional_info = None + + +class ErrorResponse(msrest.serialization.Model): + """Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.). + + :ivar error: The error object. + :vartype error: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ErrorDetail + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorDetail'}, + } + + def __init__( + self, + *, + error: Optional["_models.ErrorDetail"] = None, + **kwargs + ): + """ + :keyword error: The error object. + :paramtype error: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ErrorDetail + """ + super(ErrorResponse, self).__init__(**kwargs) + self.error = error + + +class Resource(msrest.serialization.Model): + """Common fields that are returned in the response for all Azure Resource Manager resources. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + + +class ProxyResource(Resource): + """The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ProxyResource, self).__init__(**kwargs) + + +class Extension(ProxyResource): + """The Extension object. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar identity: Identity of the Extension resource. + :vartype identity: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.Identity + :ivar system_data: Top level metadata + https://github.com/Azure/azure-resource-manager-rpc/blob/master/v1.0/common-api-contracts.md#system-metadata-for-all-azure-resources. + :vartype system_data: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.SystemData + :ivar extension_type: Type of the Extension, of which this resource is an instance of. It must + be one of the Extension Types registered with Microsoft.KubernetesConfiguration by the + Extension publisher. + :vartype extension_type: str + :ivar auto_upgrade_minor_version: Flag to note if this extension participates in auto upgrade + of minor version, or not. + :vartype auto_upgrade_minor_version: bool + :ivar release_train: ReleaseTrain this extension participates in for auto-upgrade (e.g. Stable, + Preview, etc.) - only if autoUpgradeMinorVersion is 'true'. + :vartype release_train: str + :ivar version: User-specified version of the extension for this extension to 'pin'. To use + 'version', autoUpgradeMinorVersion must be 'false'. + :vartype version: str + :ivar scope: Scope at which the extension is installed. + :vartype scope: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.Scope + :ivar configuration_settings: Configuration settings, as name-value pairs for configuring this + extension. + :vartype configuration_settings: dict[str, str] + :ivar configuration_protected_settings: Configuration settings that are sensitive, as + name-value pairs for configuring this extension. + :vartype configuration_protected_settings: dict[str, str] + :ivar installed_version: Installed version of the extension. + :vartype installed_version: str + :ivar provisioning_state: Status of installation of this extension. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ProvisioningState + :ivar statuses: Status from this extension. + :vartype statuses: list[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ExtensionStatus] + :ivar error_info: Error information from the Agent - e.g. errors during installation. + :vartype error_info: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ErrorDetail + :ivar custom_location_settings: Custom Location settings properties. + :vartype custom_location_settings: dict[str, str] + :ivar package_uri: Uri of the Helm package. + :vartype package_uri: str + :ivar aks_assigned_identity: Identity of the Extension resource in an AKS cluster. + :vartype aks_assigned_identity: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ExtensionPropertiesAksAssignedIdentity + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'installed_version': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'error_info': {'readonly': True}, + 'custom_location_settings': {'readonly': True}, + 'package_uri': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'extension_type': {'key': 'properties.extensionType', 'type': 'str'}, + 'auto_upgrade_minor_version': {'key': 'properties.autoUpgradeMinorVersion', 'type': 'bool'}, + 'release_train': {'key': 'properties.releaseTrain', 'type': 'str'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'scope': {'key': 'properties.scope', 'type': 'Scope'}, + 'configuration_settings': {'key': 'properties.configurationSettings', 'type': '{str}'}, + 'configuration_protected_settings': {'key': 'properties.configurationProtectedSettings', 'type': '{str}'}, + 'installed_version': {'key': 'properties.installedVersion', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'statuses': {'key': 'properties.statuses', 'type': '[ExtensionStatus]'}, + 'error_info': {'key': 'properties.errorInfo', 'type': 'ErrorDetail'}, + 'custom_location_settings': {'key': 'properties.customLocationSettings', 'type': '{str}'}, + 'package_uri': {'key': 'properties.packageUri', 'type': 'str'}, + 'aks_assigned_identity': {'key': 'properties.aksAssignedIdentity', 'type': 'ExtensionPropertiesAksAssignedIdentity'}, + } + + def __init__( + self, + *, + identity: Optional["_models.Identity"] = None, + extension_type: Optional[str] = None, + auto_upgrade_minor_version: Optional[bool] = True, + release_train: Optional[str] = "Stable", + version: Optional[str] = None, + scope: Optional["_models.Scope"] = None, + configuration_settings: Optional[Dict[str, str]] = None, + configuration_protected_settings: Optional[Dict[str, str]] = None, + statuses: Optional[List["_models.ExtensionStatus"]] = None, + aks_assigned_identity: Optional["_models.ExtensionPropertiesAksAssignedIdentity"] = None, + **kwargs + ): + """ + :keyword identity: Identity of the Extension resource. + :paramtype identity: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.Identity + :keyword extension_type: Type of the Extension, of which this resource is an instance of. It + must be one of the Extension Types registered with Microsoft.KubernetesConfiguration by the + Extension publisher. + :paramtype extension_type: str + :keyword auto_upgrade_minor_version: Flag to note if this extension participates in auto + upgrade of minor version, or not. + :paramtype auto_upgrade_minor_version: bool + :keyword release_train: ReleaseTrain this extension participates in for auto-upgrade (e.g. + Stable, Preview, etc.) - only if autoUpgradeMinorVersion is 'true'. + :paramtype release_train: str + :keyword version: User-specified version of the extension for this extension to 'pin'. To use + 'version', autoUpgradeMinorVersion must be 'false'. + :paramtype version: str + :keyword scope: Scope at which the extension is installed. + :paramtype scope: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.Scope + :keyword configuration_settings: Configuration settings, as name-value pairs for configuring + this extension. + :paramtype configuration_settings: dict[str, str] + :keyword configuration_protected_settings: Configuration settings that are sensitive, as + name-value pairs for configuring this extension. + :paramtype configuration_protected_settings: dict[str, str] + :keyword statuses: Status from this extension. + :paramtype statuses: + list[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ExtensionStatus] + :keyword aks_assigned_identity: Identity of the Extension resource in an AKS cluster. + :paramtype aks_assigned_identity: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ExtensionPropertiesAksAssignedIdentity + """ + super(Extension, self).__init__(**kwargs) + self.identity = identity + self.system_data = None + self.extension_type = extension_type + self.auto_upgrade_minor_version = auto_upgrade_minor_version + self.release_train = release_train + self.version = version + self.scope = scope + self.configuration_settings = configuration_settings + self.configuration_protected_settings = configuration_protected_settings + self.installed_version = None + self.provisioning_state = None + self.statuses = statuses + self.error_info = None + self.custom_location_settings = None + self.package_uri = None + self.aks_assigned_identity = aks_assigned_identity + + +class ExtensionPropertiesAksAssignedIdentity(msrest.serialization.Model): + """Identity of the Extension resource in an AKS cluster. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: The principal ID of resource identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of resource. + :vartype tenant_id: str + :ivar type: The identity type. Known values are: "SystemAssigned", "UserAssigned". + :vartype type: str or ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.AKSIdentityType + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Optional[Union[str, "_models.AKSIdentityType"]] = None, + **kwargs + ): + """ + :keyword type: The identity type. Known values are: "SystemAssigned", "UserAssigned". + :paramtype type: str or ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.AKSIdentityType + """ + super(ExtensionPropertiesAksAssignedIdentity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = type + + +class ExtensionsList(msrest.serialization.Model): + """Result of the request to list Extensions. It contains a list of Extension objects and a URL link to get the next set of results. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of Extensions within a Kubernetes cluster. + :vartype value: list[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.Extension] + :ivar next_link: URL to get the next set of extension objects, if any. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Extension]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ExtensionsList, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class ExtensionStatus(msrest.serialization.Model): + """Status from the extension. + + :ivar code: Status code provided by the Extension. + :vartype code: str + :ivar display_status: Short description of status of the extension. + :vartype display_status: str + :ivar level: Level of the status. Known values are: "Error", "Warning", "Information". Default + value: "Information". + :vartype level: str or ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.LevelType + :ivar message: Detailed message of the status from the Extension. + :vartype message: str + :ivar time: DateLiteral (per ISO8601) noting the time of installation status. + :vartype time: str + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'display_status': {'key': 'displayStatus', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'time': {'key': 'time', 'type': 'str'}, + } + + def __init__( + self, + *, + code: Optional[str] = None, + display_status: Optional[str] = None, + level: Optional[Union[str, "_models.LevelType"]] = "Information", + message: Optional[str] = None, + time: Optional[str] = None, + **kwargs + ): + """ + :keyword code: Status code provided by the Extension. + :paramtype code: str + :keyword display_status: Short description of status of the extension. + :paramtype display_status: str + :keyword level: Level of the status. Known values are: "Error", "Warning", "Information". + Default value: "Information". + :paramtype level: str or ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.LevelType + :keyword message: Detailed message of the status from the Extension. + :paramtype message: str + :keyword time: DateLiteral (per ISO8601) noting the time of installation status. + :paramtype time: str + """ + super(ExtensionStatus, self).__init__(**kwargs) + self.code = code + self.display_status = display_status + self.level = level + self.message = message + self.time = time + + +class FluxConfiguration(ProxyResource): + """The Flux Configuration object returned in Get & Put response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Top level metadata + https://github.com/Azure/azure-resource-manager-rpc/blob/master/v1.0/common-api-contracts.md#system-metadata-for-all-azure-resources. + :vartype system_data: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.SystemData + :ivar scope: Scope at which the operator will be installed. Known values are: "cluster", + "namespace". Default value: "cluster". + :vartype scope: str or ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ScopeType + :ivar namespace: The namespace to which this configuration is installed to. Maximum of 253 + lower case alphanumeric characters, hyphen and period only. + :vartype namespace: str + :ivar source_kind: Source Kind to pull the configuration data from. Known values are: + "GitRepository", "Bucket", "AzureBlob". + :vartype source_kind: str or + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.SourceKindType + :ivar suspend: Whether this configuration should suspend its reconciliation of its + kustomizations and sources. + :vartype suspend: bool + :ivar git_repository: Parameters to reconcile to the GitRepository source kind type. + :vartype git_repository: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.GitRepositoryDefinition + :ivar bucket: Parameters to reconcile to the Bucket source kind type. + :vartype bucket: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.BucketDefinition + :ivar azure_blob: Parameters to reconcile to the AzureBlob source kind type. + :vartype azure_blob: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.AzureBlobDefinition + :ivar kustomizations: Array of kustomizations used to reconcile the artifact pulled by the + source type on the cluster. + :vartype kustomizations: dict[str, + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.KustomizationDefinition] + :ivar configuration_protected_settings: Key-value pairs of protected configuration settings for + the configuration. + :vartype configuration_protected_settings: dict[str, str] + :ivar statuses: Statuses of the Flux Kubernetes resources created by the fluxConfiguration or + created by the managed objects provisioned by the fluxConfiguration. + :vartype statuses: + list[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ObjectStatusDefinition] + :ivar repository_public_key: Public Key associated with this fluxConfiguration (either + generated within the cluster or provided by the user). + :vartype repository_public_key: str + :ivar source_synced_commit_id: Branch and/or SHA of the source commit synced with the cluster. + :vartype source_synced_commit_id: str + :ivar source_updated_at: Datetime the fluxConfiguration synced its source on the cluster. + :vartype source_updated_at: ~datetime.datetime + :ivar status_updated_at: Datetime the fluxConfiguration synced its status on the cluster with + Azure. + :vartype status_updated_at: ~datetime.datetime + :ivar compliance_state: Combined status of the Flux Kubernetes resources created by the + fluxConfiguration or created by the managed objects. Known values are: "Compliant", + "Non-Compliant", "Pending", "Suspended", "Unknown". Default value: "Unknown". + :vartype compliance_state: str or + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.FluxComplianceState + :ivar provisioning_state: Status of the creation of the fluxConfiguration. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ProvisioningState + :ivar error_message: Error message returned to the user in the case of provisioning failure. + :vartype error_message: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'statuses': {'readonly': True}, + 'repository_public_key': {'readonly': True}, + 'source_synced_commit_id': {'readonly': True}, + 'source_updated_at': {'readonly': True}, + 'status_updated_at': {'readonly': True}, + 'compliance_state': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'error_message': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'scope': {'key': 'properties.scope', 'type': 'str'}, + 'namespace': {'key': 'properties.namespace', 'type': 'str'}, + 'source_kind': {'key': 'properties.sourceKind', 'type': 'str'}, + 'suspend': {'key': 'properties.suspend', 'type': 'bool'}, + 'git_repository': {'key': 'properties.gitRepository', 'type': 'GitRepositoryDefinition'}, + 'bucket': {'key': 'properties.bucket', 'type': 'BucketDefinition'}, + 'azure_blob': {'key': 'properties.azureBlob', 'type': 'AzureBlobDefinition'}, + 'kustomizations': {'key': 'properties.kustomizations', 'type': '{KustomizationDefinition}'}, + 'configuration_protected_settings': {'key': 'properties.configurationProtectedSettings', 'type': '{str}'}, + 'statuses': {'key': 'properties.statuses', 'type': '[ObjectStatusDefinition]'}, + 'repository_public_key': {'key': 'properties.repositoryPublicKey', 'type': 'str'}, + 'source_synced_commit_id': {'key': 'properties.sourceSyncedCommitId', 'type': 'str'}, + 'source_updated_at': {'key': 'properties.sourceUpdatedAt', 'type': 'iso-8601'}, + 'status_updated_at': {'key': 'properties.statusUpdatedAt', 'type': 'iso-8601'}, + 'compliance_state': {'key': 'properties.complianceState', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'error_message': {'key': 'properties.errorMessage', 'type': 'str'}, + } + + def __init__( + self, + *, + scope: Optional[Union[str, "_models.ScopeType"]] = "cluster", + namespace: Optional[str] = "default", + source_kind: Optional[Union[str, "_models.SourceKindType"]] = None, + suspend: Optional[bool] = False, + git_repository: Optional["_models.GitRepositoryDefinition"] = None, + bucket: Optional["_models.BucketDefinition"] = None, + azure_blob: Optional["_models.AzureBlobDefinition"] = None, + kustomizations: Optional[Dict[str, "_models.KustomizationDefinition"]] = None, + configuration_protected_settings: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword scope: Scope at which the operator will be installed. Known values are: "cluster", + "namespace". Default value: "cluster". + :paramtype scope: str or ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ScopeType + :keyword namespace: The namespace to which this configuration is installed to. Maximum of 253 + lower case alphanumeric characters, hyphen and period only. + :paramtype namespace: str + :keyword source_kind: Source Kind to pull the configuration data from. Known values are: + "GitRepository", "Bucket", "AzureBlob". + :paramtype source_kind: str or + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.SourceKindType + :keyword suspend: Whether this configuration should suspend its reconciliation of its + kustomizations and sources. + :paramtype suspend: bool + :keyword git_repository: Parameters to reconcile to the GitRepository source kind type. + :paramtype git_repository: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.GitRepositoryDefinition + :keyword bucket: Parameters to reconcile to the Bucket source kind type. + :paramtype bucket: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.BucketDefinition + :keyword azure_blob: Parameters to reconcile to the AzureBlob source kind type. + :paramtype azure_blob: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.AzureBlobDefinition + :keyword kustomizations: Array of kustomizations used to reconcile the artifact pulled by the + source type on the cluster. + :paramtype kustomizations: dict[str, + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.KustomizationDefinition] + :keyword configuration_protected_settings: Key-value pairs of protected configuration settings + for the configuration. + :paramtype configuration_protected_settings: dict[str, str] + """ + super(FluxConfiguration, self).__init__(**kwargs) + self.system_data = None + self.scope = scope + self.namespace = namespace + self.source_kind = source_kind + self.suspend = suspend + self.git_repository = git_repository + self.bucket = bucket + self.azure_blob = azure_blob + self.kustomizations = kustomizations + self.configuration_protected_settings = configuration_protected_settings + self.statuses = None + self.repository_public_key = None + self.source_synced_commit_id = None + self.source_updated_at = None + self.status_updated_at = None + self.compliance_state = None + self.provisioning_state = None + self.error_message = None + + +class FluxConfigurationPatch(msrest.serialization.Model): + """The Flux Configuration Patch Request object. + + :ivar source_kind: Source Kind to pull the configuration data from. Known values are: + "GitRepository", "Bucket", "AzureBlob". + :vartype source_kind: str or + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.SourceKindType + :ivar suspend: Whether this configuration should suspend its reconciliation of its + kustomizations and sources. + :vartype suspend: bool + :ivar git_repository: Parameters to reconcile to the GitRepository source kind type. + :vartype git_repository: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.GitRepositoryPatchDefinition + :ivar bucket: Parameters to reconcile to the Bucket source kind type. + :vartype bucket: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.BucketPatchDefinition + :ivar azure_blob: Parameters to reconcile to the AzureBlob source kind type. + :vartype azure_blob: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.AzureBlobPatchDefinition + :ivar kustomizations: Array of kustomizations used to reconcile the artifact pulled by the + source type on the cluster. + :vartype kustomizations: dict[str, + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.KustomizationPatchDefinition] + :ivar configuration_protected_settings: Key-value pairs of protected configuration settings for + the configuration. + :vartype configuration_protected_settings: dict[str, str] + """ + + _attribute_map = { + 'source_kind': {'key': 'properties.sourceKind', 'type': 'str'}, + 'suspend': {'key': 'properties.suspend', 'type': 'bool'}, + 'git_repository': {'key': 'properties.gitRepository', 'type': 'GitRepositoryPatchDefinition'}, + 'bucket': {'key': 'properties.bucket', 'type': 'BucketPatchDefinition'}, + 'azure_blob': {'key': 'properties.azureBlob', 'type': 'AzureBlobPatchDefinition'}, + 'kustomizations': {'key': 'properties.kustomizations', 'type': '{KustomizationPatchDefinition}'}, + 'configuration_protected_settings': {'key': 'properties.configurationProtectedSettings', 'type': '{str}'}, + } + + def __init__( + self, + *, + source_kind: Optional[Union[str, "_models.SourceKindType"]] = None, + suspend: Optional[bool] = None, + git_repository: Optional["_models.GitRepositoryPatchDefinition"] = None, + bucket: Optional["_models.BucketPatchDefinition"] = None, + azure_blob: Optional["_models.AzureBlobPatchDefinition"] = None, + kustomizations: Optional[Dict[str, "_models.KustomizationPatchDefinition"]] = None, + configuration_protected_settings: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword source_kind: Source Kind to pull the configuration data from. Known values are: + "GitRepository", "Bucket", "AzureBlob". + :paramtype source_kind: str or + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.SourceKindType + :keyword suspend: Whether this configuration should suspend its reconciliation of its + kustomizations and sources. + :paramtype suspend: bool + :keyword git_repository: Parameters to reconcile to the GitRepository source kind type. + :paramtype git_repository: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.GitRepositoryPatchDefinition + :keyword bucket: Parameters to reconcile to the Bucket source kind type. + :paramtype bucket: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.BucketPatchDefinition + :keyword azure_blob: Parameters to reconcile to the AzureBlob source kind type. + :paramtype azure_blob: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.AzureBlobPatchDefinition + :keyword kustomizations: Array of kustomizations used to reconcile the artifact pulled by the + source type on the cluster. + :paramtype kustomizations: dict[str, + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.KustomizationPatchDefinition] + :keyword configuration_protected_settings: Key-value pairs of protected configuration settings + for the configuration. + :paramtype configuration_protected_settings: dict[str, str] + """ + super(FluxConfigurationPatch, self).__init__(**kwargs) + self.source_kind = source_kind + self.suspend = suspend + self.git_repository = git_repository + self.bucket = bucket + self.azure_blob = azure_blob + self.kustomizations = kustomizations + self.configuration_protected_settings = configuration_protected_settings + + +class FluxConfigurationsList(msrest.serialization.Model): + """Result of the request to list Flux Configurations. It contains a list of FluxConfiguration objects and a URL link to get the next set of results. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of Flux Configurations within a Kubernetes cluster. + :vartype value: list[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.FluxConfiguration] + :ivar next_link: URL to get the next set of configuration objects, if any. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[FluxConfiguration]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(FluxConfigurationsList, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class GitRepositoryDefinition(msrest.serialization.Model): + """Parameters to reconcile to the GitRepository source kind type. + + :ivar url: The URL to sync for the flux configuration git repository. + :vartype url: str + :ivar timeout_in_seconds: The maximum time to attempt to reconcile the cluster git repository + source with the remote. + :vartype timeout_in_seconds: long + :ivar sync_interval_in_seconds: The interval at which to re-reconcile the cluster git + repository source with the remote. + :vartype sync_interval_in_seconds: long + :ivar repository_ref: The source reference for the GitRepository object. + :vartype repository_ref: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.RepositoryRefDefinition + :ivar ssh_known_hosts: Base64-encoded known_hosts value containing public SSH keys required to + access private git repositories over SSH. + :vartype ssh_known_hosts: str + :ivar https_user: Plaintext HTTPS username used to access private git repositories over HTTPS. + :vartype https_user: str + :ivar https_ca_cert: Base64-encoded HTTPS certificate authority contents used to access git + private git repositories over HTTPS. + :vartype https_ca_cert: str + :ivar local_auth_ref: Name of a local secret on the Kubernetes cluster to use as the + authentication secret rather than the managed or user-provided configuration secrets. + :vartype local_auth_ref: str + """ + + _attribute_map = { + 'url': {'key': 'url', 'type': 'str'}, + 'timeout_in_seconds': {'key': 'timeoutInSeconds', 'type': 'long'}, + 'sync_interval_in_seconds': {'key': 'syncIntervalInSeconds', 'type': 'long'}, + 'repository_ref': {'key': 'repositoryRef', 'type': 'RepositoryRefDefinition'}, + 'ssh_known_hosts': {'key': 'sshKnownHosts', 'type': 'str'}, + 'https_user': {'key': 'httpsUser', 'type': 'str'}, + 'https_ca_cert': {'key': 'httpsCACert', 'type': 'str'}, + 'local_auth_ref': {'key': 'localAuthRef', 'type': 'str'}, + } + + def __init__( + self, + *, + url: Optional[str] = None, + timeout_in_seconds: Optional[int] = 600, + sync_interval_in_seconds: Optional[int] = 600, + repository_ref: Optional["_models.RepositoryRefDefinition"] = None, + ssh_known_hosts: Optional[str] = None, + https_user: Optional[str] = None, + https_ca_cert: Optional[str] = None, + local_auth_ref: Optional[str] = None, + **kwargs + ): + """ + :keyword url: The URL to sync for the flux configuration git repository. + :paramtype url: str + :keyword timeout_in_seconds: The maximum time to attempt to reconcile the cluster git + repository source with the remote. + :paramtype timeout_in_seconds: long + :keyword sync_interval_in_seconds: The interval at which to re-reconcile the cluster git + repository source with the remote. + :paramtype sync_interval_in_seconds: long + :keyword repository_ref: The source reference for the GitRepository object. + :paramtype repository_ref: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.RepositoryRefDefinition + :keyword ssh_known_hosts: Base64-encoded known_hosts value containing public SSH keys required + to access private git repositories over SSH. + :paramtype ssh_known_hosts: str + :keyword https_user: Plaintext HTTPS username used to access private git repositories over + HTTPS. + :paramtype https_user: str + :keyword https_ca_cert: Base64-encoded HTTPS certificate authority contents used to access git + private git repositories over HTTPS. + :paramtype https_ca_cert: str + :keyword local_auth_ref: Name of a local secret on the Kubernetes cluster to use as the + authentication secret rather than the managed or user-provided configuration secrets. + :paramtype local_auth_ref: str + """ + super(GitRepositoryDefinition, self).__init__(**kwargs) + self.url = url + self.timeout_in_seconds = timeout_in_seconds + self.sync_interval_in_seconds = sync_interval_in_seconds + self.repository_ref = repository_ref + self.ssh_known_hosts = ssh_known_hosts + self.https_user = https_user + self.https_ca_cert = https_ca_cert + self.local_auth_ref = local_auth_ref + + +class GitRepositoryPatchDefinition(msrest.serialization.Model): + """Parameters to reconcile to the GitRepository source kind type. + + :ivar url: The URL to sync for the flux configuration git repository. + :vartype url: str + :ivar timeout_in_seconds: The maximum time to attempt to reconcile the cluster git repository + source with the remote. + :vartype timeout_in_seconds: long + :ivar sync_interval_in_seconds: The interval at which to re-reconcile the cluster git + repository source with the remote. + :vartype sync_interval_in_seconds: long + :ivar repository_ref: The source reference for the GitRepository object. + :vartype repository_ref: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.RepositoryRefDefinition + :ivar ssh_known_hosts: Base64-encoded known_hosts value containing public SSH keys required to + access private git repositories over SSH. + :vartype ssh_known_hosts: str + :ivar https_user: Plaintext HTTPS username used to access private git repositories over HTTPS. + :vartype https_user: str + :ivar https_ca_cert: Base64-encoded HTTPS certificate authority contents used to access git + private git repositories over HTTPS. + :vartype https_ca_cert: str + :ivar local_auth_ref: Name of a local secret on the Kubernetes cluster to use as the + authentication secret rather than the managed or user-provided configuration secrets. + :vartype local_auth_ref: str + """ + + _attribute_map = { + 'url': {'key': 'url', 'type': 'str'}, + 'timeout_in_seconds': {'key': 'timeoutInSeconds', 'type': 'long'}, + 'sync_interval_in_seconds': {'key': 'syncIntervalInSeconds', 'type': 'long'}, + 'repository_ref': {'key': 'repositoryRef', 'type': 'RepositoryRefDefinition'}, + 'ssh_known_hosts': {'key': 'sshKnownHosts', 'type': 'str'}, + 'https_user': {'key': 'httpsUser', 'type': 'str'}, + 'https_ca_cert': {'key': 'httpsCACert', 'type': 'str'}, + 'local_auth_ref': {'key': 'localAuthRef', 'type': 'str'}, + } + + def __init__( + self, + *, + url: Optional[str] = None, + timeout_in_seconds: Optional[int] = None, + sync_interval_in_seconds: Optional[int] = None, + repository_ref: Optional["_models.RepositoryRefDefinition"] = None, + ssh_known_hosts: Optional[str] = None, + https_user: Optional[str] = None, + https_ca_cert: Optional[str] = None, + local_auth_ref: Optional[str] = None, + **kwargs + ): + """ + :keyword url: The URL to sync for the flux configuration git repository. + :paramtype url: str + :keyword timeout_in_seconds: The maximum time to attempt to reconcile the cluster git + repository source with the remote. + :paramtype timeout_in_seconds: long + :keyword sync_interval_in_seconds: The interval at which to re-reconcile the cluster git + repository source with the remote. + :paramtype sync_interval_in_seconds: long + :keyword repository_ref: The source reference for the GitRepository object. + :paramtype repository_ref: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.RepositoryRefDefinition + :keyword ssh_known_hosts: Base64-encoded known_hosts value containing public SSH keys required + to access private git repositories over SSH. + :paramtype ssh_known_hosts: str + :keyword https_user: Plaintext HTTPS username used to access private git repositories over + HTTPS. + :paramtype https_user: str + :keyword https_ca_cert: Base64-encoded HTTPS certificate authority contents used to access git + private git repositories over HTTPS. + :paramtype https_ca_cert: str + :keyword local_auth_ref: Name of a local secret on the Kubernetes cluster to use as the + authentication secret rather than the managed or user-provided configuration secrets. + :paramtype local_auth_ref: str + """ + super(GitRepositoryPatchDefinition, self).__init__(**kwargs) + self.url = url + self.timeout_in_seconds = timeout_in_seconds + self.sync_interval_in_seconds = sync_interval_in_seconds + self.repository_ref = repository_ref + self.ssh_known_hosts = ssh_known_hosts + self.https_user = https_user + self.https_ca_cert = https_ca_cert + self.local_auth_ref = local_auth_ref + + +class HelmOperatorProperties(msrest.serialization.Model): + """Properties for Helm operator. + + :ivar chart_version: Version of the operator Helm chart. + :vartype chart_version: str + :ivar chart_values: Values override for the operator Helm chart. + :vartype chart_values: str + """ + + _attribute_map = { + 'chart_version': {'key': 'chartVersion', 'type': 'str'}, + 'chart_values': {'key': 'chartValues', 'type': 'str'}, + } + + def __init__( + self, + *, + chart_version: Optional[str] = None, + chart_values: Optional[str] = None, + **kwargs + ): + """ + :keyword chart_version: Version of the operator Helm chart. + :paramtype chart_version: str + :keyword chart_values: Values override for the operator Helm chart. + :paramtype chart_values: str + """ + super(HelmOperatorProperties, self).__init__(**kwargs) + self.chart_version = chart_version + self.chart_values = chart_values + + +class HelmReleasePropertiesDefinition(msrest.serialization.Model): + """Properties for HelmRelease objects. + + :ivar last_revision_applied: The revision number of the last released object change. + :vartype last_revision_applied: long + :ivar helm_chart_ref: The reference to the HelmChart object used as the source to this + HelmRelease. + :vartype helm_chart_ref: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ObjectReferenceDefinition + :ivar failure_count: Total number of times that the HelmRelease failed to install or upgrade. + :vartype failure_count: long + :ivar install_failure_count: Number of times that the HelmRelease failed to install. + :vartype install_failure_count: long + :ivar upgrade_failure_count: Number of times that the HelmRelease failed to upgrade. + :vartype upgrade_failure_count: long + """ + + _attribute_map = { + 'last_revision_applied': {'key': 'lastRevisionApplied', 'type': 'long'}, + 'helm_chart_ref': {'key': 'helmChartRef', 'type': 'ObjectReferenceDefinition'}, + 'failure_count': {'key': 'failureCount', 'type': 'long'}, + 'install_failure_count': {'key': 'installFailureCount', 'type': 'long'}, + 'upgrade_failure_count': {'key': 'upgradeFailureCount', 'type': 'long'}, + } + + def __init__( + self, + *, + last_revision_applied: Optional[int] = None, + helm_chart_ref: Optional["_models.ObjectReferenceDefinition"] = None, + failure_count: Optional[int] = None, + install_failure_count: Optional[int] = None, + upgrade_failure_count: Optional[int] = None, + **kwargs + ): + """ + :keyword last_revision_applied: The revision number of the last released object change. + :paramtype last_revision_applied: long + :keyword helm_chart_ref: The reference to the HelmChart object used as the source to this + HelmRelease. + :paramtype helm_chart_ref: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ObjectReferenceDefinition + :keyword failure_count: Total number of times that the HelmRelease failed to install or + upgrade. + :paramtype failure_count: long + :keyword install_failure_count: Number of times that the HelmRelease failed to install. + :paramtype install_failure_count: long + :keyword upgrade_failure_count: Number of times that the HelmRelease failed to upgrade. + :paramtype upgrade_failure_count: long + """ + super(HelmReleasePropertiesDefinition, self).__init__(**kwargs) + self.last_revision_applied = last_revision_applied + self.helm_chart_ref = helm_chart_ref + self.failure_count = failure_count + self.install_failure_count = install_failure_count + self.upgrade_failure_count = upgrade_failure_count + + +class Identity(msrest.serialization.Model): + """Identity for the resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: The principal ID of resource identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of resource. + :vartype tenant_id: str + :ivar type: The identity type. The only acceptable values to pass in are None and + "SystemAssigned". The default value is None. + :vartype type: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Optional[str] = None, + **kwargs + ): + """ + :keyword type: The identity type. The only acceptable values to pass in are None and + "SystemAssigned". The default value is None. + :paramtype type: str + """ + super(Identity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = type + + +class KustomizationDefinition(msrest.serialization.Model): + """The Kustomization defining how to reconcile the artifact pulled by the source type on the cluster. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Name of the Kustomization, matching the key in the Kustomizations object map. + :vartype name: str + :ivar path: The path in the source reference to reconcile on the cluster. + :vartype path: str + :ivar depends_on: Specifies other Kustomizations that this Kustomization depends on. This + Kustomization will not reconcile until all dependencies have completed their reconciliation. + :vartype depends_on: list[str] + :ivar timeout_in_seconds: The maximum time to attempt to reconcile the Kustomization on the + cluster. + :vartype timeout_in_seconds: long + :ivar sync_interval_in_seconds: The interval at which to re-reconcile the Kustomization on the + cluster. + :vartype sync_interval_in_seconds: long + :ivar retry_interval_in_seconds: The interval at which to re-reconcile the Kustomization on the + cluster in the event of failure on reconciliation. + :vartype retry_interval_in_seconds: long + :ivar prune: Enable/disable garbage collections of Kubernetes objects created by this + Kustomization. + :vartype prune: bool + :ivar force: Enable/disable re-creating Kubernetes resources on the cluster when patching fails + due to an immutable field change. + :vartype force: bool + """ + + _validation = { + 'name': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'path': {'key': 'path', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[str]'}, + 'timeout_in_seconds': {'key': 'timeoutInSeconds', 'type': 'long'}, + 'sync_interval_in_seconds': {'key': 'syncIntervalInSeconds', 'type': 'long'}, + 'retry_interval_in_seconds': {'key': 'retryIntervalInSeconds', 'type': 'long'}, + 'prune': {'key': 'prune', 'type': 'bool'}, + 'force': {'key': 'force', 'type': 'bool'}, + } + + def __init__( + self, + *, + path: Optional[str] = "", + depends_on: Optional[List[str]] = None, + timeout_in_seconds: Optional[int] = 600, + sync_interval_in_seconds: Optional[int] = 600, + retry_interval_in_seconds: Optional[int] = None, + prune: Optional[bool] = False, + force: Optional[bool] = False, + **kwargs + ): + """ + :keyword path: The path in the source reference to reconcile on the cluster. + :paramtype path: str + :keyword depends_on: Specifies other Kustomizations that this Kustomization depends on. This + Kustomization will not reconcile until all dependencies have completed their reconciliation. + :paramtype depends_on: list[str] + :keyword timeout_in_seconds: The maximum time to attempt to reconcile the Kustomization on the + cluster. + :paramtype timeout_in_seconds: long + :keyword sync_interval_in_seconds: The interval at which to re-reconcile the Kustomization on + the cluster. + :paramtype sync_interval_in_seconds: long + :keyword retry_interval_in_seconds: The interval at which to re-reconcile the Kustomization on + the cluster in the event of failure on reconciliation. + :paramtype retry_interval_in_seconds: long + :keyword prune: Enable/disable garbage collections of Kubernetes objects created by this + Kustomization. + :paramtype prune: bool + :keyword force: Enable/disable re-creating Kubernetes resources on the cluster when patching + fails due to an immutable field change. + :paramtype force: bool + """ + super(KustomizationDefinition, self).__init__(**kwargs) + self.name = None + self.path = path + self.depends_on = depends_on + self.timeout_in_seconds = timeout_in_seconds + self.sync_interval_in_seconds = sync_interval_in_seconds + self.retry_interval_in_seconds = retry_interval_in_seconds + self.prune = prune + self.force = force + + +class KustomizationPatchDefinition(msrest.serialization.Model): + """The Kustomization defining how to reconcile the artifact pulled by the source type on the cluster. + + :ivar path: The path in the source reference to reconcile on the cluster. + :vartype path: str + :ivar depends_on: Specifies other Kustomizations that this Kustomization depends on. This + Kustomization will not reconcile until all dependencies have completed their reconciliation. + :vartype depends_on: list[str] + :ivar timeout_in_seconds: The maximum time to attempt to reconcile the Kustomization on the + cluster. + :vartype timeout_in_seconds: long + :ivar sync_interval_in_seconds: The interval at which to re-reconcile the Kustomization on the + cluster. + :vartype sync_interval_in_seconds: long + :ivar retry_interval_in_seconds: The interval at which to re-reconcile the Kustomization on the + cluster in the event of failure on reconciliation. + :vartype retry_interval_in_seconds: long + :ivar prune: Enable/disable garbage collections of Kubernetes objects created by this + Kustomization. + :vartype prune: bool + :ivar force: Enable/disable re-creating Kubernetes resources on the cluster when patching fails + due to an immutable field change. + :vartype force: bool + """ + + _attribute_map = { + 'path': {'key': 'path', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[str]'}, + 'timeout_in_seconds': {'key': 'timeoutInSeconds', 'type': 'long'}, + 'sync_interval_in_seconds': {'key': 'syncIntervalInSeconds', 'type': 'long'}, + 'retry_interval_in_seconds': {'key': 'retryIntervalInSeconds', 'type': 'long'}, + 'prune': {'key': 'prune', 'type': 'bool'}, + 'force': {'key': 'force', 'type': 'bool'}, + } + + def __init__( + self, + *, + path: Optional[str] = None, + depends_on: Optional[List[str]] = None, + timeout_in_seconds: Optional[int] = None, + sync_interval_in_seconds: Optional[int] = None, + retry_interval_in_seconds: Optional[int] = None, + prune: Optional[bool] = None, + force: Optional[bool] = None, + **kwargs + ): + """ + :keyword path: The path in the source reference to reconcile on the cluster. + :paramtype path: str + :keyword depends_on: Specifies other Kustomizations that this Kustomization depends on. This + Kustomization will not reconcile until all dependencies have completed their reconciliation. + :paramtype depends_on: list[str] + :keyword timeout_in_seconds: The maximum time to attempt to reconcile the Kustomization on the + cluster. + :paramtype timeout_in_seconds: long + :keyword sync_interval_in_seconds: The interval at which to re-reconcile the Kustomization on + the cluster. + :paramtype sync_interval_in_seconds: long + :keyword retry_interval_in_seconds: The interval at which to re-reconcile the Kustomization on + the cluster in the event of failure on reconciliation. + :paramtype retry_interval_in_seconds: long + :keyword prune: Enable/disable garbage collections of Kubernetes objects created by this + Kustomization. + :paramtype prune: bool + :keyword force: Enable/disable re-creating Kubernetes resources on the cluster when patching + fails due to an immutable field change. + :paramtype force: bool + """ + super(KustomizationPatchDefinition, self).__init__(**kwargs) + self.path = path + self.depends_on = depends_on + self.timeout_in_seconds = timeout_in_seconds + self.sync_interval_in_seconds = sync_interval_in_seconds + self.retry_interval_in_seconds = retry_interval_in_seconds + self.prune = prune + self.force = force + + +class ManagedIdentityDefinition(msrest.serialization.Model): + """Parameters to authenticate using a Managed Identity. + + :ivar client_id: The client Id for authenticating a Managed Identity. + :vartype client_id: str + """ + + _attribute_map = { + 'client_id': {'key': 'clientId', 'type': 'str'}, + } + + def __init__( + self, + *, + client_id: Optional[str] = None, + **kwargs + ): + """ + :keyword client_id: The client Id for authenticating a Managed Identity. + :paramtype client_id: str + """ + super(ManagedIdentityDefinition, self).__init__(**kwargs) + self.client_id = client_id + + +class ManagedIdentityPatchDefinition(msrest.serialization.Model): + """Parameters to authenticate using a Managed Identity. + + :ivar client_id: The client Id for authenticating a Managed Identity. + :vartype client_id: str + """ + + _attribute_map = { + 'client_id': {'key': 'clientId', 'type': 'str'}, + } + + def __init__( + self, + *, + client_id: Optional[str] = None, + **kwargs + ): + """ + :keyword client_id: The client Id for authenticating a Managed Identity. + :paramtype client_id: str + """ + super(ManagedIdentityPatchDefinition, self).__init__(**kwargs) + self.client_id = client_id + + +class ObjectReferenceDefinition(msrest.serialization.Model): + """Object reference to a Kubernetes object on a cluster. + + :ivar name: Name of the object. + :vartype name: str + :ivar namespace: Namespace of the object. + :vartype namespace: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'namespace': {'key': 'namespace', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + namespace: Optional[str] = None, + **kwargs + ): + """ + :keyword name: Name of the object. + :paramtype name: str + :keyword namespace: Namespace of the object. + :paramtype namespace: str + """ + super(ObjectReferenceDefinition, self).__init__(**kwargs) + self.name = name + self.namespace = namespace + + +class ObjectStatusConditionDefinition(msrest.serialization.Model): + """Status condition of Kubernetes object. + + :ivar last_transition_time: Last time this status condition has changed. + :vartype last_transition_time: ~datetime.datetime + :ivar message: A more verbose description of the object status condition. + :vartype message: str + :ivar reason: Reason for the specified status condition type status. + :vartype reason: str + :ivar status: Status of the Kubernetes object condition type. + :vartype status: str + :ivar type: Object status condition type for this object. + :vartype type: str + """ + + _attribute_map = { + 'last_transition_time': {'key': 'lastTransitionTime', 'type': 'iso-8601'}, + 'message': {'key': 'message', 'type': 'str'}, + 'reason': {'key': 'reason', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + *, + last_transition_time: Optional[datetime.datetime] = None, + message: Optional[str] = None, + reason: Optional[str] = None, + status: Optional[str] = None, + type: Optional[str] = None, + **kwargs + ): + """ + :keyword last_transition_time: Last time this status condition has changed. + :paramtype last_transition_time: ~datetime.datetime + :keyword message: A more verbose description of the object status condition. + :paramtype message: str + :keyword reason: Reason for the specified status condition type status. + :paramtype reason: str + :keyword status: Status of the Kubernetes object condition type. + :paramtype status: str + :keyword type: Object status condition type for this object. + :paramtype type: str + """ + super(ObjectStatusConditionDefinition, self).__init__(**kwargs) + self.last_transition_time = last_transition_time + self.message = message + self.reason = reason + self.status = status + self.type = type + + +class ObjectStatusDefinition(msrest.serialization.Model): + """Statuses of objects deployed by the user-specified kustomizations from the git repository. + + :ivar name: Name of the applied object. + :vartype name: str + :ivar namespace: Namespace of the applied object. + :vartype namespace: str + :ivar kind: Kind of the applied object. + :vartype kind: str + :ivar compliance_state: Compliance state of the applied object showing whether the applied + object has come into a ready state on the cluster. Known values are: "Compliant", + "Non-Compliant", "Pending", "Suspended", "Unknown". Default value: "Unknown". + :vartype compliance_state: str or + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.FluxComplianceState + :ivar applied_by: Object reference to the Kustomization that applied this object. + :vartype applied_by: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ObjectReferenceDefinition + :ivar status_conditions: List of Kubernetes object status conditions present on the cluster. + :vartype status_conditions: + list[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ObjectStatusConditionDefinition] + :ivar helm_release_properties: Additional properties that are provided from objects of the + HelmRelease kind. + :vartype helm_release_properties: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.HelmReleasePropertiesDefinition + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'namespace': {'key': 'namespace', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'compliance_state': {'key': 'complianceState', 'type': 'str'}, + 'applied_by': {'key': 'appliedBy', 'type': 'ObjectReferenceDefinition'}, + 'status_conditions': {'key': 'statusConditions', 'type': '[ObjectStatusConditionDefinition]'}, + 'helm_release_properties': {'key': 'helmReleaseProperties', 'type': 'HelmReleasePropertiesDefinition'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + namespace: Optional[str] = None, + kind: Optional[str] = None, + compliance_state: Optional[Union[str, "_models.FluxComplianceState"]] = "Unknown", + applied_by: Optional["_models.ObjectReferenceDefinition"] = None, + status_conditions: Optional[List["_models.ObjectStatusConditionDefinition"]] = None, + helm_release_properties: Optional["_models.HelmReleasePropertiesDefinition"] = None, + **kwargs + ): + """ + :keyword name: Name of the applied object. + :paramtype name: str + :keyword namespace: Namespace of the applied object. + :paramtype namespace: str + :keyword kind: Kind of the applied object. + :paramtype kind: str + :keyword compliance_state: Compliance state of the applied object showing whether the applied + object has come into a ready state on the cluster. Known values are: "Compliant", + "Non-Compliant", "Pending", "Suspended", "Unknown". Default value: "Unknown". + :paramtype compliance_state: str or + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.FluxComplianceState + :keyword applied_by: Object reference to the Kustomization that applied this object. + :paramtype applied_by: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ObjectReferenceDefinition + :keyword status_conditions: List of Kubernetes object status conditions present on the cluster. + :paramtype status_conditions: + list[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ObjectStatusConditionDefinition] + :keyword helm_release_properties: Additional properties that are provided from objects of the + HelmRelease kind. + :paramtype helm_release_properties: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.HelmReleasePropertiesDefinition + """ + super(ObjectStatusDefinition, self).__init__(**kwargs) + self.name = name + self.namespace = namespace + self.kind = kind + self.compliance_state = compliance_state + self.applied_by = applied_by + self.status_conditions = status_conditions + self.helm_release_properties = helm_release_properties + + +class OperationStatusList(msrest.serialization.Model): + """The async operations in progress, in the cluster. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of async operations in progress, in the cluster. + :vartype value: + list[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.OperationStatusResult] + :ivar next_link: URL to get the next set of Operation Result objects, if any. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[OperationStatusResult]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(OperationStatusList, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class OperationStatusResult(msrest.serialization.Model): + """The current status of an async operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified ID for the async operation. + :vartype id: str + :ivar name: Name of the async operation. + :vartype name: str + :ivar status: Required. Operation status. + :vartype status: str + :ivar properties: Additional information, if available. + :vartype properties: dict[str, str] + :ivar error: If present, details of the operation error. + :vartype error: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ErrorDetail + """ + + _validation = { + 'status': {'required': True}, + 'error': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'error': {'key': 'error', 'type': 'ErrorDetail'}, + } + + def __init__( + self, + *, + status: str, + id: Optional[str] = None, + name: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword id: Fully qualified ID for the async operation. + :paramtype id: str + :keyword name: Name of the async operation. + :paramtype name: str + :keyword status: Required. Operation status. + :paramtype status: str + :keyword properties: Additional information, if available. + :paramtype properties: dict[str, str] + """ + super(OperationStatusResult, self).__init__(**kwargs) + self.id = id + self.name = name + self.status = status + self.properties = properties + self.error = None + + +class PatchExtension(msrest.serialization.Model): + """The Extension Patch Request object. + + :ivar auto_upgrade_minor_version: Flag to note if this extension participates in auto upgrade + of minor version, or not. + :vartype auto_upgrade_minor_version: bool + :ivar release_train: ReleaseTrain this extension participates in for auto-upgrade (e.g. Stable, + Preview, etc.) - only if autoUpgradeMinorVersion is 'true'. + :vartype release_train: str + :ivar version: Version of the extension for this extension, if it is 'pinned' to a specific + version. autoUpgradeMinorVersion must be 'false'. + :vartype version: str + :ivar configuration_settings: Configuration settings, as name-value pairs for configuring this + extension. + :vartype configuration_settings: dict[str, str] + :ivar configuration_protected_settings: Configuration settings that are sensitive, as + name-value pairs for configuring this extension. + :vartype configuration_protected_settings: dict[str, str] + """ + + _attribute_map = { + 'auto_upgrade_minor_version': {'key': 'properties.autoUpgradeMinorVersion', 'type': 'bool'}, + 'release_train': {'key': 'properties.releaseTrain', 'type': 'str'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'configuration_settings': {'key': 'properties.configurationSettings', 'type': '{str}'}, + 'configuration_protected_settings': {'key': 'properties.configurationProtectedSettings', 'type': '{str}'}, + } + + def __init__( + self, + *, + auto_upgrade_minor_version: Optional[bool] = True, + release_train: Optional[str] = "Stable", + version: Optional[str] = None, + configuration_settings: Optional[Dict[str, str]] = None, + configuration_protected_settings: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword auto_upgrade_minor_version: Flag to note if this extension participates in auto + upgrade of minor version, or not. + :paramtype auto_upgrade_minor_version: bool + :keyword release_train: ReleaseTrain this extension participates in for auto-upgrade (e.g. + Stable, Preview, etc.) - only if autoUpgradeMinorVersion is 'true'. + :paramtype release_train: str + :keyword version: Version of the extension for this extension, if it is 'pinned' to a specific + version. autoUpgradeMinorVersion must be 'false'. + :paramtype version: str + :keyword configuration_settings: Configuration settings, as name-value pairs for configuring + this extension. + :paramtype configuration_settings: dict[str, str] + :keyword configuration_protected_settings: Configuration settings that are sensitive, as + name-value pairs for configuring this extension. + :paramtype configuration_protected_settings: dict[str, str] + """ + super(PatchExtension, self).__init__(**kwargs) + self.auto_upgrade_minor_version = auto_upgrade_minor_version + self.release_train = release_train + self.version = version + self.configuration_settings = configuration_settings + self.configuration_protected_settings = configuration_protected_settings + + +class RepositoryRefDefinition(msrest.serialization.Model): + """The source reference for the GitRepository object. + + :ivar branch: The git repository branch name to checkout. + :vartype branch: str + :ivar tag: The git repository tag name to checkout. This takes precedence over branch. + :vartype tag: str + :ivar semver: The semver range used to match against git repository tags. This takes precedence + over tag. + :vartype semver: str + :ivar commit: The commit SHA to checkout. This value must be combined with the branch name to + be valid. This takes precedence over semver. + :vartype commit: str + """ + + _attribute_map = { + 'branch': {'key': 'branch', 'type': 'str'}, + 'tag': {'key': 'tag', 'type': 'str'}, + 'semver': {'key': 'semver', 'type': 'str'}, + 'commit': {'key': 'commit', 'type': 'str'}, + } + + def __init__( + self, + *, + branch: Optional[str] = None, + tag: Optional[str] = None, + semver: Optional[str] = None, + commit: Optional[str] = None, + **kwargs + ): + """ + :keyword branch: The git repository branch name to checkout. + :paramtype branch: str + :keyword tag: The git repository tag name to checkout. This takes precedence over branch. + :paramtype tag: str + :keyword semver: The semver range used to match against git repository tags. This takes + precedence over tag. + :paramtype semver: str + :keyword commit: The commit SHA to checkout. This value must be combined with the branch name + to be valid. This takes precedence over semver. + :paramtype commit: str + """ + super(RepositoryRefDefinition, self).__init__(**kwargs) + self.branch = branch + self.tag = tag + self.semver = semver + self.commit = commit + + +class ResourceProviderOperation(msrest.serialization.Model): + """Supported operation of this resource provider. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Operation name, in format of {provider}/{resource}/{operation}. + :vartype name: str + :ivar display: Display metadata associated with the operation. + :vartype display: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ResourceProviderOperationDisplay + :ivar is_data_action: The flag that indicates whether the operation applies to data plane. + :vartype is_data_action: bool + :ivar origin: Origin of the operation. + :vartype origin: str + """ + + _validation = { + 'is_data_action': {'readonly': True}, + 'origin': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'ResourceProviderOperationDisplay'}, + 'is_data_action': {'key': 'isDataAction', 'type': 'bool'}, + 'origin': {'key': 'origin', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + display: Optional["_models.ResourceProviderOperationDisplay"] = None, + **kwargs + ): + """ + :keyword name: Operation name, in format of {provider}/{resource}/{operation}. + :paramtype name: str + :keyword display: Display metadata associated with the operation. + :paramtype display: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ResourceProviderOperationDisplay + """ + super(ResourceProviderOperation, self).__init__(**kwargs) + self.name = name + self.display = display + self.is_data_action = None + self.origin = None + + +class ResourceProviderOperationDisplay(msrest.serialization.Model): + """Display metadata associated with the operation. + + :ivar provider: Resource provider: Microsoft KubernetesConfiguration. + :vartype provider: str + :ivar resource: Resource on which the operation is performed. + :vartype resource: str + :ivar operation: Type of operation: get, read, delete, etc. + :vartype operation: str + :ivar description: Description of this operation. + :vartype description: str + """ + + _attribute_map = { + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + *, + provider: Optional[str] = None, + resource: Optional[str] = None, + operation: Optional[str] = None, + description: Optional[str] = None, + **kwargs + ): + """ + :keyword provider: Resource provider: Microsoft KubernetesConfiguration. + :paramtype provider: str + :keyword resource: Resource on which the operation is performed. + :paramtype resource: str + :keyword operation: Type of operation: get, read, delete, etc. + :paramtype operation: str + :keyword description: Description of this operation. + :paramtype description: str + """ + super(ResourceProviderOperationDisplay, self).__init__(**kwargs) + self.provider = provider + self.resource = resource + self.operation = operation + self.description = description + + +class ResourceProviderOperationList(msrest.serialization.Model): + """Result of the request to list operations. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of operations supported by this resource provider. + :vartype value: + list[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ResourceProviderOperation] + :ivar next_link: URL to the next set of results, if any. + :vartype next_link: str + """ + + _validation = { + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ResourceProviderOperation]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["_models.ResourceProviderOperation"]] = None, + **kwargs + ): + """ + :keyword value: List of operations supported by this resource provider. + :paramtype value: + list[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ResourceProviderOperation] + """ + super(ResourceProviderOperationList, self).__init__(**kwargs) + self.value = value + self.next_link = None + + +class Scope(msrest.serialization.Model): + """Scope of the extension. It can be either Cluster or Namespace; but not both. + + :ivar cluster: Specifies that the scope of the extension is Cluster. + :vartype cluster: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ScopeCluster + :ivar namespace: Specifies that the scope of the extension is Namespace. + :vartype namespace: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ScopeNamespace + """ + + _attribute_map = { + 'cluster': {'key': 'cluster', 'type': 'ScopeCluster'}, + 'namespace': {'key': 'namespace', 'type': 'ScopeNamespace'}, + } + + def __init__( + self, + *, + cluster: Optional["_models.ScopeCluster"] = None, + namespace: Optional["_models.ScopeNamespace"] = None, + **kwargs + ): + """ + :keyword cluster: Specifies that the scope of the extension is Cluster. + :paramtype cluster: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ScopeCluster + :keyword namespace: Specifies that the scope of the extension is Namespace. + :paramtype namespace: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ScopeNamespace + """ + super(Scope, self).__init__(**kwargs) + self.cluster = cluster + self.namespace = namespace + + +class ScopeCluster(msrest.serialization.Model): + """Specifies that the scope of the extension is Cluster. + + :ivar release_namespace: Namespace where the extension Release must be placed, for a Cluster + scoped extension. If this namespace does not exist, it will be created. + :vartype release_namespace: str + """ + + _attribute_map = { + 'release_namespace': {'key': 'releaseNamespace', 'type': 'str'}, + } + + def __init__( + self, + *, + release_namespace: Optional[str] = None, + **kwargs + ): + """ + :keyword release_namespace: Namespace where the extension Release must be placed, for a Cluster + scoped extension. If this namespace does not exist, it will be created. + :paramtype release_namespace: str + """ + super(ScopeCluster, self).__init__(**kwargs) + self.release_namespace = release_namespace + + +class ScopeNamespace(msrest.serialization.Model): + """Specifies that the scope of the extension is Namespace. + + :ivar target_namespace: Namespace where the extension will be created for an Namespace scoped + extension. If this namespace does not exist, it will be created. + :vartype target_namespace: str + """ + + _attribute_map = { + 'target_namespace': {'key': 'targetNamespace', 'type': 'str'}, + } + + def __init__( + self, + *, + target_namespace: Optional[str] = None, + **kwargs + ): + """ + :keyword target_namespace: Namespace where the extension will be created for an Namespace + scoped extension. If this namespace does not exist, it will be created. + :paramtype target_namespace: str + """ + super(ScopeNamespace, self).__init__(**kwargs) + self.target_namespace = target_namespace + + +class ServicePrincipalDefinition(msrest.serialization.Model): + """Parameters to authenticate using Service Principal. + + :ivar client_id: The client Id for authenticating a Service Principal. + :vartype client_id: str + :ivar tenant_id: The tenant Id for authenticating a Service Principal. + :vartype tenant_id: str + :ivar client_secret: The client secret for authenticating a Service Principal. + :vartype client_secret: str + :ivar client_certificate: Base64-encoded certificate used to authenticate a Service Principal. + :vartype client_certificate: str + :ivar client_certificate_password: The password for the certificate used to authenticate a + Service Principal. + :vartype client_certificate_password: str + :ivar client_certificate_send_chain: Specifies whether to include x5c header in client claims + when acquiring a token to enable subject name / issuer based authentication for the Client + Certificate. + :vartype client_certificate_send_chain: bool + """ + + _attribute_map = { + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'client_secret': {'key': 'clientSecret', 'type': 'str'}, + 'client_certificate': {'key': 'clientCertificate', 'type': 'str'}, + 'client_certificate_password': {'key': 'clientCertificatePassword', 'type': 'str'}, + 'client_certificate_send_chain': {'key': 'clientCertificateSendChain', 'type': 'bool'}, + } + + def __init__( + self, + *, + client_id: Optional[str] = None, + tenant_id: Optional[str] = None, + client_secret: Optional[str] = None, + client_certificate: Optional[str] = None, + client_certificate_password: Optional[str] = None, + client_certificate_send_chain: Optional[bool] = False, + **kwargs + ): + """ + :keyword client_id: The client Id for authenticating a Service Principal. + :paramtype client_id: str + :keyword tenant_id: The tenant Id for authenticating a Service Principal. + :paramtype tenant_id: str + :keyword client_secret: The client secret for authenticating a Service Principal. + :paramtype client_secret: str + :keyword client_certificate: Base64-encoded certificate used to authenticate a Service + Principal. + :paramtype client_certificate: str + :keyword client_certificate_password: The password for the certificate used to authenticate a + Service Principal. + :paramtype client_certificate_password: str + :keyword client_certificate_send_chain: Specifies whether to include x5c header in client + claims when acquiring a token to enable subject name / issuer based authentication for the + Client Certificate. + :paramtype client_certificate_send_chain: bool + """ + super(ServicePrincipalDefinition, self).__init__(**kwargs) + self.client_id = client_id + self.tenant_id = tenant_id + self.client_secret = client_secret + self.client_certificate = client_certificate + self.client_certificate_password = client_certificate_password + self.client_certificate_send_chain = client_certificate_send_chain + + +class ServicePrincipalPatchDefinition(msrest.serialization.Model): + """Parameters to authenticate using Service Principal. + + :ivar client_id: The client Id for authenticating a Service Principal. + :vartype client_id: str + :ivar tenant_id: The tenant Id for authenticating a Service Principal. + :vartype tenant_id: str + :ivar client_secret: The client secret for authenticating a Service Principal. + :vartype client_secret: str + :ivar client_certificate: Base64-encoded certificate used to authenticate a Service Principal. + :vartype client_certificate: str + :ivar client_certificate_password: The password for the certificate used to authenticate a + Service Principal. + :vartype client_certificate_password: str + :ivar client_certificate_send_chain: Specifies whether to include x5c header in client claims + when acquiring a token to enable subject name / issuer based authentication for the Client + Certificate. + :vartype client_certificate_send_chain: bool + """ + + _attribute_map = { + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'client_secret': {'key': 'clientSecret', 'type': 'str'}, + 'client_certificate': {'key': 'clientCertificate', 'type': 'str'}, + 'client_certificate_password': {'key': 'clientCertificatePassword', 'type': 'str'}, + 'client_certificate_send_chain': {'key': 'clientCertificateSendChain', 'type': 'bool'}, + } + + def __init__( + self, + *, + client_id: Optional[str] = None, + tenant_id: Optional[str] = None, + client_secret: Optional[str] = None, + client_certificate: Optional[str] = None, + client_certificate_password: Optional[str] = None, + client_certificate_send_chain: Optional[bool] = None, + **kwargs + ): + """ + :keyword client_id: The client Id for authenticating a Service Principal. + :paramtype client_id: str + :keyword tenant_id: The tenant Id for authenticating a Service Principal. + :paramtype tenant_id: str + :keyword client_secret: The client secret for authenticating a Service Principal. + :paramtype client_secret: str + :keyword client_certificate: Base64-encoded certificate used to authenticate a Service + Principal. + :paramtype client_certificate: str + :keyword client_certificate_password: The password for the certificate used to authenticate a + Service Principal. + :paramtype client_certificate_password: str + :keyword client_certificate_send_chain: Specifies whether to include x5c header in client + claims when acquiring a token to enable subject name / issuer based authentication for the + Client Certificate. + :paramtype client_certificate_send_chain: bool + """ + super(ServicePrincipalPatchDefinition, self).__init__(**kwargs) + self.client_id = client_id + self.tenant_id = tenant_id + self.client_secret = client_secret + self.client_certificate = client_certificate + self.client_certificate_password = client_certificate_password + self.client_certificate_send_chain = client_certificate_send_chain + + +class SourceControlConfiguration(ProxyResource): + """The SourceControl Configuration object returned in Get & Put response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Top level metadata + https://github.com/Azure/azure-resource-manager-rpc/blob/master/v1.0/common-api-contracts.md#system-metadata-for-all-azure-resources. + :vartype system_data: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.SystemData + :ivar repository_url: Url of the SourceControl Repository. + :vartype repository_url: str + :ivar operator_namespace: The namespace to which this operator is installed to. Maximum of 253 + lower case alphanumeric characters, hyphen and period only. + :vartype operator_namespace: str + :ivar operator_instance_name: Instance name of the operator - identifying the specific + configuration. + :vartype operator_instance_name: str + :ivar operator_type: Type of the operator. Known values are: "Flux". + :vartype operator_type: str or + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.OperatorType + :ivar operator_params: Any Parameters for the Operator instance in string format. + :vartype operator_params: str + :ivar configuration_protected_settings: Name-value pairs of protected configuration settings + for the configuration. + :vartype configuration_protected_settings: dict[str, str] + :ivar operator_scope: Scope at which the operator will be installed. Known values are: + "cluster", "namespace". Default value: "cluster". + :vartype operator_scope: str or + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.OperatorScopeType + :ivar repository_public_key: Public Key associated with this SourceControl configuration + (either generated within the cluster or provided by the user). + :vartype repository_public_key: str + :ivar ssh_known_hosts_contents: Base64-encoded known_hosts contents containing public SSH keys + required to access private Git instances. + :vartype ssh_known_hosts_contents: str + :ivar enable_helm_operator: Option to enable Helm Operator for this git configuration. + :vartype enable_helm_operator: bool + :ivar helm_operator_properties: Properties for Helm operator. + :vartype helm_operator_properties: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.HelmOperatorProperties + :ivar provisioning_state: The provisioning state of the resource provider. Known values are: + "Accepted", "Deleting", "Running", "Succeeded", "Failed". + :vartype provisioning_state: str or + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ProvisioningStateType + :ivar compliance_status: Compliance Status of the Configuration. + :vartype compliance_status: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ComplianceStatus + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'repository_public_key': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'compliance_status': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'repository_url': {'key': 'properties.repositoryUrl', 'type': 'str'}, + 'operator_namespace': {'key': 'properties.operatorNamespace', 'type': 'str'}, + 'operator_instance_name': {'key': 'properties.operatorInstanceName', 'type': 'str'}, + 'operator_type': {'key': 'properties.operatorType', 'type': 'str'}, + 'operator_params': {'key': 'properties.operatorParams', 'type': 'str'}, + 'configuration_protected_settings': {'key': 'properties.configurationProtectedSettings', 'type': '{str}'}, + 'operator_scope': {'key': 'properties.operatorScope', 'type': 'str'}, + 'repository_public_key': {'key': 'properties.repositoryPublicKey', 'type': 'str'}, + 'ssh_known_hosts_contents': {'key': 'properties.sshKnownHostsContents', 'type': 'str'}, + 'enable_helm_operator': {'key': 'properties.enableHelmOperator', 'type': 'bool'}, + 'helm_operator_properties': {'key': 'properties.helmOperatorProperties', 'type': 'HelmOperatorProperties'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'compliance_status': {'key': 'properties.complianceStatus', 'type': 'ComplianceStatus'}, + } + + def __init__( + self, + *, + repository_url: Optional[str] = None, + operator_namespace: Optional[str] = "default", + operator_instance_name: Optional[str] = None, + operator_type: Optional[Union[str, "_models.OperatorType"]] = None, + operator_params: Optional[str] = None, + configuration_protected_settings: Optional[Dict[str, str]] = None, + operator_scope: Optional[Union[str, "_models.OperatorScopeType"]] = "cluster", + ssh_known_hosts_contents: Optional[str] = None, + enable_helm_operator: Optional[bool] = None, + helm_operator_properties: Optional["_models.HelmOperatorProperties"] = None, + **kwargs + ): + """ + :keyword repository_url: Url of the SourceControl Repository. + :paramtype repository_url: str + :keyword operator_namespace: The namespace to which this operator is installed to. Maximum of + 253 lower case alphanumeric characters, hyphen and period only. + :paramtype operator_namespace: str + :keyword operator_instance_name: Instance name of the operator - identifying the specific + configuration. + :paramtype operator_instance_name: str + :keyword operator_type: Type of the operator. Known values are: "Flux". + :paramtype operator_type: str or + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.OperatorType + :keyword operator_params: Any Parameters for the Operator instance in string format. + :paramtype operator_params: str + :keyword configuration_protected_settings: Name-value pairs of protected configuration settings + for the configuration. + :paramtype configuration_protected_settings: dict[str, str] + :keyword operator_scope: Scope at which the operator will be installed. Known values are: + "cluster", "namespace". Default value: "cluster". + :paramtype operator_scope: str or + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.OperatorScopeType + :keyword ssh_known_hosts_contents: Base64-encoded known_hosts contents containing public SSH + keys required to access private Git instances. + :paramtype ssh_known_hosts_contents: str + :keyword enable_helm_operator: Option to enable Helm Operator for this git configuration. + :paramtype enable_helm_operator: bool + :keyword helm_operator_properties: Properties for Helm operator. + :paramtype helm_operator_properties: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.HelmOperatorProperties + """ + super(SourceControlConfiguration, self).__init__(**kwargs) + self.system_data = None + self.repository_url = repository_url + self.operator_namespace = operator_namespace + self.operator_instance_name = operator_instance_name + self.operator_type = operator_type + self.operator_params = operator_params + self.configuration_protected_settings = configuration_protected_settings + self.operator_scope = operator_scope + self.repository_public_key = None + self.ssh_known_hosts_contents = ssh_known_hosts_contents + self.enable_helm_operator = enable_helm_operator + self.helm_operator_properties = helm_operator_properties + self.provisioning_state = None + self.compliance_status = None + + +class SourceControlConfigurationList(msrest.serialization.Model): + """Result of the request to list Source Control Configurations. It contains a list of SourceControlConfiguration objects and a URL link to get the next set of results. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of Source Control Configurations within a Kubernetes cluster. + :vartype value: + list[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.SourceControlConfiguration] + :ivar next_link: URL to get the next set of configuration objects, if any. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SourceControlConfiguration]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(SourceControlConfigurationList, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class SystemData(msrest.serialization.Model): + """Metadata pertaining to creation and last modification of the resource. + + :ivar created_by: The identity that created the resource. + :vartype created_by: str + :ivar created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", "Key". + :vartype created_by_type: str or + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.CreatedByType + :ivar created_at: The timestamp of resource creation (UTC). + :vartype created_at: ~datetime.datetime + :ivar last_modified_by: The identity that last modified the resource. + :vartype last_modified_by: str + :ivar last_modified_by_type: The type of identity that last modified the resource. Known values + are: "User", "Application", "ManagedIdentity", "Key". + :vartype last_modified_by_type: str or + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.CreatedByType + :ivar last_modified_at: The timestamp of resource last modification (UTC). + :vartype last_modified_at: ~datetime.datetime + """ + + _attribute_map = { + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + } + + def __init__( + self, + *, + created_by: Optional[str] = None, + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, + created_at: Optional[datetime.datetime] = None, + last_modified_by: Optional[str] = None, + last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, + last_modified_at: Optional[datetime.datetime] = None, + **kwargs + ): + """ + :keyword created_by: The identity that created the resource. + :paramtype created_by: str + :keyword created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", "Key". + :paramtype created_by_type: str or + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.CreatedByType + :keyword created_at: The timestamp of resource creation (UTC). + :paramtype created_at: ~datetime.datetime + :keyword last_modified_by: The identity that last modified the resource. + :paramtype last_modified_by: str + :keyword last_modified_by_type: The type of identity that last modified the resource. Known + values are: "User", "Application", "ManagedIdentity", "Key". + :paramtype last_modified_by_type: str or + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.CreatedByType + :keyword last_modified_at: The timestamp of resource last modification (UTC). + :paramtype last_modified_at: ~datetime.datetime + """ + super(SystemData, self).__init__(**kwargs) + self.created_by = created_by + self.created_by_type = created_by_type + self.created_at = created_at + self.last_modified_by = last_modified_by + self.last_modified_by_type = last_modified_by_type + self.last_modified_at = last_modified_at diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/models/_patch.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/models/_patch.py new file mode 100644 index 00000000000..0ad201a8c58 --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/models/_patch.py @@ -0,0 +1,19 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/models/_source_control_configuration_client_enums.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/models/_source_control_configuration_client_enums.py new file mode 100644 index 00000000000..e1d2c0f7b36 --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/models/_source_control_configuration_client_enums.py @@ -0,0 +1,121 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta + + +class AKSIdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The identity type. + """ + + SYSTEM_ASSIGNED = "SystemAssigned" + USER_ASSIGNED = "UserAssigned" + +class ComplianceStateType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The compliance state of the configuration. + """ + + PENDING = "Pending" + COMPLIANT = "Compliant" + NONCOMPLIANT = "Noncompliant" + INSTALLED = "Installed" + FAILED = "Failed" + +class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of identity that created the resource. + """ + + USER = "User" + APPLICATION = "Application" + MANAGED_IDENTITY = "ManagedIdentity" + KEY = "Key" + +class FluxComplianceState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Compliance state of the cluster object. + """ + + COMPLIANT = "Compliant" + NON_COMPLIANT = "Non-Compliant" + PENDING = "Pending" + SUSPENDED = "Suspended" + UNKNOWN = "Unknown" + +class KustomizationValidationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Specify whether to validate the Kubernetes objects referenced in the Kustomization before + applying them to the cluster. + """ + + NONE = "none" + CLIENT = "client" + SERVER = "server" + +class LevelType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Level of the status. + """ + + ERROR = "Error" + WARNING = "Warning" + INFORMATION = "Information" + +class MessageLevelType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Level of the message. + """ + + ERROR = "Error" + WARNING = "Warning" + INFORMATION = "Information" + +class OperatorScopeType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Scope at which the operator will be installed. + """ + + CLUSTER = "cluster" + NAMESPACE = "namespace" + +class OperatorType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the operator + """ + + FLUX = "Flux" + +class ProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The provisioning state of the resource. + """ + + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + CREATING = "Creating" + UPDATING = "Updating" + DELETING = "Deleting" + +class ProvisioningStateType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The provisioning state of the resource provider. + """ + + ACCEPTED = "Accepted" + DELETING = "Deleting" + RUNNING = "Running" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + +class ScopeType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Scope at which the configuration will be installed. + """ + + CLUSTER = "cluster" + NAMESPACE = "namespace" + +class SourceKindType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Source Kind to pull the configuration data from. + """ + + GIT_REPOSITORY = "GitRepository" + BUCKET = "Bucket" + AZURE_BLOB = "AzureBlob" diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/__init__.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/__init__.py new file mode 100644 index 00000000000..02567809480 --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/__init__.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._extensions_operations import ExtensionsOperations +from ._operation_status_operations import OperationStatusOperations +from ._flux_configurations_operations import FluxConfigurationsOperations +from ._flux_config_operation_status_operations import FluxConfigOperationStatusOperations +from ._source_control_configurations_operations import SourceControlConfigurationsOperations +from ._operations import Operations + +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk +__all__ = [ + 'ExtensionsOperations', + 'OperationStatusOperations', + 'FluxConfigurationsOperations', + 'FluxConfigOperationStatusOperations', + 'SourceControlConfigurationsOperations', + 'Operations', +] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() \ No newline at end of file diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_extensions_operations.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_extensions_operations.py new file mode 100644 index 00000000000..c353689861d --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_extensions_operations.py @@ -0,0 +1,934 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_create_request_initial( + subscription_id: str, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + extension_name: str, + *, + json: Optional[_models.Extension] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'), + "clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + "extensionName": _SERIALIZER.url("extension_name", extension_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + extension_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'), + "clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + "extensionName": _SERIALIZER.url("extension_name", extension_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_delete_request_initial( + subscription_id: str, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + extension_name: str, + *, + force_delete: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'), + "clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + "extensionName": _SERIALIZER.url("extension_name", extension_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if force_delete is not None: + _params['forceDelete'] = _SERIALIZER.query("force_delete", force_delete, 'bool') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_update_request_initial( + subscription_id: str, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + extension_name: str, + *, + json: Optional[_models.PatchExtension] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'), + "clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + "extensionName": _SERIALIZER.url("extension_name", extension_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PATCH", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_list_request( + subscription_id: str, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'), + "clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class ExtensionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.kubernetesconfiguration.v2022_07_01.SourceControlConfigurationClient`'s + :attr:`extensions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + def _create_initial( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + extension_name: str, + extension: _models.Extension, + **kwargs: Any + ) -> _models.Extension: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.Extension] + + _json = self._serialize.body(extension, 'Extension') + + request = build_create_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + extension_name=extension_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._create_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('Extension', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('Extension', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}"} # type: ignore + + + @distributed_trace + def begin_create( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + extension_name: str, + extension: _models.Extension, + **kwargs: Any + ) -> LROPoller[_models.Extension]: + """Create a new Kubernetes Cluster Extension. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param extension_name: Name of the Extension. + :type extension_name: str + :param extension: Properties necessary to Create an Extension. + :type extension: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.Extension + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Extension or the result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.Extension] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.Extension] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_initial( # type: ignore + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + extension_name=extension_name, + extension=extension, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Extension', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + lro_options={'final-state-via': 'azure-async-operation'}, + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}"} # type: ignore + + @distributed_trace + def get( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + extension_name: str, + **kwargs: Any + ) -> _models.Extension: + """Gets Kubernetes Cluster Extension. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param extension_name: Name of the Extension. + :type extension_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Extension, or the result of cls(response) + :rtype: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.Extension + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.Extension] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + extension_name=extension_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Extension', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}"} # type: ignore + + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + extension_name: str, + force_delete: Optional[bool] = None, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + extension_name=extension_name, + api_version=api_version, + force_delete=force_delete, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}"} # type: ignore + + + @distributed_trace + def begin_delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + extension_name: str, + force_delete: Optional[bool] = None, + **kwargs: Any + ) -> LROPoller[None]: + """Delete a Kubernetes Cluster Extension. This will cause the Agent to Uninstall the extension + from the cluster. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param extension_name: Name of the Extension. + :type extension_name: str + :param force_delete: Delete the extension resource in Azure - not the normal asynchronous + delete. Default value is None. + :type force_delete: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + extension_name=extension_name, + force_delete=force_delete, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + lro_options={'final-state-via': 'azure-async-operation'}, + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}"} # type: ignore + + def _update_initial( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + extension_name: str, + patch_extension: _models.PatchExtension, + **kwargs: Any + ) -> _models.Extension: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.Extension] + + _json = self._serialize.body(patch_extension, 'PatchExtension') + + request = build_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + extension_name=extension_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('Extension', pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize('Extension', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}"} # type: ignore + + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + extension_name: str, + patch_extension: _models.PatchExtension, + **kwargs: Any + ) -> LROPoller[_models.Extension]: + """Patch an existing Kubernetes Cluster Extension. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param extension_name: Name of the Extension. + :type extension_name: str + :param patch_extension: Properties to Patch in an existing Extension. + :type patch_extension: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.PatchExtension + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Extension or the result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.Extension] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.Extension] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( # type: ignore + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + extension_name=extension_name, + patch_extension=patch_extension, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Extension', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + lro_options={'final-state-via': 'azure-async-operation'}, + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}"} # type: ignore + + @distributed_trace + def list( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + **kwargs: Any + ) -> Iterable[_models.ExtensionsList]: + """List all Extensions in the cluster. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ExtensionsList or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ExtensionsList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ExtensionsList] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ExtensionsList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions"} # type: ignore diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_flux_config_operation_status_operations.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_flux_config_operation_status_operations.py new file mode 100644 index 00000000000..bd2f78002c2 --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_flux_config_operation_status_operations.py @@ -0,0 +1,173 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Optional, TypeVar + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_get_request( + subscription_id: str, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + flux_configuration_name: str, + operation_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}/operations/{operationId}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'), + "clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + "fluxConfigurationName": _SERIALIZER.url("flux_configuration_name", flux_configuration_name, 'str'), + "operationId": _SERIALIZER.url("operation_id", operation_id, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class FluxConfigOperationStatusOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.kubernetesconfiguration.v2022_07_01.SourceControlConfigurationClient`'s + :attr:`flux_config_operation_status` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def get( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + flux_configuration_name: str, + operation_id: str, + **kwargs: Any + ) -> _models.OperationStatusResult: + """Get Async Operation status. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param flux_configuration_name: Name of the Flux Configuration. + :type flux_configuration_name: str + :param operation_id: operation Id. + :type operation_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: OperationStatusResult, or the result of cls(response) + :rtype: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.OperationStatusResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.OperationStatusResult] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + flux_configuration_name=flux_configuration_name, + operation_id=operation_id, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('OperationStatusResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}/operations/{operationId}"} # type: ignore + diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_flux_configurations_operations.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_flux_configurations_operations.py new file mode 100644 index 00000000000..8f44f35a64d --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_flux_configurations_operations.py @@ -0,0 +1,939 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_get_request( + subscription_id: str, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + flux_configuration_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'), + "clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + "fluxConfigurationName": _SERIALIZER.url("flux_configuration_name", flux_configuration_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_create_or_update_request_initial( + subscription_id: str, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + flux_configuration_name: str, + *, + json: Optional[_models.FluxConfiguration] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'), + "clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + "fluxConfigurationName": _SERIALIZER.url("flux_configuration_name", flux_configuration_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_update_request_initial( + subscription_id: str, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + flux_configuration_name: str, + *, + json: Optional[_models.FluxConfigurationPatch] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'), + "clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + "fluxConfigurationName": _SERIALIZER.url("flux_configuration_name", flux_configuration_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PATCH", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_delete_request_initial( + subscription_id: str, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + flux_configuration_name: str, + *, + force_delete: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'), + "clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + "fluxConfigurationName": _SERIALIZER.url("flux_configuration_name", flux_configuration_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if force_delete is not None: + _params['forceDelete'] = _SERIALIZER.query("force_delete", force_delete, 'bool') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_list_request( + subscription_id: str, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'), + "clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class FluxConfigurationsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.kubernetesconfiguration.v2022_07_01.SourceControlConfigurationClient`'s + :attr:`flux_configurations` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def get( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + flux_configuration_name: str, + **kwargs: Any + ) -> _models.FluxConfiguration: + """Gets details of the Flux Configuration. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param flux_configuration_name: Name of the Flux Configuration. + :type flux_configuration_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FluxConfiguration, or the result of cls(response) + :rtype: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.FluxConfiguration + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.FluxConfiguration] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + flux_configuration_name=flux_configuration_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('FluxConfiguration', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}"} # type: ignore + + + def _create_or_update_initial( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + flux_configuration_name: str, + flux_configuration: _models.FluxConfiguration, + **kwargs: Any + ) -> _models.FluxConfiguration: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.FluxConfiguration] + + _json = self._serialize.body(flux_configuration, 'FluxConfiguration') + + request = build_create_or_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + flux_configuration_name=flux_configuration_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._create_or_update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('FluxConfiguration', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('FluxConfiguration', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}"} # type: ignore + + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + flux_configuration_name: str, + flux_configuration: _models.FluxConfiguration, + **kwargs: Any + ) -> LROPoller[_models.FluxConfiguration]: + """Create a new Kubernetes Flux Configuration. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param flux_configuration_name: Name of the Flux Configuration. + :type flux_configuration_name: str + :param flux_configuration: Properties necessary to Create a FluxConfiguration. + :type flux_configuration: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.FluxConfiguration + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FluxConfiguration or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.FluxConfiguration] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.FluxConfiguration] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + flux_configuration_name=flux_configuration_name, + flux_configuration=flux_configuration, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('FluxConfiguration', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + lro_options={'final-state-via': 'azure-async-operation'}, + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}"} # type: ignore + + def _update_initial( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + flux_configuration_name: str, + flux_configuration_patch: _models.FluxConfigurationPatch, + **kwargs: Any + ) -> _models.FluxConfiguration: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.FluxConfiguration] + + _json = self._serialize.body(flux_configuration_patch, 'FluxConfigurationPatch') + + request = build_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + flux_configuration_name=flux_configuration_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('FluxConfiguration', pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize('FluxConfiguration', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}"} # type: ignore + + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + flux_configuration_name: str, + flux_configuration_patch: _models.FluxConfigurationPatch, + **kwargs: Any + ) -> LROPoller[_models.FluxConfiguration]: + """Update an existing Kubernetes Flux Configuration. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param flux_configuration_name: Name of the Flux Configuration. + :type flux_configuration_name: str + :param flux_configuration_patch: Properties to Patch in an existing Flux Configuration. + :type flux_configuration_patch: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.FluxConfigurationPatch + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FluxConfiguration or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.FluxConfiguration] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.FluxConfiguration] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( # type: ignore + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + flux_configuration_name=flux_configuration_name, + flux_configuration_patch=flux_configuration_patch, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('FluxConfiguration', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + lro_options={'final-state-via': 'azure-async-operation'}, + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + flux_configuration_name: str, + force_delete: Optional[bool] = None, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + flux_configuration_name=flux_configuration_name, + api_version=api_version, + force_delete=force_delete, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}"} # type: ignore + + + @distributed_trace + def begin_delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + flux_configuration_name: str, + force_delete: Optional[bool] = None, + **kwargs: Any + ) -> LROPoller[None]: + """This will delete the YAML file used to set up the Flux Configuration, thus stopping future sync + from the source repo. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param flux_configuration_name: Name of the Flux Configuration. + :type flux_configuration_name: str + :param force_delete: Delete the extension resource in Azure - not the normal asynchronous + delete. Default value is None. + :type force_delete: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + flux_configuration_name=flux_configuration_name, + force_delete=force_delete, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + lro_options={'final-state-via': 'azure-async-operation'}, + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}"} # type: ignore + + @distributed_trace + def list( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + **kwargs: Any + ) -> Iterable[_models.FluxConfigurationsList]: + """List all Flux Configurations. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either FluxConfigurationsList or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.FluxConfigurationsList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.FluxConfigurationsList] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("FluxConfigurationsList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations"} # type: ignore diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_operation_status_operations.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_operation_status_operations.py new file mode 100644 index 00000000000..b9e94aa7e77 --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_operation_status_operations.py @@ -0,0 +1,317 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_get_request( + subscription_id: str, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + extension_name: str, + operation_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}/operations/{operationId}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'), + "clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + "extensionName": _SERIALIZER.url("extension_name", extension_name, 'str'), + "operationId": _SERIALIZER.url("operation_id", operation_id, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_list_request( + subscription_id: str, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/operations") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'), + "clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class OperationStatusOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.kubernetesconfiguration.v2022_07_01.SourceControlConfigurationClient`'s + :attr:`operation_status` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def get( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + extension_name: str, + operation_id: str, + **kwargs: Any + ) -> _models.OperationStatusResult: + """Get Async Operation status. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param extension_name: Name of the Extension. + :type extension_name: str + :param operation_id: operation Id. + :type operation_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: OperationStatusResult, or the result of cls(response) + :rtype: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.OperationStatusResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.OperationStatusResult] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + extension_name=extension_name, + operation_id=operation_id, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('OperationStatusResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}/operations/{operationId}"} # type: ignore + + + @distributed_trace + def list( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + **kwargs: Any + ) -> Iterable[_models.OperationStatusList]: + """List Async Operations, currently in progress, in a cluster. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OperationStatusList or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.OperationStatusList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.OperationStatusList] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("OperationStatusList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/operations"} # type: ignore diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_operations.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_operations.py new file mode 100644 index 00000000000..e36c3993635 --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_operations.py @@ -0,0 +1,153 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._vendor import _convert_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/providers/Microsoft.KubernetesConfiguration/operations") + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class Operations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.kubernetesconfiguration.v2022_07_01.SourceControlConfigurationClient`'s + :attr:`operations` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + **kwargs: Any + ) -> Iterable[_models.ResourceProviderOperationList]: + """List all the available operations the KubernetesConfiguration resource provider supports. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ResourceProviderOperationList or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.ResourceProviderOperationList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ResourceProviderOperationList] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ResourceProviderOperationList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/providers/Microsoft.KubernetesConfiguration/operations"} # type: ignore diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_patch.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_patch.py new file mode 100644 index 00000000000..0ad201a8c58 --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_patch.py @@ -0,0 +1,19 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_source_control_configurations_operations.py b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_source_control_configurations_operations.py new file mode 100644 index 00000000000..02e697c3235 --- /dev/null +++ b/src/k8s-configuration/azext_k8s_configuration/vendored_sdks/v2022_07_01/operations/_source_control_configurations_operations.py @@ -0,0 +1,639 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_get_request( + subscription_id: str, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + source_control_configuration_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/sourceControlConfigurations/{sourceControlConfigurationName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'), + "clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + "sourceControlConfigurationName": _SERIALIZER.url("source_control_configuration_name", source_control_configuration_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id: str, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + source_control_configuration_name: str, + *, + json: Optional[_models.SourceControlConfiguration] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/sourceControlConfigurations/{sourceControlConfigurationName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'), + "clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + "sourceControlConfigurationName": _SERIALIZER.url("source_control_configuration_name", source_control_configuration_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_delete_request_initial( + subscription_id: str, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + source_control_configuration_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/sourceControlConfigurations/{sourceControlConfigurationName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'), + "clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + "sourceControlConfigurationName": _SERIALIZER.url("source_control_configuration_name", source_control_configuration_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_list_request( + subscription_id: str, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/sourceControlConfigurations") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'), + "clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class SourceControlConfigurationsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.kubernetesconfiguration.v2022_07_01.SourceControlConfigurationClient`'s + :attr:`source_control_configurations` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def get( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + source_control_configuration_name: str, + **kwargs: Any + ) -> _models.SourceControlConfiguration: + """Gets details of the Source Control Configuration. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param source_control_configuration_name: Name of the Source Control Configuration. + :type source_control_configuration_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SourceControlConfiguration, or the result of cls(response) + :rtype: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.SourceControlConfiguration + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.SourceControlConfiguration] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + source_control_configuration_name=source_control_configuration_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SourceControlConfiguration', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/sourceControlConfigurations/{sourceControlConfigurationName}"} # type: ignore + + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + source_control_configuration_name: str, + source_control_configuration: _models.SourceControlConfiguration, + **kwargs: Any + ) -> _models.SourceControlConfiguration: + """Create a new Kubernetes Source Control Configuration. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param source_control_configuration_name: Name of the Source Control Configuration. + :type source_control_configuration_name: str + :param source_control_configuration: Properties necessary to Create KubernetesConfiguration. + :type source_control_configuration: + ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.SourceControlConfiguration + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SourceControlConfiguration, or the result of cls(response) + :rtype: ~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.SourceControlConfiguration + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.SourceControlConfiguration] + + _json = self._serialize.body(source_control_configuration, 'SourceControlConfiguration') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + source_control_configuration_name=source_control_configuration_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('SourceControlConfiguration', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('SourceControlConfiguration', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/sourceControlConfigurations/{sourceControlConfigurationName}"} # type: ignore + + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + source_control_configuration_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + source_control_configuration_name=source_control_configuration_name, + api_version=api_version, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/sourceControlConfigurations/{sourceControlConfigurationName}"} # type: ignore + + + @distributed_trace + def begin_delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + source_control_configuration_name: str, + **kwargs: Any + ) -> LROPoller[None]: + """This will delete the YAML file used to set up the Source control configuration, thus stopping + future sync from the source repo. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :param source_control_configuration_name: Name of the Source Control Configuration. + :type source_control_configuration_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + source_control_configuration_name=source_control_configuration_name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/sourceControlConfigurations/{sourceControlConfigurationName}"} # type: ignore + + @distributed_trace + def list( + self, + resource_group_name: str, + cluster_rp: str, + cluster_resource_name: str, + cluster_name: str, + **kwargs: Any + ) -> Iterable[_models.SourceControlConfigurationList]: + """List all Source Control Configurations. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService, + Microsoft.Kubernetes, Microsoft.HybridContainerService. + :type cluster_rp: str + :param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters, + connectedClusters, provisionedClusters. + :type cluster_resource_name: str + :param cluster_name: The name of the kubernetes cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either SourceControlConfigurationList or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.kubernetesconfiguration.v2022_07_01.models.SourceControlConfigurationList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.SourceControlConfigurationList] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_rp=cluster_rp, + cluster_resource_name=cluster_resource_name, + cluster_name=cluster_name, + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("SourceControlConfigurationList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/sourceControlConfigurations"} # type: ignore diff --git a/src/k8s-configuration/setup.py b/src/k8s-configuration/setup.py index bde5ba10458..7bb94fff772 100644 --- a/src/k8s-configuration/setup.py +++ b/src/k8s-configuration/setup.py @@ -16,7 +16,7 @@ logger.warn("Wheel is not available, disabling bdist_wheel hook") -VERSION = "1.6.0" +VERSION = "1.7.0" # The full list of classifiers is available at # https://pypi.python.org/pypi?%3Aaction=list_classifiers From 1b72857a11807601cd975d3d575b1d9e7355c9cb Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Thu, 20 Oct 2022 09:21:18 +0000 Subject: [PATCH 13/85] [Release] Update index.json for extension [ k8s-configuration ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=10180&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/d78a840c3cfa60371e6323454c535ec6093fd3dc --- src/index.json | 52 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/src/index.json b/src/index.json index 93d67fe5f89..34abcb5563d 100644 --- a/src/index.json +++ b/src/index.json @@ -26649,6 +26649,58 @@ "version": "1.6.0" }, "sha256Digest": "9f1e8f692a3a7e53dfc356a5d65593f76485cad43a20098566fee910b13abca2" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/k8s_configuration-1.7.0-py3-none-any.whl", + "filename": "k8s_configuration-1.7.0-py3-none-any.whl", + "metadata": { + "azext.minCliCoreVersion": "2.15.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/k8s-configuration" + } + } + }, + "extras": [], + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "k8s-configuration", + "run_requires": [ + { + "requires": [ + "pycryptodome (~=3.14.1)" + ] + } + ], + "summary": "Microsoft Azure Command-Line Tools K8s-configuration Extension", + "version": "1.7.0" + }, + "sha256Digest": "66c3bda7d25cae39d1d5ab9076f8fa64fdc955910ada7a8032a1756ab1f549e4" } ], "k8s-extension": [ From 759426087619dc3d6fa8cf8a53d89debfaf0659e Mon Sep 17 00:00:00 2001 From: Rajvi Modh <110064827+rmodh@users.noreply.github.com> Date: Thu, 20 Oct 2022 18:09:58 -0700 Subject: [PATCH 14/85] update release & version history file for traffic-collector cmdlets (#5466) --- src/traffic-collector/HISTORY.rst | 4 ++++ src/traffic-collector/setup.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/traffic-collector/HISTORY.rst b/src/traffic-collector/HISTORY.rst index 8c34bccfff8..ccaa3cef572 100644 --- a/src/traffic-collector/HISTORY.rst +++ b/src/traffic-collector/HISTORY.rst @@ -3,6 +3,10 @@ Release History =============== +0.1.1 +++++++++++++++++++ +* Update api-version for Azure Traffic Collector to 2022-11-01-stable + 0.1.0 ++++++ * Initial release. \ No newline at end of file diff --git a/src/traffic-collector/setup.py b/src/traffic-collector/setup.py index 0828292b241..e073fbe14ed 100644 --- a/src/traffic-collector/setup.py +++ b/src/traffic-collector/setup.py @@ -10,7 +10,7 @@ # HISTORY.rst entry. -VERSION = '0.1.0' +VERSION = '0.1.1' # The full list of classifiers is available at # https://pypi.python.org/pypi?%3Aaction=list_classifiers From 54b84f2d0c32b339419c8c5a48a62324092f1d84 Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Fri, 21 Oct 2022 01:15:50 +0000 Subject: [PATCH 15/85] [Release] Update index.json for extension [ traffic-collector ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=10342&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/759426087619dc3d6fa8cf8a53d89debfaf0659e --- src/index.json | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/src/index.json b/src/index.json index 34abcb5563d..32071e5a7de 100644 --- a/src/index.json +++ b/src/index.json @@ -40984,6 +40984,49 @@ "version": "0.1.0" }, "sha256Digest": "c148d0db3dc2284f30fe8d9cce4cde9be7f93b18664aae54d70622fd86a09b3a" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/traffic_collector-0.1.1-py3-none-any.whl", + "filename": "traffic_collector-0.1.1-py3-none-any.whl", + "metadata": { + "azext.isPreview": true, + "azext.minCliCoreVersion": "2.40.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/traffic-collector" + } + } + }, + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "traffic-collector", + "summary": "Microsoft Azure Command-Line Tools TrafficCollector Extension.", + "version": "0.1.1" + }, + "sha256Digest": "8fbce712f8edcedf422c463f6b970fa7bdd94a452887ca5ddefb4fb00735acb5" } ], "virtual-network-manager": [ From ec69d7790d605643c251d23733ff24a604105c7a Mon Sep 17 00:00:00 2001 From: rhkodiak Date: Thu, 20 Oct 2022 21:11:41 -0500 Subject: [PATCH 16/85] [Serial-Console]: az serial-console connect: Change to use different region for url calls when custom storage account firewalls are enabled (#5398) * Updated code to enable the cli to connect to serial-console over different regions * Updated arguments that are passed around for the clients * Change the code to resolve the region name at the beginning of the cli command * Moved the _arm_endpoints.py file up two directories to resolve import issues * Fix code issues where the serial-console wasn't using the storage_url correctly * Resolve Pylint issues * Resolve Pylint issues * Resolve Pylint issues * Resolve Pylint issue with to few public methods * Update the version and release notes * Change release note verbiage * Fix live tests * Fix formatting issue * Add new recording files from running live tests * Fix spacing issue and restart failed tests * Change print statement Changed the print statement to use the logger.debug() option to only output the boot_diagnostics section for debugging * Add logger import Added the logger import to correct the build failure Co-authored-by: rhoover --- src/serial-console/HISTORY.rst | 4 + .../azext_serialconsole/_arm_endpoints.py | 47 + .../azext_serialconsole/_client_factory.py | 16 +- .../azext_serialconsole/custom.py | 169 +- .../recordings/test_check_resource_VM.yaml | 2666 +++++------ .../recordings/test_check_resource_VMSS.yaml | 3929 ++++++++++------- .../recordings/test_enable_disable.yaml | 16 +- .../_microsoft_serial_console_client.py | 15 +- src/serial-console/setup.py | 2 +- 9 files changed, 4013 insertions(+), 2851 deletions(-) create mode 100644 src/serial-console/azext_serialconsole/_arm_endpoints.py diff --git a/src/serial-console/HISTORY.rst b/src/serial-console/HISTORY.rst index 1b68bd0f626..eb830fa685a 100644 --- a/src/serial-console/HISTORY.rst +++ b/src/serial-console/HISTORY.rst @@ -1,6 +1,10 @@ Release History =============== +0.1.3 +++++++ +* Change to use different region for url calls when custom storage account firewalls are enabled + 0.1.2 ++++++ * Change to make custom boot diagnostics optional diff --git a/src/serial-console/azext_serialconsole/_arm_endpoints.py b/src/serial-console/azext_serialconsole/_arm_endpoints.py new file mode 100644 index 00000000000..1bad0cd2e27 --- /dev/null +++ b/src/serial-console/azext_serialconsole/_arm_endpoints.py @@ -0,0 +1,47 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + + +class ArmEndpoints: # pylint: disable=too-few-public-methods + region_prefix_pairings = {'australiacentral': 'australiaeast', + 'australiaeast': 'australiacentral', + 'brazilsouth': 'brazilsoutheast', + 'brazilsoutheast': 'brazilsouth', + 'canadacentral': 'canadaeast', + 'canadaeast': 'canadacentral', + 'centralindia': 'southindia', + 'centralus': 'westcentralus', + 'centraluseuap': 'eastus2euap', + 'eastasia': 'southeastasia', + 'eastus2': 'westus2', # pairing eastus2 + westus2 ensure that INT works as expected + 'eastus2euap': 'centraluseuap', + 'francecentral': 'francesouth', + 'francesouth': 'francecentral', + 'germanynorth': 'germanywestcentral', + 'germanywestcentral': 'germanynorth', + 'japaneast': 'japanwest', + 'japanwest': 'japaneast', + 'koreacentral': 'koreasouth', + 'koreasouth': 'koreacentral', + 'northeurope': 'westeurope', + 'norwayeast': 'norwaywest', + 'norwaywest': 'norwayeast', + # 'southafricanorth': 'southafricawest' is not yet deployed + 'southeastasia': 'eastasia', + 'southindia': 'centralindia', + 'swedencentral': 'swedensouth', + 'swedensouth': 'swedencentral', + 'switzerlandnorth': 'switzerlandwest', + 'switzerlandwest': 'switzerlandnorth', + 'uaecentral': 'uaenorth', + 'uaenorth': 'uaecentral', + 'uksouth': 'ukwest', + 'ukwest': 'uksouth', + 'westcentralus': 'centralus', + 'westeurope': 'northeurope', + 'westus2': 'eastus2', + 'usgovarizona': 'usgoveast', # usgoveast == usgovvirginia + 'usgovvirginia': 'usgovsw', # usgovsw == usgovarizona + } diff --git a/src/serial-console/azext_serialconsole/_client_factory.py b/src/serial-console/azext_serialconsole/_client_factory.py index fbaed116da0..ccfaf9ec465 100644 --- a/src/serial-console/azext_serialconsole/_client_factory.py +++ b/src/serial-console/azext_serialconsole/_client_factory.py @@ -3,21 +3,27 @@ # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- +from azure.cli.core.profiles import ResourceType + def _compute_client_factory(cli_ctx, **kwargs): - from azure.cli.core.profiles import ResourceType from azure.cli.core.commands.client_factory import get_mgmt_service_client return get_mgmt_service_client(cli_ctx, ResourceType.MGMT_COMPUTE, subscription_id=kwargs.get('subscription_id'), aux_subscriptions=kwargs.get('aux_subscriptions')) -def cf_serialconsole(cli_ctx, *_): +def cf_serialconsole(cli_ctx, **kwargs): from azure.cli.core.commands.client_factory import get_mgmt_service_client from azext_serialconsole.vendored_sdks.serialconsole import MicrosoftSerialConsoleClient return get_mgmt_service_client(cli_ctx, - MicrosoftSerialConsoleClient) + MicrosoftSerialConsoleClient, **kwargs) + +def cf_serial_port(cli_ctx, **kwargs): + return cf_serialconsole(cli_ctx, **kwargs).serial_ports -def cf_serial_port(cli_ctx, *_): - return cf_serialconsole(cli_ctx).serial_ports + +def storage_client_factory(cli_ctx, *_): + from azure.cli.core.commands.client_factory import get_mgmt_service_client + return get_mgmt_service_client(cli_ctx, ResourceType.MGMT_STORAGE) diff --git a/src/serial-console/azext_serialconsole/custom.py b/src/serial-console/azext_serialconsole/custom.py index c225b61b2a8..836ca3dec8e 100644 --- a/src/serial-console/azext_serialconsole/custom.py +++ b/src/serial-console/azext_serialconsole/custom.py @@ -132,7 +132,7 @@ def prompt(self, getch, message): c = getch() self.hide_cursor(buffer=False) for _ in range(lines): - self.clear_line(buffer=False) + # self.clear_line(buffer=False) self.cursor_up(buffer=False) self.set_cursor_horizontal_position(col, buffer=False) self.show_cursor(buffer=False) @@ -198,8 +198,8 @@ def _getch_windows(self): class Terminal: ERROR_MESSAGE = "Unable to configure terminal." - RECOMENDATION = ("Make sure that app in running in a terminal on a Windows 10 " - "or Unix based machine. Versions earlier than Windows 10 are not supported.") + RECOMMENDATION = ("Make sure that app in running in a terminal on a Windows 10 " + "or Unix based machine. Versions earlier than Windows 10 are not supported.") def __init__(self): self.win_original_out_mode = None @@ -232,7 +232,7 @@ def configure_terminal(self): if (not kernel32.GetConsoleMode(self.win_out, ctypes.byref(dw_original_out_mode)) or not kernel32.GetConsoleMode(self.win_in, ctypes.byref(dw_original_in_mode))): quitapp(error_message=Terminal.ERROR_MESSAGE, - error_recommendation=Terminal.RECOMENDATION, error_func=UnclassifiedUserFault) + error_recommendation=Terminal.RECOMMENDATION, error_func=UnclassifiedUserFault) self.win_original_out_mode = dw_original_out_mode.value self.win_original_in_mode = dw_original_in_mode.value @@ -244,7 +244,7 @@ def configure_terminal(self): if (not kernel32.SetConsoleMode(self.win_out, dw_out_mode) or not kernel32.SetConsoleMode(self.win_in, dw_in_mode)): quitapp(error_message=Terminal.ERROR_MESSAGE, - error_recommendation=Terminal.RECOMENDATION, error_func=UnclassifiedUserFault) + error_recommendation=Terminal.RECOMMENDATION, error_func=UnclassifiedUserFault) else: try: import tty @@ -252,7 +252,7 @@ def configure_terminal(self): fd = sys.stdin.fileno() except (ModuleNotFoundError, ValueError): quitapp(error_message=Terminal.ERROR_MESSAGE, - error_recommendation=Terminal.RECOMENDATION, error_func=UnclassifiedUserFault) + error_recommendation=Terminal.RECOMMENDATION, error_func=UnclassifiedUserFault) self.unix_original_mode = termios.tcgetattr(fd) tty.setraw(fd) @@ -277,7 +277,13 @@ def revert_terminal(self): class SerialConsole: def __init__(self, cmd, resource_group_name, vm_vmss_name, vmss_instanceid): - client = cf_serial_port(cmd.cli_ctx) + result, storage_account_region = get_region_from_storage_account(cmd.cli_ctx, resource_group_name, + vm_vmss_name, vmss_instanceid) + if storage_account_region is not None: + kwargs = {'storage_account_region': storage_account_region} + else: + kwargs = {} + client = cf_serial_port(cmd.cli_ctx, **kwargs) if vmss_instanceid is None: self.connect_func = lambda: client.connect( resource_group_name=resource_group_name, @@ -365,7 +371,7 @@ def connect_loading_message_linux(): chars_copy = chars.copy() chars_copy[indx] = "\u25A0" squares = " ".join(chars_copy) - PC.clear_line() + # PC.clear_line() PC.print("Connecting to console of VM " + squares, color=PrintClass.CYAN) PC.show_cursor() @@ -457,7 +463,7 @@ def connect_thread(): GV.websocket_instance.run_forever(skip_utf8_validation=True) else: GV.loading = False - message = ("\r\nAn unexpected error occured. Could not establish connection to VM or VMSS. " + message = ("\r\nAn unexpected error occurred. Could not establish connection to VM or VMSS. " "Check network connection and press \"Enter\" to try again...") PC.print(message, color=PrintClass.RED) @@ -524,6 +530,7 @@ def connect_and_send_admin_command(self, command, arg_characters=None): elif command == "sysrq" and arg_characters is not None: def wrapper(): return self.send_sys_rq(arg_characters) + func = wrapper success_message = "Successfully sent SysRq command\r\n" failure_message = "Failed to send SysRq command. Make sure the input only contains numbers and letters.\r\n" @@ -563,14 +570,18 @@ def on_message(ws, _): error_message, recommendation=recommendation) else: GV.loading = False - error_message = "An unexpected error occured. Could not establish connection to VM or VMSS." + error_message = "An unexpected error occurred. Could not establish connection to VM or VMSS." recommendation = "Check network connection and try again." raise ResourceNotFoundError( error_message, recommendation=recommendation) -def check_serial_console_enabled(cli_ctx): - client = cf_serialconsole(cli_ctx) +def check_serial_console_enabled(cli_ctx, storage_account_region=None): + if storage_account_region is not None: + kwargs = {'storage_account_region': storage_account_region} + else: + kwargs = {} + client = cf_serialconsole(cli_ctx, **kwargs) result = client.get_console_status().additional_properties if ("properties" in result and "disabled" in result["properties"] and not result["properties"]["disabled"]): @@ -581,11 +592,11 @@ def check_serial_console_enabled(cli_ctx): def check_resource(cli_ctx, resource_group_name, vm_vmss_name, vmss_instanceid): - check_serial_console_enabled(cli_ctx) - client = _compute_client_factory(cli_ctx) + result, storage_account_region = get_region_from_storage_account(cli_ctx, resource_group_name, vm_vmss_name, + vmss_instanceid) + check_serial_console_enabled(cli_ctx, storage_account_region) + if vmss_instanceid: - result = client.virtual_machine_scale_set_vms.get_instance_view( - resource_group_name, vm_vmss_name, vmss_instanceid) if 'osName' in result.additional_properties and "windows" in result.additional_properties['osName'].lower(): GV.os_is_windows = True @@ -596,32 +607,7 @@ def check_resource(cli_ctx, resource_group_name, vm_vmss_name, vmss_instanceid): recommendation = 'Use "az vmss start" to start the Virtual Machine.' raise AzureConnectionError( error_message, recommendation=recommendation) - - if result.boot_diagnostics is None: - error_message = ("Azure Serial Console requires boot diagnostics to be enabled.") - recommendation = ('Use "az vmss update --name MyScaleSet --resource-group MyResourceGroup --set ' - 'virtualMachineProfile.diagnosticsProfile="{\\"bootDiagnostics\\": {\\"Enabled\\" : ' - '\\"True\\",\\"StorageUri\\" : null}}"" to enable boot diagnostics. ' - 'You can replace "null" with a custom storage account ' - '\\"https://mystor.blob.windows.net/"\\. Then run "az vmss update-instances -n ' - 'MyScaleSet -g MyResourceGroup --instance-ids *".') - raise AzureConnectionError( - error_message, recommendation=recommendation) else: - try: - result = client.virtual_machines.get( - resource_group_name, vm_vmss_name, expand='instanceView') - except ComputeClientResourceNotFoundError as e: - try: - client.virtual_machine_scale_sets.get( - resource_group_name, vm_vmss_name) - except ComputeClientResourceNotFoundError: - raise e from e - error_message = e.message - recommendation = ("We found that you specified a Virtual Machine Scale Set and not a VM. " - "Use the --instance-id parameter to select the VMSS instance you want to connect to.") - raise ResourceNotFoundError( - error_message, recommendation=recommendation) from e if (result.instance_view is not None and result.instance_view.os_name is not None and "windows" in result.instance_view.os_name.lower()): @@ -640,16 +626,6 @@ def check_resource(cli_ctx, resource_group_name, vm_vmss_name, vmss_instanceid): raise AzureConnectionError( error_message, recommendation=recommendation) - if (result.diagnostics_profile is None or - result.diagnostics_profile.boot_diagnostics is None or - not result.diagnostics_profile.boot_diagnostics.enabled): - error_message = ("Azure Serial Console requires boot diagnostics to be enabled.") - recommendation = ('Use "az vm boot-diagnostics enable --name MyVM --resource-group MyResourceGroup" ' - 'to enable boot diagnostics. You can specify a custom storage account with the ' - 'parameter "--storage https://mystor.blob.windows.net/".') - raise AzureConnectionError( - error_message, recommendation=recommendation) - def connect_serialconsole(cmd, resource_group_name, vm_vmss_name, vmss_instanceid=None): check_resource(cmd.cli_ctx, resource_group_name, @@ -695,3 +671,94 @@ def enable_serialconsole(cmd): def disable_serialconsole(cmd): client = cf_serialconsole(cmd.cli_ctx) return client.disable_console() + + +def get_region_from_storage_account(cli_ctx, resource_group_name, vm_vmss_name, vmss_instanceid): + from azext_serialconsole._client_factory import storage_client_factory + from knack.log import get_logger + + logger = get_logger(__name__) + result = None + storage_account_region = None + client = _compute_client_factory(cli_ctx) + scf = storage_client_factory(cli_ctx) + + if vmss_instanceid: + result_data = client.virtual_machine_scale_set_vms.get_instance_view( + resource_group_name, vm_vmss_name, vmss_instanceid) + result = result_data + + if result_data.boot_diagnostics is None: + error_message = "Azure Serial Console requires boot diagnostics to be enabled." + recommendation = ('Use "az vmss update --name MyScaleSet --resource-group MyResourceGroup --set ' + 'virtualMachineProfile.diagnosticsProfile="{\\"bootDiagnostics\\": {\\"Enabled\\" : ' + '\\"True\\",\\"StorageUri\\" : null}}"" to enable boot diagnostics. ' + 'You can replace "null" with a custom storage account ' + '\\"https://mystor.blob.windows.net/"\\. Then run "az vmss update-instances -n ' + 'MyScaleSet -g MyResourceGroup --instance-ids *".') + raise AzureConnectionError( + error_message, recommendation=recommendation) + else: + if result.boot_diagnostics is not None: + logger.debug(result.boot_diagnostics) + if result.boot_diagnostics.console_screenshot_blob_uri is not None: + storage_account_url = result.boot_diagnostics.console_screenshot_blob_uri + storage_account_region = get_storage_account_info(storage_account_url, resource_group_name, scf) + else: + try: + result_data = client.virtual_machines.get( + resource_group_name, vm_vmss_name, expand='instanceView') + result = result_data + except ComputeClientResourceNotFoundError as e: + try: + client.virtual_machine_scale_sets.get(resource_group_name, vm_vmss_name) + except ComputeClientResourceNotFoundError: + raise e from e + error_message = e.message + recommendation = ("We found that you specified a Virtual Machine Scale Set and not a VM. " + "Use the --instance-id parameter to select the VMSS instance you want to connect to.") + raise ResourceNotFoundError( + error_message, recommendation=recommendation) from e + + if (result.diagnostics_profile is None or + result.diagnostics_profile.boot_diagnostics is None or + not result.diagnostics_profile.boot_diagnostics.enabled): + error_message = "Azure Serial Console requires boot diagnostics to be enabled." + recommendation = ('Use "az vm boot-diagnostics enable --name MyVM --resource-group MyResourceGroup" ' + 'to enable boot diagnostics. You can specify a custom storage account with the ' + 'parameter "--storage https://mystor.blob.windows.net/".') + raise AzureConnectionError( + error_message, recommendation=recommendation) + else: + if result.diagnostics_profile is not None: + if result.diagnostics_profile.boot_diagnostics is not None: + storage_account_url = result.diagnostics_profile.boot_diagnostics.storage_uri + storage_account_region = get_storage_account_info(storage_account_url, resource_group_name, scf) + + return result, storage_account_region + + +def get_storage_account_info(storage_account_url, resource_group_name, scf): + from azext_serialconsole._arm_endpoints import ArmEndpoints + + if storage_account_url is not None: + storage_account = parse_storage_account_url(storage_account_url) + if storage_account is not None: + sa_result = scf.storage_accounts.get_properties(resource_group_name, storage_account) + if (sa_result is not None and + sa_result.network_rule_set is not None and + len(sa_result.network_rule_set.ip_rules) > 0): + return ArmEndpoints.region_prefix_pairings[sa_result.location] + + return None + + +def parse_storage_account_url(url): + if url is not None: + sa_list = url.split('.') + if len(sa_list) > 0: + sa_url = sa_list[0] + sa_url = sa_url.replace("https://", "") + return sa_url + + return None diff --git a/src/serial-console/azext_serialconsole/tests/latest/recordings/test_check_resource_VM.yaml b/src/serial-console/azext_serialconsole/tests/latest/recordings/test_check_resource_VM.yaml index ae7e7197e16..7291c3c5667 100644 --- a/src/serial-console/azext_serialconsole/tests/latest/recordings/test_check_resource_VM.yaml +++ b/src/serial-console/azext_serialconsole/tests/latest/recordings/test_check_resource_VM.yaml @@ -11,56 +11,9 @@ interactions: Connection: - keep-alive User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 - response: - body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" - headers: - cache-control: - - no-cache - content-length: - - '43' - content-type: - - application/json; charset=UTF-8 - date: - - Thu, 04 Aug 2022 17:10:29 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - nginx - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-frame-options: - - deny - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - unknown - Connection: - - keep-alive - User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-08-01 response: body: string: '{"error":{"code":"ResourceNotFound","message":"The Resource ''Microsoft.Compute/virtualMachines/cli000003'' @@ -74,7 +27,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:10:30 GMT + - Fri, 14 Oct 2022 15:18:31 GMT expires: - '-1' pragma: @@ -100,9 +53,9 @@ interactions: Connection: - keep-alive User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-08-01 response: body: string: '{"error":{"code":"ResourceNotFound","message":"The Resource ''Microsoft.Compute/virtualMachineScaleSets/cli000003'' @@ -116,7 +69,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:10:30 GMT + - Fri, 14 Oct 2022 15:18:31 GMT expires: - '-1' pragma: @@ -142,56 +95,9 @@ interactions: Connection: - keep-alive User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 - response: - body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" - headers: - cache-control: - - no-cache - content-length: - - '43' - content-type: - - application/json; charset=UTF-8 - date: - - Thu, 04 Aug 2022 17:10:31 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - nginx - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-frame-options: - - deny - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - unknown - Connection: - - keep-alive - User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/0/instanceView?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/0/instanceView?api-version=2022-08-01 response: body: string: '{"error":{"code":"ParentResourceNotFound","message":"Can not perform @@ -205,7 +111,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:10:31 GMT + - Fri, 14 Oct 2022 15:18:31 GMT expires: - '-1' pragma: @@ -296,13 +202,13 @@ interactions: content-type: - text/plain; charset=utf-8 date: - - Thu, 04 Aug 2022 17:10:32 GMT + - Fri, 14 Oct 2022 15:18:33 GMT etag: - W/"41b202f4dc5098d126019dc00721a4c5e30df0c5196794514fadc3710ee2a5cb" expires: - - Thu, 04 Aug 2022 17:15:32 GMT + - Fri, 14 Oct 2022 15:23:33 GMT source-age: - - '154' + - '1' strict-transport-security: - max-age=31536000 vary: @@ -316,15 +222,15 @@ interactions: x-content-type-options: - nosniff x-fastly-request-id: - - e094d23543d00b0b2fb9b969ba4aaaf5e3e68b2f + - 2e43cb927278d68606b26e191838c395fee87c9e x-frame-options: - deny x-github-request-id: - - 5064:23C7:122D3E:1DAA5F:62EBFB90 + - 0807:11F5:8F72A:FB4B1:63497DC8 x-served-by: - - cache-pao17467-PAO + - cache-dal2120134-DAL x-timer: - - S1659633032.083799,VS0,VE1 + - S1665760714.830429,VS0,VE1 x-xss-protection: - 1; mode=block status: @@ -344,13 +250,13 @@ interactions: ParameterSetName: - -g -n --image -l --generate-ssh-keys User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/publishers/Canonical/artifacttypes/vmimage/offers/UbuntuServer/skus/18.04-LTS/versions?$top=1&$orderby=name%20desc&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/publishers/Canonical/artifacttypes/vmimage/offers/UbuntuServer/skus/18.04-LTS/versions?$top=1&$orderby=name%20desc&api-version=2022-08-01 response: body: - string: "[\r\n {\r\n \"location\": \"westus2\",\r\n \"name\": \"18.04.202207120\",\r\n - \ \"id\": \"/Subscriptions/00000000-0000-0000-0000-000000000000/Providers/Microsoft.Compute/Locations/westus2/Publishers/Canonical/ArtifactTypes/VMImage/Offers/UbuntuServer/Skus/18.04-LTS/Versions/18.04.202207120\"\r\n + string: "[\r\n {\r\n \"location\": \"westus2\",\r\n \"name\": \"18.04.202209210\",\r\n + \ \"id\": \"/Subscriptions/00000000-0000-0000-0000-000000000000/Providers/Microsoft.Compute/Locations/westus2/Publishers/Canonical/ArtifactTypes/VMImage/Offers/UbuntuServer/Skus/18.04-LTS/Versions/18.04.202209210\"\r\n \ }\r\n]" headers: cache-control: @@ -360,7 +266,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:10:31 GMT + - Fri, 14 Oct 2022 15:18:33 GMT expires: - '-1' pragma: @@ -377,7 +283,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/ListVMImagesVersionsFromLocation3Min;15991,Microsoft.Compute/ListVMImagesVersionsFromLocation30Min;43971 + - Microsoft.Compute/ListVMImagesVersionsFromLocation3Min;15996,Microsoft.Compute/ListVMImagesVersionsFromLocation30Min;43996 status: code: 200 message: OK @@ -395,9 +301,9 @@ interactions: ParameterSetName: - -g -n --image -l --generate-ssh-keys User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/publishers/Canonical/artifacttypes/vmimage/offers/UbuntuServer/skus/18.04-LTS/versions/18.04.202207120?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/publishers/Canonical/artifacttypes/vmimage/offers/UbuntuServer/skus/18.04-LTS/versions/18.04.202209210?api-version=2022-08-01 response: body: string: "{\r\n \"properties\": {\r\n \"hyperVGeneration\": \"V1\",\r\n \"architecture\": @@ -407,20 +313,21 @@ interactions: {\r\n \"imageState\": \"Active\"\r\n },\r\n \"features\": [\r\n \ {\r\n \"name\": \"IsAcceleratedNetworkSupported\",\r\n \"value\": \"True\"\r\n },\r\n {\r\n \"name\": \"DiskControllerTypes\",\r\n - \ \"value\": \"SCSI\"\r\n },\r\n {\r\n \"name\": \"IsHibernateSupported\",\r\n - \ \"value\": \"True\"\r\n }\r\n ],\r\n \"osDiskImage\": {\r\n - \ \"operatingSystem\": \"Linux\",\r\n \"sizeInGb\": 31,\r\n \"sizeInBytes\": - 32213303808\r\n },\r\n \"dataDiskImages\": []\r\n },\r\n \"location\": - \"westus2\",\r\n \"name\": \"18.04.202207120\",\r\n \"id\": \"/Subscriptions/00000000-0000-0000-0000-000000000000/Providers/Microsoft.Compute/Locations/westus2/Publishers/Canonical/ArtifactTypes/VMImage/Offers/UbuntuServer/Skus/18.04-LTS/Versions/18.04.202207120\"\r\n}" + \ \"value\": \"SCSI, NVMe\"\r\n },\r\n {\r\n \"name\": + \"IsHibernateSupported\",\r\n \"value\": \"True\"\r\n }\r\n ],\r\n + \ \"osDiskImage\": {\r\n \"operatingSystem\": \"Linux\",\r\n \"sizeInGb\": + 31,\r\n \"sizeInBytes\": 32213303808\r\n },\r\n \"dataDiskImages\": + []\r\n },\r\n \"location\": \"westus2\",\r\n \"name\": \"18.04.202209210\",\r\n + \ \"id\": \"/Subscriptions/00000000-0000-0000-0000-000000000000/Providers/Microsoft.Compute/Locations/westus2/Publishers/Canonical/ArtifactTypes/VMImage/Offers/UbuntuServer/Skus/18.04-LTS/Versions/18.04.202209210\"\r\n}" headers: cache-control: - no-cache content-length: - - '1044' + - '1050' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:10:32 GMT + - Fri, 14 Oct 2022 15:18:34 GMT expires: - '-1' pragma: @@ -437,7 +344,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMImageFromLocation3Min;12996,Microsoft.Compute/GetVMImageFromLocation30Min;73987 + - Microsoft.Compute/GetVMImageFromLocation3Min;12996,Microsoft.Compute/GetVMImageFromLocation30Min;73996 status: code: 200 message: OK @@ -445,7 +352,7 @@ interactions: body: null headers: Accept: - - application/json, text/json + - application/json Accept-Encoding: - gzip, deflate CommandName: @@ -455,9 +362,9 @@ interactions: ParameterSetName: - -g -n --image -l --generate-ssh-keys User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-network/20.0.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-network/21.0.1 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks?api-version=2018-01-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks?api-version=2022-01-01 response: body: string: '{"value":[]}' @@ -469,7 +376,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:10:32 GMT + - Fri, 14 Oct 2022 15:18:34 GMT expires: - '-1' pragma: @@ -495,7 +402,7 @@ interactions: {"securityRules": [{"name": "default-allow-ssh", "properties": {"protocol": "Tcp", "sourcePortRange": "*", "destinationPortRange": "22", "sourceAddressPrefix": "*", "destinationAddressPrefix": "*", "access": "Allow", "priority": 1000, "direction": - "Inbound"}}]}}, {"apiVersion": "2018-01-01", "type": "Microsoft.Network/publicIPAddresses", + "Inbound"}}]}}, {"apiVersion": "2022-01-01", "type": "Microsoft.Network/publicIPAddresses", "name": "cli000003PublicIP", "location": "westus2", "tags": {}, "dependsOn": [], "properties": {"publicIPAllocationMethod": null}}, {"apiVersion": "2015-06-15", "type": "Microsoft.Network/networkInterfaces", "name": "cli000003VMNic", "location": @@ -505,7 +412,7 @@ interactions: {"privateIPAllocationMethod": "Dynamic", "subnet": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet"}, "publicIPAddress": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/publicIPAddresses/cli000003PublicIP"}}}], "networkSecurityGroup": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkSecurityGroups/cli000003NSG"}}}, - {"apiVersion": "2022-03-01", "type": "Microsoft.Compute/virtualMachines", "name": + {"apiVersion": "2022-08-01", "type": "Microsoft.Compute/virtualMachines", "name": "cli000003", "location": "westus2", "tags": {}, "dependsOn": ["Microsoft.Network/networkInterfaces/cli000003VMNic"], "properties": {"hardwareProfile": {"vmSize": "Standard_DS1_v2"}, "networkProfile": {"networkInterfaces": [{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic", @@ -513,10 +420,10 @@ interactions: "fromImage", "name": null, "caching": "ReadWrite", "managedDisk": {"storageAccountType": null}}, "imageReference": {"publisher": "Canonical", "offer": "UbuntuServer", "sku": "18.04-LTS", "version": "latest"}}, "osProfile": {"computerName": "cli000003", - "adminUsername": "rhl", "linuxConfiguration": {"disablePasswordAuthentication": - true, "ssh": {"publicKeys": [{"keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5", - "path": "/home/rhl/.ssh/authorized_keys"}]}}}}}], "outputs": {}}, "parameters": - {}, "mode": "incremental"}}' + "adminUsername": "rhoover", "linuxConfiguration": {"disablePasswordAuthentication": + true, "ssh": {"publicKeys": [{"keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\n", "path": "/home/rhoover/.ssh/authorized_keys"}]}}}}}], + "outputs": {}}, "parameters": {}, "mode": "incremental"}}' headers: Accept: - application/json @@ -527,29 +434,29 @@ interactions: Connection: - keep-alive Content-Length: - - '3604' + - '3808' Content-Type: - application/json ParameterSetName: - -g -n --image -l --generate-ssh-keys User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2021-04-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/vm_deploy_4DzMptweDBWqE0NyUzbtmyKes2ujJpLJ","name":"vm_deploy_4DzMptweDBWqE0NyUzbtmyKes2ujJpLJ","type":"Microsoft.Resources/deployments","properties":{"templateHash":"3174778786938806105","parameters":{},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2022-08-04T17:10:34.1268595Z","duration":"PT0.000621S","correlationId":"a96cd311-4406-46f5-b215-69ad6a4b319d","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"virtualNetworks","locations":["westus2"]},{"resourceType":"networkSecurityGroups","locations":["westus2"]},{"resourceType":"publicIPAddresses","locations":["westus2"]},{"resourceType":"networkInterfaces","locations":["westus2"]}]},{"namespace":"Microsoft.Compute","resourceTypes":[{"resourceType":"virtualMachines","locations":["westus2"]}]}],"dependencies":[{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET","resourceType":"Microsoft.Network/virtualNetworks","resourceName":"cli000003VNET"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkSecurityGroups/cli000003NSG","resourceType":"Microsoft.Network/networkSecurityGroups","resourceName":"cli000003NSG"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/publicIPAddresses/cli000003PublicIP","resourceType":"Microsoft.Network/publicIPAddresses","resourceName":"cli000003PublicIP"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic","resourceType":"Microsoft.Network/networkInterfaces","resourceName":"cli000003VMNic"},{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic","resourceType":"Microsoft.Network/networkInterfaces","resourceName":"cli000003VMNic"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003","resourceType":"Microsoft.Compute/virtualMachines","resourceName":"cli000003"}]}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/vm_deploy_UG9cEcn3KjjZRHS8GkwqQMwLb43iYktQ","name":"vm_deploy_UG9cEcn3KjjZRHS8GkwqQMwLb43iYktQ","type":"Microsoft.Resources/deployments","properties":{"templateHash":"7785195787341461596","parameters":{},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2022-10-14T15:18:38.1015064Z","duration":"PT0.0007626S","correlationId":"04b71830-d055-4441-965e-548bdaa012d8","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"virtualNetworks","locations":["westus2"]},{"resourceType":"networkSecurityGroups","locations":["westus2"]},{"resourceType":"publicIPAddresses","locations":["westus2"]},{"resourceType":"networkInterfaces","locations":["westus2"]}]},{"namespace":"Microsoft.Compute","resourceTypes":[{"resourceType":"virtualMachines","locations":["westus2"]}]}],"dependencies":[{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET","resourceType":"Microsoft.Network/virtualNetworks","resourceName":"cli000003VNET"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkSecurityGroups/cli000003NSG","resourceType":"Microsoft.Network/networkSecurityGroups","resourceName":"cli000003NSG"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/publicIPAddresses/cli000003PublicIP","resourceType":"Microsoft.Network/publicIPAddresses","resourceName":"cli000003PublicIP"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic","resourceType":"Microsoft.Network/networkInterfaces","resourceName":"cli000003VMNic"},{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic","resourceType":"Microsoft.Network/networkInterfaces","resourceName":"cli000003VMNic"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003","resourceType":"Microsoft.Compute/virtualMachines","resourceName":"cli000003"}]}}' headers: azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/vm_deploy_4DzMptweDBWqE0NyUzbtmyKes2ujJpLJ/operationStatuses/08585419738520445305?api-version=2021-04-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/vm_deploy_UG9cEcn3KjjZRHS8GkwqQMwLb43iYktQ/operationStatuses/08585358461692167705?api-version=2021-04-01 cache-control: - no-cache content-length: - - '2489' + - '2490' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:10:33 GMT + - Fri, 14 Oct 2022 15:18:38 GMT expires: - '-1' pragma: @@ -559,7 +466,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - - '1198' + - '1199' status: code: 201 message: Created @@ -577,9 +484,9 @@ interactions: ParameterSetName: - -g -n --image -l --generate-ssh-keys User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08585419738520445305?api-version=2021-04-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08585358461692167705?api-version=2021-04-01 response: body: string: '{"status":"Running"}' @@ -591,7 +498,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:11:03 GMT + - Fri, 14 Oct 2022 15:19:08 GMT expires: - '-1' pragma: @@ -619,9 +526,9 @@ interactions: ParameterSetName: - -g -n --image -l --generate-ssh-keys User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08585419738520445305?api-version=2021-04-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08585358461692167705?api-version=2021-04-01 response: body: string: '{"status":"Succeeded"}' @@ -633,7 +540,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:11:34 GMT + - Fri, 14 Oct 2022 15:19:38 GMT expires: - '-1' pragma: @@ -661,12 +568,12 @@ interactions: ParameterSetName: - -g -n --image -l --generate-ssh-keys User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2021-04-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/vm_deploy_4DzMptweDBWqE0NyUzbtmyKes2ujJpLJ","name":"vm_deploy_4DzMptweDBWqE0NyUzbtmyKes2ujJpLJ","type":"Microsoft.Resources/deployments","properties":{"templateHash":"3174778786938806105","parameters":{},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2022-08-04T17:11:17.2771259Z","duration":"PT43.1508874S","correlationId":"a96cd311-4406-46f5-b215-69ad6a4b319d","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"virtualNetworks","locations":["westus2"]},{"resourceType":"networkSecurityGroups","locations":["westus2"]},{"resourceType":"publicIPAddresses","locations":["westus2"]},{"resourceType":"networkInterfaces","locations":["westus2"]}]},{"namespace":"Microsoft.Compute","resourceTypes":[{"resourceType":"virtualMachines","locations":["westus2"]}]}],"dependencies":[{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET","resourceType":"Microsoft.Network/virtualNetworks","resourceName":"cli000003VNET"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkSecurityGroups/cli000003NSG","resourceType":"Microsoft.Network/networkSecurityGroups","resourceName":"cli000003NSG"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/publicIPAddresses/cli000003PublicIP","resourceType":"Microsoft.Network/publicIPAddresses","resourceName":"cli000003PublicIP"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic","resourceType":"Microsoft.Network/networkInterfaces","resourceName":"cli000003VMNic"},{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic","resourceType":"Microsoft.Network/networkInterfaces","resourceName":"cli000003VMNic"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003","resourceType":"Microsoft.Compute/virtualMachines","resourceName":"cli000003"}],"outputs":{},"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkSecurityGroups/cli000003NSG"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/publicIPAddresses/cli000003PublicIP"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET"}]}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/vm_deploy_UG9cEcn3KjjZRHS8GkwqQMwLb43iYktQ","name":"vm_deploy_UG9cEcn3KjjZRHS8GkwqQMwLb43iYktQ","type":"Microsoft.Resources/deployments","properties":{"templateHash":"7785195787341461596","parameters":{},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2022-10-14T15:19:17.1864402Z","duration":"PT39.0856964S","correlationId":"04b71830-d055-4441-965e-548bdaa012d8","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"virtualNetworks","locations":["westus2"]},{"resourceType":"networkSecurityGroups","locations":["westus2"]},{"resourceType":"publicIPAddresses","locations":["westus2"]},{"resourceType":"networkInterfaces","locations":["westus2"]}]},{"namespace":"Microsoft.Compute","resourceTypes":[{"resourceType":"virtualMachines","locations":["westus2"]}]}],"dependencies":[{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET","resourceType":"Microsoft.Network/virtualNetworks","resourceName":"cli000003VNET"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkSecurityGroups/cli000003NSG","resourceType":"Microsoft.Network/networkSecurityGroups","resourceName":"cli000003NSG"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/publicIPAddresses/cli000003PublicIP","resourceType":"Microsoft.Network/publicIPAddresses","resourceName":"cli000003PublicIP"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic","resourceType":"Microsoft.Network/networkInterfaces","resourceName":"cli000003VMNic"},{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic","resourceType":"Microsoft.Network/networkInterfaces","resourceName":"cli000003VMNic"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003","resourceType":"Microsoft.Compute/virtualMachines","resourceName":"cli000003"}],"outputs":{},"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkSecurityGroups/cli000003NSG"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/publicIPAddresses/cli000003PublicIP"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET"}]}}' headers: cache-control: - no-cache @@ -675,7 +582,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:11:34 GMT + - Fri, 14 Oct 2022 15:19:38 GMT expires: - '-1' pragma: @@ -703,64 +610,65 @@ interactions: ParameterSetName: - -g -n --image -l --generate-ssh-keys User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n - \ \"tags\": {},\r\n \"properties\": {\r\n \"vmId\": \"e048a2e9-76a7-497c-8ed7-829625b39824\",\r\n + \ \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"properties\": {\r\n \"vmId\": \"eee656e1-0c27-4e10-ba68-5622e742e308\",\r\n \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": - \"18.04.202207120\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": - \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n + \"18.04.202209210\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": + \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\"\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\"\r\n \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": - {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhl\",\r\n + {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhoover\",\r\n \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n - \ \"path\": \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\",\r\n - \ \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \"enableVMAgentPlatformUpdates\": - false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": - true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"patchSettings\": {\r\n \"patchMode\": + \"ImageDefault\",\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\": + [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\": + true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n \ \"provisioningState\": \"Succeeded\",\r\n \"instanceView\": {\r\n \"computerName\": \"cli000003\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n - \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.7.3.0\",\r\n \"statuses\": + \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.8.0.11\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \ \"level\": \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \ \"message\": \"Guest Agent is running\",\r\n \"time\": - \"2022-08-04T17:11:30+00:00\"\r\n }\r\n ],\r\n \"extensionHandlers\": - []\r\n },\r\n \"disks\": [\r\n {\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n + \"2022-10-14T15:19:25+00:00\"\r\n }\r\n ],\r\n \"extensionHandlers\": + []\r\n },\r\n \"disks\": [\r\n {\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:10:51.5255088+00:00\"\r\n + succeeded\",\r\n \"time\": \"2022-10-14T15:18:51.4431836+00:00\"\r\n \ }\r\n ]\r\n }\r\n ],\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:11:16.4472219+00:00\"\r\n + succeeded\",\r\n \"time\": \"2022-10-14T15:19:16.0835505+00:00\"\r\n \ },\r\n {\r\n \"code\": \"PowerState/running\",\r\n \ \"level\": \"Info\",\r\n \"displayStatus\": \"VM running\"\r\n - \ }\r\n ]\r\n },\r\n \"timeCreated\": \"2022-08-04T17:10:47.8380764+00:00\"\r\n + \ }\r\n ]\r\n },\r\n \"timeCreated\": \"2022-10-14T15:18:48.6932141+00:00\"\r\n \ }\r\n}" headers: cache-control: - no-cache content-length: - - '3913' + - '4239' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:11:35 GMT + - Fri, 14 Oct 2022 15:19:39 GMT expires: - '-1' pragma: @@ -777,7 +685,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/LowCostGet3Min;3993,Microsoft.Compute/LowCostGet30Min;31940 + - Microsoft.Compute/LowCostGet3Min;3994,Microsoft.Compute/LowCostGet30Min;31994 status: code: 200 message: OK @@ -785,7 +693,7 @@ interactions: body: null headers: Accept: - - application/json, text/json + - application/json Accept-Encoding: - gzip, deflate CommandName: @@ -795,18 +703,18 @@ interactions: ParameterSetName: - -g -n --image -l --generate-ssh-keys User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-network/20.0.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-network/21.0.1 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic?api-version=2018-01-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic?api-version=2022-01-01 response: body: string: "{\r\n \"name\": \"cli000003VMNic\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\",\r\n - \ \"etag\": \"W/\\\"aa101b49-17b4-47d7-b456-b0440c039d94\\\"\",\r\n \"tags\": + \ \"etag\": \"W/\\\"039aaffd-6916-4a71-a695-0de583ae6055\\\"\",\r\n \"tags\": {},\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\",\r\n - \ \"resourceGuid\": \"601b5ed0-d41f-4643-8e93-f1c73b03c315\",\r\n \"ipConfigurations\": + \ \"resourceGuid\": \"38037776-5efd-4adb-b1cb-bda3ab487392\",\r\n \"ipConfigurations\": [\r\n {\r\n \"name\": \"ipconfigcli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic/ipConfigurations/ipconfigcli000003\",\r\n - \ \"etag\": \"W/\\\"aa101b49-17b4-47d7-b456-b0440c039d94\\\"\",\r\n + \ \"etag\": \"W/\\\"039aaffd-6916-4a71-a695-0de583ae6055\\\"\",\r\n \ \"type\": \"Microsoft.Network/networkInterfaces/ipConfigurations\",\r\n \ \"properties\": {\r\n \"provisioningState\": \"Succeeded\",\r\n \ \"privateIPAddress\": \"10.0.0.4\",\r\n \"privateIPAllocationMethod\": @@ -815,25 +723,27 @@ interactions: \ },\r\n \"primary\": true,\r\n \"privateIPAddressVersion\": \"IPv4\"\r\n }\r\n }\r\n ],\r\n \"dnsSettings\": {\r\n \"dnsServers\": [],\r\n \"appliedDnsServers\": [],\r\n \"internalDomainNameSuffix\": - \"kzhglzq3n3getbkfek1c4nh5uc.xx.internal.cloudapp.net\"\r\n },\r\n \"macAddress\": - \"00-22-48-77-28-51\",\r\n \"enableAcceleratedNetworking\": false,\r\n - \ \"enableIPForwarding\": false,\r\n \"networkSecurityGroup\": {\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkSecurityGroups/cli000003NSG\"\r\n + \"5ubekrqrkhlurf5gahkxelgrta.xx.internal.cloudapp.net\"\r\n },\r\n \"macAddress\": + \"00-0D-3A-FC-65-EC\",\r\n \"enableAcceleratedNetworking\": false,\r\n + \ \"vnetEncryptionSupported\": false,\r\n \"enableIPForwarding\": false,\r\n + \ \"networkSecurityGroup\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkSecurityGroups/cli000003NSG\"\r\n \ },\r\n \"primary\": true,\r\n \"virtualMachine\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\"\r\n - \ }\r\n },\r\n \"type\": \"Microsoft.Network/networkInterfaces\",\r\n - \ \"location\": \"westus2\"\r\n}" + \ },\r\n \"hostedWorkloads\": [],\r\n \"tapConfigurations\": [],\r\n + \ \"nicType\": \"Standard\",\r\n \"allowPort25Out\": true\r\n },\r\n + \ \"type\": \"Microsoft.Network/networkInterfaces\",\r\n \"location\": \"westus2\",\r\n + \ \"kind\": \"Regular\"\r\n}" headers: cache-control: - no-cache content-length: - - '2347' + - '2523' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:11:34 GMT + - Fri, 14 Oct 2022 15:19:40 GMT etag: - - W/"aa101b49-17b4-47d7-b456-b0440c039d94" + - W/"039aaffd-6916-4a71-a695-0de583ae6055" expires: - '-1' pragma: @@ -850,7 +760,7 @@ interactions: x-content-type-options: - nosniff x-ms-arm-service-request-id: - - 55f55800-aef1-4d1c-91b7-4e700fa97487 + - ffa84083-0fae-4347-a8e2-20f99e68e498 status: code: 200 message: OK @@ -858,7 +768,7 @@ interactions: body: null headers: Accept: - - application/json, text/json + - application/json Accept-Encoding: - gzip, deflate CommandName: @@ -868,31 +778,32 @@ interactions: ParameterSetName: - -g -n --image -l --generate-ssh-keys User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-network/20.0.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-network/21.0.1 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/publicIPAddresses/cli000003PublicIP?api-version=2018-01-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/publicIPAddresses/cli000003PublicIP?api-version=2022-01-01 response: body: string: "{\r\n \"name\": \"cli000003PublicIP\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/publicIPAddresses/cli000003PublicIP\",\r\n - \ \"etag\": \"W/\\\"0289d247-5917-4b30-b9a3-abdec4758c7f\\\"\",\r\n \"location\": + \ \"etag\": \"W/\\\"7781bd9a-182b-45dd-a5d6-c596b1acfd34\\\"\",\r\n \"location\": \"westus2\",\r\n \"tags\": {},\r\n \"properties\": {\r\n \"provisioningState\": - \"Succeeded\",\r\n \"resourceGuid\": \"379f9ab3-9fbd-43b4-922e-1e62313bcd59\",\r\n - \ \"ipAddress\": \"20.112.39.82\",\r\n \"publicIPAddressVersion\": \"IPv4\",\r\n + \"Succeeded\",\r\n \"resourceGuid\": \"85ec7473-97f8-413f-892d-8b940148cf7c\",\r\n + \ \"ipAddress\": \"20.112.86.217\",\r\n \"publicIPAddressVersion\": \"IPv4\",\r\n \ \"publicIPAllocationMethod\": \"Dynamic\",\r\n \"idleTimeoutInMinutes\": 4,\r\n \"ipTags\": [],\r\n \"ipConfiguration\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic/ipConfigurations/ipconfigcli000003\"\r\n \ }\r\n },\r\n \"type\": \"Microsoft.Network/publicIPAddresses\",\r\n - \ \"sku\": {\r\n \"name\": \"Basic\"\r\n }\r\n}" + \ \"sku\": {\r\n \"name\": \"Basic\",\r\n \"tier\": \"Regional\"\r\n + \ }\r\n}" headers: cache-control: - no-cache content-length: - - '927' + - '953' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:11:34 GMT + - Fri, 14 Oct 2022 15:19:40 GMT etag: - - W/"0289d247-5917-4b30-b9a3-abdec4758c7f" + - W/"7781bd9a-182b-45dd-a5d6-c596b1acfd34" expires: - '-1' pragma: @@ -909,56 +820,7 @@ interactions: x-content-type-options: - nosniff x-ms-arm-service-request-id: - - 06230199-dc1b-4e72-9e85-320ed4ee9c4d - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - vm create - Connection: - - keep-alive - ParameterSetName: - - -g -n --image -l --generate-ssh-keys - User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 - response: - body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" - headers: - cache-control: - - no-cache - content-length: - - '43' - content-type: - - application/json; charset=UTF-8 - date: - - Thu, 04 Aug 2022 17:11:35 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - nginx - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-frame-options: - - deny + - 984d82fc-74d9-416f-bb4c-4df013601d5a status: code: 200 message: OK @@ -976,64 +838,65 @@ interactions: ParameterSetName: - -g -n --image -l --generate-ssh-keys User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n - \ \"tags\": {},\r\n \"properties\": {\r\n \"vmId\": \"e048a2e9-76a7-497c-8ed7-829625b39824\",\r\n + \ \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"properties\": {\r\n \"vmId\": \"eee656e1-0c27-4e10-ba68-5622e742e308\",\r\n \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": - \"18.04.202207120\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": - \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n + \"18.04.202209210\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": + \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\"\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\"\r\n \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": - {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhl\",\r\n + {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhoover\",\r\n \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n - \ \"path\": \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\",\r\n - \ \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \"enableVMAgentPlatformUpdates\": - false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": - true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"patchSettings\": {\r\n \"patchMode\": + \"ImageDefault\",\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\": + [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\": + true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n \ \"provisioningState\": \"Succeeded\",\r\n \"instanceView\": {\r\n \"computerName\": \"cli000003\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n - \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.7.3.0\",\r\n \"statuses\": + \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.8.0.11\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \ \"level\": \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \ \"message\": \"Guest Agent is running\",\r\n \"time\": - \"2022-08-04T17:11:30+00:00\"\r\n }\r\n ],\r\n \"extensionHandlers\": - []\r\n },\r\n \"disks\": [\r\n {\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n + \"2022-10-14T15:19:25+00:00\"\r\n }\r\n ],\r\n \"extensionHandlers\": + []\r\n },\r\n \"disks\": [\r\n {\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:10:51.5255088+00:00\"\r\n + succeeded\",\r\n \"time\": \"2022-10-14T15:18:51.4431836+00:00\"\r\n \ }\r\n ]\r\n }\r\n ],\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:11:16.4472219+00:00\"\r\n + succeeded\",\r\n \"time\": \"2022-10-14T15:19:16.0835505+00:00\"\r\n \ },\r\n {\r\n \"code\": \"PowerState/running\",\r\n \ \"level\": \"Info\",\r\n \"displayStatus\": \"VM running\"\r\n - \ }\r\n ]\r\n },\r\n \"timeCreated\": \"2022-08-04T17:10:47.8380764+00:00\"\r\n + \ }\r\n ]\r\n },\r\n \"timeCreated\": \"2022-10-14T15:18:48.6932141+00:00\"\r\n \ }\r\n}" headers: cache-control: - no-cache content-length: - - '3913' + - '4239' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:11:35 GMT + - Fri, 14 Oct 2022 15:19:40 GMT expires: - '-1' pragma: @@ -1050,7 +913,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/LowCostGet3Min;3992,Microsoft.Compute/LowCostGet30Min;31939 + - Microsoft.Compute/LowCostGet3Min;3993,Microsoft.Compute/LowCostGet30Min;31993 status: code: 200 message: OK @@ -1068,47 +931,48 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n - \ \"tags\": {},\r\n \"properties\": {\r\n \"vmId\": \"e048a2e9-76a7-497c-8ed7-829625b39824\",\r\n + \ \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"properties\": {\r\n \"vmId\": \"eee656e1-0c27-4e10-ba68-5622e742e308\",\r\n \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": - \"18.04.202207120\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": - \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n + \"18.04.202209210\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": + \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\"\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\"\r\n \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": - {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhl\",\r\n + {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhoover\",\r\n \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n - \ \"path\": \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\",\r\n - \ \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \"enableVMAgentPlatformUpdates\": - false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": - true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n - \ \"provisioningState\": \"Succeeded\",\r\n \"timeCreated\": \"2022-08-04T17:10:47.8380764+00:00\"\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"patchSettings\": {\r\n \"patchMode\": + \"ImageDefault\",\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\": + [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\": + true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n + \ \"provisioningState\": \"Succeeded\",\r\n \"timeCreated\": \"2022-10-14T15:18:48.6932141+00:00\"\r\n \ }\r\n}" headers: cache-control: - no-cache content-length: - - '2608' + - '2933' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:11:35 GMT + - Fri, 14 Oct 2022 15:19:40 GMT expires: - '-1' pragma: @@ -1125,21 +989,23 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/LowCostGet3Min;3991,Microsoft.Compute/LowCostGet30Min;31938 + - Microsoft.Compute/LowCostGet3Min;3992,Microsoft.Compute/LowCostGet30Min;31992 status: code: 200 message: OK - request: - body: '{"location": "westus2", "tags": {}, "properties": {"hardwareProfile": {"vmSize": - "Standard_DS1_v2"}, "storageProfile": {"osDisk": {"osType": "Linux", "name": - "cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474", "caching": "ReadWrite", - "createOption": "FromImage", "diskSizeGB": 30, "managedDisk": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474", + body: '{"location": "westus2", "tags": {"azsecpack": "nonprod", "platformsettings.host_environment.service.platform_optedin_for_rootcerts": + "true"}, "properties": {"hardwareProfile": {"vmSize": "Standard_DS1_v2"}, "storageProfile": + {"osDisk": {"osType": "Linux", "name": "cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404", + "caching": "ReadWrite", "createOption": "FromImage", "diskSizeGB": 30, "managedDisk": + {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404", "storageAccountType": "Premium_LRS"}, "deleteOption": "Detach"}, "dataDisks": - []}, "osProfile": {"computerName": "cli000003", "adminUsername": "rhl", "linuxConfiguration": - {"disablePasswordAuthentication": true, "ssh": {"publicKeys": [{"path": "/home/rhl/.ssh/authorized_keys", - "keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5"}]}, - "provisionVMAgent": true, "patchSettings": {"patchMode": "ImageDefault", "assessmentMode": - "ImageDefault"}}, "secrets": [], "allowExtensionOperations": true, "requireGuestProvisionSignal": + []}, "osProfile": {"computerName": "cli000003", "adminUsername": "rhoover", + "linuxConfiguration": {"disablePasswordAuthentication": true, "ssh": {"publicKeys": + [{"path": "/home/rhoover/.ssh/authorized_keys", "keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\n"}]}, "provisionVMAgent": true, "patchSettings": {"patchMode": + "ImageDefault", "assessmentMode": "ImageDefault"}, "enableVMAgentPlatformUpdates": + false}, "secrets": [], "allowExtensionOperations": true, "requireGuestProvisionSignal": true}, "networkProfile": {"networkInterfaces": [{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic"}]}, "diagnosticsProfile": {"bootDiagnostics": {"enabled": true}}}}' headers: @@ -1152,58 +1018,59 @@ interactions: Connection: - keep-alive Content-Length: - - '1636' + - '1985' Content-Type: - application/json ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n - \ \"tags\": {},\r\n \"properties\": {\r\n \"vmId\": \"e048a2e9-76a7-497c-8ed7-829625b39824\",\r\n + \ \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"properties\": {\r\n \"vmId\": \"eee656e1-0c27-4e10-ba68-5622e742e308\",\r\n \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": - \"18.04.202207120\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": - \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n + \"18.04.202209210\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": + \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\"\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\"\r\n \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": - {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhl\",\r\n + {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhoover\",\r\n \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n - \ \"path\": \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\",\r\n - \ \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \"enableVMAgentPlatformUpdates\": - false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": - true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"patchSettings\": {\r\n \"patchMode\": + \"ImageDefault\",\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\": + [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\": + true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": true\r\n }\r\n },\r\n \"provisioningState\": \"Updating\",\r\n - \ \"timeCreated\": \"2022-08-04T17:10:47.8380764+00:00\"\r\n }\r\n}" + \ \"timeCreated\": \"2022-10-14T15:18:48.6932141+00:00\"\r\n }\r\n}" headers: azure-asyncnotification: - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/84c4b08a-9fe7-46e5-833f-e3caa74dba4f?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/4f6bc2e8-8a7c-4276-a7d2-9ed34c15ee1e?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - - '2706' + - '3031' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:11:36 GMT + - Fri, 14 Oct 2022 15:19:44 GMT expires: - '-1' pragma: @@ -1220,7 +1087,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/PutVM3Min;500,Microsoft.Compute/PutVM30Min;2507 + - Microsoft.Compute/PutVM3Min;594,Microsoft.Compute/PutVM30Min;2984 x-ms-ratelimit-remaining-subscription-writes: - '1199' status: @@ -1240,23 +1107,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/84c4b08a-9fe7-46e5-833f-e3caa74dba4f?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/4f6bc2e8-8a7c-4276-a7d2-9ed34c15ee1e?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:11:37.181473+00:00\",\r\n \"endTime\": - \"2022-08-04T17:11:42.3690015+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"84c4b08a-9fe7-46e5-833f-e3caa74dba4f\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:19:43.5989027+00:00\",\r\n \"endTime\": + \"2022-10-14T15:19:50.7551028+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"4f6bc2e8-8a7c-4276-a7d2-9ed34c15ee1e\"\r\n}" headers: cache-control: - no-cache content-length: - - '183' + - '184' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:12:07 GMT + - Fri, 14 Oct 2022 15:20:14 GMT expires: - '-1' pragma: @@ -1273,7 +1140,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14968,Microsoft.Compute/GetOperation30Min;29871 + - Microsoft.Compute/GetOperation3Min;14994,Microsoft.Compute/GetOperation30Min;29994 status: code: 200 message: OK @@ -1291,48 +1158,49 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n - \ \"tags\": {},\r\n \"properties\": {\r\n \"vmId\": \"e048a2e9-76a7-497c-8ed7-829625b39824\",\r\n + \ \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"properties\": {\r\n \"vmId\": \"eee656e1-0c27-4e10-ba68-5622e742e308\",\r\n \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": - \"18.04.202207120\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": - \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n + \"18.04.202209210\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": + \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\"\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\"\r\n \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": - {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhl\",\r\n + {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhoover\",\r\n \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n - \ \"path\": \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\",\r\n - \ \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \"enableVMAgentPlatformUpdates\": - false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": - true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"patchSettings\": {\r\n \"patchMode\": + \"ImageDefault\",\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\": + [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\": + true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": true\r\n }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n - \ \"timeCreated\": \"2022-08-04T17:10:47.8380764+00:00\"\r\n }\r\n}" + \ \"timeCreated\": \"2022-10-14T15:18:48.6932141+00:00\"\r\n }\r\n}" headers: cache-control: - no-cache content-length: - - '2707' + - '3032' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:12:07 GMT + - Fri, 14 Oct 2022 15:20:14 GMT expires: - '-1' pragma: @@ -1349,7 +1217,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/LowCostGet3Min;3986,Microsoft.Compute/LowCostGet30Min;31933 + - Microsoft.Compute/LowCostGet3Min;3987,Microsoft.Compute/LowCostGet30Min;31987 status: code: 200 message: OK @@ -1367,28 +1235,75 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-08-01 response: body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n + \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n + \ \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"properties\": {\r\n \"vmId\": \"eee656e1-0c27-4e10-ba68-5622e742e308\",\r\n + \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n + \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": + \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": + \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": + \"18.04.202209210\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": + \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n + \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n + \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\"\r\n + \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": + 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": + {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhoover\",\r\n + \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": + true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"patchSettings\": {\r\n \"patchMode\": + \"ImageDefault\",\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\": + [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\": + true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n + \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": + true\r\n }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n + \ \"instanceView\": {\r\n \"computerName\": \"cli000003\",\r\n \"osName\": + \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n \"vmAgent\": {\r\n + \ \"vmAgentVersion\": \"2.8.0.11\",\r\n \"statuses\": [\r\n {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": + \"Guest Agent is running\",\r\n \"time\": \"2022-10-14T15:19:49+00:00\"\r\n + \ }\r\n ],\r\n \"extensionHandlers\": []\r\n },\r\n + \ \"disks\": [\r\n {\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n + \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"time\": \"2022-10-14T15:19:44.1614198+00:00\"\r\n + \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": + {},\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n + \ \"time\": \"2022-10-14T15:19:50.7551028+00:00\"\r\n },\r\n + \ {\r\n \"code\": \"PowerState/running\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"VM running\"\r\n }\r\n + \ ]\r\n },\r\n \"timeCreated\": \"2022-10-14T15:18:48.6932141+00:00\"\r\n + \ }\r\n}" headers: cache-control: - no-cache content-length: - - '43' + - '4368' content-type: - - application/json; charset=UTF-8 + - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:12:08 GMT + - Fri, 14 Oct 2022 15:20:15 GMT expires: - '-1' pragma: - no-cache server: - - nginx + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1397,8 +1312,8 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-frame-options: - - deny + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/LowCostGet3Min;3986,Microsoft.Compute/LowCostGet30Min;31986 status: code: 200 message: OK @@ -1416,74 +1331,28 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 response: body: - string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n - \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n - \ \"tags\": {},\r\n \"properties\": {\r\n \"vmId\": \"e048a2e9-76a7-497c-8ed7-829625b39824\",\r\n - \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n - \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": - \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": - \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": - \"18.04.202207120\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": - \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n - \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n - \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\"\r\n - \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": - 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": - {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhl\",\r\n - \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": - true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n - \ \"path\": \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\",\r\n - \ \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \"enableVMAgentPlatformUpdates\": - false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": - true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n - \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": - true\r\n }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n - \ \"instanceView\": {\r\n \"computerName\": \"cli000003\",\r\n \"osName\": - \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n \"vmAgent\": {\r\n - \ \"vmAgentVersion\": \"2.7.3.0\",\r\n \"statuses\": [\r\n {\r\n - \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": - \"Guest Agent is running\",\r\n \"time\": \"2022-08-04T17:11:48+00:00\"\r\n - \ }\r\n ],\r\n \"extensionHandlers\": []\r\n },\r\n - \ \"disks\": [\r\n {\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n - \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:11:37.6971283+00:00\"\r\n - \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": - {},\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n {\r\n - \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n - \ \"time\": \"2022-08-04T17:11:42.3533387+00:00\"\r\n },\r\n - \ {\r\n \"code\": \"PowerState/running\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"VM running\"\r\n }\r\n - \ ]\r\n },\r\n \"timeCreated\": \"2022-08-04T17:10:47.8380764+00:00\"\r\n - \ }\r\n}" + string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" headers: cache-control: - no-cache content-length: - - '4042' + - '43' content-type: - - application/json; charset=utf-8 + - application/json; charset=UTF-8 date: - - Thu, 04 Aug 2022 17:12:08 GMT + - Fri, 14 Oct 2022 15:20:16 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 + - nginx strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1492,8 +1361,8 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/LowCostGet3Min;3985,Microsoft.Compute/LowCostGet30Min;31932 + x-frame-options: + - deny status: code: 200 message: OK @@ -1513,25 +1382,27 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003/deallocate?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003/deallocate?api-version=2022-08-01 response: body: string: '' headers: + azure-asyncnotification: + - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/d7a758b8-1915-42bb-a410-7f6f3405c9bb?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/2104914c-5652-4df7-90ea-fcc5d18bbb8c?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - '0' date: - - Thu, 04 Aug 2022 17:12:09 GMT + - Fri, 14 Oct 2022 15:20:16 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/d7a758b8-1915-42bb-a410-7f6f3405c9bb?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/2104914c-5652-4df7-90ea-fcc5d18bbb8c?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 pragma: - no-cache server: @@ -1542,9 +1413,9 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/DeleteVM3Min;239,Microsoft.Compute/DeleteVM30Min;1194 + - Microsoft.Compute/DeleteVM3Min;239,Microsoft.Compute/DeleteVM30Min;1199 x-ms-ratelimit-remaining-subscription-writes: - - '1198' + - '1199' status: code: 202 message: Accepted @@ -1562,13 +1433,13 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/d7a758b8-1915-42bb-a410-7f6f3405c9bb?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/2104914c-5652-4df7-90ea-fcc5d18bbb8c?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:12:09.1656681+00:00\",\r\n \"status\": - \"InProgress\",\r\n \"name\": \"d7a758b8-1915-42bb-a410-7f6f3405c9bb\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:20:17.0204444+00:00\",\r\n \"status\": + \"InProgress\",\r\n \"name\": \"2104914c-5652-4df7-90ea-fcc5d18bbb8c\"\r\n}" headers: cache-control: - no-cache @@ -1577,7 +1448,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:12:19 GMT + - Fri, 14 Oct 2022 15:20:26 GMT expires: - '-1' pragma: @@ -1594,7 +1465,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14967,Microsoft.Compute/GetOperation30Min;29870 + - Microsoft.Compute/GetOperation3Min;14992,Microsoft.Compute/GetOperation30Min;29992 status: code: 200 message: OK @@ -1612,23 +1483,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/d7a758b8-1915-42bb-a410-7f6f3405c9bb?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/2104914c-5652-4df7-90ea-fcc5d18bbb8c?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:12:09.1656681+00:00\",\r\n \"endTime\": - \"2022-08-04T17:12:38.2123701+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"d7a758b8-1915-42bb-a410-7f6f3405c9bb\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:20:17.0204444+00:00\",\r\n \"endTime\": + \"2022-10-14T15:20:42.926446+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"2104914c-5652-4df7-90ea-fcc5d18bbb8c\"\r\n}" headers: cache-control: - no-cache content-length: - - '184' + - '183' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:12:55 GMT + - Fri, 14 Oct 2022 15:20:56 GMT expires: - '-1' pragma: @@ -1645,7 +1516,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14962,Microsoft.Compute/GetOperation30Min;29856 + - Microsoft.Compute/GetOperation3Min;14983,Microsoft.Compute/GetOperation30Min;29983 status: code: 200 message: OK @@ -1663,9 +1534,9 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/d7a758b8-1915-42bb-a410-7f6f3405c9bb?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/2104914c-5652-4df7-90ea-fcc5d18bbb8c?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 response: body: string: '' @@ -1675,7 +1546,7 @@ interactions: content-length: - '0' date: - - Thu, 04 Aug 2022 17:12:55 GMT + - Fri, 14 Oct 2022 15:20:57 GMT expires: - '-1' pragma: @@ -1688,7 +1559,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14961,Microsoft.Compute/GetOperation30Min;29855 + - Microsoft.Compute/GetOperation3Min;14982,Microsoft.Compute/GetOperation30Min;29982 status: code: 200 message: OK @@ -1706,28 +1577,67 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-08-01 response: body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n + \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n + \ \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"properties\": {\r\n \"vmId\": \"eee656e1-0c27-4e10-ba68-5622e742e308\",\r\n + \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n + \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": + \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": + \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": + \"18.04.202209210\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": + \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n + \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n + \ \"managedDisk\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\"\r\n + \ },\r\n \"deleteOption\": \"Detach\"\r\n },\r\n \"dataDisks\": + []\r\n },\r\n \"osProfile\": {\r\n \"computerName\": \"cli000003\",\r\n + \ \"adminUsername\": \"rhoover\",\r\n \"linuxConfiguration\": {\r\n + \ \"disablePasswordAuthentication\": true,\r\n \"ssh\": {\r\n + \ \"publicKeys\": [\r\n {\r\n \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n + \ \"keyData\": \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"patchSettings\": {\r\n \"patchMode\": + \"ImageDefault\",\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\": + [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\": + true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n + \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": + true\r\n }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n + \ \"instanceView\": {\r\n \"disks\": [\r\n {\r\n \"name\": + \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \"statuses\": + [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"time\": \"2022-10-14T15:20:42.7389392+00:00\"\r\n + \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": + {},\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n + \ \"time\": \"2022-10-14T15:20:42.7545747+00:00\"\r\n },\r\n + \ {\r\n \"code\": \"PowerState/deallocated\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"VM deallocated\"\r\n }\r\n + \ ]\r\n },\r\n \"timeCreated\": \"2022-10-14T15:18:48.6932141+00:00\"\r\n + \ }\r\n}" headers: cache-control: - no-cache content-length: - - '43' + - '3826' content-type: - - application/json; charset=UTF-8 + - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:12:55 GMT + - Fri, 14 Oct 2022 15:20:58 GMT expires: - '-1' pragma: - no-cache server: - - nginx + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1736,8 +1646,8 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-frame-options: - - deny + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/LowCostGet3Min;3978,Microsoft.Compute/LowCostGet30Min;31978 status: code: 200 message: OK @@ -1755,66 +1665,28 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 response: body: - string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n - \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n - \ \"tags\": {},\r\n \"properties\": {\r\n \"vmId\": \"e048a2e9-76a7-497c-8ed7-829625b39824\",\r\n - \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n - \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": - \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": - \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": - \"18.04.202207120\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": - \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n - \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n - \ \"managedDisk\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\"\r\n - \ },\r\n \"deleteOption\": \"Detach\"\r\n },\r\n \"dataDisks\": - []\r\n },\r\n \"osProfile\": {\r\n \"computerName\": \"cli000003\",\r\n - \ \"adminUsername\": \"rhl\",\r\n \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": - true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n - \ \"path\": \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\",\r\n - \ \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \"enableVMAgentPlatformUpdates\": - false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": - true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n - \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": - true\r\n }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n - \ \"instanceView\": {\r\n \"disks\": [\r\n {\r\n \"name\": - \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n \"statuses\": - [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:12:37.8998954+00:00\"\r\n - \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": - {},\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n {\r\n - \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n - \ \"time\": \"2022-08-04T17:12:37.9155549+00:00\"\r\n },\r\n - \ {\r\n \"code\": \"PowerState/deallocated\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"VM deallocated\"\r\n }\r\n - \ ]\r\n },\r\n \"timeCreated\": \"2022-08-04T17:10:47.8380764+00:00\"\r\n - \ }\r\n}" + string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" headers: cache-control: - no-cache content-length: - - '3501' + - '43' content-type: - - application/json; charset=utf-8 + - application/json; charset=UTF-8 date: - - Thu, 04 Aug 2022 17:12:55 GMT + - Fri, 14 Oct 2022 15:20:58 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 + - nginx strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1823,8 +1695,8 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/LowCostGet3Min;3980,Microsoft.Compute/LowCostGet30Min;31926 + x-frame-options: + - deny status: code: 200 message: OK @@ -1844,25 +1716,27 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003/start?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003/start?api-version=2022-08-01 response: body: string: '' headers: + azure-asyncnotification: + - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/f7eea585-6ef7-480e-8e82-1309aec78928?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/8ebffdc3-4311-4dee-a126-81e84b11a5eb?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - '0' date: - - Thu, 04 Aug 2022 17:12:56 GMT + - Fri, 14 Oct 2022 15:20:58 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/f7eea585-6ef7-480e-8e82-1309aec78928?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/8ebffdc3-4311-4dee-a126-81e84b11a5eb?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 pragma: - no-cache server: @@ -1873,9 +1747,9 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/UpdateVM3Min;239,Microsoft.Compute/UpdateVM30Min;1195 + - Microsoft.Compute/UpdateVM3Min;235,Microsoft.Compute/UpdateVM30Min;1195 x-ms-ratelimit-remaining-subscription-writes: - - '1198' + - '1199' status: code: 202 message: Accepted @@ -1893,13 +1767,13 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/f7eea585-6ef7-480e-8e82-1309aec78928?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/8ebffdc3-4311-4dee-a126-81e84b11a5eb?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:12:56.6498162+00:00\",\r\n \"status\": - \"InProgress\",\r\n \"name\": \"f7eea585-6ef7-480e-8e82-1309aec78928\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:20:59.5512784+00:00\",\r\n \"status\": + \"InProgress\",\r\n \"name\": \"8ebffdc3-4311-4dee-a126-81e84b11a5eb\"\r\n}" headers: cache-control: - no-cache @@ -1908,7 +1782,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:13:06 GMT + - Fri, 14 Oct 2022 15:21:09 GMT expires: - '-1' pragma: @@ -1925,7 +1799,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14961,Microsoft.Compute/GetOperation30Min;29850 + - Microsoft.Compute/GetOperation3Min;14980,Microsoft.Compute/GetOperation30Min;29980 status: code: 200 message: OK @@ -1943,14 +1817,14 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/f7eea585-6ef7-480e-8e82-1309aec78928?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/8ebffdc3-4311-4dee-a126-81e84b11a5eb?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:12:56.6498162+00:00\",\r\n \"endTime\": - \"2022-08-04T17:13:14.8372222+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"f7eea585-6ef7-480e-8e82-1309aec78928\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:20:59.5512784+00:00\",\r\n \"endTime\": + \"2022-10-14T15:21:13.4730808+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"8ebffdc3-4311-4dee-a126-81e84b11a5eb\"\r\n}" headers: cache-control: - no-cache @@ -1959,7 +1833,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:13:36 GMT + - Fri, 14 Oct 2022 15:21:19 GMT expires: - '-1' pragma: @@ -1976,7 +1850,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14951,Microsoft.Compute/GetOperation30Min;29836 + - Microsoft.Compute/GetOperation3Min;14977,Microsoft.Compute/GetOperation30Min;29977 status: code: 200 message: OK @@ -1994,9 +1868,9 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/f7eea585-6ef7-480e-8e82-1309aec78928?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/8ebffdc3-4311-4dee-a126-81e84b11a5eb?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 response: body: string: '' @@ -2006,7 +1880,7 @@ interactions: content-length: - '0' date: - - Thu, 04 Aug 2022 17:13:36 GMT + - Fri, 14 Oct 2022 15:21:19 GMT expires: - '-1' pragma: @@ -2019,7 +1893,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14949,Microsoft.Compute/GetOperation30Min;29834 + - Microsoft.Compute/GetOperation3Min;14976,Microsoft.Compute/GetOperation30Min;29976 status: code: 200 message: OK @@ -2039,25 +1913,27 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003/powerOff?skipShutdown=false&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003/powerOff?skipShutdown=false&api-version=2022-08-01 response: body: string: '' headers: + azure-asyncnotification: + - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/b0708181-81f0-4bad-8c47-7171f5883c6e?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/ecf1882f-e784-4fdb-a80a-7c760b34003c?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - '0' date: - - Thu, 04 Aug 2022 17:13:36 GMT + - Fri, 14 Oct 2022 15:21:19 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/b0708181-81f0-4bad-8c47-7171f5883c6e?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/ecf1882f-e784-4fdb-a80a-7c760b34003c?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 pragma: - no-cache server: @@ -2068,7 +1944,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/UpdateVM3Min;238,Microsoft.Compute/UpdateVM30Min;1194 + - Microsoft.Compute/UpdateVM3Min;234,Microsoft.Compute/UpdateVM30Min;1194 x-ms-ratelimit-remaining-subscription-writes: - '1199' status: @@ -2088,23 +1964,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/b0708181-81f0-4bad-8c47-7171f5883c6e?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/ecf1882f-e784-4fdb-a80a-7c760b34003c?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:13:37.8527352+00:00\",\r\n \"endTime\": - \"2022-08-04T17:13:46.103382+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"b0708181-81f0-4bad-8c47-7171f5883c6e\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:21:20.2542538+00:00\",\r\n \"endTime\": + \"2022-10-14T15:21:22.4729364+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"ecf1882f-e784-4fdb-a80a-7c760b34003c\"\r\n}" headers: cache-control: - no-cache content-length: - - '183' + - '184' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:14:07 GMT + - Fri, 14 Oct 2022 15:21:49 GMT expires: - '-1' pragma: @@ -2121,7 +1997,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14944,Microsoft.Compute/GetOperation30Min;29825 + - Microsoft.Compute/GetOperation3Min;14966,Microsoft.Compute/GetOperation30Min;29966 status: code: 200 message: OK @@ -2139,9 +2015,9 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/b0708181-81f0-4bad-8c47-7171f5883c6e?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/ecf1882f-e784-4fdb-a80a-7c760b34003c?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 response: body: string: '' @@ -2151,7 +2027,7 @@ interactions: content-length: - '0' date: - - Thu, 04 Aug 2022 17:14:07 GMT + - Fri, 14 Oct 2022 15:21:49 GMT expires: - '-1' pragma: @@ -2164,7 +2040,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14943,Microsoft.Compute/GetOperation30Min;29824 + - Microsoft.Compute/GetOperation3Min;14965,Microsoft.Compute/GetOperation30Min;29965 status: code: 200 message: OK @@ -2182,28 +2058,78 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-08-01 response: body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n + \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n + \ \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"properties\": {\r\n \"vmId\": \"eee656e1-0c27-4e10-ba68-5622e742e308\",\r\n + \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n + \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": + \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": + \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": + \"18.04.202209210\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": + \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n + \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n + \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEWKD3ILZXQ5YDMRMOP7DOUUDMPA2UMW4REUEUIXVRLTLFIKQMHSXKN/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\"\r\n + \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": + 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": + {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhoover\",\r\n + \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": + true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"patchSettings\": {\r\n \"patchMode\": + \"ImageDefault\",\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\": + [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\": + true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n + \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": + true\r\n }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n + \ \"instanceView\": {\r\n \"vmAgent\": {\r\n \"vmAgentVersion\": + \"Unknown\",\r\n \"statuses\": [\r\n {\r\n \"code\": + \"ProvisioningState/Unavailable\",\r\n \"level\": \"Warning\",\r\n + \ \"displayStatus\": \"Not Ready\",\r\n \"message\": + \"VM status blob is found but not yet populated.\",\r\n \"time\": + \"2022-10-14T15:21:51+00:00\"\r\n }\r\n ]\r\n },\r\n + \ \"disks\": [\r\n {\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n + \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"time\": \"2022-10-14T15:21:26.8635118+00:00\"\r\n + \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": + {},\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n + \ \"time\": \"2022-10-14T15:21:28.5822425+00:00\"\r\n },\r\n + \ {\r\n \"code\": \"PowerState/stopped\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"VM stopped\"\r\n }\r\n + \ ]\r\n },\r\n \"timeCreated\": \"2022-10-14T15:18:48.6932141+00:00\"\r\n + \ }\r\n}" headers: cache-control: - no-cache content-length: - - '43' + - '4736' content-type: - - application/json; charset=UTF-8 + - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:14:08 GMT + - Fri, 14 Oct 2022 15:21:50 GMT expires: - '-1' pragma: - no-cache server: - - nginx + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -2212,8 +2138,8 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-frame-options: - - deny + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/LowCostGet3Min;3959,Microsoft.Compute/LowCostGet30Min;31959 status: code: 200 message: OK @@ -2231,74 +2157,28 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 response: body: - string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n - \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n - \ \"tags\": {},\r\n \"properties\": {\r\n \"vmId\": \"e048a2e9-76a7-497c-8ed7-829625b39824\",\r\n - \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n - \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": - \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": - \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": - \"18.04.202207120\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": - \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n - \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n - \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEFTJX7LNJTECNUAAW7NDBGC7LRALMOLPWHFEOSNW5PPIREVJDRRXVV/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\"\r\n - \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": - 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": - {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhl\",\r\n - \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": - true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n - \ \"path\": \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\",\r\n - \ \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \"enableVMAgentPlatformUpdates\": - false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": - true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n - \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": - true\r\n }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n - \ \"instanceView\": {\r\n \"computerName\": \"cli000003\",\r\n \"osName\": - \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n \"vmAgent\": {\r\n - \ \"vmAgentVersion\": \"2.7.3.0\",\r\n \"statuses\": [\r\n {\r\n - \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": - \"Guest Agent is running\",\r\n \"time\": \"2022-08-04T17:13:30+00:00\"\r\n - \ }\r\n ],\r\n \"extensionHandlers\": []\r\n },\r\n - \ \"disks\": [\r\n {\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n - \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:12:57.8216908+00:00\"\r\n - \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": - {},\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n {\r\n - \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n - \ \"time\": \"2022-08-04T17:13:46.0870281+00:00\"\r\n },\r\n - \ {\r\n \"code\": \"PowerState/stopped\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"VM stopped\"\r\n }\r\n - \ ]\r\n },\r\n \"timeCreated\": \"2022-08-04T17:10:47.8380764+00:00\"\r\n - \ }\r\n}" + string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" headers: cache-control: - no-cache content-length: - - '4089' + - '43' content-type: - - application/json; charset=utf-8 + - application/json; charset=UTF-8 date: - - Thu, 04 Aug 2022 17:14:09 GMT + - Fri, 14 Oct 2022 15:21:51 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 + - nginx strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -2307,8 +2187,8 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/LowCostGet3Min;3978,Microsoft.Compute/LowCostGet30Min;31919 + x-frame-options: + - deny status: code: 200 message: OK @@ -2326,48 +2206,53 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n - \ \"tags\": {},\r\n \"properties\": {\r\n \"vmId\": \"e048a2e9-76a7-497c-8ed7-829625b39824\",\r\n + \ \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"properties\": {\r\n \"vmId\": \"eee656e1-0c27-4e10-ba68-5622e742e308\",\r\n \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": - \"18.04.202207120\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": - \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n + \"18.04.202209210\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": + \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEFTJX7LNJTECNUAAW7NDBGC7LRALMOLPWHFEOSNW5PPIREVJDRRXVV/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\"\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEWKD3ILZXQ5YDMRMOP7DOUUDMPA2UMW4REUEUIXVRLTLFIKQMHSXKN/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\"\r\n \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": - {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhl\",\r\n + {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhoover\",\r\n \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n - \ \"path\": \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\",\r\n - \ \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \"enableVMAgentPlatformUpdates\": - false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": - true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"patchSettings\": {\r\n \"patchMode\": + \"ImageDefault\",\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\": + [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\": + true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": true\r\n }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n - \ \"timeCreated\": \"2022-08-04T17:10:47.8380764+00:00\"\r\n }\r\n}" + \ \"timeCreated\": \"2022-10-14T15:18:48.6932141+00:00\"\r\n }\r\n}" headers: cache-control: - no-cache content-length: - - '2754' + - '3494' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:14:09 GMT + - Fri, 14 Oct 2022 15:21:52 GMT expires: - '-1' pragma: @@ -2384,21 +2269,24 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/LowCostGet3Min;3977,Microsoft.Compute/LowCostGet30Min;31918 + - Microsoft.Compute/LowCostGet3Min;3958,Microsoft.Compute/LowCostGet30Min;31958 status: code: 200 message: OK - request: - body: '{"location": "westus2", "tags": {}, "properties": {"hardwareProfile": {"vmSize": - "Standard_DS1_v2"}, "storageProfile": {"osDisk": {"osType": "Linux", "name": - "cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474", "caching": "ReadWrite", - "createOption": "FromImage", "diskSizeGB": 30, "managedDisk": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEFTJX7LNJTECNUAAW7NDBGC7LRALMOLPWHFEOSNW5PPIREVJDRRXVV/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474", + body: '{"location": "westus2", "tags": {"azsecpack": "nonprod", "platformsettings.host_environment.service.platform_optedin_for_rootcerts": + "true"}, "identity": {"type": "UserAssigned", "userAssignedIdentities": {"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2": + {}}}, "properties": {"hardwareProfile": {"vmSize": "Standard_DS1_v2"}, "storageProfile": + {"osDisk": {"osType": "Linux", "name": "cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404", + "caching": "ReadWrite", "createOption": "FromImage", "diskSizeGB": 30, "managedDisk": + {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEWKD3ILZXQ5YDMRMOP7DOUUDMPA2UMW4REUEUIXVRLTLFIKQMHSXKN/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404", "storageAccountType": "Premium_LRS"}, "deleteOption": "Detach"}, "dataDisks": - []}, "osProfile": {"computerName": "cli000003", "adminUsername": "rhl", "linuxConfiguration": - {"disablePasswordAuthentication": true, "ssh": {"publicKeys": [{"path": "/home/rhl/.ssh/authorized_keys", - "keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5"}]}, - "provisionVMAgent": true, "patchSettings": {"patchMode": "ImageDefault", "assessmentMode": - "ImageDefault"}}, "secrets": [], "allowExtensionOperations": true, "requireGuestProvisionSignal": + []}, "osProfile": {"computerName": "cli000003", "adminUsername": "rhoover", + "linuxConfiguration": {"disablePasswordAuthentication": true, "ssh": {"publicKeys": + [{"path": "/home/rhoover/.ssh/authorized_keys", "keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\n"}]}, "provisionVMAgent": true, "patchSettings": {"patchMode": + "ImageDefault", "assessmentMode": "ImageDefault"}, "enableVMAgentPlatformUpdates": + false}, "secrets": [], "allowExtensionOperations": true, "requireGuestProvisionSignal": true}, "networkProfile": {"networkInterfaces": [{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic"}]}, "diagnosticsProfile": {"bootDiagnostics": {"enabled": false}}}}' headers: @@ -2411,58 +2299,63 @@ interactions: Connection: - keep-alive Content-Length: - - '1684' + - '2284' Content-Type: - application/json ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n - \ \"tags\": {},\r\n \"properties\": {\r\n \"vmId\": \"e048a2e9-76a7-497c-8ed7-829625b39824\",\r\n + \ \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"properties\": {\r\n \"vmId\": \"eee656e1-0c27-4e10-ba68-5622e742e308\",\r\n \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": - \"18.04.202207120\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": - \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n + \"18.04.202209210\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": + \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEFTJX7LNJTECNUAAW7NDBGC7LRALMOLPWHFEOSNW5PPIREVJDRRXVV/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\"\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEWKD3ILZXQ5YDMRMOP7DOUUDMPA2UMW4REUEUIXVRLTLFIKQMHSXKN/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\"\r\n \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": - {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhl\",\r\n + {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhoover\",\r\n \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n - \ \"path\": \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\",\r\n - \ \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \"enableVMAgentPlatformUpdates\": - false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": - true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"patchSettings\": {\r\n \"patchMode\": + \"ImageDefault\",\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\": + [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\": + true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": false\r\n }\r\n },\r\n \"provisioningState\": \"Updating\",\r\n - \ \"timeCreated\": \"2022-08-04T17:10:47.8380764+00:00\"\r\n }\r\n}" + \ \"timeCreated\": \"2022-10-14T15:18:48.6932141+00:00\"\r\n }\r\n}" headers: azure-asyncnotification: - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/74879055-383b-4ae3-91fb-897edde33812?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/f1cd4507-f4e8-40b7-a565-afabb1d6fecf?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - - '2754' + - '3494' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:14:10 GMT + - Fri, 14 Oct 2022 15:21:55 GMT expires: - '-1' pragma: @@ -2479,7 +2372,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/PutVM3Min;479,Microsoft.Compute/PutVM30Min;2399 + - Microsoft.Compute/PutVM3Min;593,Microsoft.Compute/PutVM30Min;2981 x-ms-ratelimit-remaining-subscription-writes: - '1199' status: @@ -2499,14 +2392,14 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/74879055-383b-4ae3-91fb-897edde33812?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/f1cd4507-f4e8-40b7-a565-afabb1d6fecf?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:14:10.4462305+00:00\",\r\n \"endTime\": - \"2022-08-04T17:14:12.3993014+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"74879055-383b-4ae3-91fb-897edde33812\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:21:55.0350892+00:00\",\r\n \"endTime\": + \"2022-10-14T15:22:07.0818652+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"f1cd4507-f4e8-40b7-a565-afabb1d6fecf\"\r\n}" headers: cache-control: - no-cache @@ -2515,7 +2408,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:14:40 GMT + - Fri, 14 Oct 2022 15:22:25 GMT expires: - '-1' pragma: @@ -2532,7 +2425,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14939,Microsoft.Compute/GetOperation30Min;29813 + - Microsoft.Compute/GetOperation3Min;14960,Microsoft.Compute/GetOperation30Min;29958 status: code: 200 message: OK @@ -2550,48 +2443,53 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n - \ \"tags\": {},\r\n \"properties\": {\r\n \"vmId\": \"e048a2e9-76a7-497c-8ed7-829625b39824\",\r\n + \ \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"properties\": {\r\n \"vmId\": \"eee656e1-0c27-4e10-ba68-5622e742e308\",\r\n \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": - \"18.04.202207120\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": - \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n + \"18.04.202209210\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": + \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEFTJX7LNJTECNUAAW7NDBGC7LRALMOLPWHFEOSNW5PPIREVJDRRXVV/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\"\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEWKD3ILZXQ5YDMRMOP7DOUUDMPA2UMW4REUEUIXVRLTLFIKQMHSXKN/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\"\r\n \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": - {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhl\",\r\n + {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhoover\",\r\n \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n - \ \"path\": \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\",\r\n - \ \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \"enableVMAgentPlatformUpdates\": - false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": - true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"patchSettings\": {\r\n \"patchMode\": + \"ImageDefault\",\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\": + [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\": + true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": false\r\n }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n - \ \"timeCreated\": \"2022-08-04T17:10:47.8380764+00:00\"\r\n }\r\n}" + \ \"timeCreated\": \"2022-10-14T15:18:48.6932141+00:00\"\r\n }\r\n}" headers: cache-control: - no-cache content-length: - - '2755' + - '3495' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:14:40 GMT + - Fri, 14 Oct 2022 15:22:25 GMT expires: - '-1' pragma: @@ -2608,56 +2506,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/LowCostGet3Min;3978,Microsoft.Compute/LowCostGet30Min;31915 - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - vm boot-diagnostics disable - Connection: - - keep-alive - ParameterSetName: - - -g -n - User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 - response: - body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" - headers: - cache-control: - - no-cache - content-length: - - '43' - content-type: - - application/json; charset=UTF-8 - date: - - Thu, 04 Aug 2022 17:14:41 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - nginx - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-frame-options: - - deny + - Microsoft.Compute/LowCostGet3Min;3954,Microsoft.Compute/LowCostGet30Min;31950 status: code: 200 message: OK @@ -2675,66 +2524,70 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n - \ \"tags\": {},\r\n \"properties\": {\r\n \"vmId\": \"e048a2e9-76a7-497c-8ed7-829625b39824\",\r\n + \ \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"properties\": {\r\n \"vmId\": \"eee656e1-0c27-4e10-ba68-5622e742e308\",\r\n \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": - \"18.04.202207120\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": - \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n + \"18.04.202209210\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": + \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEFTJX7LNJTECNUAAW7NDBGC7LRALMOLPWHFEOSNW5PPIREVJDRRXVV/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\"\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEWKD3ILZXQ5YDMRMOP7DOUUDMPA2UMW4REUEUIXVRLTLFIKQMHSXKN/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\"\r\n \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": - {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhl\",\r\n + {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhoover\",\r\n \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n - \ \"path\": \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\",\r\n - \ \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \"enableVMAgentPlatformUpdates\": - false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": - true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"patchSettings\": {\r\n \"patchMode\": + \"ImageDefault\",\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\": + [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\": + true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": false\r\n }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n - \ \"instanceView\": {\r\n \"computerName\": \"cli000003\",\r\n \"osName\": - \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n \"vmAgent\": {\r\n - \ \"vmAgentVersion\": \"2.7.3.0\",\r\n \"statuses\": [\r\n {\r\n - \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": - \"Guest Agent is running\",\r\n \"time\": \"2022-08-04T17:13:30+00:00\"\r\n - \ }\r\n ],\r\n \"extensionHandlers\": []\r\n },\r\n - \ \"disks\": [\r\n {\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n + \ \"instanceView\": {\r\n \"vmAgent\": {\r\n \"vmAgentVersion\": + \"Unknown\",\r\n \"statuses\": [\r\n {\r\n \"code\": + \"ProvisioningState/Unavailable\",\r\n \"level\": \"Warning\",\r\n + \ \"displayStatus\": \"Not Ready\",\r\n \"message\": + \"VM status blob is found but not yet populated.\",\r\n \"time\": + \"2022-10-14T15:22:26+00:00\"\r\n }\r\n ]\r\n },\r\n + \ \"disks\": [\r\n {\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:14:10.9774837+00:00\"\r\n + succeeded\",\r\n \"time\": \"2022-10-14T15:22:05.6913128+00:00\"\r\n \ }\r\n ]\r\n }\r\n ],\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:14:12.3837386+00:00\"\r\n + succeeded\",\r\n \"time\": \"2022-10-14T15:22:07.0818652+00:00\"\r\n \ },\r\n {\r\n \"code\": \"PowerState/stopped\",\r\n \ \"level\": \"Info\",\r\n \"displayStatus\": \"VM stopped\"\r\n - \ }\r\n ]\r\n },\r\n \"timeCreated\": \"2022-08-04T17:10:47.8380764+00:00\"\r\n + \ }\r\n ]\r\n },\r\n \"timeCreated\": \"2022-10-14T15:18:48.6932141+00:00\"\r\n \ }\r\n}" headers: cache-control: - no-cache content-length: - - '4060' + - '4707' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:14:41 GMT + - Fri, 14 Oct 2022 15:22:26 GMT expires: - '-1' pragma: @@ -2751,7 +2604,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/LowCostGet3Min;3977,Microsoft.Compute/LowCostGet30Min;31914 + - Microsoft.Compute/LowCostGet3Min;3953,Microsoft.Compute/LowCostGet30Min;31949 status: code: 200 message: OK @@ -2771,25 +2624,27 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003/start?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003/start?api-version=2022-08-01 response: body: string: '' headers: + azure-asyncnotification: + - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/b9043596-332a-4c89-bdc0-047ff78b5a94?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/8a232137-dc32-40fe-8c57-97f9381a8b7c?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - '0' date: - - Thu, 04 Aug 2022 17:14:42 GMT + - Fri, 14 Oct 2022 15:22:27 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/b9043596-332a-4c89-bdc0-047ff78b5a94?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/8a232137-dc32-40fe-8c57-97f9381a8b7c?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 pragma: - no-cache server: @@ -2800,7 +2655,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/UpdateVM3Min;237,Microsoft.Compute/UpdateVM30Min;1193 + - Microsoft.Compute/UpdateVM3Min;233,Microsoft.Compute/UpdateVM30Min;1193 x-ms-ratelimit-remaining-subscription-writes: - '1199' status: @@ -2820,14 +2675,14 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/b9043596-332a-4c89-bdc0-047ff78b5a94?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/8a232137-dc32-40fe-8c57-97f9381a8b7c?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:14:42.6335413+00:00\",\r\n \"endTime\": - \"2022-08-04T17:14:51.3366103+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"b9043596-332a-4c89-bdc0-047ff78b5a94\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:22:27.5661088+00:00\",\r\n \"endTime\": + \"2022-10-14T15:22:35.5972271+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"8a232137-dc32-40fe-8c57-97f9381a8b7c\"\r\n}" headers: cache-control: - no-cache @@ -2836,7 +2691,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:14:52 GMT + - Fri, 14 Oct 2022 15:22:37 GMT expires: - '-1' pragma: @@ -2853,7 +2708,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14937,Microsoft.Compute/GetOperation30Min;29811 + - Microsoft.Compute/GetOperation3Min;14958,Microsoft.Compute/GetOperation30Min;29953 status: code: 200 message: OK @@ -2871,9 +2726,9 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/b9043596-332a-4c89-bdc0-047ff78b5a94?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/8a232137-dc32-40fe-8c57-97f9381a8b7c?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 response: body: string: '' @@ -2883,7 +2738,7 @@ interactions: content-length: - '0' date: - - Thu, 04 Aug 2022 17:14:52 GMT + - Fri, 14 Oct 2022 15:22:37 GMT expires: - '-1' pragma: @@ -2896,56 +2751,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14935,Microsoft.Compute/GetOperation30Min;29809 - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - vm start - Connection: - - keep-alive - ParameterSetName: - - -g -n - User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 - response: - body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" - headers: - cache-control: - - no-cache - content-length: - - '43' - content-type: - - application/json; charset=UTF-8 - date: - - Thu, 04 Aug 2022 17:14:53 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - nginx - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-frame-options: - - deny + - Microsoft.Compute/GetOperation3Min;14957,Microsoft.Compute/GetOperation30Min;29952 status: code: 200 message: OK @@ -2963,66 +2769,70 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n - \ \"tags\": {},\r\n \"properties\": {\r\n \"vmId\": \"e048a2e9-76a7-497c-8ed7-829625b39824\",\r\n + \ \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"properties\": {\r\n \"vmId\": \"eee656e1-0c27-4e10-ba68-5622e742e308\",\r\n \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": - \"18.04.202207120\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": - \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n + \"18.04.202209210\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": + \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEFTJX7LNJTECNUAAW7NDBGC7LRALMOLPWHFEOSNW5PPIREVJDRRXVV/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\"\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEWKD3ILZXQ5YDMRMOP7DOUUDMPA2UMW4REUEUIXVRLTLFIKQMHSXKN/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\"\r\n \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": - {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhl\",\r\n + {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhoover\",\r\n \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n - \ \"path\": \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\",\r\n - \ \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \"enableVMAgentPlatformUpdates\": - false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": - true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"patchSettings\": {\r\n \"patchMode\": + \"ImageDefault\",\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\": + [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\": + true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": false\r\n }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n - \ \"instanceView\": {\r\n \"computerName\": \"cli000003\",\r\n \"osName\": - \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n \"vmAgent\": {\r\n - \ \"vmAgentVersion\": \"2.7.3.0\",\r\n \"statuses\": [\r\n {\r\n - \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": - \"Guest Agent is running\",\r\n \"time\": \"2022-08-04T17:13:30+00:00\"\r\n - \ }\r\n ],\r\n \"extensionHandlers\": []\r\n },\r\n - \ \"disks\": [\r\n {\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n + \ \"instanceView\": {\r\n \"vmAgent\": {\r\n \"vmAgentVersion\": + \"Unknown\",\r\n \"statuses\": [\r\n {\r\n \"code\": + \"ProvisioningState/Unavailable\",\r\n \"level\": \"Warning\",\r\n + \ \"displayStatus\": \"Not Ready\",\r\n \"message\": + \"VM status blob is found but not yet populated.\",\r\n \"time\": + \"2022-10-14T15:22:39+00:00\"\r\n }\r\n ]\r\n },\r\n + \ \"disks\": [\r\n {\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:14:10.9774837+00:00\"\r\n + succeeded\",\r\n \"time\": \"2022-10-14T15:22:05.6913128+00:00\"\r\n \ }\r\n ]\r\n }\r\n ],\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:14:51.3209989+00:00\"\r\n + succeeded\",\r\n \"time\": \"2022-10-14T15:22:35.5816106+00:00\"\r\n \ },\r\n {\r\n \"code\": \"PowerState/running\",\r\n \ \"level\": \"Info\",\r\n \"displayStatus\": \"VM running\"\r\n - \ }\r\n ]\r\n },\r\n \"timeCreated\": \"2022-08-04T17:10:47.8380764+00:00\"\r\n + \ }\r\n ]\r\n },\r\n \"timeCreated\": \"2022-10-14T15:18:48.6932141+00:00\"\r\n \ }\r\n}" headers: cache-control: - no-cache content-length: - - '4060' + - '4707' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:14:53 GMT + - Fri, 14 Oct 2022 15:22:39 GMT expires: - '-1' pragma: @@ -3039,7 +2849,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/LowCostGet3Min;3975,Microsoft.Compute/LowCostGet30Min;31912 + - Microsoft.Compute/LowCostGet3Min;3957,Microsoft.Compute/LowCostGet30Min;31945 status: code: 200 message: OK @@ -3057,48 +2867,53 @@ interactions: ParameterSetName: - -g -n --storage User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n - \ \"tags\": {},\r\n \"properties\": {\r\n \"vmId\": \"e048a2e9-76a7-497c-8ed7-829625b39824\",\r\n + \ \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"properties\": {\r\n \"vmId\": \"eee656e1-0c27-4e10-ba68-5622e742e308\",\r\n \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": - \"18.04.202207120\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": - \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n + \"18.04.202209210\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": + \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEFTJX7LNJTECNUAAW7NDBGC7LRALMOLPWHFEOSNW5PPIREVJDRRXVV/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\"\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEWKD3ILZXQ5YDMRMOP7DOUUDMPA2UMW4REUEUIXVRLTLFIKQMHSXKN/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\"\r\n \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": - {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhl\",\r\n + {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhoover\",\r\n \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n - \ \"path\": \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\",\r\n - \ \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \"enableVMAgentPlatformUpdates\": - false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": - true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"patchSettings\": {\r\n \"patchMode\": + \"ImageDefault\",\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\": + [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\": + true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": false\r\n }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n - \ \"timeCreated\": \"2022-08-04T17:10:47.8380764+00:00\"\r\n }\r\n}" + \ \"timeCreated\": \"2022-10-14T15:18:48.6932141+00:00\"\r\n }\r\n}" headers: cache-control: - no-cache content-length: - - '2755' + - '3495' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:14:53 GMT + - Fri, 14 Oct 2022 15:22:39 GMT expires: - '-1' pragma: @@ -3115,7 +2930,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/LowCostGet3Min;3974,Microsoft.Compute/LowCostGet30Min;31911 + - Microsoft.Compute/LowCostGet3Min;3956,Microsoft.Compute/LowCostGet30Min;31944 status: code: 200 message: OK @@ -3133,21 +2948,21 @@ interactions: ParameterSetName: - -g -n --storage User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-storage/20.0.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-storage/20.1.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Storage/storageAccounts?api-version=2021-09-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Storage/storageAccounts?api-version=2022-05-01 response: body: - string: '{"value":[{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/bkerrigan/providers/Microsoft.Storage/storageAccounts/bkerrigandiag","name":"bkerrigandiag","type":"Microsoft.Storage/storageAccounts","location":"eastus","tags":{},"properties":{"keyCreationTime":{"key1":"2022-05-17T00:24:49.4879627Z","key2":"2022-05-17T00:24:49.4879627Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-05-17T00:24:49.4879627Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-05-17T00:24:49.4879627Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-05-17T00:24:49.3473400Z","primaryEndpoints":{"blob":"https://bkerrigandiag.blob.core.windows.net/","queue":"https://bkerrigandiag.queue.core.windows.net/","table":"https://bkerrigandiag.table.core.windows.net/","file":"https://bkerrigandiag.file.core.windows.net/"},"primaryLocation":"eastus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_RAGRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/craigw/providers/Microsoft.Storage/storageAccounts/craigwendpointtest","name":"craigwendpointtest","type":"Microsoft.Storage/storageAccounts","location":"eastus","tags":{},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"allowSharedKeyAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-01-15T21:56:49.8049186Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-01-15T21:56:49.8049186Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2021-01-15T21:56:49.7111640Z","primaryEndpoints":{"dfs":"https://craigwendpointtest.dfs.core.windows.net/","web":"https://craigwendpointtest.z13.web.core.windows.net/","blob":"https://craigwendpointtest.blob.core.windows.net/","queue":"https://craigwendpointtest.queue.core.windows.net/","table":"https://craigwendpointtest.table.core.windows.net/","file":"https://craigwendpointtest.file.core.windows.net/"},"primaryLocation":"eastus","statusOfPrimary":"available","secondaryLocation":"westus","statusOfSecondary":"available","secondaryEndpoints":{"dfs":"https://craigwendpointtest-secondary.dfs.core.windows.net/","web":"https://craigwendpointtest-secondary.z13.web.core.windows.net/","blob":"https://craigwendpointtest-secondary.blob.core.windows.net/","queue":"https://craigwendpointtest-secondary.queue.core.windows.net/","table":"https://craigwendpointtest-secondary.table.core.windows.net/"}}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/craigw-win10test/providers/Microsoft.Storage/storageAccounts/craigwwin10test","name":"craigwwin10test","type":"Microsoft.Storage/storageAccounts","location":"eastus","tags":{},"properties":{"keyCreationTime":{"key1":"2021-05-17T23:02:04.3032505Z","key2":"2021-05-17T23:02:04.3032505Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-05-17T23:02:04.3032505Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-05-17T23:02:04.3032505Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2021-05-17T23:02:04.1938884Z","primaryEndpoints":{"blob":"https://craigwwin10test.blob.core.windows.net/","queue":"https://craigwwin10test.queue.core.windows.net/","table":"https://craigwwin10test.table.core.windows.net/","file":"https://craigwwin10test.file.core.windows.net/"},"primaryLocation":"eastus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cloud-shell-storage-eastus/providers/Microsoft.Storage/storageAccounts/cs210032001f4814ba9","name":"cs210032001f4814ba9","type":"Microsoft.Storage/storageAccounts","location":"eastus","tags":{"ms-resource-usage":"azure-cloud-shell"},"properties":{"keyCreationTime":{"key1":"2022-05-16T14:16:22.3477819Z","key2":"2022-05-16T14:16:22.3477819Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":false,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-05-16T14:16:22.3477819Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-05-16T14:16:22.3477819Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-05-16T14:16:22.2227752Z","primaryEndpoints":{"dfs":"https://cs210032001f4814ba9.dfs.core.windows.net/","web":"https://cs210032001f4814ba9.z13.web.core.windows.net/","blob":"https://cs210032001f4814ba9.blob.core.windows.net/","queue":"https://cs210032001f4814ba9.queue.core.windows.net/","table":"https://cs210032001f4814ba9.table.core.windows.net/","file":"https://cs210032001f4814ba9.file.core.windows.net/"},"primaryLocation":"eastus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_RAGRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/kustoflow/providers/Microsoft.Storage/storageAccounts/csslinuxkustoflow","name":"csslinuxkustoflow","type":"Microsoft.Storage/storageAccounts","location":"eastus","tags":{"CreatedBy":"craigw"},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2019-02-01T20:08:38.6849654Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2019-02-01T20:08:38.6849654Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2019-02-01T20:08:38.5912170Z","primaryEndpoints":{"dfs":"https://csslinuxkustoflow.dfs.core.windows.net/","web":"https://csslinuxkustoflow.z13.web.core.windows.net/","blob":"https://csslinuxkustoflow.blob.core.windows.net/","queue":"https://csslinuxkustoflow.queue.core.windows.net/","table":"https://csslinuxkustoflow.table.core.windows.net/","file":"https://csslinuxkustoflow.file.core.windows.net/"},"primaryLocation":"eastus","statusOfPrimary":"available","secondaryLocation":"westus","statusOfSecondary":"available","secondaryEndpoints":{"dfs":"https://csslinuxkustoflow-secondary.dfs.core.windows.net/","web":"https://csslinuxkustoflow-secondary.z13.web.core.windows.net/","blob":"https://csslinuxkustoflow-secondary.blob.core.windows.net/","queue":"https://csslinuxkustoflow-secondary.queue.core.windows.net/","table":"https://csslinuxkustoflow-secondary.table.core.windows.net/"}}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/gen2-linux/providers/Microsoft.Storage/storageAccounts/gen2linux3be402a0b8","name":"gen2linux3be402a0b8","type":"Microsoft.Storage/storageAccounts","location":"eastus","tags":{},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":false,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2018-10-09T22:30:46.7307987Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2018-10-09T22:30:46.7307987Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2018-10-09T22:30:46.6214203Z","primaryEndpoints":{"blob":"https://gen2linux3be402a0b8.blob.core.windows.net/","queue":"https://gen2linux3be402a0b8.queue.core.windows.net/","table":"https://gen2linux3be402a0b8.table.core.windows.net/","file":"https://gen2linux3be402a0b8.file.core.windows.net/"},"primaryLocation":"eastus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/scrunnertestvmrg-eastus/providers/Microsoft.Storage/storageAccounts/scrunnercrkwpdn5nhtgg","name":"scrunnercrkwpdn5nhtgg","type":"Microsoft.Storage/storageAccounts","location":"eastus","tags":{},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2020-05-12T20:03:57.6389684Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2020-05-12T20:03:57.6389684Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2020-05-12T20:03:57.5451905Z","primaryEndpoints":{"blob":"https://scrunnercrkwpdn5nhtgg.blob.core.windows.net/","queue":"https://scrunnercrkwpdn5nhtgg.queue.core.windows.net/","table":"https://scrunnercrkwpdn5nhtgg.table.core.windows.net/","file":"https://scrunnercrkwpdn5nhtgg.file.core.windows.net/"},"primaryLocation":"eastus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_RAGRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/storage-RG/providers/Microsoft.Storage/storageAccounts/serialconsolepreview","name":"serialconsolepreview","type":"Microsoft.Storage/storageAccounts","location":"eastus","tags":{},"properties":{"keyCreationTime":{"key1":"2021-05-07T21:41:56.3607334Z","key2":"2021-05-07T21:41:56.3607334Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"allowSharedKeyAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-05-07T21:41:56.3607334Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-05-07T21:41:56.3607334Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2021-05-07T21:41:56.2513536Z","primaryEndpoints":{"dfs":"https://serialconsolepreview.dfs.core.windows.net/","web":"https://serialconsolepreview.z13.web.core.windows.net/","blob":"https://serialconsolepreview.blob.core.windows.net/","queue":"https://serialconsolepreview.queue.core.windows.net/","table":"https://serialconsolepreview.table.core.windows.net/","file":"https://serialconsolepreview.file.core.windows.net/"},"primaryLocation":"eastus","statusOfPrimary":"available","secondaryLocation":"westus","statusOfSecondary":"available","secondaryEndpoints":{"dfs":"https://serialconsolepreview-secondary.dfs.core.windows.net/","web":"https://serialconsolepreview-secondary.z13.web.core.windows.net/","blob":"https://serialconsolepreview-secondary.blob.core.windows.net/","queue":"https://serialconsolepreview-secondary.queue.core.windows.net/","table":"https://serialconsolepreview-secondary.table.core.windows.net/"}}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/serialconsole-test/providers/Microsoft.Storage/storageAccounts/serialconsoletestdiag","name":"serialconsoletestdiag","type":"Microsoft.Storage/storageAccounts","location":"eastus","tags":{},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"resourceAccessRules":[],"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Deny"},"supportsHttpsTrafficOnly":false,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2019-02-06T20:21:39.7019315Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2019-02-06T20:21:39.7019315Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2019-02-06T20:21:39.5925779Z","primaryEndpoints":{"blob":"https://serialconsoletestdiag.blob.core.windows.net/","queue":"https://serialconsoletestdiag.queue.core.windows.net/","table":"https://serialconsoletestdiag.table.core.windows.net/","file":"https://serialconsoletestdiag.file.core.windows.net/"},"primaryLocation":"eastus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/serialTest-EastUS/providers/Microsoft.Storage/storageAccounts/serialtesta8d7fdee41","name":"serialtesta8d7fdee41","type":"Microsoft.Storage/storageAccounts","location":"eastus","tags":{},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":false,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2019-07-11T00:38:13.5389932Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2019-07-11T00:38:13.5389932Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2019-07-11T00:38:13.4452119Z","primaryEndpoints":{"blob":"https://serialtesta8d7fdee41.blob.core.windows.net/","queue":"https://serialtesta8d7fdee41.queue.core.windows.net/","table":"https://serialtesta8d7fdee41.table.core.windows.net/","file":"https://serialtesta8d7fdee41.file.core.windows.net/"},"primaryLocation":"eastus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/storage-RG/providers/Microsoft.Storage/storageAccounts/serialtestbootdiag123","name":"serialtestbootdiag123","type":"Microsoft.Storage/storageAccounts","location":"eastus","tags":{},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":false,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2018-01-23T04:03:01.3263151Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2018-01-23T04:03:01.3263151Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2018-01-23T04:03:01.2951106Z","primaryEndpoints":{"blob":"https://serialtestbootdiag123.blob.core.windows.net/","queue":"https://serialtestbootdiag123.queue.core.windows.net/","table":"https://serialtestbootdiag123.table.core.windows.net/","file":"https://serialtestbootdiag123.file.core.windows.net/"},"primaryLocation":"eastus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_RAGRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/yuas-rg/providers/Microsoft.Storage/storageAccounts/yuasstorageacct","name":"yuasstorageacct","type":"Microsoft.Storage/storageAccounts","location":"eastus","tags":{},"properties":{"dnsEndpointType":"Standard","defaultToOAuthAuthentication":false,"publicNetworkAccess":"Enabled","keyCreationTime":{"key1":"2022-08-02T12:18:18.8547131Z","key2":"2022-08-02T12:18:18.8547131Z"},"allowCrossTenantReplication":true,"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"allowSharedKeyAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"requireInfrastructureEncryption":false,"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-08-02T12:18:18.8547131Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-08-02T12:18:18.8547131Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-08-02T12:18:18.7140969Z","primaryEndpoints":{"dfs":"https://yuasstorageacct.dfs.core.windows.net/","web":"https://yuasstorageacct.z13.web.core.windows.net/","blob":"https://yuasstorageacct.blob.core.windows.net/","queue":"https://yuasstorageacct.queue.core.windows.net/","table":"https://yuasstorageacct.table.core.windows.net/","file":"https://yuasstorageacct.file.core.windows.net/"},"primaryLocation":"eastus","statusOfPrimary":"available","secondaryLocation":"westus","statusOfSecondary":"available","secondaryEndpoints":{"dfs":"https://yuasstorageacct-secondary.dfs.core.windows.net/","web":"https://yuasstorageacct-secondary.z13.web.core.windows.net/","blob":"https://yuasstorageacct-secondary.blob.core.windows.net/","queue":"https://yuasstorageacct-secondary.queue.core.windows.net/","table":"https://yuasstorageacct-secondary.table.core.windows.net/"}}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/bkerrigan-dev-rg/providers/Microsoft.Storage/storageAccounts/bkerrigandevrgdiag","name":"bkerrigandevrgdiag","type":"Microsoft.Storage/storageAccounts","location":"eastus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-05-18T15:22:23.0244089Z","key2":"2022-05-18T15:22:23.0244089Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-05-18T15:22:23.0400357Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-05-18T15:22:23.0400357Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-05-18T15:22:22.9150287Z","primaryEndpoints":{"blob":"https://bkerrigandevrgdiag.blob.core.windows.net/","queue":"https://bkerrigandevrgdiag.queue.core.windows.net/","table":"https://bkerrigandevrgdiag.table.core.windows.net/","file":"https://bkerrigandevrgdiag.file.core.windows.net/"},"primaryLocation":"eastus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/guptar2/providers/Microsoft.Storage/storageAccounts/guptar2eastus2storage","name":"guptar2eastus2storage","type":"Microsoft.Storage/storageAccounts","location":"eastus2","tags":{"ms-resource-usage":"azure-cloud-shell"},"properties":{"keyCreationTime":{"key1":"2022-07-28T23:08:00.6935848Z","key2":"2022-07-28T23:08:00.6935848Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":false,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-07-28T23:08:00.6935848Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-07-28T23:08:00.6935848Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-07-28T23:08:00.5840608Z","primaryEndpoints":{"dfs":"https://guptar2eastus2storage.dfs.core.windows.net/","web":"https://guptar2eastus2storage.z20.web.core.windows.net/","blob":"https://guptar2eastus2storage.blob.core.windows.net/","queue":"https://guptar2eastus2storage.queue.core.windows.net/","table":"https://guptar2eastus2storage.table.core.windows.net/","file":"https://guptar2eastus2storage.file.core.windows.net/"},"primaryLocation":"eastus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rhel-test/providers/Microsoft.Storage/storageAccounts/rhel77acct","name":"rhel77acct","type":"Microsoft.Storage/storageAccounts","location":"eastus2","tags":{},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2020-08-13T20:31:30.8995173Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2020-08-13T20:31:30.8995173Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2020-08-13T20:31:30.8215811Z","primaryEndpoints":{"blob":"https://rhel77acct.blob.core.windows.net/","queue":"https://rhel77acct.queue.core.windows.net/","table":"https://rhel77acct.table.core.windows.net/","file":"https://rhel77acct.file.core.windows.net/"},"primaryLocation":"eastus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cloud-shell-storage-westus/providers/Microsoft.Storage/storageAccounts/cs4100320010c152e3d","name":"cs4100320010c152e3d","type":"Microsoft.Storage/storageAccounts","location":"westus","tags":{"ms-resource-usage":"azure-cloud-shell"},"properties":{"keyCreationTime":{"key1":"2022-02-07T20:19:42.9636823Z","key2":"2022-02-07T20:19:42.9636823Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":false,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-02-07T20:19:42.9636823Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-02-07T20:19:42.9636823Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-02-07T20:19:42.8699133Z","primaryEndpoints":{"dfs":"https://cs4100320010c152e3d.dfs.core.windows.net/","web":"https://cs4100320010c152e3d.z22.web.core.windows.net/","blob":"https://cs4100320010c152e3d.blob.core.windows.net/","queue":"https://cs4100320010c152e3d.queue.core.windows.net/","table":"https://cs4100320010c152e3d.table.core.windows.net/","file":"https://cs4100320010c152e3d.file.core.windows.net/"},"primaryLocation":"westus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cloud-shell-storage-westus/providers/Microsoft.Storage/storageAccounts/cs410037ffea943c134","name":"cs410037ffea943c134","type":"Microsoft.Storage/storageAccounts","location":"westus","tags":{"ms-resource-usage":"azure-cloud-shell"},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2020-03-23T23:07:16.0114253Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2020-03-23T23:07:16.0114253Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2020-03-23T23:07:15.9333036Z","primaryEndpoints":{"dfs":"https://cs410037ffea943c134.dfs.core.windows.net/","web":"https://cs410037ffea943c134.z22.web.core.windows.net/","blob":"https://cs410037ffea943c134.blob.core.windows.net/","queue":"https://cs410037ffea943c134.queue.core.windows.net/","table":"https://cs410037ffea943c134.table.core.windows.net/","file":"https://cs410037ffea943c134.file.core.windows.net/"},"primaryLocation":"westus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cloud-shell-storage-westus/providers/Microsoft.Storage/storageAccounts/cs41003bffd81f3ab32","name":"cs41003bffd81f3ab32","type":"Microsoft.Storage/storageAccounts","location":"westus","tags":{"ms-resource-usage":"azure-cloud-shell"},"properties":{"keyCreationTime":{"key1":"2022-07-29T00:18:56.4686445Z","key2":"2022-07-29T00:18:56.4686445Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":false,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-07-29T00:18:56.4842807Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-07-29T00:18:56.4842807Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-07-29T00:18:56.3748663Z","primaryEndpoints":{"dfs":"https://cs41003bffd81f3ab32.dfs.core.windows.net/","web":"https://cs41003bffd81f3ab32.z22.web.core.windows.net/","blob":"https://cs41003bffd81f3ab32.blob.core.windows.net/","queue":"https://cs41003bffd81f3ab32.queue.core.windows.net/","table":"https://cs41003bffd81f3ab32.table.core.windows.net/","file":"https://cs41003bffd81f3ab32.file.core.windows.net/"},"primaryLocation":"westus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cloud-shell-storage-westus/providers/Microsoft.Storage/storageAccounts/cs4aa22d82de270x4becxb48","name":"cs4aa22d82de270x4becxb48","type":"Microsoft.Storage/storageAccounts","location":"westus","tags":{"ms-resource-usage":"azure-cloud-shell"},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2018-11-29T23:39:30.3657182Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2018-11-29T23:39:30.3657182Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2018-11-29T23:39:30.2563159Z","primaryEndpoints":{"blob":"https://cs4aa22d82de270x4becxb48.blob.core.windows.net/","queue":"https://cs4aa22d82de270x4becxb48.queue.core.windows.net/","table":"https://cs4aa22d82de270x4becxb48.table.core.windows.net/","file":"https://cs4aa22d82de270x4becxb48.file.core.windows.net/"},"primaryLocation":"westus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/guptar/providers/Microsoft.Storage/storageAccounts/guptardevstorage","name":"guptardevstorage","type":"Microsoft.Storage/storageAccounts","location":"westus","tags":{"ms-resource-usage":"azure-cloud-shell"},"properties":{"keyCreationTime":{"key1":"2022-02-15T16:49:43.1435156Z","key2":"2022-02-15T16:49:43.1435156Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":false,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-02-15T16:49:43.1591440Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-02-15T16:49:43.1591440Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-02-15T16:49:43.0341047Z","primaryEndpoints":{"dfs":"https://guptardevstorage.dfs.core.windows.net/","web":"https://guptardevstorage.z22.web.core.windows.net/","blob":"https://guptardevstorage.blob.core.windows.net/","queue":"https://guptardevstorage.queue.core.windows.net/","table":"https://guptardevstorage.table.core.windows.net/","file":"https://guptardevstorage.file.core.windows.net/"},"primaryLocation":"westus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/SCRunner/providers/Microsoft.Storage/storageAccounts/scrunnerstorage","name":"scrunnerstorage","type":"Microsoft.Storage/storageAccounts","location":"westus","tags":{},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":false,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2018-03-06T00:42:11.7016543Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2018-03-06T00:42:11.7016543Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2018-03-06T00:42:11.6234985Z","primaryEndpoints":{"blob":"https://scrunnerstorage.blob.core.windows.net/","queue":"https://scrunnerstorage.queue.core.windows.net/","table":"https://scrunnerstorage.table.core.windows.net/","file":"https://scrunnerstorage.file.core.windows.net/"},"primaryLocation":"westus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_RAGRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/lt2-rg/providers/Microsoft.Storage/storageAccounts/sericonjziofteihi","name":"sericonjziofteihi","type":"Microsoft.Storage/storageAccounts","location":"westeurope","tags":{},"properties":{"keyCreationTime":{"key1":"2022-07-29T22:14:56.3530002Z","key2":"2022-07-29T22:14:56.3530002Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-07-29T22:14:56.3686352Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-07-29T22:14:56.3686352Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-07-29T22:14:56.2123903Z","primaryEndpoints":{"dfs":"https://sericonjziofteihi.dfs.core.windows.net/","web":"https://sericonjziofteihi.z6.web.core.windows.net/","blob":"https://sericonjziofteihi.blob.core.windows.net/","queue":"https://sericonjziofteihi.queue.core.windows.net/","table":"https://sericonjziofteihi.table.core.windows.net/","file":"https://sericonjziofteihi.file.core.windows.net/"},"primaryLocation":"westeurope","statusOfPrimary":"available","secondaryLocation":"northeurope","statusOfSecondary":"available","secondaryEndpoints":{"dfs":"https://sericonjziofteihi-secondary.dfs.core.windows.net/","web":"https://sericonjziofteihi-secondary.z6.web.core.windows.net/","blob":"https://sericonjziofteihi-secondary.blob.core.windows.net/","queue":"https://sericonjziofteihi-secondary.queue.core.windows.net/","table":"https://sericonjziofteihi-secondary.table.core.windows.net/"}}},{"sku":{"name":"Standard_RAGRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/load-rg/providers/Microsoft.Storage/storageAccounts/sericonmvudqscfyk","name":"sericonmvudqscfyk","type":"Microsoft.Storage/storageAccounts","location":"westeurope","tags":{},"properties":{"keyCreationTime":{"key1":"2022-07-29T20:00:48.2252183Z","key2":"2022-07-29T20:00:48.2252183Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-07-29T20:00:48.2408363Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-07-29T20:00:48.2408363Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-07-29T20:00:48.1001806Z","primaryEndpoints":{"dfs":"https://sericonmvudqscfyk.dfs.core.windows.net/","web":"https://sericonmvudqscfyk.z6.web.core.windows.net/","blob":"https://sericonmvudqscfyk.blob.core.windows.net/","queue":"https://sericonmvudqscfyk.queue.core.windows.net/","table":"https://sericonmvudqscfyk.table.core.windows.net/","file":"https://sericonmvudqscfyk.file.core.windows.net/"},"primaryLocation":"westeurope","statusOfPrimary":"available","secondaryLocation":"northeurope","statusOfSecondary":"available","secondaryEndpoints":{"dfs":"https://sericonmvudqscfyk-secondary.dfs.core.windows.net/","web":"https://sericonmvudqscfyk-secondary.z6.web.core.windows.net/","blob":"https://sericonmvudqscfyk-secondary.blob.core.windows.net/","queue":"https://sericonmvudqscfyk-secondary.queue.core.windows.net/","table":"https://sericonmvudqscfyk-secondary.table.core.windows.net/"}}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cloud-shell-storage-southcentralus/providers/Microsoft.Storage/storageAccounts/cs710032001417ec1a8","name":"cs710032001417ec1a8","type":"Microsoft.Storage/storageAccounts","location":"southcentralus","tags":{"ms-resource-usage":"azure-cloud-shell"},"properties":{"keyCreationTime":{"key1":"2021-05-18T22:07:33.4170256Z","key2":"2021-05-18T22:07:33.4170256Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":false,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-05-18T22:07:33.4170256Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-05-18T22:07:33.4170256Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2021-05-18T22:07:33.3389725Z","primaryEndpoints":{"dfs":"https://cs710032001417ec1a8.dfs.core.windows.net/","web":"https://cs710032001417ec1a8.z21.web.core.windows.net/","blob":"https://cs710032001417ec1a8.blob.core.windows.net/","queue":"https://cs710032001417ec1a8.queue.core.windows.net/","table":"https://cs710032001417ec1a8.table.core.windows.net/","file":"https://cs710032001417ec1a8.file.core.windows.net/"},"primaryLocation":"southcentralus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rhoover/providers/Microsoft.Storage/storageAccounts/rhooverstorage","name":"rhooverstorage","type":"Microsoft.Storage/storageAccounts","location":"southcentralus","tags":{"ms-resource-usage":"azure-cloud-shell"},"properties":{"keyCreationTime":{"key1":"2022-05-26T17:14:23.5085026Z","key2":"2022-05-26T17:14:23.5085026Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":false,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-05-26T17:14:23.5241285Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-05-26T17:14:23.5241285Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-05-26T17:14:23.4147520Z","primaryEndpoints":{"dfs":"https://rhooverstorage.dfs.core.windows.net/","web":"https://rhooverstorage.z21.web.core.windows.net/","blob":"https://rhooverstorage.blob.core.windows.net/","queue":"https://rhooverstorage.queue.core.windows.net/","table":"https://rhooverstorage.table.core.windows.net/","file":"https://rhooverstorage.file.core.windows.net/"},"primaryLocation":"southcentralus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/harish-storage/providers/Microsoft.Storage/storageAccounts/aueastsarestricted","name":"aueastsarestricted","type":"Microsoft.Storage/storageAccounts","location":"australiaeast","tags":{},"properties":{"dnsEndpointType":"Standard","defaultToOAuthAuthentication":false,"publicNetworkAccess":"Enabled","keyCreationTime":{"key1":"2022-07-17T04:32:04.7486474Z","key2":"2022-07-17T04:32:04.7486474Z"},"allowCrossTenantReplication":true,"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"allowSharedKeyAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/harish-networking/providers/Microsoft.Network/virtualNetworks/aueast-vnet/subnets/testing","action":"Allow","state":"Succeeded"}],"ipRules":[],"defaultAction":"Deny"},"supportsHttpsTrafficOnly":true,"encryption":{"requireInfrastructureEncryption":false,"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-07-17T04:32:04.7486474Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-07-17T04:32:04.7486474Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-07-17T04:32:04.6861236Z","primaryEndpoints":{"dfs":"https://aueastsarestricted.dfs.core.windows.net/","web":"https://aueastsarestricted.z8.web.core.windows.net/","blob":"https://aueastsarestricted.blob.core.windows.net/","queue":"https://aueastsarestricted.queue.core.windows.net/","table":"https://aueastsarestricted.table.core.windows.net/","file":"https://aueastsarestricted.file.core.windows.net/"},"primaryLocation":"australiaeast","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/harish-storage/providers/Microsoft.Storage/storageAccounts/aueastsastd","name":"aueastsastd","type":"Microsoft.Storage/storageAccounts","location":"australiaeast","tags":{},"properties":{"dnsEndpointType":"Standard","defaultToOAuthAuthentication":false,"publicNetworkAccess":"Enabled","keyCreationTime":{"key1":"2022-07-17T04:28:55.7260171Z","key2":"2022-07-17T04:28:55.7260171Z"},"allowCrossTenantReplication":true,"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"allowSharedKeyAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"requireInfrastructureEncryption":false,"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-07-17T04:28:55.7416401Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-07-17T04:28:55.7416401Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-07-17T04:28:55.6634675Z","primaryEndpoints":{"dfs":"https://aueastsastd.dfs.core.windows.net/","web":"https://aueastsastd.z8.web.core.windows.net/","blob":"https://aueastsastd.blob.core.windows.net/","queue":"https://aueastsastd.queue.core.windows.net/","table":"https://aueastsastd.table.core.windows.net/","file":"https://aueastsastd.file.core.windows.net/"},"primaryLocation":"australiaeast","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/SCRunnertestvmrg-AustraliaEast/providers/Microsoft.Storage/storageAccounts/scrunner4p3t72mzheluc","name":"scrunner4p3t72mzheluc","type":"Microsoft.Storage/storageAccounts","location":"australiaeast","tags":{},"properties":{"keyCreationTime":{"key1":"2021-04-13T22:35:36.6210942Z","key2":"2021-04-13T22:35:36.6210942Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-04-13T22:35:36.6210942Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-04-13T22:35:36.6210942Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2021-04-13T22:35:36.5429508Z","primaryEndpoints":{"blob":"https://scrunner4p3t72mzheluc.blob.core.windows.net/","queue":"https://scrunner4p3t72mzheluc.queue.core.windows.net/","table":"https://scrunner4p3t72mzheluc.table.core.windows.net/","file":"https://scrunner4p3t72mzheluc.file.core.windows.net/"},"primaryLocation":"australiaeast","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/aabedon/providers/Microsoft.Storage/storageAccounts/aabedondiag","name":"aabedondiag","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2021-05-18T23:08:58.5284733Z","key2":"2021-05-18T23:08:58.5284733Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-05-18T23:08:58.5284733Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-05-18T23:08:58.5284733Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2021-05-18T23:08:58.4503170Z","primaryEndpoints":{"blob":"https://aabedondiag.blob.core.windows.net/","queue":"https://aabedondiag.queue.core.windows.net/","table":"https://aabedondiag.table.core.windows.net/","file":"https://aabedondiag.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsolec6uoh7l3xzbaesvvht7o4yymqvrtjxzjp6xhzlr6uq33wrwalkutf/providers/Microsoft.Storage/storageAccounts/cli2nzt6cf7elcnalnul745f","name":"cli2nzt6cf7elcnalnul745f","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-08-04T17:10:10.8883580Z","key2":"2022-08-04T17:10:10.8883580Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-08-04T17:10:10.9039542Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-08-04T17:10:10.9039542Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-08-04T17:10:10.7946308Z","primaryEndpoints":{"blob":"https://cli2nzt6cf7elcnalnul745f.blob.core.windows.net/","queue":"https://cli2nzt6cf7elcnalnul745f.queue.core.windows.net/","table":"https://cli2nzt6cf7elcnalnul745f.table.core.windows.net/","file":"https://cli2nzt6cf7elcnalnul745f.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsolesaxgnjdvlvrazb2d7kzgbif6iqxouncy2ql4je4i6xagycvbdwqy3/providers/Microsoft.Storage/storageAccounts/cli4f6xxtlnjteigl6v5t4lv","name":"cli4f6xxtlnjteigl6v5t4lv","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-08-04T17:10:09.1695976Z","key2":"2022-08-04T17:10:09.1695976Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-08-04T17:10:09.1852249Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-08-04T17:10:09.1852249Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-08-04T17:10:09.0758710Z","primaryEndpoints":{"blob":"https://cli4f6xxtlnjteigl6v5t4lv.blob.core.windows.net/","queue":"https://cli4f6xxtlnjteigl6v5t4lv.queue.core.windows.net/","table":"https://cli4f6xxtlnjteigl6v5t4lv.table.core.windows.net/","file":"https://cli4f6xxtlnjteigl6v5t4lv.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsoley3ugi77omtuicfeufj7airfyly2mx6q5werdzcrsihvoga5v3vqgw/providers/Microsoft.Storage/storageAccounts/clicnww5fgwrdzxocqcrnpfj","name":"clicnww5fgwrdzxocqcrnpfj","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-08-04T17:12:57.9815047Z","key2":"2022-08-04T17:12:57.9815047Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-08-04T17:12:57.9971598Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-08-04T17:12:57.9971598Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-08-04T17:12:57.8878066Z","primaryEndpoints":{"blob":"https://clicnww5fgwrdzxocqcrnpfj.blob.core.windows.net/","queue":"https://clicnww5fgwrdzxocqcrnpfj.queue.core.windows.net/","table":"https://clicnww5fgwrdzxocqcrnpfj.table.core.windows.net/","file":"https://clicnww5fgwrdzxocqcrnpfj.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsolexz7y33ftmrn4mdow7xasofcbz7532co6uk6mo33fg3frfnznzd7mt/providers/Microsoft.Storage/storageAccounts/clid32qhbjy4liaqoy6z3cot","name":"clid32qhbjy4liaqoy6z3cot","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-08-04T05:30:13.6295332Z","key2":"2022-08-04T05:30:13.6295332Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-08-04T05:30:13.6451524Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-08-04T05:30:13.6451524Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-08-04T05:30:13.5514577Z","primaryEndpoints":{"blob":"https://clid32qhbjy4liaqoy6z3cot.blob.core.windows.net/","queue":"https://clid32qhbjy4liaqoy6z3cot.queue.core.windows.net/","table":"https://clid32qhbjy4liaqoy6z3cot.table.core.windows.net/","file":"https://clid32qhbjy4liaqoy6z3cot.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsolej3epdwbxyovtvemgmi2jcjmyfpfaq62os62nnyx3awxoyb54aq5e3/providers/Microsoft.Storage/storageAccounts/clielzocytedvrayy4w4hdmz","name":"clielzocytedvrayy4w4hdmz","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-08-04T17:14:50.2782645Z","key2":"2022-08-04T17:14:50.2782645Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-08-04T17:14:50.2938724Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-08-04T17:14:50.2938724Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"ResolvingDns","creationTime":"2022-08-04T17:14:50.1845479Z","primaryEndpoints":{"blob":"https://clielzocytedvrayy4w4hdmz.blob.core.windows.net/","queue":"https://clielzocytedvrayy4w4hdmz.queue.core.windows.net/","table":"https://clielzocytedvrayy4w4hdmz.table.core.windows.net/","file":"https://clielzocytedvrayy4w4hdmz.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002","name":"cli000002","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-08-04T17:10:09.6539719Z","key2":"2022-08-04T17:10:09.6539719Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-08-04T17:10:09.6696214Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-08-04T17:10:09.6696214Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-08-04T17:10:09.5446548Z","primaryEndpoints":{"blob":"https://cli000002.blob.core.windows.net/","queue":"https://cli000002.queue.core.windows.net/","table":"https://cli000002.table.core.windows.net/","file":"https://cli000002.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/craigw-gui-test_group/providers/Microsoft.Storage/storageAccounts/craigwguitestgroupdiag","name":"craigwguitestgroupdiag","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2021-06-25T20:43:28.9782992Z","key2":"2021-06-25T20:43:28.9782992Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-06-25T20:43:28.9782992Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-06-25T20:43:28.9782992Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2021-06-25T20:43:28.9001463Z","primaryEndpoints":{"blob":"https://craigwguitestgroupdiag.blob.core.windows.net/","queue":"https://craigwguitestgroupdiag.queue.core.windows.net/","table":"https://craigwguitestgroupdiag.table.core.windows.net/","file":"https://craigwguitestgroupdiag.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/guptar2/providers/Microsoft.Storage/storageAccounts/guptar2diagnosticsv1","name":"guptar2diagnosticsv1","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-04-05T17:21:41.8250582Z","key2":"2022-04-05T17:21:41.8250582Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-04-05T17:21:41.8250582Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-04-05T17:21:41.8250582Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-04-05T17:21:41.7313240Z","primaryEndpoints":{"blob":"https://guptar2diagnosticsv1.blob.core.windows.net/","queue":"https://guptar2diagnosticsv1.queue.core.windows.net/","table":"https://guptar2diagnosticsv1.table.core.windows.net/","file":"https://guptar2diagnosticsv1.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/guptar2/providers/Microsoft.Storage/storageAccounts/guptar2diagnosticsv2","name":"guptar2diagnosticsv2","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-04-05T17:22:55.8411567Z","key2":"2022-04-05T17:22:55.8411567Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-04-05T17:22:55.8411567Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-04-05T17:22:55.8411567Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-04-05T17:22:55.7318000Z","primaryEndpoints":{"dfs":"https://guptar2diagnosticsv2.dfs.core.windows.net/","web":"https://guptar2diagnosticsv2.z5.web.core.windows.net/","blob":"https://guptar2diagnosticsv2.blob.core.windows.net/","queue":"https://guptar2diagnosticsv2.queue.core.windows.net/","table":"https://guptar2diagnosticsv2.table.core.windows.net/","file":"https://guptar2diagnosticsv2.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/sericonrp-trafficmanager/providers/Microsoft.Storage/storageAccounts/sericonrpdevtmstorage","name":"sericonrpdevtmstorage","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"defaultToOAuthAuthentication":false,"keyCreationTime":{"key1":"2021-09-15T09:23:38.0203325Z","key2":"2021-09-15T09:23:38.0203325Z"},"allowCrossTenantReplication":true,"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"allowSharedKeyAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-09-15T09:23:38.0360009Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-09-15T09:23:38.0360009Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2021-09-15T09:23:37.9265953Z","primaryEndpoints":{"dfs":"https://sericonrpdevtmstorage.dfs.core.windows.net/","web":"https://sericonrpdevtmstorage.z5.web.core.windows.net/","blob":"https://sericonrpdevtmstorage.blob.core.windows.net/","queue":"https://sericonrpdevtmstorage.queue.core.windows.net/","table":"https://sericonrpdevtmstorage.table.core.windows.net/","file":"https://sericonrpdevtmstorage.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rhoover-dev-rg/providers/Microsoft.Storage/storageAccounts/rhooverdevrgdiag","name":"rhooverdevrgdiag","type":"Microsoft.Storage/storageAccounts","location":"westcentralus","tags":{},"properties":{"keyCreationTime":{"key1":"2022-06-20T19:39:24.4605968Z","key2":"2022-06-20T19:39:24.4605968Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-06-20T19:39:24.4762287Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-06-20T19:39:24.4762287Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-06-20T19:39:24.4137057Z","primaryEndpoints":{"blob":"https://rhooverdevrgdiag.blob.core.windows.net/","queue":"https://rhooverdevrgdiag.queue.core.windows.net/","table":"https://rhooverdevrgdiag.table.core.windows.net/","file":"https://rhooverdevrgdiag.file.core.windows.net/"},"primaryLocation":"westcentralus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/scrunnertestvmrg-westcentralus/providers/Microsoft.Storage/storageAccounts/scrunnerrfscmqxeni3uq","name":"scrunnerrfscmqxeni3uq","type":"Microsoft.Storage/storageAccounts","location":"westcentralus","tags":{},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2020-04-10T22:28:55.2104910Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2020-04-10T22:28:55.2104910Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2020-04-10T22:28:55.1479670Z","primaryEndpoints":{"blob":"https://scrunnerrfscmqxeni3uq.blob.core.windows.net/","queue":"https://scrunnerrfscmqxeni3uq.queue.core.windows.net/","table":"https://scrunnerrfscmqxeni3uq.table.core.windows.net/","file":"https://scrunnerrfscmqxeni3uq.file.core.windows.net/"},"primaryLocation":"westcentralus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/ubuntu-westus3_group/providers/Microsoft.Storage/storageAccounts/ubuntuwestus3groupdiag","name":"ubuntuwestus3groupdiag","type":"Microsoft.Storage/storageAccounts","location":"westus3","tags":{},"properties":{"keyCreationTime":{"key1":"2022-04-18T19:48:38.9882588Z","key2":"2022-04-18T19:48:38.9882588Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-04-18T19:48:38.9882588Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-04-18T19:48:38.9882588Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-04-18T19:48:38.9258191Z","primaryEndpoints":{"blob":"https://ubuntuwestus3groupdiag.blob.core.windows.net/","queue":"https://ubuntuwestus3groupdiag.queue.core.windows.net/","table":"https://ubuntuwestus3groupdiag.table.core.windows.net/","file":"https://ubuntuwestus3groupdiag.file.core.windows.net/"},"primaryLocation":"westus3","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/guptar2/providers/Microsoft.Storage/storageAccounts/cloudshellcanarystorage","name":"cloudshellcanarystorage","type":"Microsoft.Storage/storageAccounts","location":"eastus2euap","tags":{},"properties":{"keyCreationTime":{"key1":"2022-08-01T21:16:45.8824319Z","key2":"2022-08-01T21:16:45.8824319Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-08-01T21:16:46.0855567Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-08-01T21:16:46.0855567Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-08-01T21:16:45.8043474Z","primaryEndpoints":{"dfs":"https://cloudshellcanarystorage.dfs.core.windows.net/","web":"https://cloudshellcanarystorage.z3.web.core.windows.net/","blob":"https://cloudshellcanarystorage.blob.core.windows.net/","queue":"https://cloudshellcanarystorage.queue.core.windows.net/","table":"https://cloudshellcanarystorage.table.core.windows.net/","file":"https://cloudshellcanarystorage.file.core.windows.net/"},"primaryLocation":"eastus2euap","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/craigwEv5-1_group/providers/Microsoft.Storage/storageAccounts/craigwev51groupdiag","name":"craigwev51groupdiag","type":"Microsoft.Storage/storageAccounts","location":"eastus2euap","tags":{},"properties":{"keyCreationTime":{"key1":"2022-01-13T14:34:32.7433319Z","key2":"2022-01-13T14:34:32.7433319Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-01-13T14:34:32.7433319Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-01-13T14:34:32.7433319Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-01-13T14:34:32.6652176Z","primaryEndpoints":{"blob":"https://craigwev51groupdiag.blob.core.windows.net/","queue":"https://craigwev51groupdiag.queue.core.windows.net/","table":"https://craigwev51groupdiag.table.core.windows.net/","file":"https://craigwev51groupdiag.file.core.windows.net/"},"primaryLocation":"eastus2euap","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/arm64-centraluseuap/providers/Microsoft.Storage/storageAccounts/sericonarm64euap","name":"sericonarm64euap","type":"Microsoft.Storage/storageAccounts","location":"centraluseuap","tags":{},"properties":{"keyCreationTime":{"key1":"2022-01-05T18:15:35.3504562Z","key2":"2022-01-05T18:15:35.3504562Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-01-05T18:15:35.3504562Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-01-05T18:15:35.3504562Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-01-05T18:15:35.3035816Z","primaryEndpoints":{"blob":"https://sericonarm64euap.blob.core.windows.net/","queue":"https://sericonarm64euap.queue.core.windows.net/","table":"https://sericonarm64euap.table.core.windows.net/","file":"https://sericonarm64euap.file.core.windows.net/"},"primaryLocation":"centraluseuap","statusOfPrimary":"available"}}]}' + string: '{"value":[{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/bkerrigan/providers/Microsoft.Storage/storageAccounts/bkerrigandiag","name":"bkerrigandiag","type":"Microsoft.Storage/storageAccounts","location":"eastus","tags":{},"properties":{"keyCreationTime":{"key1":"2022-05-17T00:24:49.4879627Z","key2":"2022-05-17T00:24:49.4879627Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-05-17T00:24:49.4879627Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-05-17T00:24:49.4879627Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-05-17T00:24:49.3473400Z","primaryEndpoints":{"blob":"https://bkerrigandiag.blob.core.windows.net/","queue":"https://bkerrigandiag.queue.core.windows.net/","table":"https://bkerrigandiag.table.core.windows.net/","file":"https://bkerrigandiag.file.core.windows.net/"},"primaryLocation":"eastus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cloud-shell-storage-eastus/providers/Microsoft.Storage/storageAccounts/cs210032001f4814ba9","name":"cs210032001f4814ba9","type":"Microsoft.Storage/storageAccounts","location":"eastus","tags":{"ms-resource-usage":"azure-cloud-shell"},"properties":{"keyCreationTime":{"key1":"2022-05-16T14:16:22.3477819Z","key2":"2022-05-16T14:16:22.3477819Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":false,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-05-16T14:16:22.3477819Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-05-16T14:16:22.3477819Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-05-16T14:16:22.2227752Z","primaryEndpoints":{"dfs":"https://cs210032001f4814ba9.dfs.core.windows.net/","web":"https://cs210032001f4814ba9.z13.web.core.windows.net/","blob":"https://cs210032001f4814ba9.blob.core.windows.net/","queue":"https://cs210032001f4814ba9.queue.core.windows.net/","table":"https://cs210032001f4814ba9.table.core.windows.net/","file":"https://cs210032001f4814ba9.file.core.windows.net/"},"primaryLocation":"eastus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_RAGRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/kustoflow/providers/Microsoft.Storage/storageAccounts/csslinuxkustoflow","name":"csslinuxkustoflow","type":"Microsoft.Storage/storageAccounts","location":"eastus","tags":{"CreatedBy":"craigw"},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2019-02-01T20:08:38.6849654Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2019-02-01T20:08:38.6849654Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2019-02-01T20:08:38.5912170Z","primaryEndpoints":{"dfs":"https://csslinuxkustoflow.dfs.core.windows.net/","web":"https://csslinuxkustoflow.z13.web.core.windows.net/","blob":"https://csslinuxkustoflow.blob.core.windows.net/","queue":"https://csslinuxkustoflow.queue.core.windows.net/","table":"https://csslinuxkustoflow.table.core.windows.net/","file":"https://csslinuxkustoflow.file.core.windows.net/"},"primaryLocation":"eastus","statusOfPrimary":"available","secondaryLocation":"westus","statusOfSecondary":"available","secondaryEndpoints":{"dfs":"https://csslinuxkustoflow-secondary.dfs.core.windows.net/","web":"https://csslinuxkustoflow-secondary.z13.web.core.windows.net/","blob":"https://csslinuxkustoflow-secondary.blob.core.windows.net/","queue":"https://csslinuxkustoflow-secondary.queue.core.windows.net/","table":"https://csslinuxkustoflow-secondary.table.core.windows.net/"}}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/scrunnertestvmrg-eastus/providers/Microsoft.Storage/storageAccounts/scrunnercrkwpdn5nhtgg","name":"scrunnercrkwpdn5nhtgg","type":"Microsoft.Storage/storageAccounts","location":"eastus","tags":{},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2020-05-12T20:03:57.6389684Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2020-05-12T20:03:57.6389684Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2020-05-12T20:03:57.5451905Z","primaryEndpoints":{"blob":"https://scrunnercrkwpdn5nhtgg.blob.core.windows.net/","queue":"https://scrunnercrkwpdn5nhtgg.queue.core.windows.net/","table":"https://scrunnercrkwpdn5nhtgg.table.core.windows.net/","file":"https://scrunnercrkwpdn5nhtgg.file.core.windows.net/"},"primaryLocation":"eastus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_RAGRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/storage-RG/providers/Microsoft.Storage/storageAccounts/serialconsolepreview","name":"serialconsolepreview","type":"Microsoft.Storage/storageAccounts","location":"eastus","tags":{},"properties":{"keyCreationTime":{"key1":"2021-05-07T21:41:56.3607334Z","key2":"2021-05-07T21:41:56.3607334Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"allowSharedKeyAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-05-07T21:41:56.3607334Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-05-07T21:41:56.3607334Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2021-05-07T21:41:56.2513536Z","primaryEndpoints":{"dfs":"https://serialconsolepreview.dfs.core.windows.net/","web":"https://serialconsolepreview.z13.web.core.windows.net/","blob":"https://serialconsolepreview.blob.core.windows.net/","queue":"https://serialconsolepreview.queue.core.windows.net/","table":"https://serialconsolepreview.table.core.windows.net/","file":"https://serialconsolepreview.file.core.windows.net/"},"primaryLocation":"eastus","statusOfPrimary":"available","secondaryLocation":"westus","statusOfSecondary":"available","secondaryEndpoints":{"dfs":"https://serialconsolepreview-secondary.dfs.core.windows.net/","web":"https://serialconsolepreview-secondary.z13.web.core.windows.net/","blob":"https://serialconsolepreview-secondary.blob.core.windows.net/","queue":"https://serialconsolepreview-secondary.queue.core.windows.net/","table":"https://serialconsolepreview-secondary.table.core.windows.net/"}}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/serialconsole-test/providers/Microsoft.Storage/storageAccounts/serialconsoletestdiag","name":"serialconsoletestdiag","type":"Microsoft.Storage/storageAccounts","location":"eastus","tags":{},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"resourceAccessRules":[],"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Deny"},"supportsHttpsTrafficOnly":false,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2019-02-06T20:21:39.7019315Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2019-02-06T20:21:39.7019315Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2019-02-06T20:21:39.5925779Z","primaryEndpoints":{"blob":"https://serialconsoletestdiag.blob.core.windows.net/","queue":"https://serialconsoletestdiag.queue.core.windows.net/","table":"https://serialconsoletestdiag.table.core.windows.net/","file":"https://serialconsoletestdiag.file.core.windows.net/"},"primaryLocation":"eastus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/serialTest-EastUS/providers/Microsoft.Storage/storageAccounts/serialtesta8d7fdee41","name":"serialtesta8d7fdee41","type":"Microsoft.Storage/storageAccounts","location":"eastus","tags":{},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":false,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2019-07-11T00:38:13.5389932Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2019-07-11T00:38:13.5389932Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2019-07-11T00:38:13.4452119Z","primaryEndpoints":{"blob":"https://serialtesta8d7fdee41.blob.core.windows.net/","queue":"https://serialtesta8d7fdee41.queue.core.windows.net/","table":"https://serialtesta8d7fdee41.table.core.windows.net/","file":"https://serialtesta8d7fdee41.file.core.windows.net/"},"primaryLocation":"eastus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/storage-RG/providers/Microsoft.Storage/storageAccounts/serialtestbootdiag123","name":"serialtestbootdiag123","type":"Microsoft.Storage/storageAccounts","location":"eastus","tags":{},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":false,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2018-01-23T04:03:01.3263151Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2018-01-23T04:03:01.3263151Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2018-01-23T04:03:01.2951106Z","primaryEndpoints":{"blob":"https://serialtestbootdiag123.blob.core.windows.net/","queue":"https://serialtestbootdiag123.queue.core.windows.net/","table":"https://serialtestbootdiag123.table.core.windows.net/","file":"https://serialtestbootdiag123.file.core.windows.net/"},"primaryLocation":"eastus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_RAGRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/yuas-rg/providers/Microsoft.Storage/storageAccounts/yuasstorageacct","name":"yuasstorageacct","type":"Microsoft.Storage/storageAccounts","location":"eastus","tags":{},"properties":{"dnsEndpointType":"Standard","defaultToOAuthAuthentication":false,"publicNetworkAccess":"Enabled","keyCreationTime":{"key1":"2022-08-02T12:18:18.8547131Z","key2":"2022-08-02T12:18:18.8547131Z"},"allowCrossTenantReplication":true,"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"allowSharedKeyAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"requireInfrastructureEncryption":false,"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-08-02T12:18:18.8547131Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-08-02T12:18:18.8547131Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-08-02T12:18:18.7140969Z","primaryEndpoints":{"dfs":"https://yuasstorageacct.dfs.core.windows.net/","web":"https://yuasstorageacct.z13.web.core.windows.net/","blob":"https://yuasstorageacct.blob.core.windows.net/","queue":"https://yuasstorageacct.queue.core.windows.net/","table":"https://yuasstorageacct.table.core.windows.net/","file":"https://yuasstorageacct.file.core.windows.net/"},"primaryLocation":"eastus","statusOfPrimary":"available","secondaryLocation":"westus","statusOfSecondary":"available","secondaryEndpoints":{"dfs":"https://yuasstorageacct-secondary.dfs.core.windows.net/","web":"https://yuasstorageacct-secondary.z13.web.core.windows.net/","blob":"https://yuasstorageacct-secondary.blob.core.windows.net/","queue":"https://yuasstorageacct-secondary.queue.core.windows.net/","table":"https://yuasstorageacct-secondary.table.core.windows.net/"}}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/bkerrigan-dev-rg/providers/Microsoft.Storage/storageAccounts/bkerriganbootdiag","name":"bkerriganbootdiag","type":"Microsoft.Storage/storageAccounts","location":"eastus2","tags":{},"properties":{"defaultToOAuthAuthentication":false,"publicNetworkAccess":"Enabled","keyCreationTime":{"key1":"2022-09-06T13:46:17.8293781Z","key2":"2022-09-06T13:46:17.8293781Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"allowSharedKeyAccess":true,"networkAcls":{"resourceAccessRules":[],"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-09-06T13:46:18.0015953Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-09-06T13:46:18.0015953Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Cool","provisioningState":"Succeeded","creationTime":"2022-09-06T13:46:17.7200090Z","primaryEndpoints":{"dfs":"https://bkerriganbootdiag.dfs.core.windows.net/","web":"https://bkerriganbootdiag.z20.web.core.windows.net/","blob":"https://bkerriganbootdiag.blob.core.windows.net/","queue":"https://bkerriganbootdiag.queue.core.windows.net/","table":"https://bkerriganbootdiag.table.core.windows.net/","file":"https://bkerriganbootdiag.file.core.windows.net/"},"primaryLocation":"eastus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/bkerrigan-dev-rg/providers/Microsoft.Storage/storageAccounts/bktestsa2","name":"bktestsa2","type":"Microsoft.Storage/storageAccounts","location":"eastus2","tags":{},"properties":{"dnsEndpointType":"Standard","defaultToOAuthAuthentication":false,"publicNetworkAccess":"Enabled","immutableStorageWithVersioning":{"enabled":true},"keyCreationTime":{"key1":"2022-09-27T23:58:45.6496284Z","key2":"2022-09-27T23:58:45.6496284Z"},"allowCrossTenantReplication":true,"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"allowSharedKeyAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"requireInfrastructureEncryption":false,"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-09-27T23:58:46.2902461Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-09-27T23:58:46.2902461Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Cool","provisioningState":"Succeeded","creationTime":"2022-09-27T23:58:45.5558609Z","primaryEndpoints":{"dfs":"https://bktestsa2.dfs.core.windows.net/","web":"https://bktestsa2.z20.web.core.windows.net/","blob":"https://bktestsa2.blob.core.windows.net/","queue":"https://bktestsa2.queue.core.windows.net/","table":"https://bktestsa2.table.core.windows.net/","file":"https://bktestsa2.file.core.windows.net/"},"primaryLocation":"eastus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/guptar2/providers/Microsoft.Storage/storageAccounts/guptar2eastus2storage","name":"guptar2eastus2storage","type":"Microsoft.Storage/storageAccounts","location":"eastus2","tags":{"ms-resource-usage":"azure-cloud-shell"},"properties":{"publicNetworkAccess":"Enabled","keyCreationTime":{"key1":"2022-07-28T23:08:00.6935848Z","key2":"2022-07-28T23:08:00.6935848Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":false,"networkAcls":{"resourceAccessRules":[],"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[{"value":"20.98.146.84","action":"Allow"},{"value":"20.83.222.102","action":"Allow"},{"value":"20.69.5.162","action":"Allow"},{"value":"20.98.194.64","action":"Allow"}],"defaultAction":"Deny"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-07-28T23:08:00.6935848Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-07-28T23:08:00.6935848Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-07-28T23:08:00.5840608Z","primaryEndpoints":{"dfs":"https://guptar2eastus2storage.dfs.core.windows.net/","web":"https://guptar2eastus2storage.z20.web.core.windows.net/","blob":"https://guptar2eastus2storage.blob.core.windows.net/","queue":"https://guptar2eastus2storage.queue.core.windows.net/","table":"https://guptar2eastus2storage.table.core.windows.net/","file":"https://guptar2eastus2storage.file.core.windows.net/"},"primaryLocation":"eastus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rhel-test/providers/Microsoft.Storage/storageAccounts/rhel77acct","name":"rhel77acct","type":"Microsoft.Storage/storageAccounts","location":"eastus2","tags":{},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2020-08-13T20:31:30.8995173Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2020-08-13T20:31:30.8995173Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2020-08-13T20:31:30.8215811Z","primaryEndpoints":{"blob":"https://rhel77acct.blob.core.windows.net/","queue":"https://rhel77acct.queue.core.windows.net/","table":"https://rhel77acct.table.core.windows.net/","file":"https://rhel77acct.file.core.windows.net/"},"primaryLocation":"eastus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cloud-shell-storage-westus/providers/Microsoft.Storage/storageAccounts/cs4100320010c152e3d","name":"cs4100320010c152e3d","type":"Microsoft.Storage/storageAccounts","location":"westus","tags":{"ms-resource-usage":"azure-cloud-shell"},"properties":{"keyCreationTime":{"key1":"2022-02-07T20:19:42.9636823Z","key2":"2022-02-07T20:19:42.9636823Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":false,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-02-07T20:19:42.9636823Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-02-07T20:19:42.9636823Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-02-07T20:19:42.8699133Z","primaryEndpoints":{"dfs":"https://cs4100320010c152e3d.dfs.core.windows.net/","web":"https://cs4100320010c152e3d.z22.web.core.windows.net/","blob":"https://cs4100320010c152e3d.blob.core.windows.net/","queue":"https://cs4100320010c152e3d.queue.core.windows.net/","table":"https://cs4100320010c152e3d.table.core.windows.net/","file":"https://cs4100320010c152e3d.file.core.windows.net/"},"primaryLocation":"westus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cloud-shell-storage-westus/providers/Microsoft.Storage/storageAccounts/cs410037ffea943c134","name":"cs410037ffea943c134","type":"Microsoft.Storage/storageAccounts","location":"westus","tags":{"ms-resource-usage":"azure-cloud-shell"},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2020-03-23T23:07:16.0114253Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2020-03-23T23:07:16.0114253Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2020-03-23T23:07:15.9333036Z","primaryEndpoints":{"dfs":"https://cs410037ffea943c134.dfs.core.windows.net/","web":"https://cs410037ffea943c134.z22.web.core.windows.net/","blob":"https://cs410037ffea943c134.blob.core.windows.net/","queue":"https://cs410037ffea943c134.queue.core.windows.net/","table":"https://cs410037ffea943c134.table.core.windows.net/","file":"https://cs410037ffea943c134.file.core.windows.net/"},"primaryLocation":"westus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cloud-shell-storage-westus/providers/Microsoft.Storage/storageAccounts/cs41003bffd81f3ab32","name":"cs41003bffd81f3ab32","type":"Microsoft.Storage/storageAccounts","location":"westus","tags":{"ms-resource-usage":"azure-cloud-shell"},"properties":{"keyCreationTime":{"key1":"2022-07-29T00:18:56.4686445Z","key2":"2022-07-29T00:18:56.4686445Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":false,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-07-29T00:18:56.4842807Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-07-29T00:18:56.4842807Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-07-29T00:18:56.3748663Z","primaryEndpoints":{"dfs":"https://cs41003bffd81f3ab32.dfs.core.windows.net/","web":"https://cs41003bffd81f3ab32.z22.web.core.windows.net/","blob":"https://cs41003bffd81f3ab32.blob.core.windows.net/","queue":"https://cs41003bffd81f3ab32.queue.core.windows.net/","table":"https://cs41003bffd81f3ab32.table.core.windows.net/","file":"https://cs41003bffd81f3ab32.file.core.windows.net/"},"primaryLocation":"westus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cloud-shell-storage-westus/providers/Microsoft.Storage/storageAccounts/cs4aa22d82de270x4becxb48","name":"cs4aa22d82de270x4becxb48","type":"Microsoft.Storage/storageAccounts","location":"westus","tags":{"ms-resource-usage":"azure-cloud-shell"},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2018-11-29T23:39:30.3657182Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2018-11-29T23:39:30.3657182Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2018-11-29T23:39:30.2563159Z","primaryEndpoints":{"blob":"https://cs4aa22d82de270x4becxb48.blob.core.windows.net/","queue":"https://cs4aa22d82de270x4becxb48.queue.core.windows.net/","table":"https://cs4aa22d82de270x4becxb48.table.core.windows.net/","file":"https://cs4aa22d82de270x4becxb48.file.core.windows.net/"},"primaryLocation":"westus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/guptar2/providers/Microsoft.Storage/storageAccounts/guptar2storagecloudshell","name":"guptar2storagecloudshell","type":"Microsoft.Storage/storageAccounts","location":"westus","tags":{"ms-resource-usage":"azure-cloud-shell"},"properties":{"keyCreationTime":{"key1":"2022-09-13T23:27:57.8525804Z","key2":"2022-09-13T23:27:57.8525804Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":false,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-09-13T23:27:57.8525804Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-09-13T23:27:57.8525804Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-09-13T23:27:57.7431842Z","primaryEndpoints":{"dfs":"https://guptar2storagecloudshell.dfs.core.windows.net/","web":"https://guptar2storagecloudshell.z22.web.core.windows.net/","blob":"https://guptar2storagecloudshell.blob.core.windows.net/","queue":"https://guptar2storagecloudshell.queue.core.windows.net/","table":"https://guptar2storagecloudshell.table.core.windows.net/","file":"https://guptar2storagecloudshell.file.core.windows.net/"},"primaryLocation":"westus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/guptar/providers/Microsoft.Storage/storageAccounts/guptardevstorage","name":"guptardevstorage","type":"Microsoft.Storage/storageAccounts","location":"westus","tags":{"ms-resource-usage":"azure-cloud-shell"},"properties":{"keyCreationTime":{"key1":"2022-02-15T16:49:43.1435156Z","key2":"2022-02-15T16:49:43.1435156Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":false,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-02-15T16:49:43.1591440Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-02-15T16:49:43.1591440Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-02-15T16:49:43.0341047Z","primaryEndpoints":{"dfs":"https://guptardevstorage.dfs.core.windows.net/","web":"https://guptardevstorage.z22.web.core.windows.net/","blob":"https://guptardevstorage.blob.core.windows.net/","queue":"https://guptardevstorage.queue.core.windows.net/","table":"https://guptardevstorage.table.core.windows.net/","file":"https://guptardevstorage.file.core.windows.net/"},"primaryLocation":"westus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/SCRunner/providers/Microsoft.Storage/storageAccounts/scrunnerstorage","name":"scrunnerstorage","type":"Microsoft.Storage/storageAccounts","location":"westus","tags":{},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":false,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2018-03-06T00:42:11.7016543Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2018-03-06T00:42:11.7016543Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2018-03-06T00:42:11.6234985Z","primaryEndpoints":{"blob":"https://scrunnerstorage.blob.core.windows.net/","queue":"https://scrunnerstorage.queue.core.windows.net/","table":"https://scrunnerstorage.table.core.windows.net/","file":"https://scrunnerstorage.file.core.windows.net/"},"primaryLocation":"westus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cloud-shell-storage-southcentralus/providers/Microsoft.Storage/storageAccounts/cs710032001417ec1a8","name":"cs710032001417ec1a8","type":"Microsoft.Storage/storageAccounts","location":"southcentralus","tags":{"ms-resource-usage":"azure-cloud-shell"},"properties":{"keyCreationTime":{"key1":"2021-05-18T22:07:33.4170256Z","key2":"2021-05-18T22:07:33.4170256Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":false,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-05-18T22:07:33.4170256Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-05-18T22:07:33.4170256Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2021-05-18T22:07:33.3389725Z","primaryEndpoints":{"dfs":"https://cs710032001417ec1a8.dfs.core.windows.net/","web":"https://cs710032001417ec1a8.z21.web.core.windows.net/","blob":"https://cs710032001417ec1a8.blob.core.windows.net/","queue":"https://cs710032001417ec1a8.queue.core.windows.net/","table":"https://cs710032001417ec1a8.table.core.windows.net/","file":"https://cs710032001417ec1a8.file.core.windows.net/"},"primaryLocation":"southcentralus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rhoover/providers/Microsoft.Storage/storageAccounts/rhooverstorage","name":"rhooverstorage","type":"Microsoft.Storage/storageAccounts","location":"southcentralus","tags":{"ms-resource-usage":"azure-cloud-shell"},"properties":{"keyCreationTime":{"key1":"2022-05-26T17:14:23.5085026Z","key2":"2022-05-26T17:14:23.5085026Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":false,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-05-26T17:14:23.5241285Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-05-26T17:14:23.5241285Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-05-26T17:14:23.4147520Z","primaryEndpoints":{"dfs":"https://rhooverstorage.dfs.core.windows.net/","web":"https://rhooverstorage.z21.web.core.windows.net/","blob":"https://rhooverstorage.blob.core.windows.net/","queue":"https://rhooverstorage.queue.core.windows.net/","table":"https://rhooverstorage.table.core.windows.net/","file":"https://rhooverstorage.file.core.windows.net/"},"primaryLocation":"southcentralus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/harish-storage/providers/Microsoft.Storage/storageAccounts/aueastsarestricted","name":"aueastsarestricted","type":"Microsoft.Storage/storageAccounts","location":"australiaeast","tags":{},"properties":{"dnsEndpointType":"Standard","defaultToOAuthAuthentication":false,"publicNetworkAccess":"Enabled","keyCreationTime":{"key1":"2022-07-17T04:32:04.7486474Z","key2":"2022-07-17T04:32:04.7486474Z"},"allowCrossTenantReplication":true,"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"allowSharedKeyAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/harish-networking/providers/Microsoft.Network/virtualNetworks/aueast-vnet/subnets/testing","action":"Allow","state":"Succeeded"}],"ipRules":[],"defaultAction":"Deny"},"supportsHttpsTrafficOnly":true,"encryption":{"requireInfrastructureEncryption":false,"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-07-17T04:32:04.7486474Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-07-17T04:32:04.7486474Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-07-17T04:32:04.6861236Z","primaryEndpoints":{"dfs":"https://aueastsarestricted.dfs.core.windows.net/","web":"https://aueastsarestricted.z8.web.core.windows.net/","blob":"https://aueastsarestricted.blob.core.windows.net/","queue":"https://aueastsarestricted.queue.core.windows.net/","table":"https://aueastsarestricted.table.core.windows.net/","file":"https://aueastsarestricted.file.core.windows.net/"},"primaryLocation":"australiaeast","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/harish-storage/providers/Microsoft.Storage/storageAccounts/aueastsastd","name":"aueastsastd","type":"Microsoft.Storage/storageAccounts","location":"australiaeast","tags":{},"properties":{"dnsEndpointType":"Standard","defaultToOAuthAuthentication":false,"publicNetworkAccess":"Enabled","keyCreationTime":{"key1":"2022-07-17T04:28:55.7260171Z","key2":"2022-07-17T04:28:55.7260171Z"},"allowCrossTenantReplication":true,"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"allowSharedKeyAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"requireInfrastructureEncryption":false,"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-07-17T04:28:55.7416401Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-07-17T04:28:55.7416401Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-07-17T04:28:55.6634675Z","primaryEndpoints":{"dfs":"https://aueastsastd.dfs.core.windows.net/","web":"https://aueastsastd.z8.web.core.windows.net/","blob":"https://aueastsastd.blob.core.windows.net/","queue":"https://aueastsastd.queue.core.windows.net/","table":"https://aueastsastd.table.core.windows.net/","file":"https://aueastsastd.file.core.windows.net/"},"primaryLocation":"australiaeast","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/SCRunnertestvmrg-AustraliaEast/providers/Microsoft.Storage/storageAccounts/scrunner4p3t72mzheluc","name":"scrunner4p3t72mzheluc","type":"Microsoft.Storage/storageAccounts","location":"australiaeast","tags":{},"properties":{"keyCreationTime":{"key1":"2021-04-13T22:35:36.6210942Z","key2":"2021-04-13T22:35:36.6210942Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-04-13T22:35:36.6210942Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-04-13T22:35:36.6210942Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2021-04-13T22:35:36.5429508Z","primaryEndpoints":{"blob":"https://scrunner4p3t72mzheluc.blob.core.windows.net/","queue":"https://scrunner4p3t72mzheluc.queue.core.windows.net/","table":"https://scrunner4p3t72mzheluc.table.core.windows.net/","file":"https://scrunner4p3t72mzheluc.file.core.windows.net/"},"primaryLocation":"australiaeast","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsoledii6eozvcuwpj2avhf7oovhlynnodojeilddqv4awkk363btwtaf3/providers/Microsoft.Storage/storageAccounts/cli6poljdp7io7zprgoxf4ut","name":"cli6poljdp7io7zprgoxf4ut","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-10-14T15:18:09.2031765Z","key2":"2022-10-14T15:18:09.2031765Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.3282100Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.3282100Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-10-14T15:18:09.0938358Z","primaryEndpoints":{"blob":"https://cli6poljdp7io7zprgoxf4ut.blob.core.windows.net/","queue":"https://cli6poljdp7io7zprgoxf4ut.queue.core.windows.net/","table":"https://cli6poljdp7io7zprgoxf4ut.table.core.windows.net/","file":"https://cli6poljdp7io7zprgoxf4ut.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsoleimai244luxkv4qi3pzelwnexgjxzvq5n6g6erlqn6mucntnjyn2lt/providers/Microsoft.Storage/storageAccounts/cliaa3vl7jr6zshgrsmarb6m","name":"cliaa3vl7jr6zshgrsmarb6m","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-10-12T19:18:45.9704319Z","key2":"2022-10-12T19:18:45.9704319Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-12T19:18:46.6892063Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-12T19:18:46.6892063Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-10-12T19:18:45.8923129Z","primaryEndpoints":{"blob":"https://cliaa3vl7jr6zshgrsmarb6m.blob.core.windows.net/","queue":"https://cliaa3vl7jr6zshgrsmarb6m.queue.core.windows.net/","table":"https://cliaa3vl7jr6zshgrsmarb6m.table.core.windows.net/","file":"https://cliaa3vl7jr6zshgrsmarb6m.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsoleki72ehqhjg2r4e7pjpdigny2x6btssuanj3xoh4kzjzraj3htdmcf/providers/Microsoft.Storage/storageAccounts/cliiajrcdqs24ifs47d6yatp","name":"cliiajrcdqs24ifs47d6yatp","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-10-14T15:18:11.3594600Z","key2":"2022-10-14T15:18:11.3594600Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:11.5781836Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:11.5781836Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-10-14T15:18:11.2656994Z","primaryEndpoints":{"blob":"https://cliiajrcdqs24ifs47d6yatp.blob.core.windows.net/","queue":"https://cliiajrcdqs24ifs47d6yatp.queue.core.windows.net/","table":"https://cliiajrcdqs24ifs47d6yatp.table.core.windows.net/","file":"https://cliiajrcdqs24ifs47d6yatp.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsoleuprs6wnhhkthfvk3g347c54erbdpkxj7og4vdd5jrhlsj2i6a4y7z/providers/Microsoft.Storage/storageAccounts/cliit6yvjuhqdolxqkfrxi5p","name":"cliit6yvjuhqdolxqkfrxi5p","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-09-28T02:55:03.4203450Z","key2":"2022-09-28T02:55:03.4203450Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-09-28T02:55:03.8890983Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-09-28T02:55:03.8890983Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-09-28T02:55:03.3421930Z","primaryEndpoints":{"blob":"https://cliit6yvjuhqdolxqkfrxi5p.blob.core.windows.net/","queue":"https://cliit6yvjuhqdolxqkfrxi5p.queue.core.windows.net/","table":"https://cliit6yvjuhqdolxqkfrxi5p.table.core.windows.net/","file":"https://cliit6yvjuhqdolxqkfrxi5p.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002","name":"cli000002","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-10-14T15:18:09.5625791Z","key2":"2022-10-14T15:18:09.5625791Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.8282521Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.8282521Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-10-14T15:18:09.4688102Z","primaryEndpoints":{"blob":"https://cli000002.blob.core.windows.net/","queue":"https://cli000002.queue.core.windows.net/","table":"https://cli000002.table.core.windows.net/","file":"https://cli000002.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsolezqkq37ew5u2eysorcr5mcbsuuavu5dzl77dzdvk4cusb4sxbo43tx/providers/Microsoft.Storage/storageAccounts/clin5xvasz3jj33radbzq2jx","name":"clin5xvasz3jj33radbzq2jx","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-10-14T15:18:09.1406755Z","key2":"2022-10-14T15:18:09.1406755Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.4376040Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.4376040Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-10-14T15:18:09.0469947Z","primaryEndpoints":{"blob":"https://clin5xvasz3jj33radbzq2jx.blob.core.windows.net/","queue":"https://clin5xvasz3jj33radbzq2jx.queue.core.windows.net/","table":"https://clin5xvasz3jj33radbzq2jx.table.core.windows.net/","file":"https://clin5xvasz3jj33radbzq2jx.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsolefxqkhafj52ep4gg474dntlscgprhtrbvwhs2ff4fvipbw5tg2cexz/providers/Microsoft.Storage/storageAccounts/clipfdv4l2hramy2ysuln7bs","name":"clipfdv4l2hramy2ysuln7bs","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-10-12T19:55:35.0170190Z","key2":"2022-10-12T19:55:35.0170190Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-12T19:55:35.4701769Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-12T19:55:35.4701769Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-10-12T19:55:34.9076957Z","primaryEndpoints":{"blob":"https://clipfdv4l2hramy2ysuln7bs.blob.core.windows.net/","queue":"https://clipfdv4l2hramy2ysuln7bs.queue.core.windows.net/","table":"https://clipfdv4l2hramy2ysuln7bs.table.core.windows.net/","file":"https://clipfdv4l2hramy2ysuln7bs.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/craigw-gui-test_group/providers/Microsoft.Storage/storageAccounts/craigwguitestgroupdiag","name":"craigwguitestgroupdiag","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2021-06-25T20:43:28.9782992Z","key2":"2021-06-25T20:43:28.9782992Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-06-25T20:43:28.9782992Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-06-25T20:43:28.9782992Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2021-06-25T20:43:28.9001463Z","primaryEndpoints":{"blob":"https://craigwguitestgroupdiag.blob.core.windows.net/","queue":"https://craigwguitestgroupdiag.queue.core.windows.net/","table":"https://craigwguitestgroupdiag.table.core.windows.net/","file":"https://craigwguitestgroupdiag.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/guptar2/providers/Microsoft.Storage/storageAccounts/guptar2diagnosticsv1","name":"guptar2diagnosticsv1","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-04-05T17:21:41.8250582Z","key2":"2022-04-05T17:21:41.8250582Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-04-05T17:21:41.8250582Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-04-05T17:21:41.8250582Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-04-05T17:21:41.7313240Z","primaryEndpoints":{"blob":"https://guptar2diagnosticsv1.blob.core.windows.net/","queue":"https://guptar2diagnosticsv1.queue.core.windows.net/","table":"https://guptar2diagnosticsv1.table.core.windows.net/","file":"https://guptar2diagnosticsv1.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/guptar2/providers/Microsoft.Storage/storageAccounts/guptar2diagnosticsv2","name":"guptar2diagnosticsv2","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"publicNetworkAccess":"Enabled","keyCreationTime":{"key1":"2022-04-05T17:22:55.8411567Z","key2":"2022-04-05T17:22:55.8411567Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"resourceAccessRules":[],"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[{"value":"20.98.146.84","action":"Allow"},{"value":"20.83.222.102","action":"Allow"},{"value":"20.98.194.64","action":"Allow"},{"value":"20.69.5.162","action":"Allow"}],"defaultAction":"Deny"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-04-05T17:22:55.8411567Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-04-05T17:22:55.8411567Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-04-05T17:22:55.7318000Z","primaryEndpoints":{"dfs":"https://guptar2diagnosticsv2.dfs.core.windows.net/","web":"https://guptar2diagnosticsv2.z5.web.core.windows.net/","blob":"https://guptar2diagnosticsv2.blob.core.windows.net/","queue":"https://guptar2diagnosticsv2.queue.core.windows.net/","table":"https://guptar2diagnosticsv2.table.core.windows.net/","file":"https://guptar2diagnosticsv2.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/sericonrp-trafficmanager/providers/Microsoft.Storage/storageAccounts/sericonrpdevtmstorage","name":"sericonrpdevtmstorage","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"defaultToOAuthAuthentication":false,"keyCreationTime":{"key1":"2021-09-15T09:23:38.0203325Z","key2":"2021-09-15T09:23:38.0203325Z"},"allowCrossTenantReplication":true,"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"allowSharedKeyAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-09-15T09:23:38.0360009Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2021-09-15T09:23:38.0360009Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2021-09-15T09:23:37.9265953Z","primaryEndpoints":{"dfs":"https://sericonrpdevtmstorage.dfs.core.windows.net/","web":"https://sericonrpdevtmstorage.z5.web.core.windows.net/","blob":"https://sericonrpdevtmstorage.blob.core.windows.net/","queue":"https://sericonrpdevtmstorage.queue.core.windows.net/","table":"https://sericonrpdevtmstorage.table.core.windows.net/","file":"https://sericonrpdevtmstorage.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/guptar2/providers/Microsoft.Storage/storageAccounts/guptar3storage","name":"guptar3storage","type":"Microsoft.Storage/storageAccounts","location":"westcentralus","tags":{},"properties":{"publicNetworkAccess":"Enabled","keyCreationTime":{"key1":"2022-09-20T21:34:53.7867708Z","key2":"2022-09-20T21:34:53.7867708Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"resourceAccessRules":[],"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[{"value":"20.98.146.84","action":"Allow"},{"value":"20.98.194.64","action":"Allow"},{"value":"20.69.5.162","action":"Allow"},{"value":"20.83.222.102","action":"Allow"}],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-09-20T21:34:53.8024125Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-09-20T21:34:53.8024125Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-09-20T21:34:53.7086332Z","primaryEndpoints":{"blob":"https://guptar3storage.blob.core.windows.net/","queue":"https://guptar3storage.queue.core.windows.net/","table":"https://guptar3storage.table.core.windows.net/","file":"https://guptar3storage.file.core.windows.net/"},"primaryLocation":"westcentralus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rhoover-dev-rg/providers/Microsoft.Storage/storageAccounts/rhooverdevrgdiag","name":"rhooverdevrgdiag","type":"Microsoft.Storage/storageAccounts","location":"westcentralus","tags":{},"properties":{"publicNetworkAccess":"Enabled","keyCreationTime":{"key1":"2022-06-20T19:39:24.4605968Z","key2":"2022-06-20T19:39:24.4605968Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"networkAcls":{"resourceAccessRules":[],"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[{"value":"20.98.146.84","action":"Allow"},{"value":"20.98.194.64","action":"Allow"},{"value":"20.69.5.162","action":"Allow"},{"value":"20.83.222.102","action":"Allow"}],"defaultAction":"Deny"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-06-20T19:39:24.4762287Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-06-20T19:39:24.4762287Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-06-20T19:39:24.4137057Z","primaryEndpoints":{"blob":"https://rhooverdevrgdiag.blob.core.windows.net/","queue":"https://rhooverdevrgdiag.queue.core.windows.net/","table":"https://rhooverdevrgdiag.table.core.windows.net/","file":"https://rhooverdevrgdiag.file.core.windows.net/"},"primaryLocation":"westcentralus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/scrunnertestvmrg-westcentralus/providers/Microsoft.Storage/storageAccounts/scrunnerrfscmqxeni3uq","name":"scrunnerrfscmqxeni3uq","type":"Microsoft.Storage/storageAccounts","location":"westcentralus","tags":{},"properties":{"keyCreationTime":{"key1":null,"key2":null},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2020-04-10T22:28:55.2104910Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2020-04-10T22:28:55.2104910Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2020-04-10T22:28:55.1479670Z","primaryEndpoints":{"blob":"https://scrunnerrfscmqxeni3uq.blob.core.windows.net/","queue":"https://scrunnerrfscmqxeni3uq.queue.core.windows.net/","table":"https://scrunnerrfscmqxeni3uq.table.core.windows.net/","file":"https://scrunnerrfscmqxeni3uq.file.core.windows.net/"},"primaryLocation":"westcentralus","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/ubuntu-westus3_group/providers/Microsoft.Storage/storageAccounts/ubuntuwestus3groupdiag","name":"ubuntuwestus3groupdiag","type":"Microsoft.Storage/storageAccounts","location":"westus3","tags":{},"properties":{"keyCreationTime":{"key1":"2022-04-18T19:48:38.9882588Z","key2":"2022-04-18T19:48:38.9882588Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_2","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-04-18T19:48:38.9882588Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-04-18T19:48:38.9882588Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-04-18T19:48:38.9258191Z","primaryEndpoints":{"blob":"https://ubuntuwestus3groupdiag.blob.core.windows.net/","queue":"https://ubuntuwestus3groupdiag.queue.core.windows.net/","table":"https://ubuntuwestus3groupdiag.table.core.windows.net/","file":"https://ubuntuwestus3groupdiag.file.core.windows.net/"},"primaryLocation":"westus3","statusOfPrimary":"available"}},{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"StorageV2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/guptar2/providers/Microsoft.Storage/storageAccounts/cloudshellcanarystorage","name":"cloudshellcanarystorage","type":"Microsoft.Storage/storageAccounts","location":"eastus2euap","tags":{},"properties":{"keyCreationTime":{"key1":"2022-08-01T21:16:45.8824319Z","key2":"2022-08-01T21:16:45.8824319Z"},"allowCrossTenantReplication":false,"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-08-01T21:16:46.0855567Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-08-01T21:16:46.0855567Z"}},"keySource":"Microsoft.Storage"},"accessTier":"Hot","provisioningState":"Succeeded","creationTime":"2022-08-01T21:16:45.8043474Z","primaryEndpoints":{"dfs":"https://cloudshellcanarystorage.dfs.core.windows.net/","web":"https://cloudshellcanarystorage.z3.web.core.windows.net/","blob":"https://cloudshellcanarystorage.blob.core.windows.net/","queue":"https://cloudshellcanarystorage.queue.core.windows.net/","table":"https://cloudshellcanarystorage.table.core.windows.net/","file":"https://cloudshellcanarystorage.file.core.windows.net/"},"primaryLocation":"eastus2euap","statusOfPrimary":"available"}}]}' headers: cache-control: - no-cache content-length: - - '65114' + - '60142' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:14:54 GMT + - Fri, 14 Oct 2022 15:22:40 GMT expires: - '-1' pragma: @@ -3159,31 +2974,32 @@ interactions: x-content-type-options: - nosniff x-ms-original-request-ids: - - 8e0ff669-ebcd-4345-b378-e3788d45d5eb - - 0baff437-1edb-4aad-bed2-70ca6524f4f2 - - bf505621-337a-469d-b3de-4b51c2d20478 - - c5eddcd4-929c-43ff-8e08-002e52ce3ac1 - - f2d0db79-8325-4630-a726-98452a4e2399 - - 969149af-9b76-4e82-8f2f-d6b7ab7b6f20 - - f7ebe0b9-29c7-49f8-bea0-2863f110e65a - - a791869c-9f98-4a0e-9774-19f0b3f93a18 - - 40ca744b-cb2a-4fe1-ac3f-b4419a69271c - - ddc0071b-97f5-4544-a28d-3b09030a109f - - 87b2d234-b821-4dfb-9a9d-f83401774c34 + - effadb6b-d910-4764-955c-450c766ba213 + - 7ea80db4-198a-41b0-b4c0-5723c001878a + - b113a79a-13d1-40f0-adca-fb140d4269c7 + - d6c50a16-fa7c-4f23-8e6d-904b8a047234 + - 4faca546-fae8-40e0-af5f-eaf77b16cae3 + - d45f244c-5d2b-479b-a897-4db755d525a8 + - ed410a59-ab05-43a5-a0e4-2d4d12c9b952 + - ac1f6468-69c7-4891-9522-ae62a1663327 + - dc44407f-9a7f-4061-bc06-97a71e9d9c43 status: code: 200 message: OK - request: - body: '{"location": "westus2", "tags": {}, "properties": {"hardwareProfile": {"vmSize": - "Standard_DS1_v2"}, "storageProfile": {"osDisk": {"osType": "Linux", "name": - "cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474", "caching": "ReadWrite", - "createOption": "FromImage", "diskSizeGB": 30, "managedDisk": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEFTJX7LNJTECNUAAW7NDBGC7LRALMOLPWHFEOSNW5PPIREVJDRRXVV/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474", + body: '{"location": "westus2", "tags": {"azsecpack": "nonprod", "platformsettings.host_environment.service.platform_optedin_for_rootcerts": + "true"}, "identity": {"type": "UserAssigned", "userAssignedIdentities": {"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2": + {}}}, "properties": {"hardwareProfile": {"vmSize": "Standard_DS1_v2"}, "storageProfile": + {"osDisk": {"osType": "Linux", "name": "cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404", + "caching": "ReadWrite", "createOption": "FromImage", "diskSizeGB": 30, "managedDisk": + {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEWKD3ILZXQ5YDMRMOP7DOUUDMPA2UMW4REUEUIXVRLTLFIKQMHSXKN/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404", "storageAccountType": "Premium_LRS"}, "deleteOption": "Detach"}, "dataDisks": - []}, "osProfile": {"computerName": "cli000003", "adminUsername": "rhl", "linuxConfiguration": - {"disablePasswordAuthentication": true, "ssh": {"publicKeys": [{"path": "/home/rhl/.ssh/authorized_keys", - "keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5"}]}, - "provisionVMAgent": true, "patchSettings": {"patchMode": "ImageDefault", "assessmentMode": - "ImageDefault"}}, "secrets": [], "allowExtensionOperations": true, "requireGuestProvisionSignal": + []}, "osProfile": {"computerName": "cli000003", "adminUsername": "rhoover", + "linuxConfiguration": {"disablePasswordAuthentication": true, "ssh": {"publicKeys": + [{"path": "/home/rhoover/.ssh/authorized_keys", "keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\n"}]}, "provisionVMAgent": true, "patchSettings": {"patchMode": + "ImageDefault", "assessmentMode": "ImageDefault"}, "enableVMAgentPlatformUpdates": + false}, "secrets": [], "allowExtensionOperations": true, "requireGuestProvisionSignal": true}, "networkProfile": {"networkInterfaces": [{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic"}]}, "diagnosticsProfile": {"bootDiagnostics": {"enabled": true, "storageUri": "https://cli000002.blob.core.windows.net/"}}}}' headers: @@ -3196,59 +3012,64 @@ interactions: Connection: - keep-alive Content-Length: - - '1741' + - '2341' Content-Type: - application/json ParameterSetName: - -g -n --storage User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n - \ \"tags\": {},\r\n \"properties\": {\r\n \"vmId\": \"e048a2e9-76a7-497c-8ed7-829625b39824\",\r\n + \ \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"properties\": {\r\n \"vmId\": \"eee656e1-0c27-4e10-ba68-5622e742e308\",\r\n \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": - \"18.04.202207120\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": - \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n + \"18.04.202209210\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": + \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEFTJX7LNJTECNUAAW7NDBGC7LRALMOLPWHFEOSNW5PPIREVJDRRXVV/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\"\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEWKD3ILZXQ5YDMRMOP7DOUUDMPA2UMW4REUEUIXVRLTLFIKQMHSXKN/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\"\r\n \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": - {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhl\",\r\n + {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhoover\",\r\n \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n - \ \"path\": \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\",\r\n - \ \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \"enableVMAgentPlatformUpdates\": - false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": - true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"patchSettings\": {\r\n \"patchMode\": + \"ImageDefault\",\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\": + [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\": + true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": true,\r\n \"storageUri\": \"https://cli000002.blob.core.windows.net/\"\r\n \ }\r\n },\r\n \"provisioningState\": \"Updating\",\r\n \"timeCreated\": - \"2022-08-04T17:10:47.8380764+00:00\"\r\n }\r\n}" + \"2022-10-14T15:18:48.6932141+00:00\"\r\n }\r\n}" headers: azure-asyncnotification: - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/0ced3b93-2308-4f13-aee3-40e5678e1217?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/b4acdcb1-3dfc-40a9-ba3c-edf25bffc154?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - - '2820' + - '3560' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:14:54 GMT + - Fri, 14 Oct 2022 15:22:42 GMT expires: - '-1' pragma: @@ -3265,7 +3086,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/PutVM3Min;464,Microsoft.Compute/PutVM30Min;2327 + - Microsoft.Compute/PutVM3Min;593,Microsoft.Compute/PutVM30Min;2980 x-ms-ratelimit-remaining-subscription-writes: - '1199' status: @@ -3285,23 +3106,23 @@ interactions: ParameterSetName: - -g -n --storage User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/0ced3b93-2308-4f13-aee3-40e5678e1217?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/b4acdcb1-3dfc-40a9-ba3c-edf25bffc154?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:14:55.5397029+00:00\",\r\n \"endTime\": - \"2022-08-04T17:15:03.508426+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"0ced3b93-2308-4f13-aee3-40e5678e1217\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:22:42.1284354+00:00\",\r\n \"endTime\": + \"2022-10-14T15:22:50.9095971+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"b4acdcb1-3dfc-40a9-ba3c-edf25bffc154\"\r\n}" headers: cache-control: - no-cache content-length: - - '183' + - '184' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:15:25 GMT + - Fri, 14 Oct 2022 15:23:13 GMT expires: - '-1' pragma: @@ -3318,7 +3139,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14935,Microsoft.Compute/GetOperation30Min;29801 + - Microsoft.Compute/GetOperation3Min;14950,Microsoft.Compute/GetOperation30Min;29942 status: code: 200 message: OK @@ -3336,49 +3157,54 @@ interactions: ParameterSetName: - -g -n --storage User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n - \ \"tags\": {},\r\n \"properties\": {\r\n \"vmId\": \"e048a2e9-76a7-497c-8ed7-829625b39824\",\r\n + \ \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"properties\": {\r\n \"vmId\": \"eee656e1-0c27-4e10-ba68-5622e742e308\",\r\n \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": - \"18.04.202207120\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": - \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n + \"18.04.202209210\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": + \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEFTJX7LNJTECNUAAW7NDBGC7LRALMOLPWHFEOSNW5PPIREVJDRRXVV/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\"\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEWKD3ILZXQ5YDMRMOP7DOUUDMPA2UMW4REUEUIXVRLTLFIKQMHSXKN/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\"\r\n \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": - {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhl\",\r\n + {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhoover\",\r\n \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n - \ \"path\": \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\",\r\n - \ \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \"enableVMAgentPlatformUpdates\": - false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": - true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"patchSettings\": {\r\n \"patchMode\": + \"ImageDefault\",\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\": + [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\": + true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": true,\r\n \"storageUri\": \"https://cli000002.blob.core.windows.net/\"\r\n \ }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n \"timeCreated\": - \"2022-08-04T17:10:47.8380764+00:00\"\r\n }\r\n}" + \"2022-10-14T15:18:48.6932141+00:00\"\r\n }\r\n}" headers: cache-control: - no-cache content-length: - - '2821' + - '3561' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:15:26 GMT + - Fri, 14 Oct 2022 15:23:13 GMT expires: - '-1' pragma: @@ -3395,7 +3221,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/LowCostGet3Min;3974,Microsoft.Compute/LowCostGet30Min;31904 + - Microsoft.Compute/LowCostGet3Min;3954,Microsoft.Compute/LowCostGet30Min;31939 status: code: 200 message: OK @@ -3413,28 +3239,81 @@ interactions: ParameterSetName: - -g -n --storage User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-08-01 response: body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n + \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n + \ \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"properties\": {\r\n \"vmId\": \"eee656e1-0c27-4e10-ba68-5622e742e308\",\r\n + \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n + \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": + \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": + \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": + \"18.04.202209210\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": + \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n + \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n + \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEWKD3ILZXQ5YDMRMOP7DOUUDMPA2UMW4REUEUIXVRLTLFIKQMHSXKN/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\"\r\n + \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": + 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": + {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhoover\",\r\n + \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": + true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"patchSettings\": {\r\n \"patchMode\": + \"ImageDefault\",\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\": + [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\": + true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n + \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": + true,\r\n \"storageUri\": \"https://cli000002.blob.core.windows.net/\"\r\n + \ }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n \"instanceView\": + {\r\n \"vmAgent\": {\r\n \"vmAgentVersion\": \"Unknown\",\r\n + \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/Unavailable\",\r\n + \ \"level\": \"Warning\",\r\n \"displayStatus\": \"Not + Ready\",\r\n \"message\": \"VM status blob is found but not yet + populated.\",\r\n \"time\": \"2022-10-14T15:23:13+00:00\"\r\n }\r\n + \ ]\r\n },\r\n \"disks\": [\r\n {\r\n \"name\": + \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \"statuses\": + [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"time\": \"2022-10-14T15:22:05.6913128+00:00\"\r\n + \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": + {\r\n \"consoleScreenshotBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-clixgzadl-eee656e1-0c27-4e10-ba68-5622e742e308/cli000003.eee656e1-0c27-4e10-ba68-5622e742e308.screenshot.bmp\",\r\n + \ \"serialConsoleLogBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-clixgzadl-eee656e1-0c27-4e10-ba68-5622e742e308/cli000003.eee656e1-0c27-4e10-ba68-5622e742e308.serialconsole.log\"\r\n + \ },\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n + \ {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n + \ \"time\": \"2022-10-14T15:22:50.8939726+00:00\"\r\n },\r\n + \ {\r\n \"code\": \"PowerState/running\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"VM running\"\r\n }\r\n + \ ]\r\n },\r\n \"timeCreated\": \"2022-10-14T15:18:48.6932141+00:00\"\r\n + \ }\r\n}" headers: cache-control: - no-cache content-length: - - '43' + - '5222' content-type: - - application/json; charset=UTF-8 + - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:15:26 GMT + - Fri, 14 Oct 2022 15:23:13 GMT expires: - '-1' pragma: - no-cache server: - - nginx + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3443,8 +3322,8 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-frame-options: - - deny + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/LowCostGet3Min;3953,Microsoft.Compute/LowCostGet30Min;31938 status: code: 200 message: OK @@ -3462,76 +3341,27 @@ interactions: ParameterSetName: - -g -n --storage User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-storage/20.1.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002?api-version=2022-05-01 response: body: - string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n - \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n - \ \"tags\": {},\r\n \"properties\": {\r\n \"vmId\": \"e048a2e9-76a7-497c-8ed7-829625b39824\",\r\n - \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n - \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": - \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": - \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": - \"18.04.202207120\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": - \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n - \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n - \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEFTJX7LNJTECNUAAW7NDBGC7LRALMOLPWHFEOSNW5PPIREVJDRRXVV/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\"\r\n - \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": - 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": - {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhl\",\r\n - \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": - true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n - \ \"path\": \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\",\r\n - \ \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \"enableVMAgentPlatformUpdates\": - false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": - true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n - \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": - true,\r\n \"storageUri\": \"https://cli000002.blob.core.windows.net/\"\r\n - \ }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n \"instanceView\": - {\r\n \"computerName\": \"cli000003\",\r\n \"osName\": \"ubuntu\",\r\n - \ \"osVersion\": \"18.04\",\r\n \"vmAgent\": {\r\n \"vmAgentVersion\": - \"2.7.3.0\",\r\n \"statuses\": [\r\n {\r\n \"code\": - \"ProvisioningState/succeeded\",\r\n \"level\": \"Info\",\r\n \"displayStatus\": - \"Ready\",\r\n \"message\": \"Guest Agent is running\",\r\n \"time\": - \"2022-08-04T17:15:04+00:00\"\r\n }\r\n ],\r\n \"extensionHandlers\": - []\r\n },\r\n \"disks\": [\r\n {\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n - \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:14:10.9774837+00:00\"\r\n - \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": - {\r\n \"consoleScreenshotBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-cliqeh2b2-e048a2e9-76a7-497c-8ed7-829625b39824/cli000003.e048a2e9-76a7-497c-8ed7-829625b39824.screenshot.bmp\",\r\n - \ \"serialConsoleLogBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-cliqeh2b2-e048a2e9-76a7-497c-8ed7-829625b39824/cli000003.e048a2e9-76a7-497c-8ed7-829625b39824.serialconsole.log\"\r\n - \ },\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n - \ {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n - \ \"time\": \"2022-08-04T17:15:03.4927468+00:00\"\r\n },\r\n - \ {\r\n \"code\": \"PowerState/running\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"VM running\"\r\n }\r\n - \ ]\r\n },\r\n \"timeCreated\": \"2022-08-04T17:10:47.8380764+00:00\"\r\n - \ }\r\n}" + string: '{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002","name":"cli000002","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-10-14T15:18:09.5625791Z","key2":"2022-10-14T15:18:09.5625791Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.8282521Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.8282521Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-10-14T15:18:09.4688102Z","primaryEndpoints":{"blob":"https://cli000002.blob.core.windows.net/","queue":"https://cli000002.queue.core.windows.net/","table":"https://cli000002.table.core.windows.net/","file":"https://cli000002.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}}' headers: cache-control: - no-cache content-length: - - '4575' + - '1259' content-type: - - application/json; charset=utf-8 + - application/json date: - - Thu, 04 Aug 2022 17:15:26 GMT + - Fri, 14 Oct 2022 15:23:14 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 + - Microsoft-Azure-Storage-Resource-Provider/1.0,Microsoft-HTTPAPI/2.0 Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3540,8 +3370,55 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/LowCostGet3Min;3973,Microsoft.Compute/LowCostGet30Min;31903 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - vm boot-diagnostics enable + Connection: + - keep-alive + ParameterSetName: + - -g -n --storage + User-Agent: + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + response: + body: + string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + headers: + cache-control: + - no-cache + content-length: + - '43' + content-type: + - application/json; charset=UTF-8 + date: + - Fri, 14 Oct 2022 15:23:14 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - deny status: code: 200 message: OK @@ -3561,8 +3438,8 @@ interactions: Content-Type: - application/json User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) method: POST uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default/disableConsole?api-version=2018-05-01 response: @@ -3576,7 +3453,7 @@ interactions: content-type: - application/json; charset=UTF-8 date: - - Thu, 04 Aug 2022 17:15:27 GMT + - Fri, 14 Oct 2022 15:23:15 GMT expires: - '-1' pragma: @@ -3594,7 +3471,7 @@ interactions: x-frame-options: - deny x-ms-ratelimit-remaining-subscription-writes: - - '1198' + - '1199' status: code: 200 message: OK @@ -3610,28 +3487,81 @@ interactions: Connection: - keep-alive User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-08-01 response: body: - string: "{\n \"properties\": {\n \"disabled\": true\n }\n}" + string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n + \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n + \ \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"properties\": {\r\n \"vmId\": \"eee656e1-0c27-4e10-ba68-5622e742e308\",\r\n + \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n + \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": + \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": + \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": + \"18.04.202209210\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": + \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n + \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n + \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEWKD3ILZXQ5YDMRMOP7DOUUDMPA2UMW4REUEUIXVRLTLFIKQMHSXKN/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\"\r\n + \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": + 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": + {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhoover\",\r\n + \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": + true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"patchSettings\": {\r\n \"patchMode\": + \"ImageDefault\",\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\": + [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\": + true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n + \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": + true,\r\n \"storageUri\": \"https://cli000002.blob.core.windows.net/\"\r\n + \ }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n \"instanceView\": + {\r\n \"computerName\": \"cli000003\",\r\n \"osName\": \"ubuntu\",\r\n + \ \"osVersion\": \"18.04\",\r\n \"vmAgent\": {\r\n \"vmAgentVersion\": + \"2.8.0.11\",\r\n \"statuses\": [\r\n {\r\n \"code\": + \"ProvisioningState/succeeded\",\r\n \"level\": \"Info\",\r\n \"displayStatus\": + \"Ready\",\r\n \"message\": \"Guest Agent is running\",\r\n \"time\": + \"2022-10-14T15:23:15+00:00\"\r\n }\r\n ],\r\n \"extensionHandlers\": + []\r\n },\r\n \"disks\": [\r\n {\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n + \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"time\": \"2022-10-14T15:22:05.6913128+00:00\"\r\n + \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": + {\r\n \"consoleScreenshotBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-clixgzadl-eee656e1-0c27-4e10-ba68-5622e742e308/cli000003.eee656e1-0c27-4e10-ba68-5622e742e308.screenshot.bmp\",\r\n + \ \"serialConsoleLogBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-clixgzadl-eee656e1-0c27-4e10-ba68-5622e742e308/cli000003.eee656e1-0c27-4e10-ba68-5622e742e308.serialconsole.log\"\r\n + \ },\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n + \ {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n + \ \"time\": \"2022-10-14T15:22:50.8939726+00:00\"\r\n },\r\n + \ {\r\n \"code\": \"PowerState/running\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"VM running\"\r\n }\r\n + \ ]\r\n },\r\n \"timeCreated\": \"2022-10-14T15:18:48.6932141+00:00\"\r\n + \ }\r\n}" headers: cache-control: - no-cache content-length: - - '42' + - '5316' content-type: - - application/json; charset=UTF-8 + - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:15:27 GMT + - Fri, 14 Oct 2022 15:23:15 GMT expires: - '-1' pragma: - no-cache server: - - nginx + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3640,8 +3570,8 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-frame-options: - - deny + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/LowCostGet3Min;3952,Microsoft.Compute/LowCostGet30Min;31937 status: code: 200 message: OK @@ -3653,30 +3583,70 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - serial-console enable + - serial-console disable Connection: - keep-alive - Content-Length: - - '0' - Content-Type: + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-storage/20.1.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002?api-version=2022-05-01 + response: + body: + string: '{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002","name":"cli000002","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-10-14T15:18:09.5625791Z","key2":"2022-10-14T15:18:09.5625791Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.8282521Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.8282521Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-10-14T15:18:09.4688102Z","primaryEndpoints":{"blob":"https://cli000002.blob.core.windows.net/","queue":"https://cli000002.queue.core.windows.net/","table":"https://cli000002.table.core.windows.net/","file":"https://cli000002.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}}' + headers: + cache-control: + - no-cache + content-length: + - '1259' + content-type: - application/json + date: + - Fri, 14 Oct 2022 15:23:16 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-Azure-Storage-Resource-Provider/1.0,Microsoft-HTTPAPI/2.0 Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - serial-console disable + Connection: + - keep-alive User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default/enableConsole?api-version=2018-05-01 + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 response: body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + string: "{\n \"properties\": {\n \"disabled\": true\n }\n}" headers: cache-control: - no-cache content-length: - - '43' + - '42' content-type: - application/json; charset=UTF-8 date: - - Thu, 04 Aug 2022 17:15:28 GMT + - Fri, 14 Oct 2022 15:23:16 GMT expires: - '-1' pragma: @@ -3693,8 +3663,6 @@ interactions: - nosniff x-frame-options: - deny - x-ms-ratelimit-remaining-subscription-writes: - - '1198' status: code: 200 message: OK @@ -3709,11 +3677,15 @@ interactions: - serial-console enable Connection: - keep-alive + Content-Length: + - '0' + Content-Type: + - application/json User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default/enableConsole?api-version=2018-05-01 response: body: string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" @@ -3725,7 +3697,7 @@ interactions: content-type: - application/json; charset=UTF-8 date: - - Thu, 04 Aug 2022 17:15:28 GMT + - Fri, 14 Oct 2022 15:23:17 GMT expires: - '-1' pragma: @@ -3742,6 +3714,8 @@ interactions: - nosniff x-frame-options: - deny + x-ms-ratelimit-remaining-subscription-writes: + - '1199' status: code: 200 message: OK @@ -3757,69 +3731,74 @@ interactions: Connection: - keep-alive User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n - \ \"tags\": {},\r\n \"properties\": {\r\n \"vmId\": \"e048a2e9-76a7-497c-8ed7-829625b39824\",\r\n + \ \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"properties\": {\r\n \"vmId\": \"eee656e1-0c27-4e10-ba68-5622e742e308\",\r\n \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": - \"18.04.202207120\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": - \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n + \"18.04.202209210\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": + \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEFTJX7LNJTECNUAAW7NDBGC7LRALMOLPWHFEOSNW5PPIREVJDRRXVV/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\"\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEWKD3ILZXQ5YDMRMOP7DOUUDMPA2UMW4REUEUIXVRLTLFIKQMHSXKN/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\"\r\n \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": - {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhl\",\r\n + {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhoover\",\r\n \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n - \ \"path\": \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\",\r\n - \ \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \"enableVMAgentPlatformUpdates\": - false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": - true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"patchSettings\": {\r\n \"patchMode\": + \"ImageDefault\",\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\": + [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\": + true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": true,\r\n \"storageUri\": \"https://cli000002.blob.core.windows.net/\"\r\n \ }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n \"instanceView\": {\r\n \"computerName\": \"cli000003\",\r\n \"osName\": \"ubuntu\",\r\n \ \"osVersion\": \"18.04\",\r\n \"vmAgent\": {\r\n \"vmAgentVersion\": - \"2.7.3.0\",\r\n \"statuses\": [\r\n {\r\n \"code\": + \"2.8.0.11\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": \"Guest Agent is running\",\r\n \"time\": - \"2022-08-04T17:15:04+00:00\"\r\n }\r\n ],\r\n \"extensionHandlers\": - []\r\n },\r\n \"disks\": [\r\n {\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n + \"2022-10-14T15:23:15+00:00\"\r\n }\r\n ],\r\n \"extensionHandlers\": + []\r\n },\r\n \"disks\": [\r\n {\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:14:10.9774837+00:00\"\r\n + succeeded\",\r\n \"time\": \"2022-10-14T15:22:05.6913128+00:00\"\r\n \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": - {\r\n \"consoleScreenshotBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-cliqeh2b2-e048a2e9-76a7-497c-8ed7-829625b39824/cli000003.e048a2e9-76a7-497c-8ed7-829625b39824.screenshot.bmp\",\r\n - \ \"serialConsoleLogBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-cliqeh2b2-e048a2e9-76a7-497c-8ed7-829625b39824/cli000003.e048a2e9-76a7-497c-8ed7-829625b39824.serialconsole.log\"\r\n + {\r\n \"consoleScreenshotBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-clixgzadl-eee656e1-0c27-4e10-ba68-5622e742e308/cli000003.eee656e1-0c27-4e10-ba68-5622e742e308.screenshot.bmp\",\r\n + \ \"serialConsoleLogBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-clixgzadl-eee656e1-0c27-4e10-ba68-5622e742e308/cli000003.eee656e1-0c27-4e10-ba68-5622e742e308.serialconsole.log\"\r\n \ },\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n \ {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n - \ \"time\": \"2022-08-04T17:15:03.4927468+00:00\"\r\n },\r\n + \ \"time\": \"2022-10-14T15:22:50.8939726+00:00\"\r\n },\r\n \ {\r\n \"code\": \"PowerState/running\",\r\n \"level\": \"Info\",\r\n \"displayStatus\": \"VM running\"\r\n }\r\n - \ ]\r\n },\r\n \"timeCreated\": \"2022-08-04T17:10:47.8380764+00:00\"\r\n + \ ]\r\n },\r\n \"timeCreated\": \"2022-10-14T15:18:48.6932141+00:00\"\r\n \ }\r\n}" headers: cache-control: - no-cache content-length: - - '4575' + - '5316' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:15:28 GMT + - Fri, 14 Oct 2022 15:23:18 GMT expires: - '-1' pragma: @@ -3836,7 +3815,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/LowCostGet3Min;3972,Microsoft.Compute/LowCostGet30Min;31902 + - Microsoft.Compute/LowCostGet3Min;3951,Microsoft.Compute/LowCostGet30Min;31936 status: code: 200 message: OK @@ -3848,87 +3827,76 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - vm stop + - serial-console enable Connection: - keep-alive - Content-Length: - - '0' - ParameterSetName: - - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003/powerOff?skipShutdown=false&api-version=2022-03-01 + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-storage/20.1.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002?api-version=2022-05-01 response: body: - string: '' + string: '{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002","name":"cli000002","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-10-14T15:18:09.5625791Z","key2":"2022-10-14T15:18:09.5625791Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.8282521Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.8282521Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-10-14T15:18:09.4688102Z","primaryEndpoints":{"blob":"https://cli000002.blob.core.windows.net/","queue":"https://cli000002.queue.core.windows.net/","table":"https://cli000002.table.core.windows.net/","file":"https://cli000002.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}}' headers: - azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/db0e3f78-459d-4638-961f-72e4a31d6a15?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 cache-control: - no-cache content-length: - - '0' + - '1259' + content-type: + - application/json date: - - Thu, 04 Aug 2022 17:15:29 GMT + - Fri, 14 Oct 2022 15:23:18 GMT expires: - '-1' - location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/db0e3f78-459d-4638-961f-72e4a31d6a15?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 pragma: - no-cache server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 + - Microsoft-Azure-Storage-Resource-Provider/1.0,Microsoft-HTTPAPI/2.0 Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/UpdateVM3Min;236,Microsoft.Compute/UpdateVM30Min;1192 - x-ms-ratelimit-remaining-subscription-writes: - - '1198' status: - code: 202 - message: Accepted + code: 200 + message: OK - request: body: null headers: Accept: - - '*/*' + - application/json Accept-Encoding: - gzip, deflate CommandName: - - vm stop + - serial-console enable Connection: - keep-alive - ParameterSetName: - - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/db0e3f78-459d-4638-961f-72e4a31d6a15?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:15:29.3363648+00:00\",\r\n \"endTime\": - \"2022-08-04T17:15:36.2269196+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"db0e3f78-459d-4638-961f-72e4a31d6a15\"\r\n}" + string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" headers: cache-control: - no-cache content-length: - - '184' + - '43' content-type: - - application/json; charset=utf-8 + - application/json; charset=UTF-8 date: - - Thu, 04 Aug 2022 17:15:58 GMT + - Fri, 14 Oct 2022 15:23:18 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 + - nginx strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3937,8 +3905,8 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14940,Microsoft.Compute/GetOperation30Min;29794 + x-frame-options: + - deny status: code: 200 message: OK @@ -3946,31 +3914,39 @@ interactions: body: null headers: Accept: - - '*/*' + - application/json Accept-Encoding: - gzip, deflate CommandName: - vm stop Connection: - keep-alive + Content-Length: + - '0' ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/db0e3f78-459d-4638-961f-72e4a31d6a15?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003/powerOff?skipShutdown=false&api-version=2022-08-01 response: body: string: '' headers: + azure-asyncnotification: + - Enabled + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/b46d7d91-5254-4d91-a114-623644cc760d?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - '0' date: - - Thu, 04 Aug 2022 17:15:58 GMT + - Fri, 14 Oct 2022 15:23:20 GMT expires: - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/b46d7d91-5254-4d91-a114-623644cc760d?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 pragma: - no-cache server: @@ -3981,15 +3957,17 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14939,Microsoft.Compute/GetOperation30Min;29793 + - Microsoft.Compute/UpdateVM3Min;234,Microsoft.Compute/UpdateVM30Min;1192 + x-ms-ratelimit-remaining-subscription-writes: + - '1199' status: - code: 200 - message: OK + code: 202 + message: Accepted - request: body: null headers: Accept: - - application/json + - '*/*' Accept-Encoding: - gzip, deflate CommandName: @@ -3999,28 +3977,30 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/b46d7d91-5254-4d91-a114-623644cc760d?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:23:20.4405599+00:00\",\r\n \"endTime\": + \"2022-10-14T15:23:28.4092348+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"b46d7d91-5254-4d91-a114-623644cc760d\"\r\n}" headers: cache-control: - no-cache content-length: - - '43' + - '184' content-type: - - application/json; charset=UTF-8 + - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:16:00 GMT + - Fri, 14 Oct 2022 15:23:49 GMT expires: - '-1' pragma: - no-cache server: - - nginx + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -4029,8 +4009,51 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-frame-options: - - deny + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/GetOperation3Min;14947,Microsoft.Compute/GetOperation30Min;29929 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - vm stop + Connection: + - keep-alive + ParameterSetName: + - -g -n + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/b46d7d91-5254-4d91-a114-623644cc760d?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Fri, 14 Oct 2022 15:23:49 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/GetOperation3Min;14946,Microsoft.Compute/GetOperation30Min;29928 status: code: 200 message: OK @@ -4048,69 +4071,74 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n - \ \"tags\": {},\r\n \"properties\": {\r\n \"vmId\": \"e048a2e9-76a7-497c-8ed7-829625b39824\",\r\n + \ \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"properties\": {\r\n \"vmId\": \"eee656e1-0c27-4e10-ba68-5622e742e308\",\r\n \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": - \"18.04.202207120\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": - \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n + \"18.04.202209210\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": + \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEFTJX7LNJTECNUAAW7NDBGC7LRALMOLPWHFEOSNW5PPIREVJDRRXVV/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\"\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEWKD3ILZXQ5YDMRMOP7DOUUDMPA2UMW4REUEUIXVRLTLFIKQMHSXKN/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\"\r\n \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": - {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhl\",\r\n + {\r\n \"computerName\": \"cli000003\",\r\n \"adminUsername\": \"rhoover\",\r\n \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n - \ \"path\": \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\",\r\n - \ \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \"enableVMAgentPlatformUpdates\": - false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": - true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"patchSettings\": {\r\n \"patchMode\": + \"ImageDefault\",\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\": + [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\": + true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": true,\r\n \"storageUri\": \"https://cli000002.blob.core.windows.net/\"\r\n \ }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n \"instanceView\": {\r\n \"computerName\": \"cli000003\",\r\n \"osName\": \"ubuntu\",\r\n \ \"osVersion\": \"18.04\",\r\n \"vmAgent\": {\r\n \"vmAgentVersion\": - \"2.7.3.0\",\r\n \"statuses\": [\r\n {\r\n \"code\": + \"2.8.0.11\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": \"Guest Agent is running\",\r\n \"time\": - \"2022-08-04T17:15:04+00:00\"\r\n }\r\n ],\r\n \"extensionHandlers\": - []\r\n },\r\n \"disks\": [\r\n {\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n + \"2022-10-14T15:23:15+00:00\"\r\n }\r\n ],\r\n \"extensionHandlers\": + []\r\n },\r\n \"disks\": [\r\n {\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:14:10.9774837+00:00\"\r\n + succeeded\",\r\n \"time\": \"2022-10-14T15:22:05.6913128+00:00\"\r\n \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": - {\r\n \"consoleScreenshotBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-cliqeh2b2-e048a2e9-76a7-497c-8ed7-829625b39824/cli000003.e048a2e9-76a7-497c-8ed7-829625b39824.screenshot.bmp\",\r\n - \ \"serialConsoleLogBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-cliqeh2b2-e048a2e9-76a7-497c-8ed7-829625b39824/cli000003.e048a2e9-76a7-497c-8ed7-829625b39824.serialconsole.log\"\r\n + {\r\n \"consoleScreenshotBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-clixgzadl-eee656e1-0c27-4e10-ba68-5622e742e308/cli000003.eee656e1-0c27-4e10-ba68-5622e742e308.screenshot.bmp\",\r\n + \ \"serialConsoleLogBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-clixgzadl-eee656e1-0c27-4e10-ba68-5622e742e308/cli000003.eee656e1-0c27-4e10-ba68-5622e742e308.serialconsole.log\"\r\n \ },\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n \ {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n - \ \"time\": \"2022-08-04T17:15:36.2269196+00:00\"\r\n },\r\n + \ \"time\": \"2022-10-14T15:23:28.3936754+00:00\"\r\n },\r\n \ {\r\n \"code\": \"PowerState/stopped\",\r\n \"level\": \"Info\",\r\n \"displayStatus\": \"VM stopped\"\r\n }\r\n - \ ]\r\n },\r\n \"timeCreated\": \"2022-08-04T17:10:47.8380764+00:00\"\r\n + \ ]\r\n },\r\n \"timeCreated\": \"2022-10-14T15:18:48.6932141+00:00\"\r\n \ }\r\n}" headers: cache-control: - no-cache content-length: - - '4575' + - '5316' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:16:00 GMT + - Fri, 14 Oct 2022 15:23:51 GMT expires: - '-1' pragma: @@ -4127,7 +4155,102 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/LowCostGet3Min;3974,Microsoft.Compute/LowCostGet30Min;31900 + - Microsoft.Compute/LowCostGet3Min;3957,Microsoft.Compute/LowCostGet30Min;31934 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - vm stop + Connection: + - keep-alive + ParameterSetName: + - -g -n + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-storage/20.1.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002?api-version=2022-05-01 + response: + body: + string: '{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002","name":"cli000002","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-10-14T15:18:09.5625791Z","key2":"2022-10-14T15:18:09.5625791Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.8282521Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.8282521Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-10-14T15:18:09.4688102Z","primaryEndpoints":{"blob":"https://cli000002.blob.core.windows.net/","queue":"https://cli000002.queue.core.windows.net/","table":"https://cli000002.table.core.windows.net/","file":"https://cli000002.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}}' + headers: + cache-control: + - no-cache + content-length: + - '1259' + content-type: + - application/json + date: + - Fri, 14 Oct 2022 15:23:51 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-Azure-Storage-Resource-Provider/1.0,Microsoft-HTTPAPI/2.0 Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - vm stop + Connection: + - keep-alive + ParameterSetName: + - -g -n + User-Agent: + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + response: + body: + string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + headers: + cache-control: + - no-cache + content-length: + - '43' + content-type: + - application/json; charset=UTF-8 + date: + - Fri, 14 Oct 2022 15:23:52 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - deny status: code: 200 message: OK @@ -4147,25 +4270,27 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003/deallocate?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003/deallocate?api-version=2022-08-01 response: body: string: '' headers: + azure-asyncnotification: + - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/bdff402c-bf07-4db1-90ea-42de7c3442b5?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/cc0ddb8b-c140-459c-b25d-d4da08a880cd?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - '0' date: - - Thu, 04 Aug 2022 17:16:00 GMT + - Fri, 14 Oct 2022 15:23:53 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/bdff402c-bf07-4db1-90ea-42de7c3442b5?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/cc0ddb8b-c140-459c-b25d-d4da08a880cd?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 pragma: - no-cache server: @@ -4176,9 +4301,9 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/DeleteVM3Min;238,Microsoft.Compute/DeleteVM30Min;1191 + - Microsoft.Compute/DeleteVM3Min;238,Microsoft.Compute/DeleteVM30Min;1197 x-ms-ratelimit-remaining-subscription-writes: - - '1198' + - '1199' status: code: 202 message: Accepted @@ -4196,22 +4321,22 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/bdff402c-bf07-4db1-90ea-42de7c3442b5?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/cc0ddb8b-c140-459c-b25d-d4da08a880cd?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:16:01.1486551+00:00\",\r\n \"status\": - \"InProgress\",\r\n \"name\": \"bdff402c-bf07-4db1-90ea-42de7c3442b5\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:23:53.174687+00:00\",\r\n \"status\": + \"InProgress\",\r\n \"name\": \"cc0ddb8b-c140-459c-b25d-d4da08a880cd\"\r\n}" headers: cache-control: - no-cache content-length: - - '134' + - '133' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:16:11 GMT + - Fri, 14 Oct 2022 15:24:03 GMT expires: - '-1' pragma: @@ -4228,7 +4353,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14949,Microsoft.Compute/GetOperation30Min;29789 + - Microsoft.Compute/GetOperation3Min;14950,Microsoft.Compute/GetOperation30Min;29924 status: code: 200 message: OK @@ -4246,23 +4371,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/bdff402c-bf07-4db1-90ea-42de7c3442b5?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/cc0ddb8b-c140-459c-b25d-d4da08a880cd?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:16:01.1486551+00:00\",\r\n \"endTime\": - \"2022-08-04T17:16:34.8359408+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"bdff402c-bf07-4db1-90ea-42de7c3442b5\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:23:53.174687+00:00\",\r\n \"endTime\": + \"2022-10-14T15:24:27.299336+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"cc0ddb8b-c140-459c-b25d-d4da08a880cd\"\r\n}" headers: cache-control: - no-cache content-length: - - '184' + - '182' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:16:47 GMT + - Fri, 14 Oct 2022 15:24:38 GMT expires: - '-1' pragma: @@ -4279,7 +4404,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14952,Microsoft.Compute/GetOperation30Min;29778 + - Microsoft.Compute/GetOperation3Min;14954,Microsoft.Compute/GetOperation30Min;29916 status: code: 200 message: OK @@ -4297,9 +4422,9 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/bdff402c-bf07-4db1-90ea-42de7c3442b5?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/cc0ddb8b-c140-459c-b25d-d4da08a880cd?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 response: body: string: '' @@ -4309,7 +4434,7 @@ interactions: content-length: - '0' date: - - Thu, 04 Aug 2022 17:16:47 GMT + - Fri, 14 Oct 2022 15:24:38 GMT expires: - '-1' pragma: @@ -4322,7 +4447,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14951,Microsoft.Compute/GetOperation30Min;29777 + - Microsoft.Compute/GetOperation3Min;14953,Microsoft.Compute/GetOperation30Min;29915 status: code: 200 message: OK @@ -4340,28 +4465,73 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-08-01 response: body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n + \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n + \ \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"properties\": {\r\n \"vmId\": \"eee656e1-0c27-4e10-ba68-5622e742e308\",\r\n + \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n + \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": + \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": + \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": + \"18.04.202209210\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": + \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n + \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n + \ \"managedDisk\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEWKD3ILZXQ5YDMRMOP7DOUUDMPA2UMW4REUEUIXVRLTLFIKQMHSXKN/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\"\r\n + \ },\r\n \"deleteOption\": \"Detach\"\r\n },\r\n \"dataDisks\": + []\r\n },\r\n \"osProfile\": {\r\n \"computerName\": \"cli000003\",\r\n + \ \"adminUsername\": \"rhoover\",\r\n \"linuxConfiguration\": {\r\n + \ \"disablePasswordAuthentication\": true,\r\n \"ssh\": {\r\n + \ \"publicKeys\": [\r\n {\r\n \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n + \ \"keyData\": \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"patchSettings\": {\r\n \"patchMode\": + \"ImageDefault\",\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\": + [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\": + true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n + \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": + true,\r\n \"storageUri\": \"https://cli000002.blob.core.windows.net/\"\r\n + \ }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n \"instanceView\": + {\r\n \"disks\": [\r\n {\r\n \"name\": \"cli000003_OsDisk_1_7d6d86c836b3467ba1558092f932f404\",\r\n + \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"time\": \"2022-10-14T15:24:27.1118215+00:00\"\r\n + \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": + {\r\n \"consoleScreenshotBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-clixgzadl-eee656e1-0c27-4e10-ba68-5622e742e308/cli000003.eee656e1-0c27-4e10-ba68-5622e742e308.screenshot.bmp\",\r\n + \ \"serialConsoleLogBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-clixgzadl-eee656e1-0c27-4e10-ba68-5622e742e308/cli000003.eee656e1-0c27-4e10-ba68-5622e742e308.serialconsole.log\"\r\n + \ },\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n + \ {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n + \ \"time\": \"2022-10-14T15:24:27.1274768+00:00\"\r\n },\r\n + \ {\r\n \"code\": \"PowerState/deallocated\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"VM deallocated\"\r\n }\r\n + \ ]\r\n },\r\n \"timeCreated\": \"2022-10-14T15:18:48.6932141+00:00\"\r\n + \ }\r\n}" headers: cache-control: - no-cache content-length: - - '43' + - '4774' content-type: - - application/json; charset=UTF-8 + - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:16:47 GMT + - Fri, 14 Oct 2022 15:24:40 GMT expires: - '-1' pragma: - no-cache server: - - nginx + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -4370,8 +4540,8 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-frame-options: - - deny + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/LowCostGet3Min;3970,Microsoft.Compute/LowCostGet30Min;31925 status: code: 200 message: OK @@ -4389,68 +4559,27 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-storage/20.1.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002?api-version=2022-05-01 response: body: - string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003\",\r\n - \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus2\",\r\n - \ \"tags\": {},\r\n \"properties\": {\r\n \"vmId\": \"e048a2e9-76a7-497c-8ed7-829625b39824\",\r\n - \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n - \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": - \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": - \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": - \"18.04.202207120\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": - \"Linux\",\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n - \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n - \ \"managedDisk\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/CLI_TEST_SERIALCONSOLEFTJX7LNJTECNUAAW7NDBGC7LRALMOLPWHFEOSNW5PPIREVJDRRXVV/providers/Microsoft.Compute/disks/cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\"\r\n - \ },\r\n \"deleteOption\": \"Detach\"\r\n },\r\n \"dataDisks\": - []\r\n },\r\n \"osProfile\": {\r\n \"computerName\": \"cli000003\",\r\n - \ \"adminUsername\": \"rhl\",\r\n \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": - true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n - \ \"path\": \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\",\r\n - \ \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \"enableVMAgentPlatformUpdates\": - false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": - true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/networkInterfaces/cli000003VMNic\"}]},\r\n - \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": - true,\r\n \"storageUri\": \"https://cli000002.blob.core.windows.net/\"\r\n - \ }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n \"instanceView\": - {\r\n \"disks\": [\r\n {\r\n \"name\": \"cli000003_OsDisk_1_fac1eec67dc244d1a0ebc351341c4474\",\r\n - \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:16:34.5390781+00:00\"\r\n - \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": - {\r\n \"consoleScreenshotBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-cliqeh2b2-e048a2e9-76a7-497c-8ed7-829625b39824/cli000003.e048a2e9-76a7-497c-8ed7-829625b39824.screenshot.bmp\",\r\n - \ \"serialConsoleLogBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-cliqeh2b2-e048a2e9-76a7-497c-8ed7-829625b39824/cli000003.e048a2e9-76a7-497c-8ed7-829625b39824.serialconsole.log\"\r\n - \ },\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n - \ {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n - \ \"time\": \"2022-08-04T17:16:34.5547235+00:00\"\r\n },\r\n - \ {\r\n \"code\": \"PowerState/deallocated\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"VM deallocated\"\r\n }\r\n - \ ]\r\n },\r\n \"timeCreated\": \"2022-08-04T17:10:47.8380764+00:00\"\r\n - \ }\r\n}" + string: '{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002","name":"cli000002","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-10-14T15:18:09.5625791Z","key2":"2022-10-14T15:18:09.5625791Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.8282521Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.8282521Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-10-14T15:18:09.4688102Z","primaryEndpoints":{"blob":"https://cli000002.blob.core.windows.net/","queue":"https://cli000002.queue.core.windows.net/","table":"https://cli000002.table.core.windows.net/","file":"https://cli000002.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}}' headers: cache-control: - no-cache content-length: - - '4034' + - '1259' content-type: - - application/json; charset=utf-8 + - application/json date: - - Thu, 04 Aug 2022 17:16:48 GMT + - Fri, 14 Oct 2022 15:24:40 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 + - Microsoft-Azure-Storage-Resource-Provider/1.0,Microsoft-HTTPAPI/2.0 Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -4459,8 +4588,55 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/LowCostGet3Min;3976,Microsoft.Compute/LowCostGet30Min;31897 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - vm deallocate + Connection: + - keep-alive + ParameterSetName: + - -g -n + User-Agent: + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + response: + body: + string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + headers: + cache-control: + - no-cache + content-length: + - '43' + content-type: + - application/json; charset=UTF-8 + date: + - Fri, 14 Oct 2022 15:24:40 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - deny status: code: 200 message: OK diff --git a/src/serial-console/azext_serialconsole/tests/latest/recordings/test_check_resource_VMSS.yaml b/src/serial-console/azext_serialconsole/tests/latest/recordings/test_check_resource_VMSS.yaml index 6be68f040c8..f64b8046922 100644 --- a/src/serial-console/azext_serialconsole/tests/latest/recordings/test_check_resource_VMSS.yaml +++ b/src/serial-console/azext_serialconsole/tests/latest/recordings/test_check_resource_VMSS.yaml @@ -11,56 +11,9 @@ interactions: Connection: - keep-alive User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 - response: - body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" - headers: - cache-control: - - no-cache - content-length: - - '43' - content-type: - - application/json; charset=UTF-8 - date: - - Thu, 04 Aug 2022 17:10:29 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - nginx - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-frame-options: - - deny - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - unknown - Connection: - - keep-alive - User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-08-01 response: body: string: '{"error":{"code":"ResourceNotFound","message":"The Resource ''Microsoft.Compute/virtualMachines/cli000003'' @@ -74,7 +27,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:10:30 GMT + - Fri, 14 Oct 2022 15:18:30 GMT expires: - '-1' pragma: @@ -100,9 +53,9 @@ interactions: Connection: - keep-alive User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-08-01 response: body: string: '{"error":{"code":"ResourceNotFound","message":"The Resource ''Microsoft.Compute/virtualMachineScaleSets/cli000003'' @@ -116,7 +69,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:10:30 GMT + - Fri, 14 Oct 2022 15:18:30 GMT expires: - '-1' pragma: @@ -142,56 +95,9 @@ interactions: Connection: - keep-alive User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 - response: - body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" - headers: - cache-control: - - no-cache - content-length: - - '43' - content-type: - - application/json; charset=UTF-8 - date: - - Thu, 04 Aug 2022 17:10:30 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - nginx - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-frame-options: - - deny - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - unknown - Connection: - - keep-alive - User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/0/instanceView?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/0/instanceView?api-version=2022-08-01 response: body: string: '{"error":{"code":"ParentResourceNotFound","message":"Can not perform @@ -205,7 +111,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:10:30 GMT + - Fri, 14 Oct 2022 15:18:30 GMT expires: - '-1' pragma: @@ -296,13 +202,13 @@ interactions: content-type: - text/plain; charset=utf-8 date: - - Thu, 04 Aug 2022 17:10:31 GMT + - Fri, 14 Oct 2022 15:18:33 GMT etag: - W/"41b202f4dc5098d126019dc00721a4c5e30df0c5196794514fadc3710ee2a5cb" expires: - - Thu, 04 Aug 2022 17:15:31 GMT + - Fri, 14 Oct 2022 15:23:33 GMT source-age: - - '153' + - '1' strict-transport-security: - max-age=31536000 vary: @@ -316,15 +222,15 @@ interactions: x-content-type-options: - nosniff x-fastly-request-id: - - 2efe8a199373fd72a8df5a2a83bccb2a320d768c + - 1c63a2f9d8f5190151db137dbbdeefb50ea190df x-frame-options: - deny x-github-request-id: - - 5064:23C7:122D3E:1DAA5F:62EBFB90 + - 0807:11F5:8F72A:FB4B1:63497DC8 x-served-by: - - cache-pao17443-PAO + - cache-dal2120089-DAL x-timer: - - S1659633032.524216,VS0,VE1 + - S1665760713.155581,VS0,VE1 x-xss-protection: - 1; mode=block status: @@ -344,13 +250,13 @@ interactions: ParameterSetName: - -g -n --image --instance-count -l User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/publishers/Canonical/artifacttypes/vmimage/offers/UbuntuServer/skus/18.04-LTS/versions?$top=1&$orderby=name%20desc&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/publishers/Canonical/artifacttypes/vmimage/offers/UbuntuServer/skus/18.04-LTS/versions?$top=1&$orderby=name%20desc&api-version=2022-08-01 response: body: - string: "[\r\n {\r\n \"location\": \"westus2\",\r\n \"name\": \"18.04.202207120\",\r\n - \ \"id\": \"/Subscriptions/00000000-0000-0000-0000-000000000000/Providers/Microsoft.Compute/Locations/westus2/Publishers/Canonical/ArtifactTypes/VMImage/Offers/UbuntuServer/Skus/18.04-LTS/Versions/18.04.202207120\"\r\n + string: "[\r\n {\r\n \"location\": \"westus2\",\r\n \"name\": \"18.04.202209210\",\r\n + \ \"id\": \"/Subscriptions/00000000-0000-0000-0000-000000000000/Providers/Microsoft.Compute/Locations/westus2/Publishers/Canonical/ArtifactTypes/VMImage/Offers/UbuntuServer/Skus/18.04-LTS/Versions/18.04.202209210\"\r\n \ }\r\n]" headers: cache-control: @@ -360,7 +266,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:10:31 GMT + - Fri, 14 Oct 2022 15:18:33 GMT expires: - '-1' pragma: @@ -377,7 +283,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/ListVMImagesVersionsFromLocation3Min;15993,Microsoft.Compute/ListVMImagesVersionsFromLocation30Min;43973 + - Microsoft.Compute/ListVMImagesVersionsFromLocation3Min;15998,Microsoft.Compute/ListVMImagesVersionsFromLocation30Min;43998 status: code: 200 message: OK @@ -395,9 +301,9 @@ interactions: ParameterSetName: - -g -n --image --instance-count -l User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/publishers/Canonical/artifacttypes/vmimage/offers/UbuntuServer/skus/18.04-LTS/versions/18.04.202207120?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/publishers/Canonical/artifacttypes/vmimage/offers/UbuntuServer/skus/18.04-LTS/versions/18.04.202209210?api-version=2022-08-01 response: body: string: "{\r\n \"properties\": {\r\n \"hyperVGeneration\": \"V1\",\r\n \"architecture\": @@ -407,20 +313,21 @@ interactions: {\r\n \"imageState\": \"Active\"\r\n },\r\n \"features\": [\r\n \ {\r\n \"name\": \"IsAcceleratedNetworkSupported\",\r\n \"value\": \"True\"\r\n },\r\n {\r\n \"name\": \"DiskControllerTypes\",\r\n - \ \"value\": \"SCSI\"\r\n },\r\n {\r\n \"name\": \"IsHibernateSupported\",\r\n - \ \"value\": \"True\"\r\n }\r\n ],\r\n \"osDiskImage\": {\r\n - \ \"operatingSystem\": \"Linux\",\r\n \"sizeInGb\": 31,\r\n \"sizeInBytes\": - 32213303808\r\n },\r\n \"dataDiskImages\": []\r\n },\r\n \"location\": - \"westus2\",\r\n \"name\": \"18.04.202207120\",\r\n \"id\": \"/Subscriptions/00000000-0000-0000-0000-000000000000/Providers/Microsoft.Compute/Locations/westus2/Publishers/Canonical/ArtifactTypes/VMImage/Offers/UbuntuServer/Skus/18.04-LTS/Versions/18.04.202207120\"\r\n}" + \ \"value\": \"SCSI, NVMe\"\r\n },\r\n {\r\n \"name\": + \"IsHibernateSupported\",\r\n \"value\": \"True\"\r\n }\r\n ],\r\n + \ \"osDiskImage\": {\r\n \"operatingSystem\": \"Linux\",\r\n \"sizeInGb\": + 31,\r\n \"sizeInBytes\": 32213303808\r\n },\r\n \"dataDiskImages\": + []\r\n },\r\n \"location\": \"westus2\",\r\n \"name\": \"18.04.202209210\",\r\n + \ \"id\": \"/Subscriptions/00000000-0000-0000-0000-000000000000/Providers/Microsoft.Compute/Locations/westus2/Publishers/Canonical/ArtifactTypes/VMImage/Offers/UbuntuServer/Skus/18.04-LTS/Versions/18.04.202209210\"\r\n}" headers: cache-control: - no-cache content-length: - - '1044' + - '1050' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:10:31 GMT + - Fri, 14 Oct 2022 15:18:33 GMT expires: - '-1' pragma: @@ -437,7 +344,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMImageFromLocation3Min;12998,Microsoft.Compute/GetVMImageFromLocation30Min;73989 + - Microsoft.Compute/GetVMImageFromLocation3Min;12998,Microsoft.Compute/GetVMImageFromLocation30Min;73998 status: code: 200 message: OK @@ -445,7 +352,7 @@ interactions: body: null headers: Accept: - - application/json, text/json + - application/json Accept-Encoding: - gzip, deflate CommandName: @@ -455,9 +362,9 @@ interactions: ParameterSetName: - -g -n --image --instance-count -l User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-network/20.0.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-network/21.0.1 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks?api-version=2018-01-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks?api-version=2022-01-01 response: body: string: '{"value":[]}' @@ -469,7 +376,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:10:31 GMT + - Fri, 14 Oct 2022 15:18:33 GMT expires: - '-1' pragma: @@ -490,10 +397,10 @@ interactions: "westus2", "apiVersion": "2015-06-15", "dependsOn": [], "tags": {}, "properties": {"addressSpace": {"addressPrefixes": ["10.0.0.0/16"]}, "subnets": [{"name": "cli000003Subnet", "properties": {"addressPrefix": "10.0.0.0/24"}}]}}, {"apiVersion": - "2018-01-01", "type": "Microsoft.Network/publicIPAddresses", "name": "cli000003LBPublicIP", + "2022-01-01", "type": "Microsoft.Network/publicIPAddresses", "name": "cli000003LBPublicIP", "location": "westus2", "tags": {}, "dependsOn": [], "properties": {"publicIPAllocationMethod": "Dynamic"}}, {"type": "Microsoft.Network/loadBalancers", "name": "cli000003LB", - "location": "westus2", "tags": {}, "apiVersion": "2018-01-01", "dependsOn": + "location": "westus2", "tags": {}, "apiVersion": "2022-01-01", "dependsOn": ["Microsoft.Network/virtualNetworks/cli000003VNET", "Microsoft.Network/publicIpAddresses/cli000003LBPublicIP"], "properties": {"backendAddressPools": [{"name": "cli000003LBBEPool"}], "frontendIPConfigurations": [{"name": "loadBalancerFrontEnd", "properties": {"publicIPAddress": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/publicIPAddresses/cli000003LBPublicIP"}}}], @@ -502,19 +409,19 @@ interactions: ''/frontendIPConfigurations/'', ''loadBalancerFrontEnd'')]"}, "protocol": "tcp", "frontendPortRangeStart": "50000", "frontendPortRangeEnd": "50119", "backendPort": 22}}]}}, {"type": "Microsoft.Compute/virtualMachineScaleSets", "name": "cli000003", - "location": "westus2", "tags": {}, "apiVersion": "2022-03-01", "dependsOn": + "location": "westus2", "tags": {}, "apiVersion": "2022-08-01", "dependsOn": ["Microsoft.Network/virtualNetworks/cli000003VNET", "Microsoft.Network/loadBalancers/cli000003LB"], "properties": {"overprovision": true, "upgradePolicy": {"mode": "manual", "rollingUpgradePolicy": {}}, "singlePlacementGroup": null, "virtualMachineProfile": {"storageProfile": {"osDisk": {"createOption": "FromImage", "caching": "ReadWrite", "managedDisk": {"storageAccountType": null}}, "imageReference": {"publisher": "Canonical", "offer": "UbuntuServer", "sku": "18.04-LTS", "version": "latest"}}, "osProfile": - {"computerNamePrefix": "clinqc38b", "adminUsername": "rhl", "linuxConfiguration": - {"disablePasswordAuthentication": true, "ssh": {"publicKeys": [{"path": "/home/rhl/.ssh/authorized_keys", - "keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5"}]}}}, - "networkProfile": {"networkInterfaceConfigurations": [{"name": "clinqc38bNic", - "properties": {"ipConfigurations": [{"name": "clinqc38bIPConfig", "properties": - {"subnet": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet"}, + {"computerNamePrefix": "cliouf96e", "adminUsername": "rhoover", "linuxConfiguration": + {"disablePasswordAuthentication": true, "ssh": {"publicKeys": [{"path": "/home/rhoover/.ssh/authorized_keys", + "keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\n"}]}}}, "networkProfile": {"networkInterfaceConfigurations": + [{"name": "cliouf96eNic", "properties": {"ipConfigurations": [{"name": "cliouf96eIPConfig", + "properties": {"subnet": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet"}, "loadBalancerBackendAddressPools": [{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool"}], "loadBalancerInboundNatPools": [{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool"}]}}], "primary": "true"}}]}}, "orchestrationMode": "Uniform"}, "sku": {"name": "Standard_DS1_v2", @@ -531,29 +438,29 @@ interactions: Connection: - keep-alive Content-Length: - - '4106' + - '4310' Content-Type: - application/json ParameterSetName: - -g -n --image --instance-count -l User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2021-04-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/vmss_deploy_mqVrveBcuXIOanW0hg0AJuSsCVgT6U7K","name":"vmss_deploy_mqVrveBcuXIOanW0hg0AJuSsCVgT6U7K","type":"Microsoft.Resources/deployments","properties":{"templateHash":"5012540532131786740","parameters":{},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2022-08-04T17:10:43.791516Z","duration":"PT0.0002535S","correlationId":"1e5a3c82-ee7f-4fa4-a4ea-4df37be452a2","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"virtualNetworks","locations":["westus2"]},{"resourceType":"publicIPAddresses","locations":["westus2"]},{"resourceType":"loadBalancers","locations":["westus2"]}]},{"namespace":"Microsoft.Compute","resourceTypes":[{"resourceType":"virtualMachineScaleSets","locations":["westus2"]}]}],"dependencies":[{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET","resourceType":"Microsoft.Network/virtualNetworks","resourceName":"cli000003VNET"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/publicIPAddresses/cli000003LBPublicIP","resourceType":"Microsoft.Network/publicIPAddresses","resourceName":"cli000003LBPublicIP"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB","resourceType":"Microsoft.Network/loadBalancers","resourceName":"cli000003LB"},{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET","resourceType":"Microsoft.Network/virtualNetworks","resourceName":"cli000003VNET"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB","resourceType":"Microsoft.Network/loadBalancers","resourceName":"cli000003LB"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003","resourceType":"Microsoft.Compute/virtualMachineScaleSets","resourceName":"cli000003"}]}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/vmss_deploy_t5llprO4gRjggLaAddTNGJULYsdpSyzF","name":"vmss_deploy_t5llprO4gRjggLaAddTNGJULYsdpSyzF","type":"Microsoft.Resources/deployments","properties":{"templateHash":"3541347179006122922","parameters":{},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2022-10-14T15:18:39.8643364Z","duration":"PT0.0000837S","correlationId":"177bf8d7-dc02-49b3-a649-a91eb2d280a7","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"virtualNetworks","locations":["westus2"]},{"resourceType":"publicIPAddresses","locations":["westus2"]},{"resourceType":"loadBalancers","locations":["westus2"]}]},{"namespace":"Microsoft.Compute","resourceTypes":[{"resourceType":"virtualMachineScaleSets","locations":["westus2"]}]}],"dependencies":[{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET","resourceType":"Microsoft.Network/virtualNetworks","resourceName":"cli000003VNET"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/publicIPAddresses/cli000003LBPublicIP","resourceType":"Microsoft.Network/publicIPAddresses","resourceName":"cli000003LBPublicIP"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB","resourceType":"Microsoft.Network/loadBalancers","resourceName":"cli000003LB"},{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET","resourceType":"Microsoft.Network/virtualNetworks","resourceName":"cli000003VNET"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB","resourceType":"Microsoft.Network/loadBalancers","resourceName":"cli000003LB"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003","resourceType":"Microsoft.Compute/virtualMachineScaleSets","resourceName":"cli000003"}]}}' headers: azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/vmss_deploy_mqVrveBcuXIOanW0hg0AJuSsCVgT6U7K/operationStatuses/08585419738427778273?api-version=2021-04-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/vmss_deploy_t5llprO4gRjggLaAddTNGJULYsdpSyzF/operationStatuses/08585358461674764940?api-version=2021-04-01 cache-control: - no-cache content-length: - - '2414' + - '2415' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:10:43 GMT + - Fri, 14 Oct 2022 15:18:39 GMT expires: - '-1' pragma: @@ -581,9 +488,9 @@ interactions: ParameterSetName: - -g -n --image --instance-count -l User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08585419738427778273?api-version=2021-04-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08585358461674764940?api-version=2021-04-01 response: body: string: '{"status":"Running"}' @@ -595,7 +502,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:11:13 GMT + - Fri, 14 Oct 2022 15:19:11 GMT expires: - '-1' pragma: @@ -623,9 +530,9 @@ interactions: ParameterSetName: - -g -n --image --instance-count -l User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08585419738427778273?api-version=2021-04-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08585358461674764940?api-version=2021-04-01 response: body: string: '{"status":"Running"}' @@ -637,7 +544,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:11:44 GMT + - Fri, 14 Oct 2022 15:19:41 GMT expires: - '-1' pragma: @@ -665,21 +572,21 @@ interactions: ParameterSetName: - -g -n --image --instance-count -l User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08585419738427778273?api-version=2021-04-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08585358461674764940?api-version=2021-04-01 response: body: - string: '{"status":"Succeeded"}' + string: '{"status":"Running"}' headers: cache-control: - no-cache content-length: - - '22' + - '20' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:12:14 GMT + - Fri, 14 Oct 2022 15:20:11 GMT expires: - '-1' pragma: @@ -707,22 +614,21 @@ interactions: ParameterSetName: - -g -n --image --instance-count -l User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2021-04-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08585358461674764940?api-version=2021-04-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/vmss_deploy_mqVrveBcuXIOanW0hg0AJuSsCVgT6U7K","name":"vmss_deploy_mqVrveBcuXIOanW0hg0AJuSsCVgT6U7K","type":"Microsoft.Resources/deployments","properties":{"templateHash":"5012540532131786740","parameters":{},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2022-08-04T17:11:45.7224405Z","duration":"PT1M1.931178S","correlationId":"1e5a3c82-ee7f-4fa4-a4ea-4df37be452a2","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"virtualNetworks","locations":["westus2"]},{"resourceType":"publicIPAddresses","locations":["westus2"]},{"resourceType":"loadBalancers","locations":["westus2"]}]},{"namespace":"Microsoft.Compute","resourceTypes":[{"resourceType":"virtualMachineScaleSets","locations":["westus2"]}]}],"dependencies":[{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET","resourceType":"Microsoft.Network/virtualNetworks","resourceName":"cli000003VNET"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/publicIPAddresses/cli000003LBPublicIP","resourceType":"Microsoft.Network/publicIPAddresses","resourceName":"cli000003LBPublicIP"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB","resourceType":"Microsoft.Network/loadBalancers","resourceName":"cli000003LB"},{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET","resourceType":"Microsoft.Network/virtualNetworks","resourceName":"cli000003VNET"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB","resourceType":"Microsoft.Network/loadBalancers","resourceName":"cli000003LB"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003","resourceType":"Microsoft.Compute/virtualMachineScaleSets","resourceName":"cli000003"}],"outputs":{"vmss":{"type":"Object","value":{"singlePlacementGroup":true,"upgradePolicy":{"mode":"Manual","rollingUpgradePolicy":{"maxBatchInstancePercent":20,"maxUnhealthyInstancePercent":20,"maxUnhealthyUpgradedInstancePercent":20,"pauseTimeBetweenBatches":"PT0S"}},"virtualMachineProfile":{"osProfile":{"computerNamePrefix":"clinqc38b","adminUsername":"rhl","linuxConfiguration":{"disablePasswordAuthentication":true,"ssh":{"publicKeys":[{"path":"/home/rhl/.ssh/authorized_keys","keyData":"ssh-rsa - AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5"}]},"provisionVMAgent":true,"enableVMAgentPlatformUpdates":false},"secrets":[],"allowExtensionOperations":true,"requireGuestProvisionSignal":true},"storageProfile":{"osDisk":{"osType":"Linux","createOption":"FromImage","caching":"ReadWrite","managedDisk":{"storageAccountType":"Premium_LRS"},"diskSizeGB":30},"imageReference":{"publisher":"Canonical","offer":"UbuntuServer","sku":"18.04-LTS","version":"latest"}},"networkProfile":{"networkInterfaceConfigurations":[{"name":"clinqc38bNic","properties":{"primary":true,"enableAcceleratedNetworking":false,"dnsSettings":{"dnsServers":[]},"enableIPForwarding":false,"ipConfigurations":[{"name":"clinqc38bIPConfig","properties":{"subnet":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet"},"privateIPAddressVersion":"IPv4","loadBalancerBackendAddressPools":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool"}],"loadBalancerInboundNatPools":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool"}]}}]}}]}},"provisioningState":"Succeeded","overprovision":true,"doNotRunExtensionsOnOverprovisionedVMs":false,"uniqueId":"239f9fe3-2e0c-403b-8c3a-55cda6cd7b9a","timeCreated":"2022-08-04T17:10:51.2598866+00:00"}}},"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/publicIPAddresses/cli000003LBPublicIP"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET"}]}}' + string: '{"status":"Running"}' headers: cache-control: - no-cache content-length: - - '5543' + - '20' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:12:14 GMT + - Fri, 14 Oct 2022 15:20:41 GMT expires: - '-1' pragma: @@ -740,7 +646,7 @@ interactions: body: null headers: Accept: - - application/json + - '*/*' Accept-Encoding: - gzip, deflate CommandName: @@ -750,38 +656,117 @@ interactions: ParameterSetName: - -g -n --image --instance-count -l User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08585358461674764940?api-version=2021-04-01 response: body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + string: '{"status":"Running"}' headers: cache-control: - no-cache content-length: - - '43' + - '20' content-type: - - application/json; charset=UTF-8 + - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:12:14 GMT + - Fri, 14 Oct 2022 15:21:11 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - vmss create + Connection: + - keep-alive + ParameterSetName: + - -g -n --image --instance-count -l + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.5 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08585358461674764940?api-version=2021-04-01 + response: + body: + string: '{"status":"Succeeded"}' + headers: + cache-control: + - no-cache + content-length: + - '22' + content-type: + - application/json; charset=utf-8 + date: + - Fri, 14 Oct 2022 15:21:41 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - vmss create + Connection: + - keep-alive + ParameterSetName: + - -g -n --image --instance-count -l + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.5 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Resources/deployments/vmss_deploy_t5llprO4gRjggLaAddTNGJULYsdpSyzF","name":"vmss_deploy_t5llprO4gRjggLaAddTNGJULYsdpSyzF","type":"Microsoft.Resources/deployments","properties":{"templateHash":"3541347179006122922","parameters":{},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2022-10-14T15:21:33.3001464Z","duration":"PT2M53.4358937S","correlationId":"177bf8d7-dc02-49b3-a649-a91eb2d280a7","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"virtualNetworks","locations":["westus2"]},{"resourceType":"publicIPAddresses","locations":["westus2"]},{"resourceType":"loadBalancers","locations":["westus2"]}]},{"namespace":"Microsoft.Compute","resourceTypes":[{"resourceType":"virtualMachineScaleSets","locations":["westus2"]}]}],"dependencies":[{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET","resourceType":"Microsoft.Network/virtualNetworks","resourceName":"cli000003VNET"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/publicIPAddresses/cli000003LBPublicIP","resourceType":"Microsoft.Network/publicIPAddresses","resourceName":"cli000003LBPublicIP"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB","resourceType":"Microsoft.Network/loadBalancers","resourceName":"cli000003LB"},{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET","resourceType":"Microsoft.Network/virtualNetworks","resourceName":"cli000003VNET"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB","resourceType":"Microsoft.Network/loadBalancers","resourceName":"cli000003LB"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003","resourceType":"Microsoft.Compute/virtualMachineScaleSets","resourceName":"cli000003"}],"outputs":{"vmss":{"type":"Object","value":{"singlePlacementGroup":true,"orchestrationMode":"Uniform","upgradePolicy":{"mode":"Manual","rollingUpgradePolicy":{"maxBatchInstancePercent":20,"maxUnhealthyInstancePercent":20,"maxUnhealthyUpgradedInstancePercent":20,"pauseTimeBetweenBatches":"PT0S"}},"virtualMachineProfile":{"osProfile":{"computerNamePrefix":"cliouf96e","adminUsername":"rhoover","linuxConfiguration":{"disablePasswordAuthentication":true,"ssh":{"publicKeys":[{"path":"/home/rhoover/.ssh/authorized_keys","keyData":"ssh-rsa + AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\n"}]},"provisionVMAgent":true,"enableVMAgentPlatformUpdates":false},"secrets":[],"allowExtensionOperations":true,"requireGuestProvisionSignal":true},"storageProfile":{"osDisk":{"osType":"Linux","createOption":"FromImage","caching":"ReadWrite","managedDisk":{"storageAccountType":"Premium_LRS"},"diskSizeGB":30},"imageReference":{"publisher":"Canonical","offer":"UbuntuServer","sku":"18.04-LTS","version":"latest"}},"networkProfile":{"networkInterfaceConfigurations":[{"name":"cliouf96eNic","properties":{"primary":true,"enableAcceleratedNetworking":false,"disableTcpStateTracking":false,"dnsSettings":{"dnsServers":[]},"enableIPForwarding":false,"ipConfigurations":[{"name":"cliouf96eIPConfig","properties":{"subnet":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet"},"privateIPAddressVersion":"IPv4","loadBalancerBackendAddressPools":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool"}],"loadBalancerInboundNatPools":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool"}]}}]}}]},"extensionProfile":{"extensions":[{"name":"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent","properties":{"autoUpgradeMinorVersion":true,"enableAutomaticUpgrade":true,"publisher":"Microsoft.Azure.Monitor","type":"AzureMonitorLinuxAgent","typeHandlerVersion":"1.0","settings":{"GCS_AUTO_CONFIG":true}}},{"name":"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent","properties":{"autoUpgradeMinorVersion":true,"enableAutomaticUpgrade":true,"publisher":"Microsoft.Azure.Security.Monitoring","type":"AzureSecurityLinuxAgent","typeHandlerVersion":"2.0","settings":{"enableGenevaUpload":true,"enableAutoConfig":true,"reportSuccessOnUnsupportedDistro":true}}}]}},"provisioningState":"Succeeded","overprovision":true,"doNotRunExtensionsOnOverprovisionedVMs":false,"uniqueId":"793988fd-7a65-472d-9472-7470271a360c","timeCreated":"2022-10-14T15:18:49.0838434+00:00"}}},"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/publicIPAddresses/cli000003LBPublicIP"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET"}]}}' + headers: + cache-control: + - no-cache + content-length: + - '6474' + content-type: + - application/json; charset=utf-8 + date: + - Fri, 14 Oct 2022 15:21:41 GMT expires: - '-1' pragma: - no-cache - server: - - nginx strict-transport-security: - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked vary: - Accept-Encoding x-content-type-options: - nosniff - x-frame-options: - - deny status: code: 200 message: OK @@ -799,9 +784,9 @@ interactions: ParameterSetName: - -g -n --image --instance-count -l User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003?$expand=instanceView&api-version=2022-08-01 response: body: string: '{"error":{"code":"ResourceNotFound","message":"The Resource ''Microsoft.Compute/virtualMachines/cli000003'' @@ -815,7 +800,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:12:15 GMT + - Fri, 14 Oct 2022 15:21:42 GMT expires: - '-1' pragma: @@ -843,49 +828,63 @@ interactions: ParameterSetName: - -g -n --image --instance-count -l User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachineScaleSets\",\r\n \"location\": - \"westus2\",\r\n \"tags\": {},\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n + \"westus2\",\r\n \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n \ \"tier\": \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": - {\r\n \"singlePlacementGroup\": true,\r\n \"upgradePolicy\": {\r\n \"mode\": - \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n \"maxBatchInstancePercent\": - 20,\r\n \"maxUnhealthyInstancePercent\": 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": - 20,\r\n \"pauseTimeBetweenBatches\": \"PT0S\"\r\n }\r\n },\r\n - \ \"virtualMachineProfile\": {\r\n \"osProfile\": {\r\n \"computerNamePrefix\": - \"clinqc38b\",\r\n \"adminUsername\": \"rhl\",\r\n \"linuxConfiguration\": - {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": - {\r\n \"publicKeys\": [\r\n {\r\n \"path\": - \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": \"ssh-rsa - AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"enableVMAgentPlatformUpdates\": false\r\n },\r\n - \ \"secrets\": [],\r\n \"allowExtensionOperations\": true,\r\n - \ \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": + {\r\n \"singlePlacementGroup\": true,\r\n \"orchestrationMode\": \"Uniform\",\r\n + \ \"upgradePolicy\": {\r\n \"mode\": \"Manual\",\r\n \"rollingUpgradePolicy\": + {\r\n \"maxBatchInstancePercent\": 20,\r\n \"maxUnhealthyInstancePercent\": + 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": 20,\r\n \"pauseTimeBetweenBatches\": + \"PT0S\"\r\n }\r\n },\r\n \"virtualMachineProfile\": {\r\n \"osProfile\": + {\r\n \"computerNamePrefix\": \"cliouf96e\",\r\n \"adminUsername\": + \"rhoover\",\r\n \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": + true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"enableVMAgentPlatformUpdates\": + false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": + true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": {\r\n \"osDisk\": {\r\n \"osType\": \"Linux\",\r\n \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\"\r\n },\r\n \ \"diskSizeGB\": 30\r\n },\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \ \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\"\r\n - \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"clinqc38bNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"clinqc38bIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]}\r\n - \ },\r\n \"provisioningState\": \"Succeeded\",\r\n \"overprovision\": - true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": false,\r\n \"uniqueId\": - \"239f9fe3-2e0c-403b-8c3a-55cda6cd7b9a\",\r\n \"timeCreated\": \"2022-08-04T17:10:51.2598866+00:00\"\r\n - \ }\r\n}" + \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"cliouf96eNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"disableTcpStateTracking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"cliouf96eIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n + \ \"extensionProfile\": {\r\n \"extensions\": [\r\n {\r\n + \ \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Monitor\",\r\n \"type\": \"AzureMonitorLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"1.0\",\r\n \"settings\": + {\"GCS_AUTO_CONFIG\":true}\r\n }\r\n },\r\n {\r\n + \ \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Security.Monitoring\",\r\n \"type\": \"AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.0\",\r\n \"settings\": + {\"enableGenevaUpload\":true,\"enableAutoConfig\":true,\"reportSuccessOnUnsupportedDistro\":true}\r\n + \ }\r\n }\r\n ]\r\n }\r\n },\r\n \"provisioningState\": + \"Succeeded\",\r\n \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": + false,\r\n \"uniqueId\": \"793988fd-7a65-472d-9472-7470271a360c\",\r\n + \ \"timeCreated\": \"2022-10-14T15:18:49.0838434+00:00\"\r\n }\r\n}" headers: cache-control: - no-cache content-length: - - '3417' + - '4852' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:12:15 GMT + - Fri, 14 Oct 2022 15:21:43 GMT expires: - '-1' pragma: @@ -902,7 +901,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMScaleSet3Min;384,Microsoft.Compute/GetVMScaleSet30Min;2538 + - Microsoft.Compute/GetVMScaleSet3Min;390,Microsoft.Compute/GetVMScaleSet30Min;2590 status: code: 200 message: OK @@ -920,83 +919,121 @@ interactions: ParameterSetName: - --resource-group --name --query User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines?api-version=2022-08-01 response: body: - string: "{\r\n \"value\": [\r\n {\r\n \"name\": \"cli000003_0\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/0\",\r\n + string: "{\r\n \"value\": [\r\n {\r\n \"name\": \"cli000003_2\",\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/2\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachineScaleSets/virtualMachines\",\r\n - \ \"location\": \"westus2\",\r\n \"tags\": {},\r\n \"instanceId\": - \"0\",\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n \"tier\": - \"Standard\"\r\n },\r\n \"properties\": {\r\n \"latestModelApplied\": - true,\r\n \"modelDefinitionApplied\": \"VirtualMachineScaleSet\",\r\n - \ \"networkProfileConfiguration\": {\"networkInterfaceConfigurations\":[{\"name\":\"clinqc38bNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"clinqc38bIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n - \ \"vmId\": \"3d348973-8135-413e-93ca-7215f26d1bac\",\r\n \"hardwareProfile\": + \ \"location\": \"westus2\",\r\n \"tags\": {\r\n \"azsecpack\": + \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"instanceId\": \"2\",\r\n \"sku\": {\r\n + \ \"name\": \"Standard_DS1_v2\",\r\n \"tier\": \"Standard\"\r\n + \ },\r\n \"properties\": {\r\n \"latestModelApplied\": true,\r\n + \ \"modelDefinitionApplied\": \"VirtualMachineScaleSet\",\r\n \"networkProfileConfiguration\": + {\"networkInterfaceConfigurations\":[{\"name\":\"cliouf96eNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"disableTcpStateTracking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"cliouf96eIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n + \ \"vmId\": \"2e7f392d-c7c5-4450-821e-74e86b1b548b\",\r\n \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \ \"offer\": \"UbuntuServer\",\r\n \"sku\": \"18.04-LTS\",\r\n - \ \"version\": \"latest\",\r\n \"exactVersion\": \"18.04.202207120\"\r\n + \ \"version\": \"latest\",\r\n \"exactVersion\": \"18.04.202209210\"\r\n \ },\r\n \"osDisk\": {\r\n \"osType\": \"Linux\",\r\n - \ \"name\": \"clinqhzpoczvy5m2spufclinqhzpoczvy5m2spufqOS__1_9d3b4aa626d54d8e9c891331fdca0d7f\",\r\n + \ \"name\": \"cliou5z7nax54kj6yubjcliou5z7nax54kj6yubj5OS__1_1953d79714594ce28dcbbbe477234ca8\",\r\n \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/disks/clinqhzpoczvy5m2spufclinqhzpoczvy5m2spufqOS__1_9d3b4aa626d54d8e9c891331fdca0d7f\"\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/disks/cliou5z7nax54kj6yubjcliou5z7nax54kj6yubj5OS__1_1953d79714594ce28dcbbbe477234ca8\"\r\n \ },\r\n \"diskSizeGB\": 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": {\r\n \"computerName\": - \"clinqc38b000000\",\r\n \"adminUsername\": \"rhl\",\r\n \"linuxConfiguration\": + \"cliouf96e000002\",\r\n \"adminUsername\": \"rhoover\",\r\n \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n \"path\": - \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": \"ssh-rsa - AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"enableVMAgentPlatformUpdates\": false\r\n },\r\n - \ \"secrets\": [],\r\n \"allowExtensionOperations\": true,\r\n - \ \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/0/networkInterfaces/clinqc38bNic\"}]},\r\n - \ \"provisioningState\": \"Updating\",\r\n \"timeCreated\": \"2022-08-04T17:10:51.3692529+00:00\"\r\n - \ }\r\n },\r\n {\r\n \"name\": \"cli000003_3\",\r\n \"id\": - \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3\",\r\n + \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"enableVMAgentPlatformUpdates\": + false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": + true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n + \ \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/2/networkInterfaces/cliouf96eNic\"}]},\r\n + \ \"provisioningState\": \"Succeeded\",\r\n \"timeCreated\": + \"2022-10-14T15:18:49.2869754+00:00\"\r\n },\r\n \"resources\": + [\r\n {\r\n \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003_2/extensions/Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Compute/virtualMachines/extensions\",\r\n + \ \"location\": \"westus2\",\r\n \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"provisioningState\": \"Succeeded\",\r\n \"enableAutomaticUpgrade\": + true,\r\n \"publisher\": \"Microsoft.Azure.Monitor\",\r\n \"type\": + \"AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": \"1.0\",\r\n + \ \"settings\": {\"GCS_AUTO_CONFIG\":true}\r\n }\r\n },\r\n + \ {\r\n \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003_2/extensions/Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Compute/virtualMachines/extensions\",\r\n + \ \"location\": \"westus2\",\r\n \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"provisioningState\": \"Succeeded\",\r\n \"enableAutomaticUpgrade\": + true,\r\n \"publisher\": \"Microsoft.Azure.Security.Monitoring\",\r\n + \ \"type\": \"AzureSecurityLinuxAgent\",\r\n \"typeHandlerVersion\": + \"2.0\",\r\n \"settings\": {\"enableGenevaUpload\":true,\"enableAutoConfig\":true,\"reportSuccessOnUnsupportedDistro\":true}\r\n + \ }\r\n }\r\n ]\r\n },\r\n {\r\n \"name\": + \"cli000003_3\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachineScaleSets/virtualMachines\",\r\n - \ \"location\": \"westus2\",\r\n \"tags\": {},\r\n \"instanceId\": - \"3\",\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n \"tier\": - \"Standard\"\r\n },\r\n \"properties\": {\r\n \"latestModelApplied\": - true,\r\n \"modelDefinitionApplied\": \"VirtualMachineScaleSet\",\r\n - \ \"networkProfileConfiguration\": {\"networkInterfaceConfigurations\":[{\"name\":\"clinqc38bNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"clinqc38bIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n - \ \"vmId\": \"c47ce47c-9b4d-461d-8f0e-b5a271ca2265\",\r\n \"hardwareProfile\": + \ \"location\": \"westus2\",\r\n \"tags\": {\r\n \"azsecpack\": + \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"instanceId\": \"3\",\r\n \"sku\": {\r\n + \ \"name\": \"Standard_DS1_v2\",\r\n \"tier\": \"Standard\"\r\n + \ },\r\n \"properties\": {\r\n \"latestModelApplied\": true,\r\n + \ \"modelDefinitionApplied\": \"VirtualMachineScaleSet\",\r\n \"networkProfileConfiguration\": + {\"networkInterfaceConfigurations\":[{\"name\":\"cliouf96eNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"disableTcpStateTracking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"cliouf96eIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n + \ \"vmId\": \"c3d44363-484f-435b-bb1d-61e4ddddcb55\",\r\n \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \ \"offer\": \"UbuntuServer\",\r\n \"sku\": \"18.04-LTS\",\r\n - \ \"version\": \"latest\",\r\n \"exactVersion\": \"18.04.202207120\"\r\n + \ \"version\": \"latest\",\r\n \"exactVersion\": \"18.04.202209210\"\r\n \ },\r\n \"osDisk\": {\r\n \"osType\": \"Linux\",\r\n - \ \"name\": \"clinqhzpoczvy5m2spufclinqhzpoczvy5m2spufqOS__1_00d110bc7aaf4fbebbd4095a4a610862\",\r\n + \ \"name\": \"cliou5z7nax54kj6yubjcliou5z7nax54kj6yubj5OS__1_474db27698dd49ddab2454669aab0333\",\r\n \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/disks/clinqhzpoczvy5m2spufclinqhzpoczvy5m2spufqOS__1_00d110bc7aaf4fbebbd4095a4a610862\"\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/disks/cliou5z7nax54kj6yubjcliou5z7nax54kj6yubj5OS__1_474db27698dd49ddab2454669aab0333\"\r\n \ },\r\n \"diskSizeGB\": 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": {\r\n \"computerName\": - \"clinqc38b000003\",\r\n \"adminUsername\": \"rhl\",\r\n \"linuxConfiguration\": + \"cliouf96e000003\",\r\n \"adminUsername\": \"rhoover\",\r\n \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n \"path\": - \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": \"ssh-rsa - AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"enableVMAgentPlatformUpdates\": false\r\n },\r\n - \ \"secrets\": [],\r\n \"allowExtensionOperations\": true,\r\n - \ \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/networkInterfaces/clinqc38bNic\"}]},\r\n + \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"enableVMAgentPlatformUpdates\": + false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": + true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n + \ \"networkProfile\": {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/networkInterfaces/cliouf96eNic\"}]},\r\n \ \"provisioningState\": \"Succeeded\",\r\n \"timeCreated\": - \"2022-08-04T17:10:51.3692529+00:00\"\r\n }\r\n }\r\n ]\r\n}" + \"2022-10-14T15:18:49.2869754+00:00\"\r\n },\r\n \"resources\": + [\r\n {\r\n \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003_3/extensions/Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Compute/virtualMachines/extensions\",\r\n + \ \"location\": \"westus2\",\r\n \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"provisioningState\": \"Succeeded\",\r\n \"enableAutomaticUpgrade\": + true,\r\n \"publisher\": \"Microsoft.Azure.Monitor\",\r\n \"type\": + \"AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": \"1.0\",\r\n + \ \"settings\": {\"GCS_AUTO_CONFIG\":true}\r\n }\r\n },\r\n + \ {\r\n \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachines/cli000003_3/extensions/Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Compute/virtualMachines/extensions\",\r\n + \ \"location\": \"westus2\",\r\n \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"provisioningState\": \"Succeeded\",\r\n \"enableAutomaticUpgrade\": + true,\r\n \"publisher\": \"Microsoft.Azure.Security.Monitoring\",\r\n + \ \"type\": \"AzureSecurityLinuxAgent\",\r\n \"typeHandlerVersion\": + \"2.0\",\r\n \"settings\": {\"enableGenevaUpload\":true,\"enableAutoConfig\":true,\"reportSuccessOnUnsupportedDistro\":true}\r\n + \ }\r\n }\r\n ]\r\n }\r\n ]\r\n}" headers: cache-control: - no-cache content-length: - - '8097' + - '12282' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:12:15 GMT + - Fri, 14 Oct 2022 15:21:44 GMT expires: - '-1' pragma: @@ -1013,7 +1050,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/HighCostGetVMScaleSet3Min;179,Microsoft.Compute/HighCostGetVMScaleSet30Min;884,Microsoft.Compute/VMScaleSetVMViews3Min;4981 + - Microsoft.Compute/HighCostGetVMScaleSet3Min;179,Microsoft.Compute/HighCostGetVMScaleSet30Min;899,Microsoft.Compute/VMScaleSetVMViews3Min;4996 x-ms-request-charge: - '4' status: @@ -1033,49 +1070,63 @@ interactions: ParameterSetName: - --name --resource-group --set User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachineScaleSets\",\r\n \"location\": - \"westus2\",\r\n \"tags\": {},\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n + \"westus2\",\r\n \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n \ \"tier\": \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": - {\r\n \"singlePlacementGroup\": true,\r\n \"upgradePolicy\": {\r\n \"mode\": - \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n \"maxBatchInstancePercent\": - 20,\r\n \"maxUnhealthyInstancePercent\": 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": - 20,\r\n \"pauseTimeBetweenBatches\": \"PT0S\"\r\n }\r\n },\r\n - \ \"virtualMachineProfile\": {\r\n \"osProfile\": {\r\n \"computerNamePrefix\": - \"clinqc38b\",\r\n \"adminUsername\": \"rhl\",\r\n \"linuxConfiguration\": - {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": - {\r\n \"publicKeys\": [\r\n {\r\n \"path\": - \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": \"ssh-rsa - AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"enableVMAgentPlatformUpdates\": false\r\n },\r\n - \ \"secrets\": [],\r\n \"allowExtensionOperations\": true,\r\n - \ \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": + {\r\n \"singlePlacementGroup\": true,\r\n \"orchestrationMode\": \"Uniform\",\r\n + \ \"upgradePolicy\": {\r\n \"mode\": \"Manual\",\r\n \"rollingUpgradePolicy\": + {\r\n \"maxBatchInstancePercent\": 20,\r\n \"maxUnhealthyInstancePercent\": + 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": 20,\r\n \"pauseTimeBetweenBatches\": + \"PT0S\"\r\n }\r\n },\r\n \"virtualMachineProfile\": {\r\n \"osProfile\": + {\r\n \"computerNamePrefix\": \"cliouf96e\",\r\n \"adminUsername\": + \"rhoover\",\r\n \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": + true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"enableVMAgentPlatformUpdates\": + false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": + true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": {\r\n \"osDisk\": {\r\n \"osType\": \"Linux\",\r\n \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\"\r\n },\r\n \ \"diskSizeGB\": 30\r\n },\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \ \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\"\r\n - \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"clinqc38bNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"clinqc38bIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]}\r\n - \ },\r\n \"provisioningState\": \"Succeeded\",\r\n \"overprovision\": - true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": false,\r\n \"uniqueId\": - \"239f9fe3-2e0c-403b-8c3a-55cda6cd7b9a\",\r\n \"timeCreated\": \"2022-08-04T17:10:51.2598866+00:00\"\r\n - \ }\r\n}" + \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"cliouf96eNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"disableTcpStateTracking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"cliouf96eIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n + \ \"extensionProfile\": {\r\n \"extensions\": [\r\n {\r\n + \ \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Monitor\",\r\n \"type\": \"AzureMonitorLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"1.0\",\r\n \"settings\": + {\"GCS_AUTO_CONFIG\":true}\r\n }\r\n },\r\n {\r\n + \ \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Security.Monitoring\",\r\n \"type\": \"AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.0\",\r\n \"settings\": + {\"enableGenevaUpload\":true,\"enableAutoConfig\":true,\"reportSuccessOnUnsupportedDistro\":true}\r\n + \ }\r\n }\r\n ]\r\n }\r\n },\r\n \"provisioningState\": + \"Succeeded\",\r\n \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": + false,\r\n \"uniqueId\": \"793988fd-7a65-472d-9472-7470271a360c\",\r\n + \ \"timeCreated\": \"2022-10-14T15:18:49.0838434+00:00\"\r\n }\r\n}" headers: cache-control: - no-cache content-length: - - '3417' + - '4852' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:12:15 GMT + - Fri, 14 Oct 2022 15:21:44 GMT expires: - '-1' pragma: @@ -1092,32 +1143,40 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMScaleSet3Min;381,Microsoft.Compute/GetVMScaleSet30Min;2535 + - Microsoft.Compute/GetVMScaleSet3Min;389,Microsoft.Compute/GetVMScaleSet30Min;2589 status: code: 200 message: OK - request: - body: '{"location": "westus2", "tags": {}, "sku": {"name": "Standard_DS1_v2", - "tier": "Standard", "capacity": 2}, "properties": {"upgradePolicy": {"mode": - "Manual", "rollingUpgradePolicy": {"maxBatchInstancePercent": 20, "maxUnhealthyInstancePercent": - 20, "maxUnhealthyUpgradedInstancePercent": 20, "pauseTimeBetweenBatches": "PT0S"}}, - "virtualMachineProfile": {"osProfile": {"computerNamePrefix": "clinqc38b", "adminUsername": - "rhl", "linuxConfiguration": {"disablePasswordAuthentication": true, "ssh": - {"publicKeys": [{"path": "/home/rhl/.ssh/authorized_keys", "keyData": "ssh-rsa - AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5"}]}, - "provisionVMAgent": true}, "secrets": [], "allowExtensionOperations": true}, - "storageProfile": {"osDisk": {"caching": "ReadWrite", "createOption": "FromImage", - "diskSizeGB": 30, "osType": "Linux", "managedDisk": {"storageAccountType": "Premium_LRS"}}}, - "networkProfile": {"networkInterfaceConfigurations": [{"name": "clinqc38bNic", - "properties": {"primary": true, "enableAcceleratedNetworking": false, "dnsSettings": - {"dnsServers": []}, "ipConfigurations": [{"name": "clinqc38bIPConfig", "properties": - {"subnet": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet"}, + body: '{"location": "westus2", "tags": {"azsecpack": "nonprod", "platformsettings.host_environment.service.platform_optedin_for_rootcerts": + "true"}, "sku": {"name": "Standard_DS1_v2", "tier": "Standard", "capacity": + 2}, "properties": {"upgradePolicy": {"mode": "Manual", "rollingUpgradePolicy": + {"maxBatchInstancePercent": 20, "maxUnhealthyInstancePercent": 20, "maxUnhealthyUpgradedInstancePercent": + 20, "pauseTimeBetweenBatches": "PT0S"}}, "virtualMachineProfile": {"osProfile": + {"computerNamePrefix": "cliouf96e", "adminUsername": "rhoover", "linuxConfiguration": + {"disablePasswordAuthentication": true, "ssh": {"publicKeys": [{"path": "/home/rhoover/.ssh/authorized_keys", + "keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\n"}]}, "provisionVMAgent": true, "enableVMAgentPlatformUpdates": + false}, "secrets": [], "allowExtensionOperations": true}, "storageProfile": + {"osDisk": {"caching": "ReadWrite", "createOption": "FromImage", "diskSizeGB": + 30, "osType": "Linux", "managedDisk": {"storageAccountType": "Premium_LRS"}}}, + "networkProfile": {"networkInterfaceConfigurations": [{"name": "cliouf96eNic", + "properties": {"primary": true, "enableAcceleratedNetworking": false, "disableTcpStateTracking": + false, "dnsSettings": {"dnsServers": []}, "ipConfigurations": [{"name": "cliouf96eIPConfig", + "properties": {"subnet": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet"}, "privateIPAddressVersion": "IPv4", "loadBalancerBackendAddressPools": [{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool"}], "loadBalancerInboundNatPools": [{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool"}]}}], "enableIPForwarding": false}}]}, "diagnosticsProfile": {"bootDiagnostics": {"enabled": - true}}}, "overprovision": true, "doNotRunExtensionsOnOverprovisionedVMs": false, - "singlePlacementGroup": true}}' + true}}, "extensionProfile": {"extensions": [{"name": "Microsoft.Azure.Monitor.AzureMonitorLinuxAgent", + "properties": {"publisher": "Microsoft.Azure.Monitor", "type": "AzureMonitorLinuxAgent", + "typeHandlerVersion": "1.0", "autoUpgradeMinorVersion": true, "enableAutomaticUpgrade": + true, "settings": {"GCS_AUTO_CONFIG": true}}}, {"name": "Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent", + "properties": {"publisher": "Microsoft.Azure.Security.Monitoring", "type": "AzureSecurityLinuxAgent", + "typeHandlerVersion": "2.0", "autoUpgradeMinorVersion": true, "enableAutomaticUpgrade": + true, "settings": {"enableGenevaUpload": true, "enableAutoConfig": true, "reportSuccessOnUnsupportedDistro": + true}}}]}}, "overprovision": true, "doNotRunExtensionsOnOverprovisionedVMs": + false, "singlePlacementGroup": true, "orchestrationMode": "Uniform"}}' headers: Accept: - application/json @@ -1128,60 +1187,74 @@ interactions: Connection: - keep-alive Content-Length: - - '2357' + - '3473' Content-Type: - application/json ParameterSetName: - --name --resource-group --set User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachineScaleSets\",\r\n \"location\": - \"westus2\",\r\n \"tags\": {},\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n + \"westus2\",\r\n \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n \ \"tier\": \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": - {\r\n \"singlePlacementGroup\": true,\r\n \"upgradePolicy\": {\r\n \"mode\": - \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n \"maxBatchInstancePercent\": - 20,\r\n \"maxUnhealthyInstancePercent\": 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": - 20,\r\n \"pauseTimeBetweenBatches\": \"PT0S\"\r\n }\r\n },\r\n - \ \"virtualMachineProfile\": {\r\n \"osProfile\": {\r\n \"computerNamePrefix\": - \"clinqc38b\",\r\n \"adminUsername\": \"rhl\",\r\n \"linuxConfiguration\": - {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": - {\r\n \"publicKeys\": [\r\n {\r\n \"path\": - \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": \"ssh-rsa - AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"enableVMAgentPlatformUpdates\": false\r\n },\r\n - \ \"secrets\": [],\r\n \"allowExtensionOperations\": true,\r\n - \ \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": + {\r\n \"singlePlacementGroup\": true,\r\n \"orchestrationMode\": \"Uniform\",\r\n + \ \"upgradePolicy\": {\r\n \"mode\": \"Manual\",\r\n \"rollingUpgradePolicy\": + {\r\n \"maxBatchInstancePercent\": 20,\r\n \"maxUnhealthyInstancePercent\": + 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": 20,\r\n \"pauseTimeBetweenBatches\": + \"PT0S\"\r\n }\r\n },\r\n \"virtualMachineProfile\": {\r\n \"osProfile\": + {\r\n \"computerNamePrefix\": \"cliouf96e\",\r\n \"adminUsername\": + \"rhoover\",\r\n \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": + true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"enableVMAgentPlatformUpdates\": + false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": + true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": {\r\n \"osDisk\": {\r\n \"osType\": \"Linux\",\r\n \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\"\r\n },\r\n \ \"diskSizeGB\": 30\r\n },\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \ \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\"\r\n - \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"clinqc38bNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"clinqc38bIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n + \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"cliouf96eNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"disableTcpStateTracking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"cliouf96eIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": - true\r\n }\r\n }\r\n },\r\n \"provisioningState\": \"Updating\",\r\n - \ \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": - false,\r\n \"uniqueId\": \"239f9fe3-2e0c-403b-8c3a-55cda6cd7b9a\",\r\n - \ \"timeCreated\": \"2022-08-04T17:10:51.2598866+00:00\"\r\n }\r\n}" + true\r\n }\r\n },\r\n \"extensionProfile\": {\r\n \"extensions\": + [\r\n {\r\n \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Monitor\",\r\n \"type\": \"AzureMonitorLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"1.0\",\r\n \"settings\": + {\"GCS_AUTO_CONFIG\":true}\r\n }\r\n },\r\n {\r\n + \ \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Security.Monitoring\",\r\n \"type\": \"AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.0\",\r\n \"settings\": + {\"enableGenevaUpload\":true,\"enableAutoConfig\":true,\"reportSuccessOnUnsupportedDistro\":true}\r\n + \ }\r\n }\r\n ]\r\n }\r\n },\r\n \"provisioningState\": + \"Updating\",\r\n \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": + false,\r\n \"uniqueId\": \"793988fd-7a65-472d-9472-7470271a360c\",\r\n + \ \"timeCreated\": \"2022-10-14T15:18:49.0838434+00:00\"\r\n }\r\n}" headers: azure-asyncnotification: - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/5a472f80-cb97-46d3-8e9a-773ea3a5ae28?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/3a9c922e-332b-4f2a-86bf-ac046382627a?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - - '3525' + - '4960' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:12:18 GMT + - Fri, 14 Oct 2022 15:21:49 GMT expires: - '-1' pragma: @@ -1198,7 +1271,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/CreateVMScaleSet3Min;118,Microsoft.Compute/CreateVMScaleSet30Min;603,Microsoft.Compute/VmssQueuedVMOperations;0 + - Microsoft.Compute/CreateVMScaleSet3Min;147,Microsoft.Compute/CreateVMScaleSet30Min;742,Microsoft.Compute/VmssQueuedVMOperations;0 x-ms-ratelimit-remaining-subscription-writes: - '1199' x-ms-request-charge: @@ -1220,13 +1293,13 @@ interactions: ParameterSetName: - --name --resource-group --set User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/5a472f80-cb97-46d3-8e9a-773ea3a5ae28?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/3a9c922e-332b-4f2a-86bf-ac046382627a?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:12:18.6030877+00:00\",\r\n \"status\": - \"InProgress\",\r\n \"name\": \"5a472f80-cb97-46d3-8e9a-773ea3a5ae28\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:21:48.5664318+00:00\",\r\n \"status\": + \"InProgress\",\r\n \"name\": \"3a9c922e-332b-4f2a-86bf-ac046382627a\"\r\n}" headers: cache-control: - no-cache @@ -1235,7 +1308,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:12:28 GMT + - Fri, 14 Oct 2022 15:21:58 GMT expires: - '-1' pragma: @@ -1252,7 +1325,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14963,Microsoft.Compute/GetOperation30Min;29866 + - Microsoft.Compute/GetOperation3Min;14962,Microsoft.Compute/GetOperation30Min;29962 status: code: 200 message: OK @@ -1270,14 +1343,14 @@ interactions: ParameterSetName: - --name --resource-group --set User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/5a472f80-cb97-46d3-8e9a-773ea3a5ae28?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/3a9c922e-332b-4f2a-86bf-ac046382627a?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:12:18.6030877+00:00\",\r\n \"endTime\": - \"2022-08-04T17:12:43.6654917+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"5a472f80-cb97-46d3-8e9a-773ea3a5ae28\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:21:48.5664318+00:00\",\r\n \"endTime\": + \"2022-10-14T15:21:59.6288416+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"3a9c922e-332b-4f2a-86bf-ac046382627a\"\r\n}" headers: cache-control: - no-cache @@ -1286,7 +1359,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:13:05 GMT + - Fri, 14 Oct 2022 15:22:35 GMT expires: - '-1' pragma: @@ -1303,7 +1376,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14963,Microsoft.Compute/GetOperation30Min;29852 + - Microsoft.Compute/GetOperation3Min;14959,Microsoft.Compute/GetOperation30Min;29954 status: code: 200 message: OK @@ -1321,50 +1394,64 @@ interactions: ParameterSetName: - --name --resource-group --set User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachineScaleSets\",\r\n \"location\": - \"westus2\",\r\n \"tags\": {},\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n + \"westus2\",\r\n \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n \ \"tier\": \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": - {\r\n \"singlePlacementGroup\": true,\r\n \"upgradePolicy\": {\r\n \"mode\": - \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n \"maxBatchInstancePercent\": - 20,\r\n \"maxUnhealthyInstancePercent\": 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": - 20,\r\n \"pauseTimeBetweenBatches\": \"PT0S\"\r\n }\r\n },\r\n - \ \"virtualMachineProfile\": {\r\n \"osProfile\": {\r\n \"computerNamePrefix\": - \"clinqc38b\",\r\n \"adminUsername\": \"rhl\",\r\n \"linuxConfiguration\": - {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": - {\r\n \"publicKeys\": [\r\n {\r\n \"path\": - \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": \"ssh-rsa - AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"enableVMAgentPlatformUpdates\": false\r\n },\r\n - \ \"secrets\": [],\r\n \"allowExtensionOperations\": true,\r\n - \ \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": + {\r\n \"singlePlacementGroup\": true,\r\n \"orchestrationMode\": \"Uniform\",\r\n + \ \"upgradePolicy\": {\r\n \"mode\": \"Manual\",\r\n \"rollingUpgradePolicy\": + {\r\n \"maxBatchInstancePercent\": 20,\r\n \"maxUnhealthyInstancePercent\": + 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": 20,\r\n \"pauseTimeBetweenBatches\": + \"PT0S\"\r\n }\r\n },\r\n \"virtualMachineProfile\": {\r\n \"osProfile\": + {\r\n \"computerNamePrefix\": \"cliouf96e\",\r\n \"adminUsername\": + \"rhoover\",\r\n \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": + true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"enableVMAgentPlatformUpdates\": + false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": + true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": {\r\n \"osDisk\": {\r\n \"osType\": \"Linux\",\r\n \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\"\r\n },\r\n \ \"diskSizeGB\": 30\r\n },\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \ \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\"\r\n - \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"clinqc38bNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"clinqc38bIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n + \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"cliouf96eNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"disableTcpStateTracking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"cliouf96eIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": - true\r\n }\r\n }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n - \ \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": - false,\r\n \"uniqueId\": \"239f9fe3-2e0c-403b-8c3a-55cda6cd7b9a\",\r\n - \ \"timeCreated\": \"2022-08-04T17:10:51.2598866+00:00\"\r\n }\r\n}" + true\r\n }\r\n },\r\n \"extensionProfile\": {\r\n \"extensions\": + [\r\n {\r\n \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Monitor\",\r\n \"type\": \"AzureMonitorLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"1.0\",\r\n \"settings\": + {\"GCS_AUTO_CONFIG\":true}\r\n }\r\n },\r\n {\r\n + \ \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Security.Monitoring\",\r\n \"type\": \"AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.0\",\r\n \"settings\": + {\"enableGenevaUpload\":true,\"enableAutoConfig\":true,\"reportSuccessOnUnsupportedDistro\":true}\r\n + \ }\r\n }\r\n ]\r\n }\r\n },\r\n \"provisioningState\": + \"Succeeded\",\r\n \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": + false,\r\n \"uniqueId\": \"793988fd-7a65-472d-9472-7470271a360c\",\r\n + \ \"timeCreated\": \"2022-10-14T15:18:49.0838434+00:00\"\r\n }\r\n}" headers: cache-control: - no-cache content-length: - - '3526' + - '4961' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:13:05 GMT + - Fri, 14 Oct 2022 15:22:35 GMT expires: - '-1' pragma: @@ -1381,7 +1468,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMScaleSet3Min;375,Microsoft.Compute/GetVMScaleSet30Min;2525 + - Microsoft.Compute/GetVMScaleSet3Min;381,Microsoft.Compute/GetVMScaleSet30Min;2581 status: code: 200 message: OK @@ -1403,25 +1490,27 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/manualupgrade?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/manualupgrade?api-version=2022-08-01 response: body: string: '' headers: + azure-asyncnotification: + - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/889d317a-45cb-49ea-9939-4c22229be58e?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/325a1e30-b80a-4ade-aa02-67a7605ebce6?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - '0' date: - - Thu, 04 Aug 2022 17:13:06 GMT + - Fri, 14 Oct 2022 15:22:37 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/889d317a-45cb-49ea-9939-4c22229be58e?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/325a1e30-b80a-4ade-aa02-67a7605ebce6?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 pragma: - no-cache server: @@ -1432,7 +1521,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/VMScaleSetActions3Min;236,Microsoft.Compute/VMScaleSetActions30Min;1187,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2447,Microsoft.Compute/VmssQueuedVMOperations;0 + - Microsoft.Compute/VMScaleSetActions3Min;238,Microsoft.Compute/VMScaleSetActions30Min;1198,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2977,Microsoft.Compute/VmssQueuedVMOperations;0 x-ms-ratelimit-remaining-subscription-writes: - '1199' x-ms-request-charge: @@ -1454,14 +1543,14 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/889d317a-45cb-49ea-9939-4c22229be58e?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/325a1e30-b80a-4ade-aa02-67a7605ebce6?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:13:06.5872597+00:00\",\r\n \"endTime\": - \"2022-08-04T17:13:16.493456+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"889d317a-45cb-49ea-9939-4c22229be58e\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:22:37.909696+00:00\",\r\n \"endTime\": + \"2022-10-14T15:22:53.2532899+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"325a1e30-b80a-4ade-aa02-67a7605ebce6\"\r\n}" headers: cache-control: - no-cache @@ -1470,7 +1559,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:13:35 GMT + - Fri, 14 Oct 2022 15:23:07 GMT expires: - '-1' pragma: @@ -1487,7 +1576,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14952,Microsoft.Compute/GetOperation30Min;29837 + - Microsoft.Compute/GetOperation3Min;14953,Microsoft.Compute/GetOperation30Min;29945 status: code: 200 message: OK @@ -1505,9 +1594,9 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/889d317a-45cb-49ea-9939-4c22229be58e?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/325a1e30-b80a-4ade-aa02-67a7605ebce6?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 response: body: string: '' @@ -1517,7 +1606,7 @@ interactions: content-length: - '0' date: - - Thu, 04 Aug 2022 17:13:36 GMT + - Fri, 14 Oct 2022 15:23:07 GMT expires: - '-1' pragma: @@ -1530,7 +1619,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14950,Microsoft.Compute/GetOperation30Min;29835 + - Microsoft.Compute/GetOperation3Min;14952,Microsoft.Compute/GetOperation30Min;29944 status: code: 200 message: OK @@ -1548,28 +1637,66 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-08-01 response: body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + string: "{\r\n \"placementGroupId\": \"900064c0-d742-4569-a06c-dcca8072c0c3\",\r\n + \ \"platformUpdateDomain\": 3,\r\n \"platformFaultDomain\": 3,\r\n \"computerName\": + \"cliouf96e000003\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n + \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.8.0.11\",\r\n \"statuses\": + [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": + \"Guest Agent is running\",\r\n \"time\": \"2022-10-14T15:23:03+00:00\"\r\n + \ }\r\n ],\r\n \"extensionHandlers\": [\r\n {\r\n \"type\": + \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": + \"1.22.2\",\r\n \"status\": {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n + \ \"message\": \"Plugin enabled\"\r\n }\r\n },\r\n {\r\n + \ \"type\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.20.58\",\r\n \"status\": {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": + \"Plugin enabled\"\r\n }\r\n }\r\n ]\r\n },\r\n \"disks\": + [\r\n {\r\n \"name\": \"cliou5z7nax54kj6yubjcliou5z7nax54kj6yubj5OS__1_474db27698dd49ddab2454669aab0333\",\r\n + \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"time\": \"2022-10-14T15:22:48.6908712+00:00\"\r\n + \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": {},\r\n \"extensions\": + [\r\n {\r\n \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": + \"1.22.2\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"message\": \"Enable succeeded\"\r\n }\r\n + \ ]\r\n },\r\n {\r\n \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.20.58\",\r\n \"statuses\": [\r\n {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n + \ \"message\": \"Enable ASM succeeded\"\r\n }\r\n ]\r\n + \ }\r\n ],\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n + \ {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n \"time\": + \"2022-10-14T15:22:53.2220428+00:00\"\r\n },\r\n {\r\n \"code\": + \"PowerState/running\",\r\n \"level\": \"Info\",\r\n \"displayStatus\": + \"VM running\"\r\n }\r\n ]\r\n}" headers: cache-control: - no-cache content-length: - - '43' + - '2807' content-type: - - application/json; charset=UTF-8 + - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:13:37 GMT + - Fri, 14 Oct 2022 15:23:09 GMT expires: - '-1' pragma: - no-cache server: - - nginx + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1578,8 +1705,10 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-frame-options: - - deny + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/GetVMScaleSetVM3Min;499,Microsoft.Compute/GetVMScaleSetVM30Min;2499,Microsoft.Compute/VMScaleSetVMViews3Min;4993 + x-ms-request-charge: + - '1' status: code: 200 message: OK @@ -1597,45 +1726,28 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 response: body: - string: "{\r\n \"placementGroupId\": \"d026ef0f-b482-4884-8583-16ebe50a963b\",\r\n - \ \"platformUpdateDomain\": 3,\r\n \"platformFaultDomain\": 3,\r\n \"computerName\": - \"clinqc38b000003\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n - \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.7.3.0\",\r\n \"statuses\": - [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": - \"Guest Agent is running\",\r\n \"time\": \"2022-08-04T17:13:16+00:00\"\r\n - \ }\r\n ],\r\n \"extensionHandlers\": []\r\n },\r\n \"disks\": - [\r\n {\r\n \"name\": \"clinqhzpoczvy5m2spufclinqhzpoczvy5m2spufqOS__1_00d110bc7aaf4fbebbd4095a4a610862\",\r\n - \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:13:07.4309807+00:00\"\r\n - \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": {},\r\n \"hyperVGeneration\": - \"V1\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n - \ \"time\": \"2022-08-04T17:13:16.462172+00:00\"\r\n },\r\n {\r\n - \ \"code\": \"PowerState/running\",\r\n \"level\": \"Info\",\r\n - \ \"displayStatus\": \"VM running\"\r\n }\r\n ]\r\n}" + string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" headers: cache-control: - no-cache content-length: - - '1287' + - '43' content-type: - - application/json; charset=utf-8 + - application/json; charset=UTF-8 date: - - Thu, 04 Aug 2022 17:13:37 GMT + - Fri, 14 Oct 2022 15:23:14 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 + - nginx strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1644,10 +1756,8 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMScaleSetVM3Min;492,Microsoft.Compute/GetVMScaleSetVM30Min;2472,Microsoft.Compute/VMScaleSetVMViews3Min;4986 - x-ms-request-charge: - - '1' + x-frame-options: + - deny status: code: 200 message: OK @@ -1667,25 +1777,27 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/deallocate?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/deallocate?api-version=2022-08-01 response: body: string: '' headers: + azure-asyncnotification: + - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/07890245-8ecb-4b70-b4cd-d9fc163cb566?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/2c379d80-4c1d-4e2b-afca-2630e2c759d4?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - '0' date: - - Thu, 04 Aug 2022 17:13:37 GMT + - Fri, 14 Oct 2022 15:23:14 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/07890245-8ecb-4b70-b4cd-d9fc163cb566?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/2c379d80-4c1d-4e2b-afca-2630e2c759d4?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 pragma: - no-cache server: @@ -1696,7 +1808,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/DeleteVMScaleSetVM3Min;238,Microsoft.Compute/DeleteVMScaleSetVM30Min;1196,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2421,Microsoft.Compute/VmssQueuedVMOperations;0 + - Microsoft.Compute/DeleteVMScaleSetVM3Min;239,Microsoft.Compute/DeleteVMScaleSetVM30Min;1199,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2984,Microsoft.Compute/VmssQueuedVMOperations;0 x-ms-ratelimit-remaining-subscription-writes: - '1199' x-ms-request-charge: @@ -1718,22 +1830,23 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/07890245-8ecb-4b70-b4cd-d9fc163cb566?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/2c379d80-4c1d-4e2b-afca-2630e2c759d4?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:13:38.2433222+00:00\",\r\n \"status\": - \"InProgress\",\r\n \"name\": \"07890245-8ecb-4b70-b4cd-d9fc163cb566\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:23:14.7843898+00:00\",\r\n \"endTime\": + \"2022-10-14T15:23:39.6278845+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"2c379d80-4c1d-4e2b-afca-2630e2c759d4\"\r\n}" headers: cache-control: - no-cache content-length: - - '134' + - '184' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:14:08 GMT + - Fri, 14 Oct 2022 15:23:44 GMT expires: - '-1' pragma: @@ -1750,7 +1863,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14942,Microsoft.Compute/GetOperation30Min;29823 + - Microsoft.Compute/GetOperation3Min;14950,Microsoft.Compute/GetOperation30Min;29932 status: code: 200 message: OK @@ -1768,23 +1881,19 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/07890245-8ecb-4b70-b4cd-d9fc163cb566?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/2c379d80-4c1d-4e2b-afca-2630e2c759d4?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:13:38.2433222+00:00\",\r\n \"endTime\": - \"2022-08-04T17:14:29.914845+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"07890245-8ecb-4b70-b4cd-d9fc163cb566\"\r\n}" + string: '' headers: cache-control: - no-cache content-length: - - '183' - content-type: - - application/json; charset=utf-8 + - '0' date: - - Thu, 04 Aug 2022 17:14:38 GMT + - Fri, 14 Oct 2022 15:23:44 GMT expires: - '-1' pragma: @@ -1794,14 +1903,10 @@ interactions: - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14942,Microsoft.Compute/GetOperation30Min;29816 + - Microsoft.Compute/GetOperation3Min;14949,Microsoft.Compute/GetOperation30Min;29931 status: code: 200 message: OK @@ -1809,7 +1914,7 @@ interactions: body: null headers: Accept: - - '*/*' + - application/json Accept-Encoding: - gzip, deflate CommandName: @@ -1819,71 +1924,39 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/07890245-8ecb-4b70-b4cd-d9fc163cb566?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-08-01 response: body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Thu, 04 Aug 2022 17:14:38 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14941,Microsoft.Compute/GetOperation30Min;29815 - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - vmss deallocate - Connection: - - keep-alive - ParameterSetName: - - -g -n --instance-ids - User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 - response: - body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + string: "{\r\n \"placementGroupId\": \"900064c0-d742-4569-a06c-dcca8072c0c3\",\r\n + \ \"platformUpdateDomain\": 3,\r\n \"platformFaultDomain\": 3,\r\n \"disks\": + [\r\n {\r\n \"name\": \"cliou5z7nax54kj6yubjcliou5z7nax54kj6yubj5OS__1_474db27698dd49ddab2454669aab0333\",\r\n + \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"time\": \"2022-10-14T15:23:37.3623283+00:00\"\r\n + \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": {},\r\n \"hyperVGeneration\": + \"V1\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n + \ \"time\": \"2022-10-14T15:23:37.3935543+00:00\"\r\n },\r\n {\r\n + \ \"code\": \"PowerState/deallocated\",\r\n \"level\": \"Info\",\r\n + \ \"displayStatus\": \"VM deallocated\"\r\n }\r\n ]\r\n}" headers: cache-control: - no-cache content-length: - - '43' + - '880' content-type: - - application/json; charset=UTF-8 + - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:14:39 GMT + - Fri, 14 Oct 2022 15:23:45 GMT expires: - '-1' pragma: - no-cache server: - - nginx + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1892,8 +1965,10 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-frame-options: - - deny + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/GetVMScaleSetVM3Min;494,Microsoft.Compute/GetVMScaleSetVM30Min;2494,Microsoft.Compute/VMScaleSetVMViews3Min;4988 + x-ms-request-charge: + - '1' status: code: 200 message: OK @@ -1911,39 +1986,28 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 response: body: - string: "{\r\n \"placementGroupId\": \"d026ef0f-b482-4884-8583-16ebe50a963b\",\r\n - \ \"platformUpdateDomain\": 3,\r\n \"platformFaultDomain\": 3,\r\n \"disks\": - [\r\n {\r\n \"name\": \"clinqhzpoczvy5m2spufclinqhzpoczvy5m2spufqOS__1_00d110bc7aaf4fbebbd4095a4a610862\",\r\n - \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:14:29.7273434+00:00\"\r\n - \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": {},\r\n \"hyperVGeneration\": - \"V1\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n - \ \"time\": \"2022-08-04T17:14:29.7429651+00:00\"\r\n },\r\n {\r\n - \ \"code\": \"PowerState/deallocated\",\r\n \"level\": \"Info\",\r\n - \ \"displayStatus\": \"VM deallocated\"\r\n }\r\n ]\r\n}" + string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" headers: cache-control: - no-cache content-length: - - '880' + - '43' content-type: - - application/json; charset=utf-8 + - application/json; charset=UTF-8 date: - - Thu, 04 Aug 2022 17:14:39 GMT + - Fri, 14 Oct 2022 15:23:46 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 + - nginx strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1952,10 +2016,8 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMScaleSetVM3Min;492,Microsoft.Compute/GetVMScaleSetVM30Min;2467,Microsoft.Compute/VMScaleSetVMViews3Min;4984 - x-ms-request-charge: - - '1' + x-frame-options: + - deny status: code: 200 message: OK @@ -1977,25 +2039,27 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/start?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/start?api-version=2022-08-01 response: body: string: '' headers: + azure-asyncnotification: + - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/e546c0e8-722a-4ed0-a0f2-829e86242608?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/df884a6a-a08b-476b-a588-25f2e3e0aae1?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - '0' date: - - Thu, 04 Aug 2022 17:14:40 GMT + - Fri, 14 Oct 2022 15:23:46 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/e546c0e8-722a-4ed0-a0f2-829e86242608?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/df884a6a-a08b-476b-a588-25f2e3e0aae1?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 pragma: - no-cache server: @@ -2006,9 +2070,9 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/VMScaleSetActions3Min;235,Microsoft.Compute/VMScaleSetActions30Min;1185,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2387,Microsoft.Compute/VmssQueuedVMOperations;0 + - Microsoft.Compute/VMScaleSetActions3Min;235,Microsoft.Compute/VMScaleSetActions30Min;1195,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2992,Microsoft.Compute/VmssQueuedVMOperations;0 x-ms-ratelimit-remaining-subscription-writes: - - '1198' + - '1199' x-ms-request-charge: - '1' status: @@ -2028,22 +2092,22 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/e546c0e8-722a-4ed0-a0f2-829e86242608?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/df884a6a-a08b-476b-a588-25f2e3e0aae1?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:14:40.086698+00:00\",\r\n \"status\": - \"InProgress\",\r\n \"name\": \"e546c0e8-722a-4ed0-a0f2-829e86242608\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:23:47.6278821+00:00\",\r\n \"status\": + \"InProgress\",\r\n \"name\": \"df884a6a-a08b-476b-a588-25f2e3e0aae1\"\r\n}" headers: cache-control: - no-cache content-length: - - '133' + - '134' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:15:09 GMT + - Fri, 14 Oct 2022 15:24:17 GMT expires: - '-1' pragma: @@ -2060,7 +2124,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14940,Microsoft.Compute/GetOperation30Min;29806 + - Microsoft.Compute/GetOperation3Min;14946,Microsoft.Compute/GetOperation30Min;29920 status: code: 200 message: OK @@ -2078,23 +2142,23 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/e546c0e8-722a-4ed0-a0f2-829e86242608?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/df884a6a-a08b-476b-a588-25f2e3e0aae1?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:14:40.086698+00:00\",\r\n \"endTime\": - \"2022-08-04T17:15:15.6177127+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"e546c0e8-722a-4ed0-a0f2-829e86242608\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:23:47.6278821+00:00\",\r\n \"endTime\": + \"2022-10-14T15:24:31.3930251+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"df884a6a-a08b-476b-a588-25f2e3e0aae1\"\r\n}" headers: cache-control: - no-cache content-length: - - '183' + - '184' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:15:39 GMT + - Fri, 14 Oct 2022 15:24:47 GMT expires: - '-1' pragma: @@ -2111,7 +2175,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14946,Microsoft.Compute/GetOperation30Min;29800 + - Microsoft.Compute/GetOperation3Min;14950,Microsoft.Compute/GetOperation30Min;29912 status: code: 200 message: OK @@ -2129,9 +2193,9 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/e546c0e8-722a-4ed0-a0f2-829e86242608?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/df884a6a-a08b-476b-a588-25f2e3e0aae1?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 response: body: string: '' @@ -2141,7 +2205,7 @@ interactions: content-length: - '0' date: - - Thu, 04 Aug 2022 17:15:39 GMT + - Fri, 14 Oct 2022 15:24:48 GMT expires: - '-1' pragma: @@ -2154,7 +2218,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14945,Microsoft.Compute/GetOperation30Min;29799 + - Microsoft.Compute/GetOperation3Min;14949,Microsoft.Compute/GetOperation30Min;29911 status: code: 200 message: OK @@ -2176,25 +2240,27 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/poweroff?skipShutdown=false&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/poweroff?skipShutdown=false&api-version=2022-08-01 response: body: string: '' headers: + azure-asyncnotification: + - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/07fc9e39-a652-4a04-8796-e82dea9cb84b?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/fc6e7a20-dfcd-40ce-80e9-1f44723d4148?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - '0' date: - - Thu, 04 Aug 2022 17:15:40 GMT + - Fri, 14 Oct 2022 15:24:48 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/07fc9e39-a652-4a04-8796-e82dea9cb84b?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/fc6e7a20-dfcd-40ce-80e9-1f44723d4148?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 pragma: - no-cache server: @@ -2205,7 +2271,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/DeleteVMScaleSet3Min;77,Microsoft.Compute/DeleteVMScaleSet30Min;392,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2318,Microsoft.Compute/VmssQueuedVMOperations;0 + - Microsoft.Compute/DeleteVMScaleSet3Min;79,Microsoft.Compute/DeleteVMScaleSet30Min;399,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;3003,Microsoft.Compute/VmssQueuedVMOperations;0 x-ms-ratelimit-remaining-subscription-writes: - '1199' x-ms-request-charge: @@ -2227,14 +2293,14 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/07fc9e39-a652-4a04-8796-e82dea9cb84b?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/fc6e7a20-dfcd-40ce-80e9-1f44723d4148?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:15:41.1331519+00:00\",\r\n \"endTime\": - \"2022-08-04T17:15:50.3830897+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"07fc9e39-a652-4a04-8796-e82dea9cb84b\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:24:48.7209789+00:00\",\r\n \"endTime\": + \"2022-10-14T15:24:55.1740793+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"fc6e7a20-dfcd-40ce-80e9-1f44723d4148\"\r\n}" headers: cache-control: - no-cache @@ -2243,7 +2309,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:16:11 GMT + - Fri, 14 Oct 2022 15:25:18 GMT expires: - '-1' pragma: @@ -2260,7 +2326,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14950,Microsoft.Compute/GetOperation30Min;29790 + - Microsoft.Compute/GetOperation3Min;14949,Microsoft.Compute/GetOperation30Min;29906 status: code: 200 message: OK @@ -2278,9 +2344,9 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/07fc9e39-a652-4a04-8796-e82dea9cb84b?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/fc6e7a20-dfcd-40ce-80e9-1f44723d4148?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 response: body: string: '' @@ -2290,7 +2356,7 @@ interactions: content-length: - '0' date: - - Thu, 04 Aug 2022 17:16:11 GMT + - Fri, 14 Oct 2022 15:25:18 GMT expires: - '-1' pragma: @@ -2303,7 +2369,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14948,Microsoft.Compute/GetOperation30Min;29788 + - Microsoft.Compute/GetOperation3Min;14948,Microsoft.Compute/GetOperation30Min;29905 status: code: 200 message: OK @@ -2321,28 +2387,66 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-08-01 response: body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + string: "{\r\n \"placementGroupId\": \"900064c0-d742-4569-a06c-dcca8072c0c3\",\r\n + \ \"platformUpdateDomain\": 0,\r\n \"platformFaultDomain\": 0,\r\n \"computerName\": + \"cliouf96e000003\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n + \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.8.0.11\",\r\n \"statuses\": + [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": + \"Guest Agent is running\",\r\n \"time\": \"2022-10-14T15:24:26+00:00\"\r\n + \ }\r\n ],\r\n \"extensionHandlers\": [\r\n {\r\n \"type\": + \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": + \"1.22.2\",\r\n \"status\": {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n + \ \"message\": \"Plugin enabled\"\r\n }\r\n },\r\n {\r\n + \ \"type\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.20.58\",\r\n \"status\": {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": + \"Plugin enabled\"\r\n }\r\n }\r\n ]\r\n },\r\n \"disks\": + [\r\n {\r\n \"name\": \"cliou5z7nax54kj6yubjcliou5z7nax54kj6yubj5OS__1_474db27698dd49ddab2454669aab0333\",\r\n + \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"time\": \"2022-10-14T15:23:48.3934894+00:00\"\r\n + \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": {},\r\n \"extensions\": + [\r\n {\r\n \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": + \"1.22.2\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"message\": \"Enable succeeded\"\r\n }\r\n + \ ]\r\n },\r\n {\r\n \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.20.58\",\r\n \"statuses\": [\r\n {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n + \ \"message\": \"Enable ASM succeeded\"\r\n }\r\n ]\r\n + \ }\r\n ],\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n + \ {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n \"time\": + \"2022-10-14T15:24:55.1428197+00:00\"\r\n },\r\n {\r\n \"code\": + \"PowerState/stopped\",\r\n \"level\": \"Info\",\r\n \"displayStatus\": + \"VM stopped\"\r\n }\r\n ]\r\n}" headers: cache-control: - no-cache content-length: - - '43' + - '2807' content-type: - - application/json; charset=UTF-8 + - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:16:11 GMT + - Fri, 14 Oct 2022 15:25:19 GMT expires: - '-1' pragma: - no-cache server: - - nginx + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -2351,8 +2455,10 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-frame-options: - - deny + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/GetVMScaleSetVM3Min;492,Microsoft.Compute/GetVMScaleSetVM30Min;2492,Microsoft.Compute/VMScaleSetVMViews3Min;4992 + x-ms-request-charge: + - '1' status: code: 200 message: OK @@ -2370,45 +2476,28 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 response: body: - string: "{\r\n \"placementGroupId\": \"d026ef0f-b482-4884-8583-16ebe50a963b\",\r\n - \ \"platformUpdateDomain\": 1,\r\n \"platformFaultDomain\": 1,\r\n \"computerName\": - \"clinqc38b000003\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n - \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.7.3.0\",\r\n \"statuses\": - [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": - \"Guest Agent is running\",\r\n \"time\": \"2022-08-04T17:15:31+00:00\"\r\n - \ }\r\n ],\r\n \"extensionHandlers\": []\r\n },\r\n \"disks\": - [\r\n {\r\n \"name\": \"clinqhzpoczvy5m2spufclinqhzpoczvy5m2spufqOS__1_00d110bc7aaf4fbebbd4095a4a610862\",\r\n - \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:15:03.1334389+00:00\"\r\n - \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": {},\r\n \"hyperVGeneration\": - \"V1\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n - \ \"time\": \"2022-08-04T17:15:50.3518389+00:00\"\r\n },\r\n {\r\n - \ \"code\": \"PowerState/stopped\",\r\n \"level\": \"Info\",\r\n - \ \"displayStatus\": \"VM stopped\"\r\n }\r\n ]\r\n}" + string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" headers: cache-control: - no-cache content-length: - - '1288' + - '43' content-type: - - application/json; charset=utf-8 + - application/json; charset=UTF-8 date: - - Thu, 04 Aug 2022 17:16:12 GMT + - Fri, 14 Oct 2022 15:25:20 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 + - nginx strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -2417,10 +2506,8 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMScaleSetVM3Min;492,Microsoft.Compute/GetVMScaleSetVM30Min;2465,Microsoft.Compute/VMScaleSetVMViews3Min;4990 - x-ms-request-charge: - - '1' + x-frame-options: + - deny status: code: 200 message: OK @@ -2442,25 +2529,27 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/start?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/start?api-version=2022-08-01 response: body: string: '' headers: + azure-asyncnotification: + - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/1243d5f7-28ce-4878-9dbe-390b02fbb686?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/a5d80113-6003-44d2-b7ae-3aec93392654?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - '0' date: - - Thu, 04 Aug 2022 17:16:12 GMT + - Fri, 14 Oct 2022 15:25:20 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/1243d5f7-28ce-4878-9dbe-390b02fbb686?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/a5d80113-6003-44d2-b7ae-3aec93392654?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 pragma: - no-cache server: @@ -2471,9 +2560,9 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/VMScaleSetActions3Min;238,Microsoft.Compute/VMScaleSetActions30Min;1184,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2321,Microsoft.Compute/VmssQueuedVMOperations;0 + - Microsoft.Compute/VMScaleSetActions3Min;235,Microsoft.Compute/VMScaleSetActions30Min;1194,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2984,Microsoft.Compute/VmssQueuedVMOperations;0 x-ms-ratelimit-remaining-subscription-writes: - - '1197' + - '1199' x-ms-request-charge: - '1' status: @@ -2493,23 +2582,23 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/1243d5f7-28ce-4878-9dbe-390b02fbb686?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/a5d80113-6003-44d2-b7ae-3aec93392654?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:16:12.9142137+00:00\",\r\n \"endTime\": - \"2022-08-04T17:16:16.3048315+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"1243d5f7-28ce-4878-9dbe-390b02fbb686\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:25:21.3144206+00:00\",\r\n \"endTime\": + \"2022-10-14T15:25:25.501922+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"a5d80113-6003-44d2-b7ae-3aec93392654\"\r\n}" headers: cache-control: - no-cache content-length: - - '184' + - '183' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:16:42 GMT + - Fri, 14 Oct 2022 15:25:50 GMT expires: - '-1' pragma: @@ -2526,7 +2615,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14956,Microsoft.Compute/GetOperation30Min;29782 + - Microsoft.Compute/GetOperation3Min;14952,Microsoft.Compute/GetOperation30Min;29898 status: code: 200 message: OK @@ -2544,9 +2633,9 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/1243d5f7-28ce-4878-9dbe-390b02fbb686?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/a5d80113-6003-44d2-b7ae-3aec93392654?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 response: body: string: '' @@ -2556,7 +2645,7 @@ interactions: content-length: - '0' date: - - Thu, 04 Aug 2022 17:16:42 GMT + - Fri, 14 Oct 2022 15:25:50 GMT expires: - '-1' pragma: @@ -2569,7 +2658,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14955,Microsoft.Compute/GetOperation30Min;29781 + - Microsoft.Compute/GetOperation3Min;14951,Microsoft.Compute/GetOperation30Min;29897 status: code: 200 message: OK @@ -2587,50 +2676,68 @@ interactions: ParameterSetName: - --name --resource-group --set User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachineScaleSets\",\r\n \"location\": - \"westus2\",\r\n \"tags\": {},\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n - \ \"tier\": \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": - {\r\n \"singlePlacementGroup\": true,\r\n \"upgradePolicy\": {\r\n \"mode\": - \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n \"maxBatchInstancePercent\": - 20,\r\n \"maxUnhealthyInstancePercent\": 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": - 20,\r\n \"pauseTimeBetweenBatches\": \"PT0S\"\r\n }\r\n },\r\n - \ \"virtualMachineProfile\": {\r\n \"osProfile\": {\r\n \"computerNamePrefix\": - \"clinqc38b\",\r\n \"adminUsername\": \"rhl\",\r\n \"linuxConfiguration\": - {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": - {\r\n \"publicKeys\": [\r\n {\r\n \"path\": - \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": \"ssh-rsa - AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"enableVMAgentPlatformUpdates\": false\r\n },\r\n - \ \"secrets\": [],\r\n \"allowExtensionOperations\": true,\r\n - \ \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": + \"westus2\",\r\n \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n \"tier\": + \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": {\r\n \"singlePlacementGroup\": + true,\r\n \"orchestrationMode\": \"Uniform\",\r\n \"upgradePolicy\": + {\r\n \"mode\": \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n + \ \"maxBatchInstancePercent\": 20,\r\n \"maxUnhealthyInstancePercent\": + 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": 20,\r\n \"pauseTimeBetweenBatches\": + \"PT0S\"\r\n }\r\n },\r\n \"virtualMachineProfile\": {\r\n \"osProfile\": + {\r\n \"computerNamePrefix\": \"cliouf96e\",\r\n \"adminUsername\": + \"rhoover\",\r\n \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": + true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"enableVMAgentPlatformUpdates\": + false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": + true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": {\r\n \"osDisk\": {\r\n \"osType\": \"Linux\",\r\n \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\"\r\n },\r\n \ \"diskSizeGB\": 30\r\n },\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \ \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\"\r\n - \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"clinqc38bNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"clinqc38bIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n + \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"cliouf96eNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"disableTcpStateTracking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"cliouf96eIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": - true\r\n }\r\n }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n - \ \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": - false,\r\n \"uniqueId\": \"239f9fe3-2e0c-403b-8c3a-55cda6cd7b9a\",\r\n - \ \"timeCreated\": \"2022-08-04T17:10:51.2598866+00:00\"\r\n }\r\n}" + true\r\n }\r\n },\r\n \"extensionProfile\": {\r\n \"extensions\": + [\r\n {\r\n \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Monitor\",\r\n \"type\": \"AzureMonitorLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"1.0\",\r\n \"settings\": + {\"GCS_AUTO_CONFIG\":true}\r\n }\r\n },\r\n {\r\n + \ \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Security.Monitoring\",\r\n \"type\": \"AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.0\",\r\n \"settings\": + {\"enableGenevaUpload\":true,\"enableAutoConfig\":true,\"reportSuccessOnUnsupportedDistro\":true}\r\n + \ }\r\n }\r\n ]\r\n }\r\n },\r\n \"provisioningState\": + \"Succeeded\",\r\n \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": + false,\r\n \"uniqueId\": \"793988fd-7a65-472d-9472-7470271a360c\",\r\n + \ \"timeCreated\": \"2022-10-14T15:18:49.0838434+00:00\"\r\n }\r\n}" headers: cache-control: - no-cache content-length: - - '3526' + - '5376' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:16:43 GMT + - Fri, 14 Oct 2022 15:25:52 GMT expires: - '-1' pragma: @@ -2647,33 +2754,42 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMScaleSet3Min;391,Microsoft.Compute/GetVMScaleSet30Min;2513 + - Microsoft.Compute/GetVMScaleSet3Min;383,Microsoft.Compute/GetVMScaleSet30Min;2560 status: code: 200 message: OK - request: - body: '{"location": "westus2", "tags": {}, "sku": {"name": "Standard_DS1_v2", - "tier": "Standard", "capacity": 2}, "properties": {"upgradePolicy": {"mode": - "Manual", "rollingUpgradePolicy": {"maxBatchInstancePercent": 20, "maxUnhealthyInstancePercent": - 20, "maxUnhealthyUpgradedInstancePercent": 20, "pauseTimeBetweenBatches": "PT0S"}}, - "virtualMachineProfile": {"osProfile": {"computerNamePrefix": "clinqc38b", "adminUsername": - "rhl", "linuxConfiguration": {"disablePasswordAuthentication": true, "ssh": - {"publicKeys": [{"path": "/home/rhl/.ssh/authorized_keys", "keyData": "ssh-rsa - AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5"}]}, - "provisionVMAgent": true}, "secrets": [], "allowExtensionOperations": true}, - "storageProfile": {"osDisk": {"caching": "ReadWrite", "createOption": "FromImage", - "diskSizeGB": 30, "osType": "Linux", "managedDisk": {"storageAccountType": "Premium_LRS"}}}, - "networkProfile": {"networkInterfaceConfigurations": [{"name": "clinqc38bNic", - "properties": {"primary": true, "enableAcceleratedNetworking": false, "dnsSettings": - {"dnsServers": []}, "ipConfigurations": [{"name": "clinqc38bIPConfig", "properties": - {"subnet": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet"}, + body: '{"location": "westus2", "tags": {"azsecpack": "nonprod", "platformsettings.host_environment.service.platform_optedin_for_rootcerts": + "true"}, "sku": {"name": "Standard_DS1_v2", "tier": "Standard", "capacity": + 2}, "identity": {"type": "UserAssigned", "userAssignedIdentities": {"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2": + {}}}, "properties": {"upgradePolicy": {"mode": "Manual", "rollingUpgradePolicy": + {"maxBatchInstancePercent": 20, "maxUnhealthyInstancePercent": 20, "maxUnhealthyUpgradedInstancePercent": + 20, "pauseTimeBetweenBatches": "PT0S"}}, "virtualMachineProfile": {"osProfile": + {"computerNamePrefix": "cliouf96e", "adminUsername": "rhoover", "linuxConfiguration": + {"disablePasswordAuthentication": true, "ssh": {"publicKeys": [{"path": "/home/rhoover/.ssh/authorized_keys", + "keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\n"}]}, "provisionVMAgent": true, "enableVMAgentPlatformUpdates": + false}, "secrets": [], "allowExtensionOperations": true}, "storageProfile": + {"osDisk": {"caching": "ReadWrite", "createOption": "FromImage", "diskSizeGB": + 30, "osType": "Linux", "managedDisk": {"storageAccountType": "Premium_LRS"}}}, + "networkProfile": {"networkInterfaceConfigurations": [{"name": "cliouf96eNic", + "properties": {"primary": true, "enableAcceleratedNetworking": false, "disableTcpStateTracking": + false, "dnsSettings": {"dnsServers": []}, "ipConfigurations": [{"name": "cliouf96eIPConfig", + "properties": {"subnet": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet"}, "privateIPAddressVersion": "IPv4", "loadBalancerBackendAddressPools": [{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool"}], "loadBalancerInboundNatPools": [{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool"}]}}], "enableIPForwarding": false}}]}, "diagnosticsProfile": {"bootDiagnostics": {"enabled": - true, "storageUri": "https://cli000002.blob.core.windows.net/"}}}, "overprovision": - true, "doNotRunExtensionsOnOverprovisionedVMs": false, "singlePlacementGroup": - true}}' + true, "storageUri": "https://cli000002.blob.core.windows.net/"}}, "extensionProfile": + {"extensions": [{"name": "Microsoft.Azure.Monitor.AzureMonitorLinuxAgent", "properties": + {"publisher": "Microsoft.Azure.Monitor", "type": "AzureMonitorLinuxAgent", "typeHandlerVersion": + "1.0", "autoUpgradeMinorVersion": true, "enableAutomaticUpgrade": true, "settings": + {"GCS_AUTO_CONFIG": true}}}, {"name": "Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent", + "properties": {"publisher": "Microsoft.Azure.Security.Monitoring", "type": "AzureSecurityLinuxAgent", + "typeHandlerVersion": "2.0", "autoUpgradeMinorVersion": true, "enableAutomaticUpgrade": + true, "settings": {"enableGenevaUpload": true, "enableAutoConfig": true, "reportSuccessOnUnsupportedDistro": + true}}}]}}, "overprovision": true, "doNotRunExtensionsOnOverprovisionedVMs": + false, "singlePlacementGroup": true, "orchestrationMode": "Uniform"}}' headers: Accept: - application/json @@ -2684,61 +2800,79 @@ interactions: Connection: - keep-alive Content-Length: - - '2415' + - '3782' Content-Type: - application/json ParameterSetName: - --name --resource-group --set User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachineScaleSets\",\r\n \"location\": - \"westus2\",\r\n \"tags\": {},\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n - \ \"tier\": \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": - {\r\n \"singlePlacementGroup\": true,\r\n \"upgradePolicy\": {\r\n \"mode\": - \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n \"maxBatchInstancePercent\": - 20,\r\n \"maxUnhealthyInstancePercent\": 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": - 20,\r\n \"pauseTimeBetweenBatches\": \"PT0S\"\r\n }\r\n },\r\n - \ \"virtualMachineProfile\": {\r\n \"osProfile\": {\r\n \"computerNamePrefix\": - \"clinqc38b\",\r\n \"adminUsername\": \"rhl\",\r\n \"linuxConfiguration\": - {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": - {\r\n \"publicKeys\": [\r\n {\r\n \"path\": - \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": \"ssh-rsa - AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"enableVMAgentPlatformUpdates\": false\r\n },\r\n - \ \"secrets\": [],\r\n \"allowExtensionOperations\": true,\r\n - \ \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": + \"westus2\",\r\n \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n \"tier\": + \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": {\r\n \"singlePlacementGroup\": + true,\r\n \"orchestrationMode\": \"Uniform\",\r\n \"upgradePolicy\": + {\r\n \"mode\": \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n + \ \"maxBatchInstancePercent\": 20,\r\n \"maxUnhealthyInstancePercent\": + 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": 20,\r\n \"pauseTimeBetweenBatches\": + \"PT0S\"\r\n }\r\n },\r\n \"virtualMachineProfile\": {\r\n \"osProfile\": + {\r\n \"computerNamePrefix\": \"cliouf96e\",\r\n \"adminUsername\": + \"rhoover\",\r\n \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": + true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"enableVMAgentPlatformUpdates\": + false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": + true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": {\r\n \"osDisk\": {\r\n \"osType\": \"Linux\",\r\n \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\"\r\n },\r\n \ \"diskSizeGB\": 30\r\n },\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \ \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\"\r\n - \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"clinqc38bNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"clinqc38bIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n + \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"cliouf96eNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"disableTcpStateTracking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"cliouf96eIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": true,\r\n \"storageUri\": \"https://cli000002.blob.core.windows.net/\"\r\n - \ }\r\n }\r\n },\r\n \"provisioningState\": \"Updating\",\r\n - \ \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": - false,\r\n \"uniqueId\": \"239f9fe3-2e0c-403b-8c3a-55cda6cd7b9a\",\r\n - \ \"timeCreated\": \"2022-08-04T17:10:51.2598866+00:00\"\r\n }\r\n}" + \ }\r\n },\r\n \"extensionProfile\": {\r\n \"extensions\": + [\r\n {\r\n \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Monitor\",\r\n \"type\": \"AzureMonitorLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"1.0\",\r\n \"settings\": + {\"GCS_AUTO_CONFIG\":true}\r\n }\r\n },\r\n {\r\n + \ \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Security.Monitoring\",\r\n \"type\": \"AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.0\",\r\n \"settings\": + {\"enableGenevaUpload\":true,\"enableAutoConfig\":true,\"reportSuccessOnUnsupportedDistro\":true}\r\n + \ }\r\n }\r\n ]\r\n }\r\n },\r\n \"provisioningState\": + \"Updating\",\r\n \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": + false,\r\n \"uniqueId\": \"793988fd-7a65-472d-9472-7470271a360c\",\r\n + \ \"timeCreated\": \"2022-10-14T15:18:49.0838434+00:00\"\r\n }\r\n}" headers: azure-asyncnotification: - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/8153e4c3-cdab-4eed-9544-7226ec957d7b?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/2b73d0ef-9378-43a2-976e-aba0ae465449?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - - '3594' + - '5444' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:16:46 GMT + - Fri, 14 Oct 2022 15:25:56 GMT expires: - '-1' pragma: @@ -2755,7 +2889,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/CreateVMScaleSet3Min;113,Microsoft.Compute/CreateVMScaleSet30Min;558,Microsoft.Compute/VmssQueuedVMOperations;0 + - Microsoft.Compute/CreateVMScaleSet3Min;145,Microsoft.Compute/CreateVMScaleSet30Min;744,Microsoft.Compute/VmssQueuedVMOperations;0 x-ms-ratelimit-remaining-subscription-writes: - '1199' x-ms-request-charge: @@ -2777,14 +2911,14 @@ interactions: ParameterSetName: - --name --resource-group --set User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/8153e4c3-cdab-4eed-9544-7226ec957d7b?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/2b73d0ef-9378-43a2-976e-aba0ae465449?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:16:46.4765007+00:00\",\r\n \"endTime\": - \"2022-08-04T17:16:46.7421169+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"8153e4c3-cdab-4eed-9544-7226ec957d7b\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:25:56.6891099+00:00\",\r\n \"endTime\": + \"2022-10-14T15:25:57.0797314+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"2b73d0ef-9378-43a2-976e-aba0ae465449\"\r\n}" headers: cache-control: - no-cache @@ -2793,7 +2927,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:16:56 GMT + - Fri, 14 Oct 2022 15:26:07 GMT expires: - '-1' pragma: @@ -2810,7 +2944,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14949,Microsoft.Compute/GetOperation30Min;29775 + - Microsoft.Compute/GetOperation3Min;14958,Microsoft.Compute/GetOperation30Min;29896 status: code: 200 message: OK @@ -2828,51 +2962,69 @@ interactions: ParameterSetName: - --name --resource-group --set User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachineScaleSets\",\r\n \"location\": - \"westus2\",\r\n \"tags\": {},\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n - \ \"tier\": \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": - {\r\n \"singlePlacementGroup\": true,\r\n \"upgradePolicy\": {\r\n \"mode\": - \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n \"maxBatchInstancePercent\": - 20,\r\n \"maxUnhealthyInstancePercent\": 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": - 20,\r\n \"pauseTimeBetweenBatches\": \"PT0S\"\r\n }\r\n },\r\n - \ \"virtualMachineProfile\": {\r\n \"osProfile\": {\r\n \"computerNamePrefix\": - \"clinqc38b\",\r\n \"adminUsername\": \"rhl\",\r\n \"linuxConfiguration\": - {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": - {\r\n \"publicKeys\": [\r\n {\r\n \"path\": - \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": \"ssh-rsa - AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"enableVMAgentPlatformUpdates\": false\r\n },\r\n - \ \"secrets\": [],\r\n \"allowExtensionOperations\": true,\r\n - \ \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": + \"westus2\",\r\n \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n \"tier\": + \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": {\r\n \"singlePlacementGroup\": + true,\r\n \"orchestrationMode\": \"Uniform\",\r\n \"upgradePolicy\": + {\r\n \"mode\": \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n + \ \"maxBatchInstancePercent\": 20,\r\n \"maxUnhealthyInstancePercent\": + 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": 20,\r\n \"pauseTimeBetweenBatches\": + \"PT0S\"\r\n }\r\n },\r\n \"virtualMachineProfile\": {\r\n \"osProfile\": + {\r\n \"computerNamePrefix\": \"cliouf96e\",\r\n \"adminUsername\": + \"rhoover\",\r\n \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": + true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"enableVMAgentPlatformUpdates\": + false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": + true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": {\r\n \"osDisk\": {\r\n \"osType\": \"Linux\",\r\n \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\"\r\n },\r\n \ \"diskSizeGB\": 30\r\n },\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \ \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\"\r\n - \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"clinqc38bNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"clinqc38bIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n + \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"cliouf96eNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"disableTcpStateTracking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"cliouf96eIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": true,\r\n \"storageUri\": \"https://cli000002.blob.core.windows.net/\"\r\n - \ }\r\n }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n - \ \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": - false,\r\n \"uniqueId\": \"239f9fe3-2e0c-403b-8c3a-55cda6cd7b9a\",\r\n - \ \"timeCreated\": \"2022-08-04T17:10:51.2598866+00:00\"\r\n }\r\n}" + \ }\r\n },\r\n \"extensionProfile\": {\r\n \"extensions\": + [\r\n {\r\n \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Monitor\",\r\n \"type\": \"AzureMonitorLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"1.0\",\r\n \"settings\": + {\"GCS_AUTO_CONFIG\":true}\r\n }\r\n },\r\n {\r\n + \ \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Security.Monitoring\",\r\n \"type\": \"AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.0\",\r\n \"settings\": + {\"enableGenevaUpload\":true,\"enableAutoConfig\":true,\"reportSuccessOnUnsupportedDistro\":true}\r\n + \ }\r\n }\r\n ]\r\n }\r\n },\r\n \"provisioningState\": + \"Succeeded\",\r\n \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": + false,\r\n \"uniqueId\": \"793988fd-7a65-472d-9472-7470271a360c\",\r\n + \ \"timeCreated\": \"2022-10-14T15:18:49.0838434+00:00\"\r\n }\r\n}" headers: cache-control: - no-cache content-length: - - '3595' + - '5445' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:16:56 GMT + - Fri, 14 Oct 2022 15:26:07 GMT expires: - '-1' pragma: @@ -2889,7 +3041,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMScaleSet3Min;382,Microsoft.Compute/GetVMScaleSet30Min;2504 + - Microsoft.Compute/GetVMScaleSet3Min;386,Microsoft.Compute/GetVMScaleSet30Min;2556 status: code: 200 message: OK @@ -2911,25 +3063,27 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/manualupgrade?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/manualupgrade?api-version=2022-08-01 response: body: string: '' headers: + azure-asyncnotification: + - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/5af238b5-2789-45a9-860f-6060cfaaa57c?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/dcb50831-7d49-4cf9-8d92-4e1be0499d7d?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - '0' date: - - Thu, 04 Aug 2022 17:16:57 GMT + - Fri, 14 Oct 2022 15:26:08 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/5af238b5-2789-45a9-860f-6060cfaaa57c?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/dcb50831-7d49-4cf9-8d92-4e1be0499d7d?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 pragma: - no-cache server: @@ -2940,9 +3094,9 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/VMScaleSetActions3Min;237,Microsoft.Compute/VMScaleSetActions30Min;1183,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2231,Microsoft.Compute/VmssQueuedVMOperations;0 + - Microsoft.Compute/VMScaleSetActions3Min;237,Microsoft.Compute/VMScaleSetActions30Min;1193,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2995,Microsoft.Compute/VmssQueuedVMOperations;0 x-ms-ratelimit-remaining-subscription-writes: - - '1199' + - '1198' x-ms-request-charge: - '1' status: @@ -2962,14 +3116,14 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/5af238b5-2789-45a9-860f-6060cfaaa57c?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/dcb50831-7d49-4cf9-8d92-4e1be0499d7d?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:16:57.6015185+00:00\",\r\n \"endTime\": - \"2022-08-04T17:17:10.288885+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"5af238b5-2789-45a9-860f-6060cfaaa57c\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:26:08.3921237+00:00\",\r\n \"endTime\": + \"2022-10-14T15:26:24.220085+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"dcb50831-7d49-4cf9-8d92-4e1be0499d7d\"\r\n}" headers: cache-control: - no-cache @@ -2978,7 +3132,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:17:27 GMT + - Fri, 14 Oct 2022 15:26:37 GMT expires: - '-1' pragma: @@ -2995,7 +3149,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14954,Microsoft.Compute/GetOperation30Min;29771 + - Microsoft.Compute/GetOperation3Min;14964,Microsoft.Compute/GetOperation30Min;29890 status: code: 200 message: OK @@ -3013,9 +3167,9 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/5af238b5-2789-45a9-860f-6060cfaaa57c?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/dcb50831-7d49-4cf9-8d92-4e1be0499d7d?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 response: body: string: '' @@ -3025,7 +3179,7 @@ interactions: content-length: - '0' date: - - Thu, 04 Aug 2022 17:17:27 GMT + - Fri, 14 Oct 2022 15:26:37 GMT expires: - '-1' pragma: @@ -3038,7 +3192,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14953,Microsoft.Compute/GetOperation30Min;29770 + - Microsoft.Compute/GetOperation3Min;14963,Microsoft.Compute/GetOperation30Min;29889 status: code: 200 message: OK @@ -3056,28 +3210,68 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-08-01 response: body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + string: "{\r\n \"placementGroupId\": \"900064c0-d742-4569-a06c-dcca8072c0c3\",\r\n + \ \"platformUpdateDomain\": 0,\r\n \"platformFaultDomain\": 0,\r\n \"computerName\": + \"cliouf96e000003\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n + \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.8.0.11\",\r\n \"statuses\": + [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": + \"Guest Agent is running\",\r\n \"time\": \"2022-10-14T15:26:22+00:00\"\r\n + \ }\r\n ],\r\n \"extensionHandlers\": [\r\n {\r\n \"type\": + \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": + \"1.22.2\",\r\n \"status\": {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n + \ \"message\": \"Plugin enabled\"\r\n }\r\n },\r\n {\r\n + \ \"type\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.20.58\",\r\n \"status\": {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": + \"Plugin enabled\"\r\n }\r\n }\r\n ]\r\n },\r\n \"disks\": + [\r\n {\r\n \"name\": \"cliou5z7nax54kj6yubjcliou5z7nax54kj6yubj5OS__1_474db27698dd49ddab2454669aab0333\",\r\n + \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"time\": \"2022-10-14T15:26:09.4077099+00:00\"\r\n + \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": {\r\n \"consoleScreenshotBlobUri\": + \"https://cli000002.blob.core.windows.net/bootdiagnostics-cliou5z7n-c3d44363-484f-435b-bb1d-61e4ddddcb55/cli000003_3.c3d44363-484f-435b-bb1d-61e4ddddcb55.screenshot.bmp\",\r\n + \ \"serialConsoleLogBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-cliou5z7n-c3d44363-484f-435b-bb1d-61e4ddddcb55/cli000003_3.c3d44363-484f-435b-bb1d-61e4ddddcb55.serialconsole.log\"\r\n + \ },\r\n \"extensions\": [\r\n {\r\n \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": + \"1.22.2\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"message\": \"Enable succeeded\"\r\n }\r\n + \ ]\r\n },\r\n {\r\n \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.20.58\",\r\n \"statuses\": [\r\n {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n + \ \"message\": \"Enable ASM succeeded\"\r\n }\r\n ]\r\n + \ }\r\n ],\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n + \ {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n \"time\": + \"2022-10-14T15:26:24.1732214+00:00\"\r\n },\r\n {\r\n \"code\": + \"PowerState/running\",\r\n \"level\": \"Info\",\r\n \"displayStatus\": + \"VM running\"\r\n }\r\n ]\r\n}" headers: cache-control: - no-cache content-length: - - '43' + - '3218' content-type: - - application/json; charset=UTF-8 + - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:17:28 GMT + - Fri, 14 Oct 2022 15:26:39 GMT expires: - '-1' pragma: - no-cache server: - - nginx + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3086,8 +3280,10 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-frame-options: - - deny + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/GetVMScaleSetVM3Min;498,Microsoft.Compute/GetVMScaleSetVM30Min;2491,Microsoft.Compute/VMScaleSetVMViews3Min;4998 + x-ms-request-charge: + - '1' status: code: 200 message: OK @@ -3105,47 +3301,27 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-storage/20.1.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002?api-version=2022-05-01 response: body: - string: "{\r\n \"placementGroupId\": \"d026ef0f-b482-4884-8583-16ebe50a963b\",\r\n - \ \"platformUpdateDomain\": 1,\r\n \"platformFaultDomain\": 1,\r\n \"computerName\": - \"clinqc38b000003\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n - \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.7.3.0\",\r\n \"statuses\": - [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": - \"Guest Agent is running\",\r\n \"time\": \"2022-08-04T17:17:09+00:00\"\r\n - \ }\r\n ],\r\n \"extensionHandlers\": []\r\n },\r\n \"disks\": - [\r\n {\r\n \"name\": \"clinqhzpoczvy5m2spufclinqhzpoczvy5m2spufqOS__1_00d110bc7aaf4fbebbd4095a4a610862\",\r\n - \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:17:00.6326722+00:00\"\r\n - \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": {\r\n \"consoleScreenshotBlobUri\": - \"https://cli000002.blob.core.windows.net/bootdiagnostics-clinqhzpo-c47ce47c-9b4d-461d-8f0e-b5a271ca2265/cli000003_3.c47ce47c-9b4d-461d-8f0e-b5a271ca2265.screenshot.bmp\",\r\n - \ \"serialConsoleLogBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-clinqhzpo-c47ce47c-9b4d-461d-8f0e-b5a271ca2265/cli000003_3.c47ce47c-9b4d-461d-8f0e-b5a271ca2265.serialconsole.log\"\r\n - \ },\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n {\r\n - \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": \"Info\",\r\n - \ \"displayStatus\": \"Provisioning succeeded\",\r\n \"time\": \"2022-08-04T17:17:10.2576133+00:00\"\r\n - \ },\r\n {\r\n \"code\": \"PowerState/running\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"VM running\"\r\n }\r\n ]\r\n}" + string: '{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002","name":"cli000002","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-10-14T15:18:09.2031765Z","key2":"2022-10-14T15:18:09.2031765Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.3282100Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.3282100Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-10-14T15:18:09.0938358Z","primaryEndpoints":{"blob":"https://cli000002.blob.core.windows.net/","queue":"https://cli000002.queue.core.windows.net/","table":"https://cli000002.table.core.windows.net/","file":"https://cli000002.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}}' headers: cache-control: - no-cache content-length: - - '1699' + - '1259' content-type: - - application/json; charset=utf-8 + - application/json date: - - Thu, 04 Aug 2022 17:17:28 GMT + - Fri, 14 Oct 2022 15:26:39 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 + - Microsoft-Azure-Storage-Resource-Provider/1.0,Microsoft-HTTPAPI/2.0 Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3154,10 +3330,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMScaleSetVM3Min;493,Microsoft.Compute/GetVMScaleSetVM30Min;2464,Microsoft.Compute/VMScaleSetVMViews3Min;4993 - x-ms-request-charge: - - '1' status: code: 200 message: OK @@ -3169,30 +3341,28 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - serial-console disable + - vmss update-instances Connection: - keep-alive - Content-Length: - - '0' - Content-Type: - - application/json + ParameterSetName: + - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default/disableConsole?api-version=2018-05-01 + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 response: body: - string: "{\n \"properties\": {\n \"disabled\": true\n }\n}" + string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" headers: cache-control: - no-cache content-length: - - '42' + - '43' content-type: - application/json; charset=UTF-8 date: - - Thu, 04 Aug 2022 17:17:28 GMT + - Fri, 14 Oct 2022 15:26:39 GMT expires: - '-1' pragma: @@ -3209,8 +3379,6 @@ interactions: - nosniff x-frame-options: - deny - x-ms-ratelimit-remaining-subscription-writes: - - '1199' status: code: 200 message: OK @@ -3225,11 +3393,15 @@ interactions: - serial-console disable Connection: - keep-alive + Content-Length: + - '0' + Content-Type: + - application/json User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default/disableConsole?api-version=2018-05-01 response: body: string: "{\n \"properties\": {\n \"disabled\": true\n }\n}" @@ -3241,7 +3413,7 @@ interactions: content-type: - application/json; charset=UTF-8 date: - - Thu, 04 Aug 2022 17:17:29 GMT + - Fri, 14 Oct 2022 15:26:39 GMT expires: - '-1' pragma: @@ -3258,6 +3430,8 @@ interactions: - nosniff x-frame-options: - deny + x-ms-ratelimit-remaining-subscription-writes: + - '1199' status: code: 200 message: OK @@ -3269,36 +3443,72 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - serial-console enable + - serial-console disable Connection: - keep-alive - Content-Length: - - '0' - Content-Type: - - application/json User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default/enableConsole?api-version=2018-05-01 + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-08-01 response: body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + string: "{\r\n \"placementGroupId\": \"900064c0-d742-4569-a06c-dcca8072c0c3\",\r\n + \ \"platformUpdateDomain\": 0,\r\n \"platformFaultDomain\": 0,\r\n \"computerName\": + \"cliouf96e000003\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n + \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.8.0.11\",\r\n \"statuses\": + [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": + \"Guest Agent is running\",\r\n \"time\": \"2022-10-14T15:26:22+00:00\"\r\n + \ }\r\n ],\r\n \"extensionHandlers\": [\r\n {\r\n \"type\": + \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": + \"1.22.2\",\r\n \"status\": {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n + \ \"message\": \"Plugin enabled\"\r\n }\r\n },\r\n {\r\n + \ \"type\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.20.58\",\r\n \"status\": {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": + \"Plugin enabled\"\r\n }\r\n }\r\n ]\r\n },\r\n \"disks\": + [\r\n {\r\n \"name\": \"cliou5z7nax54kj6yubjcliou5z7nax54kj6yubj5OS__1_474db27698dd49ddab2454669aab0333\",\r\n + \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"time\": \"2022-10-14T15:26:09.4077099+00:00\"\r\n + \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": {\r\n \"consoleScreenshotBlobUri\": + \"https://cli000002.blob.core.windows.net/bootdiagnostics-cliou5z7n-c3d44363-484f-435b-bb1d-61e4ddddcb55/cli000003_3.c3d44363-484f-435b-bb1d-61e4ddddcb55.screenshot.bmp\",\r\n + \ \"serialConsoleLogBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-cliou5z7n-c3d44363-484f-435b-bb1d-61e4ddddcb55/cli000003_3.c3d44363-484f-435b-bb1d-61e4ddddcb55.serialconsole.log\"\r\n + \ },\r\n \"extensions\": [\r\n {\r\n \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": + \"1.22.2\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"message\": \"Enable succeeded\"\r\n }\r\n + \ ]\r\n },\r\n {\r\n \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.20.58\",\r\n \"statuses\": [\r\n {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n + \ \"message\": \"Enable ASM succeeded\"\r\n }\r\n ]\r\n + \ }\r\n ],\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n + \ {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n \"time\": + \"2022-10-14T15:26:24.1732214+00:00\"\r\n },\r\n {\r\n \"code\": + \"PowerState/running\",\r\n \"level\": \"Info\",\r\n \"displayStatus\": + \"VM running\"\r\n }\r\n ]\r\n}" headers: cache-control: - no-cache content-length: - - '43' + - '3218' content-type: - - application/json; charset=UTF-8 + - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:17:29 GMT + - Fri, 14 Oct 2022 15:26:40 GMT expires: - '-1' pragma: - no-cache server: - - nginx + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3307,10 +3517,10 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-frame-options: - - deny - x-ms-ratelimit-remaining-subscription-writes: - - '1198' + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/GetVMScaleSetVM3Min;497,Microsoft.Compute/GetVMScaleSetVM30Min;2490,Microsoft.Compute/VMScaleSetVMViews3Min;4997 + x-ms-request-charge: + - '1' status: code: 200 message: OK @@ -3322,32 +3532,31 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - serial-console enable + - serial-console disable Connection: - keep-alive User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-storage/20.1.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002?api-version=2022-05-01 response: body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + string: '{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002","name":"cli000002","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-10-14T15:18:09.2031765Z","key2":"2022-10-14T15:18:09.2031765Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.3282100Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.3282100Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-10-14T15:18:09.0938358Z","primaryEndpoints":{"blob":"https://cli000002.blob.core.windows.net/","queue":"https://cli000002.queue.core.windows.net/","table":"https://cli000002.table.core.windows.net/","file":"https://cli000002.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}}' headers: cache-control: - no-cache content-length: - - '43' + - '1259' content-type: - - application/json; charset=UTF-8 + - application/json date: - - Thu, 04 Aug 2022 17:17:30 GMT + - Fri, 14 Oct 2022 15:26:40 GMT expires: - '-1' pragma: - no-cache server: - - nginx + - Microsoft-Azure-Storage-Resource-Provider/1.0,Microsoft-HTTPAPI/2.0 Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3356,8 +3565,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-frame-options: - - deny status: code: 200 message: OK @@ -3369,51 +3576,32 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - serial-console enable + - serial-console disable Connection: - keep-alive User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 response: body: - string: "{\r\n \"placementGroupId\": \"d026ef0f-b482-4884-8583-16ebe50a963b\",\r\n - \ \"platformUpdateDomain\": 1,\r\n \"platformFaultDomain\": 1,\r\n \"computerName\": - \"clinqc38b000003\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n - \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.7.3.0\",\r\n \"statuses\": - [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": - \"Guest Agent is running\",\r\n \"time\": \"2022-08-04T17:17:09+00:00\"\r\n - \ }\r\n ],\r\n \"extensionHandlers\": []\r\n },\r\n \"disks\": - [\r\n {\r\n \"name\": \"clinqhzpoczvy5m2spufclinqhzpoczvy5m2spufqOS__1_00d110bc7aaf4fbebbd4095a4a610862\",\r\n - \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:17:00.6326722+00:00\"\r\n - \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": {\r\n \"consoleScreenshotBlobUri\": - \"https://cli000002.blob.core.windows.net/bootdiagnostics-clinqhzpo-c47ce47c-9b4d-461d-8f0e-b5a271ca2265/cli000003_3.c47ce47c-9b4d-461d-8f0e-b5a271ca2265.screenshot.bmp\",\r\n - \ \"serialConsoleLogBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-clinqhzpo-c47ce47c-9b4d-461d-8f0e-b5a271ca2265/cli000003_3.c47ce47c-9b4d-461d-8f0e-b5a271ca2265.serialconsole.log\"\r\n - \ },\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n {\r\n - \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": \"Info\",\r\n - \ \"displayStatus\": \"Provisioning succeeded\",\r\n \"time\": \"2022-08-04T17:17:10.2576133+00:00\"\r\n - \ },\r\n {\r\n \"code\": \"PowerState/running\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"VM running\"\r\n }\r\n ]\r\n}" + string: "{\n \"properties\": {\n \"disabled\": true\n }\n}" headers: cache-control: - no-cache content-length: - - '1699' + - '42' content-type: - - application/json; charset=utf-8 + - application/json; charset=UTF-8 date: - - Thu, 04 Aug 2022 17:17:30 GMT + - Fri, 14 Oct 2022 15:26:41 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 + - nginx strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3422,11 +3610,242 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMScaleSetVM3Min;497,Microsoft.Compute/GetVMScaleSetVM30Min;2463,Microsoft.Compute/VMScaleSetVMViews3Min;4997 - x-ms-request-charge: - - '1' - status: + x-frame-options: + - deny + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - serial-console enable + Connection: + - keep-alive + Content-Length: + - '0' + Content-Type: + - application/json + User-Agent: + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default/enableConsole?api-version=2018-05-01 + response: + body: + string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + headers: + cache-control: + - no-cache + content-length: + - '43' + content-type: + - application/json; charset=UTF-8 + date: + - Fri, 14 Oct 2022 15:26:42 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - deny + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - serial-console enable + Connection: + - keep-alive + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-08-01 + response: + body: + string: "{\r\n \"placementGroupId\": \"900064c0-d742-4569-a06c-dcca8072c0c3\",\r\n + \ \"platformUpdateDomain\": 0,\r\n \"platformFaultDomain\": 0,\r\n \"computerName\": + \"cliouf96e000003\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n + \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.8.0.11\",\r\n \"statuses\": + [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": + \"Guest Agent is running\",\r\n \"time\": \"2022-10-14T15:26:22+00:00\"\r\n + \ }\r\n ],\r\n \"extensionHandlers\": [\r\n {\r\n \"type\": + \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": + \"1.22.2\",\r\n \"status\": {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n + \ \"message\": \"Plugin enabled\"\r\n }\r\n },\r\n {\r\n + \ \"type\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.20.58\",\r\n \"status\": {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": + \"Plugin enabled\"\r\n }\r\n }\r\n ]\r\n },\r\n \"disks\": + [\r\n {\r\n \"name\": \"cliou5z7nax54kj6yubjcliou5z7nax54kj6yubj5OS__1_474db27698dd49ddab2454669aab0333\",\r\n + \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"time\": \"2022-10-14T15:26:09.4077099+00:00\"\r\n + \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": {\r\n \"consoleScreenshotBlobUri\": + \"https://cli000002.blob.core.windows.net/bootdiagnostics-cliou5z7n-c3d44363-484f-435b-bb1d-61e4ddddcb55/cli000003_3.c3d44363-484f-435b-bb1d-61e4ddddcb55.screenshot.bmp\",\r\n + \ \"serialConsoleLogBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-cliou5z7n-c3d44363-484f-435b-bb1d-61e4ddddcb55/cli000003_3.c3d44363-484f-435b-bb1d-61e4ddddcb55.serialconsole.log\"\r\n + \ },\r\n \"extensions\": [\r\n {\r\n \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": + \"1.22.2\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"message\": \"Enable succeeded\"\r\n }\r\n + \ ]\r\n },\r\n {\r\n \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.20.58\",\r\n \"statuses\": [\r\n {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n + \ \"message\": \"Enable ASM succeeded\"\r\n }\r\n ]\r\n + \ }\r\n ],\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n + \ {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n \"time\": + \"2022-10-14T15:26:24.1732214+00:00\"\r\n },\r\n {\r\n \"code\": + \"PowerState/running\",\r\n \"level\": \"Info\",\r\n \"displayStatus\": + \"VM running\"\r\n }\r\n ]\r\n}" + headers: + cache-control: + - no-cache + content-length: + - '3218' + content-type: + - application/json; charset=utf-8 + date: + - Fri, 14 Oct 2022 15:26:42 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/GetVMScaleSetVM3Min;496,Microsoft.Compute/GetVMScaleSetVM30Min;2489,Microsoft.Compute/VMScaleSetVMViews3Min;4996 + x-ms-request-charge: + - '1' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - serial-console enable + Connection: + - keep-alive + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-storage/20.1.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002?api-version=2022-05-01 + response: + body: + string: '{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002","name":"cli000002","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-10-14T15:18:09.2031765Z","key2":"2022-10-14T15:18:09.2031765Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.3282100Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.3282100Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-10-14T15:18:09.0938358Z","primaryEndpoints":{"blob":"https://cli000002.blob.core.windows.net/","queue":"https://cli000002.queue.core.windows.net/","table":"https://cli000002.table.core.windows.net/","file":"https://cli000002.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}}' + headers: + cache-control: + - no-cache + content-length: + - '1259' + content-type: + - application/json + date: + - Fri, 14 Oct 2022 15:26:43 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-Azure-Storage-Resource-Provider/1.0,Microsoft-HTTPAPI/2.0 Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - serial-console enable + Connection: + - keep-alive + User-Agent: + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + response: + body: + string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + headers: + cache-control: + - no-cache + content-length: + - '43' + content-type: + - application/json; charset=UTF-8 + date: + - Fri, 14 Oct 2022 15:26:43 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - deny + status: code: 200 message: OK - request: @@ -3445,25 +3864,27 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/deallocate?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/deallocate?api-version=2022-08-01 response: body: string: '' headers: + azure-asyncnotification: + - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/86816ea3-665f-4227-97b1-cbdbc2d6fbe3?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/287566c4-9f48-4f4b-9051-1714edef25dd?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - '0' date: - - Thu, 04 Aug 2022 17:17:30 GMT + - Fri, 14 Oct 2022 15:26:43 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/86816ea3-665f-4227-97b1-cbdbc2d6fbe3?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/287566c4-9f48-4f4b-9051-1714edef25dd?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 pragma: - no-cache server: @@ -3474,7 +3895,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/DeleteVMScaleSetVM3Min;239,Microsoft.Compute/DeleteVMScaleSetVM30Min;1195,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2233,Microsoft.Compute/VmssQueuedVMOperations;0 + - Microsoft.Compute/DeleteVMScaleSetVM3Min;239,Microsoft.Compute/DeleteVMScaleSetVM30Min;1198,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2994,Microsoft.Compute/VmssQueuedVMOperations;0 x-ms-ratelimit-remaining-subscription-writes: - '1199' x-ms-request-charge: @@ -3496,13 +3917,13 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/86816ea3-665f-4227-97b1-cbdbc2d6fbe3?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/287566c4-9f48-4f4b-9051-1714edef25dd?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:17:31.6324677+00:00\",\r\n \"status\": - \"InProgress\",\r\n \"name\": \"86816ea3-665f-4227-97b1-cbdbc2d6fbe3\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:26:43.8761617+00:00\",\r\n \"status\": + \"InProgress\",\r\n \"name\": \"287566c4-9f48-4f4b-9051-1714edef25dd\"\r\n}" headers: cache-control: - no-cache @@ -3511,7 +3932,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:18:01 GMT + - Fri, 14 Oct 2022 15:27:13 GMT expires: - '-1' pragma: @@ -3528,7 +3949,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14964,Microsoft.Compute/GetOperation30Min;29765 + - Microsoft.Compute/GetOperation3Min;14960,Microsoft.Compute/GetOperation30Min;29879 status: code: 200 message: OK @@ -3546,14 +3967,14 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/86816ea3-665f-4227-97b1-cbdbc2d6fbe3?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/287566c4-9f48-4f4b-9051-1714edef25dd?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:17:31.6324677+00:00\",\r\n \"endTime\": - \"2022-08-04T17:18:18.0071799+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"86816ea3-665f-4227-97b1-cbdbc2d6fbe3\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:26:43.8761617+00:00\",\r\n \"endTime\": + \"2022-10-14T15:27:29.4694405+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"287566c4-9f48-4f4b-9051-1714edef25dd\"\r\n}" headers: cache-control: - no-cache @@ -3562,7 +3983,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:18:31 GMT + - Fri, 14 Oct 2022 15:27:43 GMT expires: - '-1' pragma: @@ -3579,7 +4000,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14966,Microsoft.Compute/GetOperation30Min;29759 + - Microsoft.Compute/GetOperation3Min;14962,Microsoft.Compute/GetOperation30Min;29872 status: code: 200 message: OK @@ -3597,9 +4018,9 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/86816ea3-665f-4227-97b1-cbdbc2d6fbe3?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/287566c4-9f48-4f4b-9051-1714edef25dd?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 response: body: string: '' @@ -3609,7 +4030,7 @@ interactions: content-length: - '0' date: - - Thu, 04 Aug 2022 17:18:31 GMT + - Fri, 14 Oct 2022 15:27:43 GMT expires: - '-1' pragma: @@ -3622,7 +4043,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14965,Microsoft.Compute/GetOperation30Min;29758 + - Microsoft.Compute/GetOperation3Min;14961,Microsoft.Compute/GetOperation30Min;29871 status: code: 200 message: OK @@ -3640,28 +4061,41 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-08-01 response: body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + string: "{\r\n \"placementGroupId\": \"900064c0-d742-4569-a06c-dcca8072c0c3\",\r\n + \ \"platformUpdateDomain\": 0,\r\n \"platformFaultDomain\": 0,\r\n \"disks\": + [\r\n {\r\n \"name\": \"cliou5z7nax54kj6yubjcliou5z7nax54kj6yubj5OS__1_474db27698dd49ddab2454669aab0333\",\r\n + \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"time\": \"2022-10-14T15:27:29.3913238+00:00\"\r\n + \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": {\r\n \"consoleScreenshotBlobUri\": + \"https://cli000002.blob.core.windows.net/bootdiagnostics-cliou5z7n-c3d44363-484f-435b-bb1d-61e4ddddcb55/cli000003_3.c3d44363-484f-435b-bb1d-61e4ddddcb55.screenshot.bmp\",\r\n + \ \"serialConsoleLogBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-cliou5z7n-c3d44363-484f-435b-bb1d-61e4ddddcb55/cli000003_3.c3d44363-484f-435b-bb1d-61e4ddddcb55.serialconsole.log\"\r\n + \ },\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": \"Info\",\r\n + \ \"displayStatus\": \"Provisioning succeeded\",\r\n \"time\": \"2022-10-14T15:27:29.4382113+00:00\"\r\n + \ },\r\n {\r\n \"code\": \"PowerState/deallocated\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"VM deallocated\"\r\n }\r\n ]\r\n}" headers: cache-control: - no-cache content-length: - - '43' + - '1291' content-type: - - application/json; charset=UTF-8 + - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:18:31 GMT + - Fri, 14 Oct 2022 15:27:44 GMT expires: - '-1' pragma: - no-cache server: - - nginx + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3670,8 +4104,10 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-frame-options: - - deny + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/GetVMScaleSetVM3Min;495,Microsoft.Compute/GetVMScaleSetVM30Min;2488,Microsoft.Compute/VMScaleSetVMViews3Min;4995 + x-ms-request-charge: + - '1' status: code: 200 message: OK @@ -3689,41 +4125,27 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-storage/20.1.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002?api-version=2022-05-01 response: body: - string: "{\r\n \"placementGroupId\": \"d026ef0f-b482-4884-8583-16ebe50a963b\",\r\n - \ \"platformUpdateDomain\": 1,\r\n \"platformFaultDomain\": 1,\r\n \"disks\": - [\r\n {\r\n \"name\": \"clinqhzpoczvy5m2spufclinqhzpoczvy5m2spufqOS__1_00d110bc7aaf4fbebbd4095a4a610862\",\r\n - \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:18:17.8509313+00:00\"\r\n - \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": {\r\n \"consoleScreenshotBlobUri\": - \"https://cli000002.blob.core.windows.net/bootdiagnostics-clinqhzpo-c47ce47c-9b4d-461d-8f0e-b5a271ca2265/cli000003_3.c47ce47c-9b4d-461d-8f0e-b5a271ca2265.screenshot.bmp\",\r\n - \ \"serialConsoleLogBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-clinqhzpo-c47ce47c-9b4d-461d-8f0e-b5a271ca2265/cli000003_3.c47ce47c-9b4d-461d-8f0e-b5a271ca2265.serialconsole.log\"\r\n - \ },\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n {\r\n - \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": \"Info\",\r\n - \ \"displayStatus\": \"Provisioning succeeded\",\r\n \"time\": \"2022-08-04T17:18:17.8665399+00:00\"\r\n - \ },\r\n {\r\n \"code\": \"PowerState/deallocated\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"VM deallocated\"\r\n }\r\n ]\r\n}" + string: '{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002","name":"cli000002","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-10-14T15:18:09.2031765Z","key2":"2022-10-14T15:18:09.2031765Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.3282100Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.3282100Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-10-14T15:18:09.0938358Z","primaryEndpoints":{"blob":"https://cli000002.blob.core.windows.net/","queue":"https://cli000002.queue.core.windows.net/","table":"https://cli000002.table.core.windows.net/","file":"https://cli000002.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}}' headers: cache-control: - no-cache content-length: - - '1291' + - '1259' content-type: - - application/json; charset=utf-8 + - application/json date: - - Thu, 04 Aug 2022 17:18:32 GMT + - Fri, 14 Oct 2022 15:27:44 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 + - Microsoft-Azure-Storage-Resource-Provider/1.0,Microsoft-HTTPAPI/2.0 Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3732,10 +4154,55 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMScaleSetVM3Min;496,Microsoft.Compute/GetVMScaleSetVM30Min;2462,Microsoft.Compute/VMScaleSetVMViews3Min;4994 - x-ms-request-charge: - - '1' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - vmss deallocate + Connection: + - keep-alive + ParameterSetName: + - -g -n --instance-ids + User-Agent: + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + response: + body: + string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + headers: + cache-control: + - no-cache + content-length: + - '43' + content-type: + - application/json; charset=UTF-8 + date: + - Fri, 14 Oct 2022 15:27:44 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - deny status: code: 200 message: OK @@ -3757,25 +4224,27 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/start?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/start?api-version=2022-08-01 response: body: string: '' headers: + azure-asyncnotification: + - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/c86462cd-02d5-4754-8f50-ce258bd14d92?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/06b0da8a-8c5b-46e9-97b9-b337e906b010?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - '0' date: - - Thu, 04 Aug 2022 17:18:32 GMT + - Fri, 14 Oct 2022 15:27:46 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/c86462cd-02d5-4754-8f50-ce258bd14d92?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/06b0da8a-8c5b-46e9-97b9-b337e906b010?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 pragma: - no-cache server: @@ -3786,9 +4255,9 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/VMScaleSetActions3Min;236,Microsoft.Compute/VMScaleSetActions30Min;1181,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2320,Microsoft.Compute/VmssQueuedVMOperations;0 + - Microsoft.Compute/VMScaleSetActions3Min;235,Microsoft.Compute/VMScaleSetActions30Min;1190,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2979,Microsoft.Compute/VmssQueuedVMOperations;0 x-ms-ratelimit-remaining-subscription-writes: - - '1197' + - '1199' x-ms-request-charge: - '1' status: @@ -3808,14 +4277,64 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/c86462cd-02d5-4754-8f50-ce258bd14d92?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/06b0da8a-8c5b-46e9-97b9-b337e906b010?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:18:33.2883201+00:00\",\r\n \"endTime\": - \"2022-08-04T17:18:50.2569874+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"c86462cd-02d5-4754-8f50-ce258bd14d92\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:27:46.3755593+00:00\",\r\n \"status\": + \"InProgress\",\r\n \"name\": \"06b0da8a-8c5b-46e9-97b9-b337e906b010\"\r\n}" + headers: + cache-control: + - no-cache + content-length: + - '134' + content-type: + - application/json; charset=utf-8 + date: + - Fri, 14 Oct 2022 15:28:16 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/GetOperation3Min;14964,Microsoft.Compute/GetOperation30Min;29866 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - vmss start + Connection: + - keep-alive + ParameterSetName: + - -g -n --instance-ids + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/06b0da8a-8c5b-46e9-97b9-b337e906b010?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 + response: + body: + string: "{\r\n \"startTime\": \"2022-10-14T15:27:46.3755593+00:00\",\r\n \"endTime\": + \"2022-10-14T15:28:27.4062444+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"06b0da8a-8c5b-46e9-97b9-b337e906b010\"\r\n}" headers: cache-control: - no-cache @@ -3824,7 +4343,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:19:03 GMT + - Fri, 14 Oct 2022 15:28:45 GMT expires: - '-1' pragma: @@ -3841,7 +4360,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14966,Microsoft.Compute/GetOperation30Min;29751 + - Microsoft.Compute/GetOperation3Min;14961,Microsoft.Compute/GetOperation30Min;29858 status: code: 200 message: OK @@ -3859,9 +4378,9 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/c86462cd-02d5-4754-8f50-ce258bd14d92?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/06b0da8a-8c5b-46e9-97b9-b337e906b010?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 response: body: string: '' @@ -3871,7 +4390,7 @@ interactions: content-length: - '0' date: - - Thu, 04 Aug 2022 17:19:03 GMT + - Fri, 14 Oct 2022 15:28:45 GMT expires: - '-1' pragma: @@ -3884,7 +4403,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14964,Microsoft.Compute/GetOperation30Min;29749 + - Microsoft.Compute/GetOperation3Min;14960,Microsoft.Compute/GetOperation30Min;29857 status: code: 200 message: OK @@ -3906,25 +4425,27 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/poweroff?skipShutdown=false&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/poweroff?skipShutdown=false&api-version=2022-08-01 response: body: string: '' headers: + azure-asyncnotification: + - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/3a99ab9a-1e7e-4f29-b923-fedb1e11a0d8?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/13ae8617-0817-42d9-b8fc-357ee7bc412d?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - '0' date: - - Thu, 04 Aug 2022 17:19:03 GMT + - Fri, 14 Oct 2022 15:28:47 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/3a99ab9a-1e7e-4f29-b923-fedb1e11a0d8?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/13ae8617-0817-42d9-b8fc-357ee7bc412d?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 pragma: - no-cache server: @@ -3935,9 +4456,9 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/DeleteVMScaleSet3Min;79,Microsoft.Compute/DeleteVMScaleSet30Min;391,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2324,Microsoft.Compute/VmssQueuedVMOperations;0 + - Microsoft.Compute/DeleteVMScaleSet3Min;79,Microsoft.Compute/DeleteVMScaleSet30Min;397,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2979,Microsoft.Compute/VmssQueuedVMOperations;0 x-ms-ratelimit-remaining-subscription-writes: - - '1197' + - '1199' x-ms-request-charge: - '1' status: @@ -3957,14 +4478,14 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/3a99ab9a-1e7e-4f29-b923-fedb1e11a0d8?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/13ae8617-0817-42d9-b8fc-357ee7bc412d?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:19:04.2881363+00:00\",\r\n \"endTime\": - \"2022-08-04T17:19:09.5068433+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"3a99ab9a-1e7e-4f29-b923-fedb1e11a0d8\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:28:47.9998204+00:00\",\r\n \"endTime\": + \"2022-10-14T15:28:56.1402327+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"13ae8617-0817-42d9-b8fc-357ee7bc412d\"\r\n}" headers: cache-control: - no-cache @@ -3973,7 +4494,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:19:33 GMT + - Fri, 14 Oct 2022 15:29:17 GMT expires: - '-1' pragma: @@ -3990,7 +4511,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14969,Microsoft.Compute/GetOperation30Min;29744 + - Microsoft.Compute/GetOperation3Min;14952,Microsoft.Compute/GetOperation30Min;29846 status: code: 200 message: OK @@ -4008,9 +4529,9 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/3a99ab9a-1e7e-4f29-b923-fedb1e11a0d8?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/13ae8617-0817-42d9-b8fc-357ee7bc412d?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 response: body: string: '' @@ -4020,7 +4541,7 @@ interactions: content-length: - '0' date: - - Thu, 04 Aug 2022 17:19:33 GMT + - Fri, 14 Oct 2022 15:29:17 GMT expires: - '-1' pragma: @@ -4033,7 +4554,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14968,Microsoft.Compute/GetOperation30Min;29743 + - Microsoft.Compute/GetOperation3Min;14951,Microsoft.Compute/GetOperation30Min;29845 status: code: 200 message: OK @@ -4051,28 +4572,68 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-08-01 response: body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + string: "{\r\n \"placementGroupId\": \"900064c0-d742-4569-a06c-dcca8072c0c3\",\r\n + \ \"platformUpdateDomain\": 0,\r\n \"platformFaultDomain\": 0,\r\n \"computerName\": + \"cliouf96e000003\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n + \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.8.0.11\",\r\n \"statuses\": + [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": + \"Guest Agent is running\",\r\n \"time\": \"2022-10-14T15:28:19+00:00\"\r\n + \ }\r\n ],\r\n \"extensionHandlers\": [\r\n {\r\n \"type\": + \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": + \"1.22.2\",\r\n \"status\": {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n + \ \"message\": \"Plugin enabled\"\r\n }\r\n },\r\n {\r\n + \ \"type\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.20.58\",\r\n \"status\": {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": + \"Plugin enabled\"\r\n }\r\n }\r\n ]\r\n },\r\n \"disks\": + [\r\n {\r\n \"name\": \"cliou5z7nax54kj6yubjcliou5z7nax54kj6yubj5OS__1_474db27698dd49ddab2454669aab0333\",\r\n + \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"time\": \"2022-10-14T15:27:47.1724508+00:00\"\r\n + \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": {\r\n \"consoleScreenshotBlobUri\": + \"https://cli000002.blob.core.windows.net/bootdiagnostics-cliou5z7n-c3d44363-484f-435b-bb1d-61e4ddddcb55/cli000003_3.c3d44363-484f-435b-bb1d-61e4ddddcb55.screenshot.bmp\",\r\n + \ \"serialConsoleLogBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-cliou5z7n-c3d44363-484f-435b-bb1d-61e4ddddcb55/cli000003_3.c3d44363-484f-435b-bb1d-61e4ddddcb55.serialconsole.log\"\r\n + \ },\r\n \"extensions\": [\r\n {\r\n \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": + \"1.22.2\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"message\": \"Enable succeeded\"\r\n }\r\n + \ ]\r\n },\r\n {\r\n \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.20.58\",\r\n \"statuses\": [\r\n {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n + \ \"message\": \"Enable ASM succeeded\"\r\n }\r\n ]\r\n + \ }\r\n ],\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n + \ {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n \"time\": + \"2022-10-14T15:28:56.1090117+00:00\"\r\n },\r\n {\r\n \"code\": + \"PowerState/stopped\",\r\n \"level\": \"Info\",\r\n \"displayStatus\": + \"VM stopped\"\r\n }\r\n ]\r\n}" headers: cache-control: - no-cache content-length: - - '43' + - '3218' content-type: - - application/json; charset=UTF-8 + - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:19:35 GMT + - Fri, 14 Oct 2022 15:29:19 GMT expires: - '-1' pragma: - no-cache server: - - nginx + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -4081,8 +4642,10 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-frame-options: - - deny + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/GetVMScaleSetVM3Min;495,Microsoft.Compute/GetVMScaleSetVM30Min;2487,Microsoft.Compute/VMScaleSetVMViews3Min;4993 + x-ms-request-charge: + - '1' status: code: 200 message: OK @@ -4100,47 +4663,27 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-storage/20.1.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002?api-version=2022-05-01 response: body: - string: "{\r\n \"placementGroupId\": \"d026ef0f-b482-4884-8583-16ebe50a963b\",\r\n - \ \"platformUpdateDomain\": 1,\r\n \"platformFaultDomain\": 1,\r\n \"computerName\": - \"clinqc38b000003\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n - \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.7.3.0\",\r\n \"statuses\": - [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": - \"Guest Agent is running\",\r\n \"time\": \"2022-08-04T17:19:03+00:00\"\r\n - \ }\r\n ],\r\n \"extensionHandlers\": []\r\n },\r\n \"disks\": - [\r\n {\r\n \"name\": \"clinqhzpoczvy5m2spufclinqhzpoczvy5m2spufqOS__1_00d110bc7aaf4fbebbd4095a4a610862\",\r\n - \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:18:33.8976766+00:00\"\r\n - \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": {\r\n \"consoleScreenshotBlobUri\": - \"https://cli000002.blob.core.windows.net/bootdiagnostics-clinqhzpo-c47ce47c-9b4d-461d-8f0e-b5a271ca2265/cli000003_3.c47ce47c-9b4d-461d-8f0e-b5a271ca2265.screenshot.bmp\",\r\n - \ \"serialConsoleLogBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-clinqhzpo-c47ce47c-9b4d-461d-8f0e-b5a271ca2265/cli000003_3.c47ce47c-9b4d-461d-8f0e-b5a271ca2265.serialconsole.log\"\r\n - \ },\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n {\r\n - \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": \"Info\",\r\n - \ \"displayStatus\": \"Provisioning succeeded\",\r\n \"time\": \"2022-08-04T17:19:09.4756347+00:00\"\r\n - \ },\r\n {\r\n \"code\": \"PowerState/stopped\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"VM stopped\"\r\n }\r\n ]\r\n}" + string: '{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002","name":"cli000002","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-10-14T15:18:09.2031765Z","key2":"2022-10-14T15:18:09.2031765Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.3282100Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.3282100Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-10-14T15:18:09.0938358Z","primaryEndpoints":{"blob":"https://cli000002.blob.core.windows.net/","queue":"https://cli000002.queue.core.windows.net/","table":"https://cli000002.table.core.windows.net/","file":"https://cli000002.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}}' headers: cache-control: - no-cache content-length: - - '1699' + - '1259' content-type: - - application/json; charset=utf-8 + - application/json date: - - Thu, 04 Aug 2022 17:19:35 GMT + - Fri, 14 Oct 2022 15:29:19 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 + - Microsoft-Azure-Storage-Resource-Provider/1.0,Microsoft-HTTPAPI/2.0 Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -4149,10 +4692,55 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMScaleSetVM3Min;490,Microsoft.Compute/GetVMScaleSetVM30Min;2455,Microsoft.Compute/VMScaleSetVMViews3Min;4984 - x-ms-request-charge: - - '1' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - vmss stop + Connection: + - keep-alive + ParameterSetName: + - -g -n --instance-ids + User-Agent: + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + response: + body: + string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + headers: + cache-control: + - no-cache + content-length: + - '43' + content-type: + - application/json; charset=UTF-8 + date: + - Fri, 14 Oct 2022 15:29:20 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - deny status: code: 200 message: OK @@ -4174,25 +4762,27 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/start?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/start?api-version=2022-08-01 response: body: string: '' headers: + azure-asyncnotification: + - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/10e5b448-a2b8-4053-8fa4-5e07732fe234?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/3e3890a8-9afc-4bd7-be91-bc0d9f16e4f0?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - '0' date: - - Thu, 04 Aug 2022 17:19:35 GMT + - Fri, 14 Oct 2022 15:29:20 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/10e5b448-a2b8-4053-8fa4-5e07732fe234?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/3e3890a8-9afc-4bd7-be91-bc0d9f16e4f0?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 pragma: - no-cache server: @@ -4203,9 +4793,9 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/VMScaleSetActions3Min;236,Microsoft.Compute/VMScaleSetActions30Min;1179,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2322,Microsoft.Compute/VmssQueuedVMOperations;0 + - Microsoft.Compute/VMScaleSetActions3Min;233,Microsoft.Compute/VMScaleSetActions30Min;1186,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2985,Microsoft.Compute/VmssQueuedVMOperations;0 x-ms-ratelimit-remaining-subscription-writes: - - '1198' + - '1199' x-ms-request-charge: - '1' status: @@ -4225,14 +4815,14 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/10e5b448-a2b8-4053-8fa4-5e07732fe234?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/3e3890a8-9afc-4bd7-be91-bc0d9f16e4f0?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:19:35.8816777+00:00\",\r\n \"endTime\": - \"2022-08-04T17:19:44.1785086+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"10e5b448-a2b8-4053-8fa4-5e07732fe234\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:29:20.8744467+00:00\",\r\n \"endTime\": + \"2022-10-14T15:29:28.6400093+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"3e3890a8-9afc-4bd7-be91-bc0d9f16e4f0\"\r\n}" headers: cache-control: - no-cache @@ -4241,7 +4831,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:20:05 GMT + - Fri, 14 Oct 2022 15:29:50 GMT expires: - '-1' pragma: @@ -4258,7 +4848,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14969,Microsoft.Compute/GetOperation30Min;29739 + - Microsoft.Compute/GetOperation3Min;14956,Microsoft.Compute/GetOperation30Min;29838 status: code: 200 message: OK @@ -4276,9 +4866,9 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/10e5b448-a2b8-4053-8fa4-5e07732fe234?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/3e3890a8-9afc-4bd7-be91-bc0d9f16e4f0?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 response: body: string: '' @@ -4288,7 +4878,7 @@ interactions: content-length: - '0' date: - - Thu, 04 Aug 2022 17:20:05 GMT + - Fri, 14 Oct 2022 15:29:51 GMT expires: - '-1' pragma: @@ -4301,7 +4891,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14968,Microsoft.Compute/GetOperation30Min;29738 + - Microsoft.Compute/GetOperation3Min;14955,Microsoft.Compute/GetOperation30Min;29837 status: code: 200 message: OK @@ -4319,51 +4909,69 @@ interactions: ParameterSetName: - --name --resource-group --set User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachineScaleSets\",\r\n \"location\": - \"westus2\",\r\n \"tags\": {},\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n - \ \"tier\": \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": - {\r\n \"singlePlacementGroup\": true,\r\n \"upgradePolicy\": {\r\n \"mode\": - \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n \"maxBatchInstancePercent\": - 20,\r\n \"maxUnhealthyInstancePercent\": 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": - 20,\r\n \"pauseTimeBetweenBatches\": \"PT0S\"\r\n }\r\n },\r\n - \ \"virtualMachineProfile\": {\r\n \"osProfile\": {\r\n \"computerNamePrefix\": - \"clinqc38b\",\r\n \"adminUsername\": \"rhl\",\r\n \"linuxConfiguration\": - {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": - {\r\n \"publicKeys\": [\r\n {\r\n \"path\": - \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": \"ssh-rsa - AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"enableVMAgentPlatformUpdates\": false\r\n },\r\n - \ \"secrets\": [],\r\n \"allowExtensionOperations\": true,\r\n - \ \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": + \"westus2\",\r\n \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n \"tier\": + \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": {\r\n \"singlePlacementGroup\": + true,\r\n \"orchestrationMode\": \"Uniform\",\r\n \"upgradePolicy\": + {\r\n \"mode\": \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n + \ \"maxBatchInstancePercent\": 20,\r\n \"maxUnhealthyInstancePercent\": + 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": 20,\r\n \"pauseTimeBetweenBatches\": + \"PT0S\"\r\n }\r\n },\r\n \"virtualMachineProfile\": {\r\n \"osProfile\": + {\r\n \"computerNamePrefix\": \"cliouf96e\",\r\n \"adminUsername\": + \"rhoover\",\r\n \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": + true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"enableVMAgentPlatformUpdates\": + false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": + true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": {\r\n \"osDisk\": {\r\n \"osType\": \"Linux\",\r\n \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\"\r\n },\r\n \ \"diskSizeGB\": 30\r\n },\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \ \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\"\r\n - \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"clinqc38bNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"clinqc38bIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n + \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"cliouf96eNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"disableTcpStateTracking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"cliouf96eIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": true,\r\n \"storageUri\": \"https://cli000002.blob.core.windows.net/\"\r\n - \ }\r\n }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n - \ \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": - false,\r\n \"uniqueId\": \"239f9fe3-2e0c-403b-8c3a-55cda6cd7b9a\",\r\n - \ \"timeCreated\": \"2022-08-04T17:10:51.2598866+00:00\"\r\n }\r\n}" + \ }\r\n },\r\n \"extensionProfile\": {\r\n \"extensions\": + [\r\n {\r\n \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Monitor\",\r\n \"type\": \"AzureMonitorLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"1.0\",\r\n \"settings\": + {\"GCS_AUTO_CONFIG\":true}\r\n }\r\n },\r\n {\r\n + \ \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Security.Monitoring\",\r\n \"type\": \"AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.0\",\r\n \"settings\": + {\"enableGenevaUpload\":true,\"enableAutoConfig\":true,\"reportSuccessOnUnsupportedDistro\":true}\r\n + \ }\r\n }\r\n ]\r\n }\r\n },\r\n \"provisioningState\": + \"Succeeded\",\r\n \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": + false,\r\n \"uniqueId\": \"793988fd-7a65-472d-9472-7470271a360c\",\r\n + \ \"timeCreated\": \"2022-10-14T15:18:49.0838434+00:00\"\r\n }\r\n}" headers: cache-control: - no-cache content-length: - - '3595' + - '5445' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:20:06 GMT + - Fri, 14 Oct 2022 15:29:51 GMT expires: - '-1' pragma: @@ -4380,32 +4988,41 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMScaleSet3Min;383,Microsoft.Compute/GetVMScaleSet30Min;2487 + - Microsoft.Compute/GetVMScaleSet3Min;358,Microsoft.Compute/GetVMScaleSet30Min;2514 status: code: 200 message: OK - request: - body: '{"location": "westus2", "tags": {}, "sku": {"name": "Standard_DS1_v2", - "tier": "Standard", "capacity": 2}, "properties": {"upgradePolicy": {"mode": - "Manual", "rollingUpgradePolicy": {"maxBatchInstancePercent": 20, "maxUnhealthyInstancePercent": - 20, "maxUnhealthyUpgradedInstancePercent": 20, "pauseTimeBetweenBatches": "PT0S"}}, - "virtualMachineProfile": {"osProfile": {"computerNamePrefix": "clinqc38b", "adminUsername": - "rhl", "linuxConfiguration": {"disablePasswordAuthentication": true, "ssh": - {"publicKeys": [{"path": "/home/rhl/.ssh/authorized_keys", "keyData": "ssh-rsa - AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5"}]}, - "provisionVMAgent": true}, "secrets": [], "allowExtensionOperations": true}, - "storageProfile": {"osDisk": {"caching": "ReadWrite", "createOption": "FromImage", - "diskSizeGB": 30, "osType": "Linux", "managedDisk": {"storageAccountType": "Premium_LRS"}}}, - "networkProfile": {"networkInterfaceConfigurations": [{"name": "clinqc38bNic", - "properties": {"primary": true, "enableAcceleratedNetworking": false, "dnsSettings": - {"dnsServers": []}, "ipConfigurations": [{"name": "clinqc38bIPConfig", "properties": - {"subnet": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet"}, + body: '{"location": "westus2", "tags": {"azsecpack": "nonprod", "platformsettings.host_environment.service.platform_optedin_for_rootcerts": + "true"}, "sku": {"name": "Standard_DS1_v2", "tier": "Standard", "capacity": + 2}, "identity": {"type": "UserAssigned", "userAssignedIdentities": {"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2": + {}}}, "properties": {"upgradePolicy": {"mode": "Manual", "rollingUpgradePolicy": + {"maxBatchInstancePercent": 20, "maxUnhealthyInstancePercent": 20, "maxUnhealthyUpgradedInstancePercent": + 20, "pauseTimeBetweenBatches": "PT0S"}}, "virtualMachineProfile": {"osProfile": + {"computerNamePrefix": "cliouf96e", "adminUsername": "rhoover", "linuxConfiguration": + {"disablePasswordAuthentication": true, "ssh": {"publicKeys": [{"path": "/home/rhoover/.ssh/authorized_keys", + "keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\n"}]}, "provisionVMAgent": true, "enableVMAgentPlatformUpdates": + false}, "secrets": [], "allowExtensionOperations": true}, "storageProfile": + {"osDisk": {"caching": "ReadWrite", "createOption": "FromImage", "diskSizeGB": + 30, "osType": "Linux", "managedDisk": {"storageAccountType": "Premium_LRS"}}}, + "networkProfile": {"networkInterfaceConfigurations": [{"name": "cliouf96eNic", + "properties": {"primary": true, "enableAcceleratedNetworking": false, "disableTcpStateTracking": + false, "dnsSettings": {"dnsServers": []}, "ipConfigurations": [{"name": "cliouf96eIPConfig", + "properties": {"subnet": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet"}, "privateIPAddressVersion": "IPv4", "loadBalancerBackendAddressPools": [{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool"}], "loadBalancerInboundNatPools": [{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool"}]}}], "enableIPForwarding": false}}]}, "diagnosticsProfile": {"bootDiagnostics": {"enabled": - false}}}, "overprovision": true, "doNotRunExtensionsOnOverprovisionedVMs": false, - "singlePlacementGroup": true}}' + false}}, "extensionProfile": {"extensions": [{"name": "Microsoft.Azure.Monitor.AzureMonitorLinuxAgent", + "properties": {"publisher": "Microsoft.Azure.Monitor", "type": "AzureMonitorLinuxAgent", + "typeHandlerVersion": "1.0", "autoUpgradeMinorVersion": true, "enableAutomaticUpgrade": + true, "settings": {"GCS_AUTO_CONFIG": true}}}, {"name": "Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent", + "properties": {"publisher": "Microsoft.Azure.Security.Monitoring", "type": "AzureSecurityLinuxAgent", + "typeHandlerVersion": "2.0", "autoUpgradeMinorVersion": true, "enableAutomaticUpgrade": + true, "settings": {"enableGenevaUpload": true, "enableAutoConfig": true, "reportSuccessOnUnsupportedDistro": + true}}}]}}, "overprovision": true, "doNotRunExtensionsOnOverprovisionedVMs": + false, "singlePlacementGroup": true, "orchestrationMode": "Uniform"}}' headers: Accept: - application/json @@ -4416,61 +5033,79 @@ interactions: Connection: - keep-alive Content-Length: - - '2358' + - '3725' Content-Type: - application/json ParameterSetName: - --name --resource-group --set User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachineScaleSets\",\r\n \"location\": - \"westus2\",\r\n \"tags\": {},\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n - \ \"tier\": \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": - {\r\n \"singlePlacementGroup\": true,\r\n \"upgradePolicy\": {\r\n \"mode\": - \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n \"maxBatchInstancePercent\": - 20,\r\n \"maxUnhealthyInstancePercent\": 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": - 20,\r\n \"pauseTimeBetweenBatches\": \"PT0S\"\r\n }\r\n },\r\n - \ \"virtualMachineProfile\": {\r\n \"osProfile\": {\r\n \"computerNamePrefix\": - \"clinqc38b\",\r\n \"adminUsername\": \"rhl\",\r\n \"linuxConfiguration\": - {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": - {\r\n \"publicKeys\": [\r\n {\r\n \"path\": - \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": \"ssh-rsa - AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"enableVMAgentPlatformUpdates\": false\r\n },\r\n - \ \"secrets\": [],\r\n \"allowExtensionOperations\": true,\r\n - \ \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": + \"westus2\",\r\n \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n \"tier\": + \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": {\r\n \"singlePlacementGroup\": + true,\r\n \"orchestrationMode\": \"Uniform\",\r\n \"upgradePolicy\": + {\r\n \"mode\": \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n + \ \"maxBatchInstancePercent\": 20,\r\n \"maxUnhealthyInstancePercent\": + 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": 20,\r\n \"pauseTimeBetweenBatches\": + \"PT0S\"\r\n }\r\n },\r\n \"virtualMachineProfile\": {\r\n \"osProfile\": + {\r\n \"computerNamePrefix\": \"cliouf96e\",\r\n \"adminUsername\": + \"rhoover\",\r\n \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": + true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"enableVMAgentPlatformUpdates\": + false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": + true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": {\r\n \"osDisk\": {\r\n \"osType\": \"Linux\",\r\n \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\"\r\n },\r\n \ \"diskSizeGB\": 30\r\n },\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \ \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\"\r\n - \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"clinqc38bNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"clinqc38bIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n + \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"cliouf96eNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"disableTcpStateTracking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"cliouf96eIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": false,\r\n \"storageUri\": \"https://cli000002.blob.core.windows.net/\"\r\n - \ }\r\n }\r\n },\r\n \"provisioningState\": \"Updating\",\r\n - \ \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": - false,\r\n \"uniqueId\": \"239f9fe3-2e0c-403b-8c3a-55cda6cd7b9a\",\r\n - \ \"timeCreated\": \"2022-08-04T17:10:51.2598866+00:00\"\r\n }\r\n}" + \ }\r\n },\r\n \"extensionProfile\": {\r\n \"extensions\": + [\r\n {\r\n \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Monitor\",\r\n \"type\": \"AzureMonitorLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"1.0\",\r\n \"settings\": + {\"GCS_AUTO_CONFIG\":true}\r\n }\r\n },\r\n {\r\n + \ \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Security.Monitoring\",\r\n \"type\": \"AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.0\",\r\n \"settings\": + {\"enableGenevaUpload\":true,\"enableAutoConfig\":true,\"reportSuccessOnUnsupportedDistro\":true}\r\n + \ }\r\n }\r\n ]\r\n }\r\n },\r\n \"provisioningState\": + \"Updating\",\r\n \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": + false,\r\n \"uniqueId\": \"793988fd-7a65-472d-9472-7470271a360c\",\r\n + \ \"timeCreated\": \"2022-10-14T15:18:49.0838434+00:00\"\r\n }\r\n}" headers: azure-asyncnotification: - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/5c3a3ee3-26fb-4b32-bb36-bd7c71317d8a?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/30c56bf9-3e40-4b43-ba6b-d402607e9915?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - - '3595' + - '5445' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:20:08 GMT + - Fri, 14 Oct 2022 15:29:56 GMT expires: - '-1' pragma: @@ -4487,9 +5122,9 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/CreateVMScaleSet3Min;113,Microsoft.Compute/CreateVMScaleSet30Min;563,Microsoft.Compute/VmssQueuedVMOperations;0 + - Microsoft.Compute/CreateVMScaleSet3Min;144,Microsoft.Compute/CreateVMScaleSet30Min;735,Microsoft.Compute/VmssQueuedVMOperations;0 x-ms-ratelimit-remaining-subscription-writes: - - '1198' + - '1199' x-ms-request-charge: - '0' status: @@ -4509,14 +5144,14 @@ interactions: ParameterSetName: - --name --resource-group --set User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/5c3a3ee3-26fb-4b32-bb36-bd7c71317d8a?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/30c56bf9-3e40-4b43-ba6b-d402607e9915?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:20:08.9283778+00:00\",\r\n \"endTime\": - \"2022-08-04T17:20:09.084623+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"5c3a3ee3-26fb-4b32-bb36-bd7c71317d8a\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:29:56.0772226+00:00\",\r\n \"endTime\": + \"2022-10-14T15:29:56.358454+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"30c56bf9-3e40-4b43-ba6b-d402607e9915\"\r\n}" headers: cache-control: - no-cache @@ -4525,7 +5160,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:20:18 GMT + - Fri, 14 Oct 2022 15:30:06 GMT expires: - '-1' pragma: @@ -4542,7 +5177,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14967,Microsoft.Compute/GetOperation30Min;29737 + - Microsoft.Compute/GetOperation3Min;14956,Microsoft.Compute/GetOperation30Min;29834 status: code: 200 message: OK @@ -4560,51 +5195,69 @@ interactions: ParameterSetName: - --name --resource-group --set User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachineScaleSets\",\r\n \"location\": - \"westus2\",\r\n \"tags\": {},\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n - \ \"tier\": \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": - {\r\n \"singlePlacementGroup\": true,\r\n \"upgradePolicy\": {\r\n \"mode\": - \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n \"maxBatchInstancePercent\": - 20,\r\n \"maxUnhealthyInstancePercent\": 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": - 20,\r\n \"pauseTimeBetweenBatches\": \"PT0S\"\r\n }\r\n },\r\n - \ \"virtualMachineProfile\": {\r\n \"osProfile\": {\r\n \"computerNamePrefix\": - \"clinqc38b\",\r\n \"adminUsername\": \"rhl\",\r\n \"linuxConfiguration\": - {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": - {\r\n \"publicKeys\": [\r\n {\r\n \"path\": - \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": \"ssh-rsa - AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"enableVMAgentPlatformUpdates\": false\r\n },\r\n - \ \"secrets\": [],\r\n \"allowExtensionOperations\": true,\r\n - \ \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": + \"westus2\",\r\n \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n \"tier\": + \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": {\r\n \"singlePlacementGroup\": + true,\r\n \"orchestrationMode\": \"Uniform\",\r\n \"upgradePolicy\": + {\r\n \"mode\": \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n + \ \"maxBatchInstancePercent\": 20,\r\n \"maxUnhealthyInstancePercent\": + 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": 20,\r\n \"pauseTimeBetweenBatches\": + \"PT0S\"\r\n }\r\n },\r\n \"virtualMachineProfile\": {\r\n \"osProfile\": + {\r\n \"computerNamePrefix\": \"cliouf96e\",\r\n \"adminUsername\": + \"rhoover\",\r\n \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": + true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"enableVMAgentPlatformUpdates\": + false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": + true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": {\r\n \"osDisk\": {\r\n \"osType\": \"Linux\",\r\n \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\"\r\n },\r\n \ \"diskSizeGB\": 30\r\n },\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \ \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\"\r\n - \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"clinqc38bNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"clinqc38bIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n + \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"cliouf96eNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"disableTcpStateTracking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"cliouf96eIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": false,\r\n \"storageUri\": \"https://cli000002.blob.core.windows.net/\"\r\n - \ }\r\n }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n - \ \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": - false,\r\n \"uniqueId\": \"239f9fe3-2e0c-403b-8c3a-55cda6cd7b9a\",\r\n - \ \"timeCreated\": \"2022-08-04T17:10:51.2598866+00:00\"\r\n }\r\n}" + \ }\r\n },\r\n \"extensionProfile\": {\r\n \"extensions\": + [\r\n {\r\n \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Monitor\",\r\n \"type\": \"AzureMonitorLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"1.0\",\r\n \"settings\": + {\"GCS_AUTO_CONFIG\":true}\r\n }\r\n },\r\n {\r\n + \ \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Security.Monitoring\",\r\n \"type\": \"AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.0\",\r\n \"settings\": + {\"enableGenevaUpload\":true,\"enableAutoConfig\":true,\"reportSuccessOnUnsupportedDistro\":true}\r\n + \ }\r\n }\r\n ]\r\n }\r\n },\r\n \"provisioningState\": + \"Succeeded\",\r\n \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": + false,\r\n \"uniqueId\": \"793988fd-7a65-472d-9472-7470271a360c\",\r\n + \ \"timeCreated\": \"2022-10-14T15:18:49.0838434+00:00\"\r\n }\r\n}" headers: cache-control: - no-cache content-length: - - '3596' + - '5446' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:20:18 GMT + - Fri, 14 Oct 2022 15:30:06 GMT expires: - '-1' pragma: @@ -4621,7 +5274,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMScaleSet3Min;379,Microsoft.Compute/GetVMScaleSet30Min;2483 + - Microsoft.Compute/GetVMScaleSet3Min;360,Microsoft.Compute/GetVMScaleSet30Min;2509 status: code: 200 message: OK @@ -4639,28 +5292,68 @@ interactions: ParameterSetName: - --name --resource-group --set User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-08-01 response: body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + string: "{\r\n \"placementGroupId\": \"900064c0-d742-4569-a06c-dcca8072c0c3\",\r\n + \ \"platformUpdateDomain\": 0,\r\n \"platformFaultDomain\": 0,\r\n \"computerName\": + \"cliouf96e000003\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n + \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.8.0.11\",\r\n \"statuses\": + [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": + \"Guest Agent is running\",\r\n \"time\": \"2022-10-14T15:29:45+00:00\"\r\n + \ }\r\n ],\r\n \"extensionHandlers\": [\r\n {\r\n \"type\": + \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": + \"1.22.2\",\r\n \"status\": {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n + \ \"message\": \"Plugin enabled\"\r\n }\r\n },\r\n {\r\n + \ \"type\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.20.58\",\r\n \"status\": {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": + \"Plugin enabled\"\r\n }\r\n }\r\n ]\r\n },\r\n \"disks\": + [\r\n {\r\n \"name\": \"cliou5z7nax54kj6yubjcliou5z7nax54kj6yubj5OS__1_474db27698dd49ddab2454669aab0333\",\r\n + \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"time\": \"2022-10-14T15:27:47.1724508+00:00\"\r\n + \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": {\r\n \"consoleScreenshotBlobUri\": + \"https://cli000002.blob.core.windows.net/bootdiagnostics-cliou5z7n-c3d44363-484f-435b-bb1d-61e4ddddcb55/cli000003_3.c3d44363-484f-435b-bb1d-61e4ddddcb55.screenshot.bmp\",\r\n + \ \"serialConsoleLogBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-cliou5z7n-c3d44363-484f-435b-bb1d-61e4ddddcb55/cli000003_3.c3d44363-484f-435b-bb1d-61e4ddddcb55.serialconsole.log\"\r\n + \ },\r\n \"extensions\": [\r\n {\r\n \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": + \"1.22.2\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"message\": \"Enable succeeded\"\r\n }\r\n + \ ]\r\n },\r\n {\r\n \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.20.58\",\r\n \"statuses\": [\r\n {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n + \ \"message\": \"Enable ASM succeeded\"\r\n }\r\n ]\r\n + \ }\r\n ],\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n + \ {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n \"time\": + \"2022-10-14T15:29:28.6087302+00:00\"\r\n },\r\n {\r\n \"code\": + \"PowerState/running\",\r\n \"level\": \"Info\",\r\n \"displayStatus\": + \"VM running\"\r\n }\r\n ]\r\n}" headers: cache-control: - no-cache content-length: - - '43' + - '3218' content-type: - - application/json; charset=UTF-8 + - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:20:19 GMT + - Fri, 14 Oct 2022 15:30:07 GMT expires: - '-1' pragma: - no-cache server: - - nginx + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -4669,8 +5362,10 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-frame-options: - - deny + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/GetVMScaleSetVM3Min;497,Microsoft.Compute/GetVMScaleSetVM30Min;2486,Microsoft.Compute/VMScaleSetVMViews3Min;4993 + x-ms-request-charge: + - '1' status: code: 200 message: OK @@ -4688,47 +5383,27 @@ interactions: ParameterSetName: - --name --resource-group --set User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-storage/20.1.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002?api-version=2022-05-01 response: body: - string: "{\r\n \"placementGroupId\": \"d026ef0f-b482-4884-8583-16ebe50a963b\",\r\n - \ \"platformUpdateDomain\": 1,\r\n \"platformFaultDomain\": 1,\r\n \"computerName\": - \"clinqc38b000003\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n - \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.7.3.0\",\r\n \"statuses\": - [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": - \"Guest Agent is running\",\r\n \"time\": \"2022-08-04T17:20:07+00:00\"\r\n - \ }\r\n ],\r\n \"extensionHandlers\": []\r\n },\r\n \"disks\": - [\r\n {\r\n \"name\": \"clinqhzpoczvy5m2spufclinqhzpoczvy5m2spufqOS__1_00d110bc7aaf4fbebbd4095a4a610862\",\r\n - \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:18:33.8976766+00:00\"\r\n - \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": {\r\n \"consoleScreenshotBlobUri\": - \"https://cli000002.blob.core.windows.net/bootdiagnostics-clinqhzpo-c47ce47c-9b4d-461d-8f0e-b5a271ca2265/cli000003_3.c47ce47c-9b4d-461d-8f0e-b5a271ca2265.screenshot.bmp\",\r\n - \ \"serialConsoleLogBlobUri\": \"https://cli000002.blob.core.windows.net/bootdiagnostics-clinqhzpo-c47ce47c-9b4d-461d-8f0e-b5a271ca2265/cli000003_3.c47ce47c-9b4d-461d-8f0e-b5a271ca2265.serialconsole.log\"\r\n - \ },\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n {\r\n - \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": \"Info\",\r\n - \ \"displayStatus\": \"Provisioning succeeded\",\r\n \"time\": \"2022-08-04T17:19:44.1628937+00:00\"\r\n - \ },\r\n {\r\n \"code\": \"PowerState/running\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"VM running\"\r\n }\r\n ]\r\n}" + string: '{"sku":{"name":"Standard_LRS","tier":"Standard"},"kind":"Storage","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Storage/storageAccounts/cli000002","name":"cli000002","type":"Microsoft.Storage/storageAccounts","location":"westus2","tags":{},"properties":{"keyCreationTime":{"key1":"2022-10-14T15:18:09.2031765Z","key2":"2022-10-14T15:18:09.2031765Z"},"privateEndpointConnections":[],"minimumTlsVersion":"TLS1_0","allowBlobPublicAccess":true,"networkAcls":{"bypass":"AzureServices","virtualNetworkRules":[],"ipRules":[],"defaultAction":"Allow"},"supportsHttpsTrafficOnly":true,"encryption":{"services":{"file":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.3282100Z"},"blob":{"keyType":"Account","enabled":true,"lastEnabledTime":"2022-10-14T15:18:09.3282100Z"}},"keySource":"Microsoft.Storage"},"provisioningState":"Succeeded","creationTime":"2022-10-14T15:18:09.0938358Z","primaryEndpoints":{"blob":"https://cli000002.blob.core.windows.net/","queue":"https://cli000002.queue.core.windows.net/","table":"https://cli000002.table.core.windows.net/","file":"https://cli000002.file.core.windows.net/"},"primaryLocation":"westus2","statusOfPrimary":"available"}}' headers: cache-control: - no-cache content-length: - - '1699' + - '1259' content-type: - - application/json; charset=utf-8 + - application/json date: - - Thu, 04 Aug 2022 17:20:20 GMT + - Fri, 14 Oct 2022 15:30:07 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 + - Microsoft-Azure-Storage-Resource-Provider/1.0,Microsoft-HTTPAPI/2.0 Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -4737,10 +5412,55 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMScaleSetVM3Min;490,Microsoft.Compute/GetVMScaleSetVM30Min;2454,Microsoft.Compute/VMScaleSetVMViews3Min;4984 - x-ms-request-charge: - - '1' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - vmss update + Connection: + - keep-alive + ParameterSetName: + - --name --resource-group --set + User-Agent: + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + response: + body: + string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + headers: + cache-control: + - no-cache + content-length: + - '43' + content-type: + - application/json; charset=UTF-8 + date: + - Fri, 14 Oct 2022 15:30:08 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - deny status: code: 200 message: OK @@ -4762,25 +5482,27 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/manualupgrade?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/manualupgrade?api-version=2022-08-01 response: body: string: '' headers: + azure-asyncnotification: + - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/03de0886-4d01-4fcf-9950-acda6f1e833f?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/6fa287ec-b1cf-4439-a805-b1fc36c972d3?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - '0' date: - - Thu, 04 Aug 2022 17:20:20 GMT + - Fri, 14 Oct 2022 15:30:09 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/03de0886-4d01-4fcf-9950-acda6f1e833f?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/6fa287ec-b1cf-4439-a805-b1fc36c972d3?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 pragma: - no-cache server: @@ -4791,9 +5513,9 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/VMScaleSetActions3Min;235,Microsoft.Compute/VMScaleSetActions30Min;1178,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2324,Microsoft.Compute/VmssQueuedVMOperations;0 + - Microsoft.Compute/VMScaleSetActions3Min;229,Microsoft.Compute/VMScaleSetActions30Min;1182,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2988,Microsoft.Compute/VmssQueuedVMOperations;0 x-ms-ratelimit-remaining-subscription-writes: - - '1199' + - '1198' x-ms-request-charge: - '1' status: @@ -4813,14 +5535,14 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/03de0886-4d01-4fcf-9950-acda6f1e833f?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/6fa287ec-b1cf-4439-a805-b1fc36c972d3?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:20:20.6470228+00:00\",\r\n \"endTime\": - \"2022-08-04T17:20:25.8657649+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"03de0886-4d01-4fcf-9950-acda6f1e833f\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:30:09.3895467+00:00\",\r\n \"endTime\": + \"2022-10-14T15:30:17.2644986+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"6fa287ec-b1cf-4439-a805-b1fc36c972d3\"\r\n}" headers: cache-control: - no-cache @@ -4829,7 +5551,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:20:50 GMT + - Fri, 14 Oct 2022 15:30:39 GMT expires: - '-1' pragma: @@ -4846,7 +5568,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14965,Microsoft.Compute/GetOperation30Min;29732 + - Microsoft.Compute/GetOperation3Min;14957,Microsoft.Compute/GetOperation30Min;29827 status: code: 200 message: OK @@ -4864,9 +5586,9 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/03de0886-4d01-4fcf-9950-acda6f1e833f?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/6fa287ec-b1cf-4439-a805-b1fc36c972d3?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 response: body: string: '' @@ -4876,7 +5598,7 @@ interactions: content-length: - '0' date: - - Thu, 04 Aug 2022 17:20:50 GMT + - Fri, 14 Oct 2022 15:30:39 GMT expires: - '-1' pragma: @@ -4889,7 +5611,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14964,Microsoft.Compute/GetOperation30Min;29731 + - Microsoft.Compute/GetOperation3Min;14956,Microsoft.Compute/GetOperation30Min;29826 status: code: 200 message: OK @@ -4907,28 +5629,66 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-08-01 response: body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + string: "{\r\n \"placementGroupId\": \"900064c0-d742-4569-a06c-dcca8072c0c3\",\r\n + \ \"platformUpdateDomain\": 0,\r\n \"platformFaultDomain\": 0,\r\n \"computerName\": + \"cliouf96e000003\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n + \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.8.0.11\",\r\n \"statuses\": + [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": + \"Guest Agent is running\",\r\n \"time\": \"2022-10-14T15:30:21+00:00\"\r\n + \ }\r\n ],\r\n \"extensionHandlers\": [\r\n {\r\n \"type\": + \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": + \"1.22.2\",\r\n \"status\": {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n + \ \"message\": \"Plugin enabled\"\r\n }\r\n },\r\n {\r\n + \ \"type\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.20.58\",\r\n \"status\": {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": + \"Plugin enabled\"\r\n }\r\n }\r\n ]\r\n },\r\n \"disks\": + [\r\n {\r\n \"name\": \"cliou5z7nax54kj6yubjcliou5z7nax54kj6yubj5OS__1_474db27698dd49ddab2454669aab0333\",\r\n + \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"time\": \"2022-10-14T15:27:47.1724508+00:00\"\r\n + \ }\r\n ]\r\n }\r\n ],\r\n \"extensions\": [\r\n {\r\n + \ \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"type\": + \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": + \"1.22.2\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"message\": \"Enable succeeded\"\r\n }\r\n + \ ]\r\n },\r\n {\r\n \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.20.58\",\r\n \"statuses\": [\r\n {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n + \ \"message\": \"Enable ASM succeeded\"\r\n }\r\n ]\r\n + \ }\r\n ],\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n + \ {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n \"time\": + \"2022-10-14T15:30:17.2332747+00:00\"\r\n },\r\n {\r\n \"code\": + \"PowerState/running\",\r\n \"level\": \"Info\",\r\n \"displayStatus\": + \"VM running\"\r\n }\r\n ]\r\n}" headers: cache-control: - no-cache content-length: - - '43' + - '2781' content-type: - - application/json; charset=UTF-8 + - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:20:51 GMT + - Fri, 14 Oct 2022 15:30:40 GMT expires: - '-1' pragma: - no-cache server: - - nginx + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -4937,8 +5697,10 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-frame-options: - - deny + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/GetVMScaleSetVM3Min;490,Microsoft.Compute/GetVMScaleSetVM30Min;2478,Microsoft.Compute/VMScaleSetVMViews3Min;4986 + x-ms-request-charge: + - '1' status: code: 200 message: OK @@ -4950,46 +5712,35 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - vmss update-instances + - vmss deallocate Connection: - keep-alive + Content-Length: + - '0' ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-03-01 + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/deallocate?api-version=2022-08-01 response: body: - string: "{\r\n \"placementGroupId\": \"d026ef0f-b482-4884-8583-16ebe50a963b\",\r\n - \ \"platformUpdateDomain\": 1,\r\n \"platformFaultDomain\": 1,\r\n \"computerName\": - \"clinqc38b000003\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n - \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.7.3.0\",\r\n \"statuses\": - [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": - \"Guest Agent is running\",\r\n \"time\": \"2022-08-04T17:20:26+00:00\"\r\n - \ }\r\n ],\r\n \"extensionHandlers\": []\r\n },\r\n \"disks\": - [\r\n {\r\n \"name\": \"clinqhzpoczvy5m2spufclinqhzpoczvy5m2spufqOS__1_00d110bc7aaf4fbebbd4095a4a610862\",\r\n - \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:18:33.8976766+00:00\"\r\n - \ }\r\n ]\r\n }\r\n ],\r\n \"hyperVGeneration\": \"V1\",\r\n - \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n - \ \"time\": \"2022-08-04T17:20:25.8188856+00:00\"\r\n },\r\n {\r\n - \ \"code\": \"PowerState/running\",\r\n \"level\": \"Info\",\r\n - \ \"displayStatus\": \"VM running\"\r\n }\r\n ]\r\n}" + string: '' headers: + azure-asyncnotification: + - Enabled + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/298bea43-d8c3-41f5-9ea6-fdefb592a94f?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - - '1262' - content-type: - - application/json; charset=utf-8 + - '0' date: - - Thu, 04 Aug 2022 17:20:51 GMT + - Fri, 14 Oct 2022 15:30:40 GMT expires: - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/298bea43-d8c3-41f5-9ea6-fdefb592a94f?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 pragma: - no-cache server: @@ -4997,54 +5748,49 @@ interactions: - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMScaleSetVM3Min;487,Microsoft.Compute/GetVMScaleSetVM30Min;2450,Microsoft.Compute/VMScaleSetVMViews3Min;4983 + - Microsoft.Compute/DeleteVMScaleSetVM3Min;239,Microsoft.Compute/DeleteVMScaleSetVM30Min;1197,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2987,Microsoft.Compute/VmssQueuedVMOperations;0 + x-ms-ratelimit-remaining-subscription-writes: + - '1199' x-ms-request-charge: - '1' status: - code: 200 - message: OK + code: 202 + message: Accepted - request: body: null headers: Accept: - - application/json + - '*/*' Accept-Encoding: - gzip, deflate CommandName: - vmss deallocate Connection: - keep-alive - Content-Length: - - '0' ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/deallocate?api-version=2022-03-01 + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/298bea43-d8c3-41f5-9ea6-fdefb592a94f?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: '' + string: "{\r\n \"startTime\": \"2022-10-14T15:30:40.8892724+00:00\",\r\n \"status\": + \"InProgress\",\r\n \"name\": \"298bea43-d8c3-41f5-9ea6-fdefb592a94f\"\r\n}" headers: - azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/903b6095-5bc8-4c09-aded-64300145e321?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 cache-control: - no-cache content-length: - - '0' + - '134' + content-type: + - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:20:52 GMT + - Fri, 14 Oct 2022 15:31:10 GMT expires: - '-1' - location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/903b6095-5bc8-4c09-aded-64300145e321?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 pragma: - no-cache server: @@ -5052,17 +5798,17 @@ interactions: - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/DeleteVMScaleSetVM3Min;239,Microsoft.Compute/DeleteVMScaleSetVM30Min;1194,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2234,Microsoft.Compute/VmssQueuedVMOperations;0 - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - x-ms-request-charge: - - '1' + - Microsoft.Compute/GetOperation3Min;14958,Microsoft.Compute/GetOperation30Min;29820 status: - code: 202 - message: Accepted + code: 200 + message: OK - request: body: null headers: @@ -5077,13 +5823,13 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/903b6095-5bc8-4c09-aded-64300145e321?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/298bea43-d8c3-41f5-9ea6-fdefb592a94f?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:20:53.1312236+00:00\",\r\n \"status\": - \"InProgress\",\r\n \"name\": \"903b6095-5bc8-4c09-aded-64300145e321\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:30:40.8892724+00:00\",\r\n \"status\": + \"InProgress\",\r\n \"name\": \"298bea43-d8c3-41f5-9ea6-fdefb592a94f\"\r\n}" headers: cache-control: - no-cache @@ -5092,7 +5838,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:21:22 GMT + - Fri, 14 Oct 2022 15:31:40 GMT expires: - '-1' pragma: @@ -5109,7 +5855,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14969,Microsoft.Compute/GetOperation30Min;29729 + - Microsoft.Compute/GetOperation3Min;14962,Microsoft.Compute/GetOperation30Min;29816 status: code: 200 message: OK @@ -5127,23 +5873,22 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/903b6095-5bc8-4c09-aded-64300145e321?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/298bea43-d8c3-41f5-9ea6-fdefb592a94f?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:20:53.1312236+00:00\",\r\n \"endTime\": - \"2022-08-04T17:21:40.8028128+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"903b6095-5bc8-4c09-aded-64300145e321\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:30:40.8892724+00:00\",\r\n \"status\": + \"InProgress\",\r\n \"name\": \"298bea43-d8c3-41f5-9ea6-fdefb592a94f\"\r\n}" headers: cache-control: - no-cache content-length: - - '184' + - '134' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:21:53 GMT + - Fri, 14 Oct 2022 15:32:10 GMT expires: - '-1' pragma: @@ -5160,7 +5905,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14974,Microsoft.Compute/GetOperation30Min;29726 + - Microsoft.Compute/GetOperation3Min;14971,Microsoft.Compute/GetOperation30Min;29814 status: code: 200 message: OK @@ -5178,19 +5923,23 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/903b6095-5bc8-4c09-aded-64300145e321?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/298bea43-d8c3-41f5-9ea6-fdefb592a94f?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: '' + string: "{\r\n \"startTime\": \"2022-10-14T15:30:40.8892724+00:00\",\r\n \"endTime\": + \"2022-10-14T15:32:41.4350378+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"298bea43-d8c3-41f5-9ea6-fdefb592a94f\"\r\n}" headers: cache-control: - no-cache content-length: - - '0' + - '184' + content-type: + - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:21:53 GMT + - Fri, 14 Oct 2022 15:32:41 GMT expires: - '-1' pragma: @@ -5200,10 +5949,14 @@ interactions: - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14973,Microsoft.Compute/GetOperation30Min;29725 + - Microsoft.Compute/GetOperation3Min;14976,Microsoft.Compute/GetOperation30Min;29811 status: code: 200 message: OK @@ -5211,7 +5964,7 @@ interactions: body: null headers: Accept: - - application/json + - '*/*' Accept-Encoding: - gzip, deflate CommandName: @@ -5221,38 +5974,32 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/298bea43-d8c3-41f5-9ea6-fdefb592a94f?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 response: body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + string: '' headers: cache-control: - no-cache content-length: - - '43' - content-type: - - application/json; charset=UTF-8 + - '0' date: - - Thu, 04 Aug 2022 17:21:53 GMT + - Fri, 14 Oct 2022 15:32:41 GMT expires: - '-1' pragma: - no-cache server: - - nginx + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding x-content-type-options: - nosniff - x-frame-options: - - deny + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/GetOperation3Min;14975,Microsoft.Compute/GetOperation30Min;29810 status: code: 200 message: OK @@ -5270,21 +6017,21 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-08-01 response: body: - string: "{\r\n \"placementGroupId\": \"d026ef0f-b482-4884-8583-16ebe50a963b\",\r\n - \ \"platformUpdateDomain\": 1,\r\n \"platformFaultDomain\": 1,\r\n \"disks\": - [\r\n {\r\n \"name\": \"clinqhzpoczvy5m2spufclinqhzpoczvy5m2spufqOS__1_00d110bc7aaf4fbebbd4095a4a610862\",\r\n + string: "{\r\n \"placementGroupId\": \"900064c0-d742-4569-a06c-dcca8072c0c3\",\r\n + \ \"platformUpdateDomain\": 0,\r\n \"platformFaultDomain\": 0,\r\n \"disks\": + [\r\n {\r\n \"name\": \"cliou5z7nax54kj6yubjcliou5z7nax54kj6yubj5OS__1_474db27698dd49ddab2454669aab0333\",\r\n \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:21:40.6465621+00:00\"\r\n + succeeded\",\r\n \"time\": \"2022-10-14T15:32:41.3568721+00:00\"\r\n \ }\r\n ]\r\n }\r\n ],\r\n \"hyperVGeneration\": \"V1\",\r\n \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n - \ \"time\": \"2022-08-04T17:21:40.6621708+00:00\"\r\n },\r\n {\r\n + \ \"time\": \"2022-10-14T15:32:41.3881182+00:00\"\r\n },\r\n {\r\n \ \"code\": \"PowerState/deallocated\",\r\n \"level\": \"Info\",\r\n \ \"displayStatus\": \"VM deallocated\"\r\n }\r\n ]\r\n}" headers: @@ -5295,7 +6042,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:21:53 GMT + - Fri, 14 Oct 2022 15:32:41 GMT expires: - '-1' pragma: @@ -5312,7 +6059,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMScaleSetVM3Min;484,Microsoft.Compute/GetVMScaleSetVM30Min;2446,Microsoft.Compute/VMScaleSetVMViews3Min;4980 + - Microsoft.Compute/GetVMScaleSetVM3Min;483,Microsoft.Compute/GetVMScaleSetVM30Min;2470,Microsoft.Compute/VMScaleSetVMViews3Min;4983 x-ms-request-charge: - '1' status: @@ -5336,25 +6083,27 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/start?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/start?api-version=2022-08-01 response: body: string: '' headers: + azure-asyncnotification: + - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/ce4de450-4935-456d-8c81-df3ff06533a9?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/3afa8c95-a423-4601-8aa6-2eed6aef7166?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - '0' date: - - Thu, 04 Aug 2022 17:21:54 GMT + - Fri, 14 Oct 2022 15:32:42 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/ce4de450-4935-456d-8c81-df3ff06533a9?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/3afa8c95-a423-4601-8aa6-2eed6aef7166?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 pragma: - no-cache server: @@ -5365,9 +6114,9 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/VMScaleSetActions3Min;236,Microsoft.Compute/VMScaleSetActions30Min;1177,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2235,Microsoft.Compute/VmssQueuedVMOperations;0 + - Microsoft.Compute/VMScaleSetActions3Min;236,Microsoft.Compute/VMScaleSetActions30Min;1179,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2976,Microsoft.Compute/VmssQueuedVMOperations;0 x-ms-ratelimit-remaining-subscription-writes: - - '1197' + - '1199' x-ms-request-charge: - '1' status: @@ -5387,14 +6136,64 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/3afa8c95-a423-4601-8aa6-2eed6aef7166?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 + response: + body: + string: "{\r\n \"startTime\": \"2022-10-14T15:32:43.2630868+00:00\",\r\n \"status\": + \"InProgress\",\r\n \"name\": \"3afa8c95-a423-4601-8aa6-2eed6aef7166\"\r\n}" + headers: + cache-control: + - no-cache + content-length: + - '134' + content-type: + - application/json; charset=utf-8 + date: + - Fri, 14 Oct 2022 15:33:12 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/GetOperation3Min;14978,Microsoft.Compute/GetOperation30Min;29806 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - vmss start + Connection: + - keep-alive + ParameterSetName: + - -g -n --instance-ids + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/ce4de450-4935-456d-8c81-df3ff06533a9?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/3afa8c95-a423-4601-8aa6-2eed6aef7166?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:21:55.0995667+00:00\",\r\n \"endTime\": - \"2022-08-04T17:22:12.6150939+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"ce4de450-4935-456d-8c81-df3ff06533a9\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:32:43.2630868+00:00\",\r\n \"endTime\": + \"2022-10-14T15:33:18.3249612+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"3afa8c95-a423-4601-8aa6-2eed6aef7166\"\r\n}" headers: cache-control: - no-cache @@ -5403,7 +6202,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:22:24 GMT + - Fri, 14 Oct 2022 15:33:43 GMT expires: - '-1' pragma: @@ -5420,7 +6219,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14973,Microsoft.Compute/GetOperation30Min;29720 + - Microsoft.Compute/GetOperation3Min;14982,Microsoft.Compute/GetOperation30Min;29804 status: code: 200 message: OK @@ -5438,9 +6237,9 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/ce4de450-4935-456d-8c81-df3ff06533a9?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/3afa8c95-a423-4601-8aa6-2eed6aef7166?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 response: body: string: '' @@ -5450,7 +6249,7 @@ interactions: content-length: - '0' date: - - Thu, 04 Aug 2022 17:22:24 GMT + - Fri, 14 Oct 2022 15:33:43 GMT expires: - '-1' pragma: @@ -5463,7 +6262,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14971,Microsoft.Compute/GetOperation30Min;29718 + - Microsoft.Compute/GetOperation3Min;14981,Microsoft.Compute/GetOperation30Min;29803 status: code: 200 message: OK @@ -5481,51 +6280,69 @@ interactions: ParameterSetName: - --name --resource-group --set User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachineScaleSets\",\r\n \"location\": - \"westus2\",\r\n \"tags\": {},\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n - \ \"tier\": \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": - {\r\n \"singlePlacementGroup\": true,\r\n \"upgradePolicy\": {\r\n \"mode\": - \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n \"maxBatchInstancePercent\": - 20,\r\n \"maxUnhealthyInstancePercent\": 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": - 20,\r\n \"pauseTimeBetweenBatches\": \"PT0S\"\r\n }\r\n },\r\n - \ \"virtualMachineProfile\": {\r\n \"osProfile\": {\r\n \"computerNamePrefix\": - \"clinqc38b\",\r\n \"adminUsername\": \"rhl\",\r\n \"linuxConfiguration\": - {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": - {\r\n \"publicKeys\": [\r\n {\r\n \"path\": - \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": \"ssh-rsa - AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"enableVMAgentPlatformUpdates\": false\r\n },\r\n - \ \"secrets\": [],\r\n \"allowExtensionOperations\": true,\r\n - \ \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": + \"westus2\",\r\n \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n \"tier\": + \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": {\r\n \"singlePlacementGroup\": + true,\r\n \"orchestrationMode\": \"Uniform\",\r\n \"upgradePolicy\": + {\r\n \"mode\": \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n + \ \"maxBatchInstancePercent\": 20,\r\n \"maxUnhealthyInstancePercent\": + 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": 20,\r\n \"pauseTimeBetweenBatches\": + \"PT0S\"\r\n }\r\n },\r\n \"virtualMachineProfile\": {\r\n \"osProfile\": + {\r\n \"computerNamePrefix\": \"cliouf96e\",\r\n \"adminUsername\": + \"rhoover\",\r\n \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": + true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"enableVMAgentPlatformUpdates\": + false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": + true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": {\r\n \"osDisk\": {\r\n \"osType\": \"Linux\",\r\n \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\"\r\n },\r\n \ \"diskSizeGB\": 30\r\n },\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \ \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\"\r\n - \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"clinqc38bNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"clinqc38bIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n + \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"cliouf96eNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"disableTcpStateTracking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"cliouf96eIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": false,\r\n \"storageUri\": \"https://cli000002.blob.core.windows.net/\"\r\n - \ }\r\n }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n - \ \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": - false,\r\n \"uniqueId\": \"239f9fe3-2e0c-403b-8c3a-55cda6cd7b9a\",\r\n - \ \"timeCreated\": \"2022-08-04T17:10:51.2598866+00:00\"\r\n }\r\n}" + \ }\r\n },\r\n \"extensionProfile\": {\r\n \"extensions\": + [\r\n {\r\n \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Monitor\",\r\n \"type\": \"AzureMonitorLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"1.0\",\r\n \"settings\": + {\"GCS_AUTO_CONFIG\":true}\r\n }\r\n },\r\n {\r\n + \ \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Security.Monitoring\",\r\n \"type\": \"AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.0\",\r\n \"settings\": + {\"enableGenevaUpload\":true,\"enableAutoConfig\":true,\"reportSuccessOnUnsupportedDistro\":true}\r\n + \ }\r\n }\r\n ]\r\n }\r\n },\r\n \"provisioningState\": + \"Succeeded\",\r\n \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": + false,\r\n \"uniqueId\": \"793988fd-7a65-472d-9472-7470271a360c\",\r\n + \ \"timeCreated\": \"2022-10-14T15:18:49.0838434+00:00\"\r\n }\r\n}" headers: cache-control: - no-cache content-length: - - '3596' + - '5446' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:22:26 GMT + - Fri, 14 Oct 2022 15:33:44 GMT expires: - '-1' pragma: @@ -5542,32 +6359,41 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMScaleSet3Min;387,Microsoft.Compute/GetVMScaleSet30Min;2477 + - Microsoft.Compute/GetVMScaleSet3Min;396,Microsoft.Compute/GetVMScaleSet30Min;2503 status: code: 200 message: OK - request: - body: '{"location": "westus2", "tags": {}, "sku": {"name": "Standard_DS1_v2", - "tier": "Standard", "capacity": 2}, "properties": {"upgradePolicy": {"mode": - "Manual", "rollingUpgradePolicy": {"maxBatchInstancePercent": 20, "maxUnhealthyInstancePercent": - 20, "maxUnhealthyUpgradedInstancePercent": 20, "pauseTimeBetweenBatches": "PT0S"}}, - "virtualMachineProfile": {"osProfile": {"computerNamePrefix": "clinqc38b", "adminUsername": - "rhl", "linuxConfiguration": {"disablePasswordAuthentication": true, "ssh": - {"publicKeys": [{"path": "/home/rhl/.ssh/authorized_keys", "keyData": "ssh-rsa - AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5"}]}, - "provisionVMAgent": true}, "secrets": [], "allowExtensionOperations": true}, - "storageProfile": {"osDisk": {"caching": "ReadWrite", "createOption": "FromImage", - "diskSizeGB": 30, "osType": "Linux", "managedDisk": {"storageAccountType": "Premium_LRS"}}}, - "networkProfile": {"networkInterfaceConfigurations": [{"name": "clinqc38bNic", - "properties": {"primary": true, "enableAcceleratedNetworking": false, "dnsSettings": - {"dnsServers": []}, "ipConfigurations": [{"name": "clinqc38bIPConfig", "properties": - {"subnet": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet"}, + body: '{"location": "westus2", "tags": {"azsecpack": "nonprod", "platformsettings.host_environment.service.platform_optedin_for_rootcerts": + "true"}, "sku": {"name": "Standard_DS1_v2", "tier": "Standard", "capacity": + 2}, "identity": {"type": "UserAssigned", "userAssignedIdentities": {"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2": + {}}}, "properties": {"upgradePolicy": {"mode": "Manual", "rollingUpgradePolicy": + {"maxBatchInstancePercent": 20, "maxUnhealthyInstancePercent": 20, "maxUnhealthyUpgradedInstancePercent": + 20, "pauseTimeBetweenBatches": "PT0S"}}, "virtualMachineProfile": {"osProfile": + {"computerNamePrefix": "cliouf96e", "adminUsername": "rhoover", "linuxConfiguration": + {"disablePasswordAuthentication": true, "ssh": {"publicKeys": [{"path": "/home/rhoover/.ssh/authorized_keys", + "keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\n"}]}, "provisionVMAgent": true, "enableVMAgentPlatformUpdates": + false}, "secrets": [], "allowExtensionOperations": true}, "storageProfile": + {"osDisk": {"caching": "ReadWrite", "createOption": "FromImage", "diskSizeGB": + 30, "osType": "Linux", "managedDisk": {"storageAccountType": "Premium_LRS"}}}, + "networkProfile": {"networkInterfaceConfigurations": [{"name": "cliouf96eNic", + "properties": {"primary": true, "enableAcceleratedNetworking": false, "disableTcpStateTracking": + false, "dnsSettings": {"dnsServers": []}, "ipConfigurations": [{"name": "cliouf96eIPConfig", + "properties": {"subnet": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet"}, "privateIPAddressVersion": "IPv4", "loadBalancerBackendAddressPools": [{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool"}], "loadBalancerInboundNatPools": [{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool"}]}}], "enableIPForwarding": false}}]}, "diagnosticsProfile": {"bootDiagnostics": {"enabled": - true}}}, "overprovision": true, "doNotRunExtensionsOnOverprovisionedVMs": false, - "singlePlacementGroup": true}}' + true}}, "extensionProfile": {"extensions": [{"name": "Microsoft.Azure.Monitor.AzureMonitorLinuxAgent", + "properties": {"publisher": "Microsoft.Azure.Monitor", "type": "AzureMonitorLinuxAgent", + "typeHandlerVersion": "1.0", "autoUpgradeMinorVersion": true, "enableAutomaticUpgrade": + true, "settings": {"GCS_AUTO_CONFIG": true}}}, {"name": "Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent", + "properties": {"publisher": "Microsoft.Azure.Security.Monitoring", "type": "AzureSecurityLinuxAgent", + "typeHandlerVersion": "2.0", "autoUpgradeMinorVersion": true, "enableAutomaticUpgrade": + true, "settings": {"enableGenevaUpload": true, "enableAutoConfig": true, "reportSuccessOnUnsupportedDistro": + true}}}]}}, "overprovision": true, "doNotRunExtensionsOnOverprovisionedVMs": + false, "singlePlacementGroup": true, "orchestrationMode": "Uniform"}}' headers: Accept: - application/json @@ -5578,60 +6404,78 @@ interactions: Connection: - keep-alive Content-Length: - - '2357' + - '3724' Content-Type: - application/json ParameterSetName: - --name --resource-group --set User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachineScaleSets\",\r\n \"location\": - \"westus2\",\r\n \"tags\": {},\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n - \ \"tier\": \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": - {\r\n \"singlePlacementGroup\": true,\r\n \"upgradePolicy\": {\r\n \"mode\": - \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n \"maxBatchInstancePercent\": - 20,\r\n \"maxUnhealthyInstancePercent\": 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": - 20,\r\n \"pauseTimeBetweenBatches\": \"PT0S\"\r\n }\r\n },\r\n - \ \"virtualMachineProfile\": {\r\n \"osProfile\": {\r\n \"computerNamePrefix\": - \"clinqc38b\",\r\n \"adminUsername\": \"rhl\",\r\n \"linuxConfiguration\": - {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": - {\r\n \"publicKeys\": [\r\n {\r\n \"path\": - \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": \"ssh-rsa - AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"enableVMAgentPlatformUpdates\": false\r\n },\r\n - \ \"secrets\": [],\r\n \"allowExtensionOperations\": true,\r\n - \ \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": + \"westus2\",\r\n \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n \"tier\": + \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": {\r\n \"singlePlacementGroup\": + true,\r\n \"orchestrationMode\": \"Uniform\",\r\n \"upgradePolicy\": + {\r\n \"mode\": \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n + \ \"maxBatchInstancePercent\": 20,\r\n \"maxUnhealthyInstancePercent\": + 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": 20,\r\n \"pauseTimeBetweenBatches\": + \"PT0S\"\r\n }\r\n },\r\n \"virtualMachineProfile\": {\r\n \"osProfile\": + {\r\n \"computerNamePrefix\": \"cliouf96e\",\r\n \"adminUsername\": + \"rhoover\",\r\n \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": + true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"enableVMAgentPlatformUpdates\": + false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": + true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": {\r\n \"osDisk\": {\r\n \"osType\": \"Linux\",\r\n \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\"\r\n },\r\n \ \"diskSizeGB\": 30\r\n },\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \ \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\"\r\n - \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"clinqc38bNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"clinqc38bIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n + \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"cliouf96eNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"disableTcpStateTracking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"cliouf96eIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": - true\r\n }\r\n }\r\n },\r\n \"provisioningState\": \"Updating\",\r\n - \ \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": - false,\r\n \"uniqueId\": \"239f9fe3-2e0c-403b-8c3a-55cda6cd7b9a\",\r\n - \ \"timeCreated\": \"2022-08-04T17:10:51.2598866+00:00\"\r\n }\r\n}" + true\r\n }\r\n },\r\n \"extensionProfile\": {\r\n \"extensions\": + [\r\n {\r\n \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Monitor\",\r\n \"type\": \"AzureMonitorLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"1.0\",\r\n \"settings\": + {\"GCS_AUTO_CONFIG\":true}\r\n }\r\n },\r\n {\r\n + \ \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Security.Monitoring\",\r\n \"type\": \"AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.0\",\r\n \"settings\": + {\"enableGenevaUpload\":true,\"enableAutoConfig\":true,\"reportSuccessOnUnsupportedDistro\":true}\r\n + \ }\r\n }\r\n ]\r\n }\r\n },\r\n \"provisioningState\": + \"Updating\",\r\n \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": + false,\r\n \"uniqueId\": \"793988fd-7a65-472d-9472-7470271a360c\",\r\n + \ \"timeCreated\": \"2022-10-14T15:18:49.0838434+00:00\"\r\n }\r\n}" headers: azure-asyncnotification: - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/d54e9cdd-9671-44b5-9599-b2bb63d31fc0?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/09c9bcf9-6244-4cc6-a6f2-21e58cb0b4e4?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - - '3525' + - '5375' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:22:28 GMT + - Fri, 14 Oct 2022 15:33:49 GMT expires: - '-1' pragma: @@ -5648,9 +6492,9 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/CreateVMScaleSet3Min;110,Microsoft.Compute/CreateVMScaleSet30Min;540,Microsoft.Compute/VmssQueuedVMOperations;0 + - Microsoft.Compute/CreateVMScaleSet3Min;147,Microsoft.Compute/CreateVMScaleSet30Min;726,Microsoft.Compute/VmssQueuedVMOperations;0 x-ms-ratelimit-remaining-subscription-writes: - - '1198' + - '1199' x-ms-request-charge: - '0' status: @@ -5670,14 +6514,14 @@ interactions: ParameterSetName: - --name --resource-group --set User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/d54e9cdd-9671-44b5-9599-b2bb63d31fc0?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/09c9bcf9-6244-4cc6-a6f2-21e58cb0b4e4?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:22:28.0525063+00:00\",\r\n \"endTime\": - \"2022-08-04T17:22:28.1778164+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"d54e9cdd-9671-44b5-9599-b2bb63d31fc0\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:33:49.1842555+00:00\",\r\n \"endTime\": + \"2022-10-14T15:33:49.4968109+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"09c9bcf9-6244-4cc6-a6f2-21e58cb0b4e4\"\r\n}" headers: cache-control: - no-cache @@ -5686,7 +6530,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:22:38 GMT + - Fri, 14 Oct 2022 15:33:59 GMT expires: - '-1' pragma: @@ -5703,7 +6547,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14975,Microsoft.Compute/GetOperation30Min;29715 + - Microsoft.Compute/GetOperation3Min;14983,Microsoft.Compute/GetOperation30Min;29802 status: code: 200 message: OK @@ -5721,50 +6565,68 @@ interactions: ParameterSetName: - --name --resource-group --set User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003?api-version=2022-08-01 response: body: string: "{\r\n \"name\": \"cli000003\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003\",\r\n \ \"type\": \"Microsoft.Compute/virtualMachineScaleSets\",\r\n \"location\": - \"westus2\",\r\n \"tags\": {},\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n - \ \"tier\": \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": - {\r\n \"singlePlacementGroup\": true,\r\n \"upgradePolicy\": {\r\n \"mode\": - \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n \"maxBatchInstancePercent\": - 20,\r\n \"maxUnhealthyInstancePercent\": 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": - 20,\r\n \"pauseTimeBetweenBatches\": \"PT0S\"\r\n }\r\n },\r\n - \ \"virtualMachineProfile\": {\r\n \"osProfile\": {\r\n \"computerNamePrefix\": - \"clinqc38b\",\r\n \"adminUsername\": \"rhl\",\r\n \"linuxConfiguration\": - {\r\n \"disablePasswordAuthentication\": true,\r\n \"ssh\": - {\r\n \"publicKeys\": [\r\n {\r\n \"path\": - \"/home/rhl/.ssh/authorized_keys\",\r\n \"keyData\": \"ssh-rsa - AAAAB3NzaC1yc2EAAAADAQABAAABAQCnShRFbeM5I8ZqZWutEqZmnfgEoQQwC4Gd+oiy/XiTTalYwRjUWC0nYLWx8QZROPCyD+GyU7Mm3KyKtWSU5yDBjAxSEegJxqc93oPTbVX8i0IuXZ1DtmdI6JBTQRvliInSnkiY2UXTE+R058LEZRiOeMkGcaLcGzKBYQe/xHzH8dbDMK9Jx1RQSKWqslb5u0YSM8aIdMlDJ2u1hRkp054yHcKdyMCR9lUYa9I6BEpZqEbk8m7Wy4jtbyCgNP7Y1AFcQUcZFtm+wplrYJf4M20umLkK6c04j7NPrAMydprprKgU4Wg7vyhIgQF9VQx6bzBmSKUgkzAYmsZpm4cDCtb5\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"enableVMAgentPlatformUpdates\": false\r\n },\r\n - \ \"secrets\": [],\r\n \"allowExtensionOperations\": true,\r\n - \ \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": + \"westus2\",\r\n \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"platformsettings.host_environment.service.platform_optedin_for_rootcerts\": + \"true\"\r\n },\r\n \"identity\": {\r\n \"type\": \"UserAssigned\",\r\n + \ \"userAssignedIdentities\": {\r\n \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/AzSecPackAutoConfigRG/providers/Microsoft.ManagedIdentity/userAssignedIdentities/AzSecPackAutoConfigUA-westus2\": + {\r\n \"principalId\": \"684d55e2-8922-4966-a660-2d38ca4a1711\",\r\n + \ \"clientId\": \"6d45cf55-f311-4228-97b0-c22ae418aad6\"\r\n }\r\n + \ }\r\n },\r\n \"sku\": {\r\n \"name\": \"Standard_DS1_v2\",\r\n \"tier\": + \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\": {\r\n \"singlePlacementGroup\": + true,\r\n \"orchestrationMode\": \"Uniform\",\r\n \"upgradePolicy\": + {\r\n \"mode\": \"Manual\",\r\n \"rollingUpgradePolicy\": {\r\n + \ \"maxBatchInstancePercent\": 20,\r\n \"maxUnhealthyInstancePercent\": + 20,\r\n \"maxUnhealthyUpgradedInstancePercent\": 20,\r\n \"pauseTimeBetweenBatches\": + \"PT0S\"\r\n }\r\n },\r\n \"virtualMachineProfile\": {\r\n \"osProfile\": + {\r\n \"computerNamePrefix\": \"cliouf96e\",\r\n \"adminUsername\": + \"rhoover\",\r\n \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": + true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n + \ \"path\": \"/home/rhoover/.ssh/authorized_keys\",\r\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDu8zk/7tyHC3VcDKnMTGglN/OICHI6zYTBNyjdzh9xf5Xb6geCw2wmwGa2D7z1u4qwqlaPN2axVbs8/C7v/HQpmgirNjXZIJMB35olsFgs5vLQswHqL+mXjqV5o+puM3bvGTNGnmJBDSD3K+JkLOkahpc3r6W1bVUeKRnuoJOsQ/Fbss/y7BZMeX31mzIwfjOVkmEVU8mvTE0n1BOnRtELKVxbhvE16xaBI54J777Ns34HTNmhuFY3PyhHbfX5UOyEKqPXKGcnbRye/pq9j9+8Pyg1Vh4ZxycEQ6KxOYhfOpdrn+NR0z9dYVeal3cXQc5hHBpK38JE7nwPrKywp3v6dRxNcJUAjib06vs1Ept3+dTLW5FcBb/IK54HVSp4SEkq8xGj60HQebAAqf7HGIllngCnNsVABBD/06FoNaKxZSk3zFCBsWkSB5gh0R6DVH/yw6Ydru6cHqXAIIl3FCgbXrnfB9xngF34Em5P/rCRLjdZlXFofgPP6x+YPJmdNh8= + rhoover@microsoft.com\\n\"\r\n }\r\n ]\r\n },\r\n + \ \"provisionVMAgent\": true,\r\n \"enableVMAgentPlatformUpdates\": + false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": + true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"storageProfile\": {\r\n \"osDisk\": {\r\n \"osType\": \"Linux\",\r\n \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\"\r\n },\r\n \ \"diskSizeGB\": 30\r\n },\r\n \"imageReference\": {\r\n \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \ \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\"\r\n - \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"clinqc38bNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"clinqc38bIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n + \ }\r\n },\r\n \"networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"cliouf96eNic\",\"properties\":{\"primary\":true,\"enableAcceleratedNetworking\":false,\"disableTcpStateTracking\":false,\"dnsSettings\":{\"dnsServers\":[]},\"enableIPForwarding\":false,\"ipConfigurations\":[{\"name\":\"cliouf96eIPConfig\",\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/virtualNetworks/cli000003VNET/subnets/cli000003Subnet\"},\"privateIPAddressVersion\":\"IPv4\",\"loadBalancerBackendAddressPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/backendAddressPools/cli000003LBBEPool\"}],\"loadBalancerInboundNatPools\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Network/loadBalancers/cli000003LB/inboundNatPools/cli000003LBNatPool\"}]}}]}}]},\r\n \ \"diagnosticsProfile\": {\r\n \"bootDiagnostics\": {\r\n \"enabled\": - true\r\n }\r\n }\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n - \ \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": - false,\r\n \"uniqueId\": \"239f9fe3-2e0c-403b-8c3a-55cda6cd7b9a\",\r\n - \ \"timeCreated\": \"2022-08-04T17:10:51.2598866+00:00\"\r\n }\r\n}" + true\r\n }\r\n },\r\n \"extensionProfile\": {\r\n \"extensions\": + [\r\n {\r\n \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Monitor\",\r\n \"type\": \"AzureMonitorLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"1.0\",\r\n \"settings\": + {\"GCS_AUTO_CONFIG\":true}\r\n }\r\n },\r\n {\r\n + \ \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"properties\": {\r\n \"autoUpgradeMinorVersion\": + true,\r\n \"enableAutomaticUpgrade\": true,\r\n \"publisher\": + \"Microsoft.Azure.Security.Monitoring\",\r\n \"type\": \"AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.0\",\r\n \"settings\": + {\"enableGenevaUpload\":true,\"enableAutoConfig\":true,\"reportSuccessOnUnsupportedDistro\":true}\r\n + \ }\r\n }\r\n ]\r\n }\r\n },\r\n \"provisioningState\": + \"Succeeded\",\r\n \"overprovision\": true,\r\n \"doNotRunExtensionsOnOverprovisionedVMs\": + false,\r\n \"uniqueId\": \"793988fd-7a65-472d-9472-7470271a360c\",\r\n + \ \"timeCreated\": \"2022-10-14T15:18:49.0838434+00:00\"\r\n }\r\n}" headers: cache-control: - no-cache content-length: - - '3526' + - '5376' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:22:38 GMT + - Fri, 14 Oct 2022 15:33:59 GMT expires: - '-1' pragma: @@ -5781,56 +6643,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMScaleSet3Min;386,Microsoft.Compute/GetVMScaleSet30Min;2474 - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - vmss update - Connection: - - keep-alive - ParameterSetName: - - --name --resource-group --set - User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 - response: - body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" - headers: - cache-control: - - no-cache - content-length: - - '43' - content-type: - - application/json; charset=UTF-8 - date: - - Thu, 04 Aug 2022 17:22:38 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - nginx - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-frame-options: - - deny + - Microsoft.Compute/GetVMScaleSet3Min;393,Microsoft.Compute/GetVMScaleSet30Min;2499 status: code: 200 message: OK @@ -5848,38 +6661,59 @@ interactions: ParameterSetName: - --name --resource-group --set User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-08-01 response: body: - string: "{\r\n \"placementGroupId\": \"d026ef0f-b482-4884-8583-16ebe50a963b\",\r\n - \ \"platformUpdateDomain\": 1,\r\n \"platformFaultDomain\": 1,\r\n \"computerName\": - \"clinqc38b000003\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n - \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.7.3.0\",\r\n \"statuses\": + string: "{\r\n \"placementGroupId\": \"900064c0-d742-4569-a06c-dcca8072c0c3\",\r\n + \ \"platformUpdateDomain\": 0,\r\n \"platformFaultDomain\": 0,\r\n \"computerName\": + \"cliouf96e000003\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n + \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.8.0.11\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": - \"Guest Agent is running\",\r\n \"time\": \"2022-08-04T17:22:30+00:00\"\r\n - \ }\r\n ],\r\n \"extensionHandlers\": []\r\n },\r\n \"disks\": - [\r\n {\r\n \"name\": \"clinqhzpoczvy5m2spufclinqhzpoczvy5m2spufqOS__1_00d110bc7aaf4fbebbd4095a4a610862\",\r\n + \"Guest Agent is running\",\r\n \"time\": \"2022-10-14T15:33:17+00:00\"\r\n + \ }\r\n ],\r\n \"extensionHandlers\": [\r\n {\r\n \"type\": + \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": + \"1.22.2\",\r\n \"status\": {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n + \ \"message\": \"Plugin enabled\"\r\n }\r\n },\r\n {\r\n + \ \"type\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.20.58\",\r\n \"status\": {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": + \"Plugin enabled\"\r\n }\r\n }\r\n ]\r\n },\r\n \"disks\": + [\r\n {\r\n \"name\": \"cliou5z7nax54kj6yubjcliou5z7nax54kj6yubj5OS__1_474db27698dd49ddab2454669aab0333\",\r\n \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:21:55.7245758+00:00\"\r\n - \ }\r\n ]\r\n }\r\n ],\r\n \"hyperVGeneration\": \"V1\",\r\n - \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n - \ \"time\": \"2022-08-04T17:22:12.599487+00:00\"\r\n },\r\n {\r\n - \ \"code\": \"PowerState/running\",\r\n \"level\": \"Info\",\r\n - \ \"displayStatus\": \"VM running\"\r\n }\r\n ]\r\n}" + succeeded\",\r\n \"time\": \"2022-10-14T15:32:44.1224345+00:00\"\r\n + \ }\r\n ]\r\n }\r\n ],\r\n \"extensions\": [\r\n {\r\n + \ \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"type\": + \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": + \"1.22.2\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"message\": \"Enable succeeded\"\r\n }\r\n + \ ]\r\n },\r\n {\r\n \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.20.58\",\r\n \"statuses\": [\r\n {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n + \ \"message\": \"Enable ASM succeeded\"\r\n }\r\n ]\r\n + \ }\r\n ],\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n + \ {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n \"time\": + \"2022-10-14T15:33:18.2780922+00:00\"\r\n },\r\n {\r\n \"code\": + \"PowerState/running\",\r\n \"level\": \"Info\",\r\n \"displayStatus\": + \"VM running\"\r\n }\r\n ]\r\n}" headers: cache-control: - no-cache content-length: - - '1261' + - '2781' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:22:38 GMT + - Fri, 14 Oct 2022 15:34:00 GMT expires: - '-1' pragma: @@ -5896,7 +6730,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMScaleSetVM3Min;490,Microsoft.Compute/GetVMScaleSetVM30Min;2445,Microsoft.Compute/VMScaleSetVMViews3Min;4990 + - Microsoft.Compute/GetVMScaleSetVM3Min;496,Microsoft.Compute/GetVMScaleSetVM30Min;2469,Microsoft.Compute/VMScaleSetVMViews3Min;4996 x-ms-request-charge: - '1' status: @@ -5920,25 +6754,27 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/manualupgrade?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/manualupgrade?api-version=2022-08-01 response: body: string: '' headers: + azure-asyncnotification: + - Enabled azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/f6b82fdb-efc7-4b6d-88ec-6fd59c4654e9?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/13eb1b46-5d20-4eff-8562-a7b9d15c3721?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 cache-control: - no-cache content-length: - '0' date: - - Thu, 04 Aug 2022 17:22:39 GMT + - Fri, 14 Oct 2022 15:34:01 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/f6b82fdb-efc7-4b6d-88ec-6fd59c4654e9?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/13eb1b46-5d20-4eff-8562-a7b9d15c3721?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 pragma: - no-cache server: @@ -5949,9 +6785,9 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/VMScaleSetActions3Min;237,Microsoft.Compute/VMScaleSetActions30Min;1176,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2238,Microsoft.Compute/VmssQueuedVMOperations;0 + - Microsoft.Compute/VMScaleSetActions3Min;238,Microsoft.Compute/VMScaleSetActions30Min;1178,Microsoft.Compute/VMScaleSetBatchedVMRequests5Min;2957,Microsoft.Compute/VmssQueuedVMOperations;0 x-ms-ratelimit-remaining-subscription-writes: - - '1196' + - '1199' x-ms-request-charge: - '1' status: @@ -5971,23 +6807,23 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/f6b82fdb-efc7-4b6d-88ec-6fd59c4654e9?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/13eb1b46-5d20-4eff-8562-a7b9d15c3721?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&api-version=2022-08-01 response: body: - string: "{\r\n \"startTime\": \"2022-08-04T17:22:39.9587222+00:00\",\r\n \"endTime\": - \"2022-08-04T17:22:48.099251+00:00\",\r\n \"status\": \"Succeeded\",\r\n - \ \"name\": \"f6b82fdb-efc7-4b6d-88ec-6fd59c4654e9\"\r\n}" + string: "{\r\n \"startTime\": \"2022-10-14T15:34:01.8561004+00:00\",\r\n \"endTime\": + \"2022-10-14T15:34:12.3872976+00:00\",\r\n \"status\": \"Succeeded\",\r\n + \ \"name\": \"13eb1b46-5d20-4eff-8562-a7b9d15c3721\"\r\n}" headers: cache-control: - no-cache content-length: - - '183' + - '184' content-type: - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:23:09 GMT + - Fri, 14 Oct 2022 15:34:32 GMT expires: - '-1' pragma: @@ -6004,7 +6840,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14978,Microsoft.Compute/GetOperation30Min;29713 + - Microsoft.Compute/GetOperation3Min;14984,Microsoft.Compute/GetOperation30Min;29799 status: code: 200 message: OK @@ -6022,9 +6858,9 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/f6b82fdb-efc7-4b6d-88ec-6fd59c4654e9?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus2/operations/13eb1b46-5d20-4eff-8562-a7b9d15c3721?p=c49d4c35-fce9-4992-b8a6-5d4f3bc79110&monitor=true&api-version=2022-08-01 response: body: string: '' @@ -6034,7 +6870,7 @@ interactions: content-length: - '0' date: - - Thu, 04 Aug 2022 17:23:09 GMT + - Fri, 14 Oct 2022 15:34:32 GMT expires: - '-1' pragma: @@ -6047,7 +6883,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetOperation3Min;14977,Microsoft.Compute/GetOperation30Min;29712 + - Microsoft.Compute/GetOperation3Min;14983,Microsoft.Compute/GetOperation30Min;29798 status: code: 200 message: OK @@ -6065,28 +6901,66 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-08-01 response: body: - string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" + string: "{\r\n \"placementGroupId\": \"900064c0-d742-4569-a06c-dcca8072c0c3\",\r\n + \ \"platformUpdateDomain\": 0,\r\n \"platformFaultDomain\": 0,\r\n \"computerName\": + \"cliouf96e000003\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n + \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.8.0.11\",\r\n \"statuses\": + [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": + \"Guest Agent is running\",\r\n \"time\": \"2022-10-14T15:34:17+00:00\"\r\n + \ }\r\n ],\r\n \"extensionHandlers\": [\r\n {\r\n \"type\": + \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": + \"1.22.2\",\r\n \"status\": {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n + \ \"message\": \"Plugin enabled\"\r\n }\r\n },\r\n {\r\n + \ \"type\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.20.58\",\r\n \"status\": {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": + \"Plugin enabled\"\r\n }\r\n }\r\n ]\r\n },\r\n \"disks\": + [\r\n {\r\n \"name\": \"cliou5z7nax54kj6yubjcliou5z7nax54kj6yubj5OS__1_474db27698dd49ddab2454669aab0333\",\r\n + \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"time\": \"2022-10-14T15:34:02.8873439+00:00\"\r\n + \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": {},\r\n \"extensions\": + [\r\n {\r\n \"name\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Azure.Monitor.AzureMonitorLinuxAgent\",\r\n \"typeHandlerVersion\": + \"1.22.2\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n + \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning + succeeded\",\r\n \"message\": \"Enable succeeded\"\r\n }\r\n + \ ]\r\n },\r\n {\r\n \"name\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"type\": \"Microsoft.Azure.Security.Monitoring.AzureSecurityLinuxAgent\",\r\n + \ \"typeHandlerVersion\": \"2.20.58\",\r\n \"statuses\": [\r\n {\r\n + \ \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n + \ \"message\": \"Enable ASM succeeded\"\r\n }\r\n ]\r\n + \ }\r\n ],\r\n \"hyperVGeneration\": \"V1\",\r\n \"statuses\": [\r\n + \ {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": + \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n \"time\": + \"2022-10-14T15:34:12.3247354+00:00\"\r\n },\r\n {\r\n \"code\": + \"PowerState/running\",\r\n \"level\": \"Info\",\r\n \"displayStatus\": + \"VM running\"\r\n }\r\n ]\r\n}" headers: cache-control: - no-cache content-length: - - '43' + - '2807' content-type: - - application/json; charset=UTF-8 + - application/json; charset=utf-8 date: - - Thu, 04 Aug 2022 17:23:10 GMT + - Fri, 14 Oct 2022 15:34:32 GMT expires: - '-1' pragma: - no-cache server: - - nginx + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -6095,8 +6969,10 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-frame-options: - - deny + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/GetVMScaleSetVM3Min;497,Microsoft.Compute/GetVMScaleSetVM30Min;2468,Microsoft.Compute/VMScaleSetVMViews3Min;4997 + x-ms-request-charge: + - '1' status: code: 200 message: OK @@ -6114,45 +6990,28 @@ interactions: ParameterSetName: - -g -n --instance-ids User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.13 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_serialconsole000001/providers/Microsoft.Compute/virtualMachineScaleSets/cli000003/virtualMachines/3/instanceView?api-version=2022-03-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default?api-version=2018-05-01 response: body: - string: "{\r\n \"placementGroupId\": \"d026ef0f-b482-4884-8583-16ebe50a963b\",\r\n - \ \"platformUpdateDomain\": 1,\r\n \"platformFaultDomain\": 1,\r\n \"computerName\": - \"clinqc38b000003\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n - \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.7.3.0\",\r\n \"statuses\": - [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n \"level\": - \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \"message\": - \"Guest Agent is running\",\r\n \"time\": \"2022-08-04T17:22:48+00:00\"\r\n - \ }\r\n ],\r\n \"extensionHandlers\": []\r\n },\r\n \"disks\": - [\r\n {\r\n \"name\": \"clinqhzpoczvy5m2spufclinqhzpoczvy5m2spufqOS__1_00d110bc7aaf4fbebbd4095a4a610862\",\r\n - \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-04T17:22:40.8024299+00:00\"\r\n - \ }\r\n ]\r\n }\r\n ],\r\n \"bootDiagnostics\": {},\r\n \"hyperVGeneration\": - \"V1\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning succeeded\",\r\n - \ \"time\": \"2022-08-04T17:22:48.0523799+00:00\"\r\n },\r\n {\r\n - \ \"code\": \"PowerState/running\",\r\n \"level\": \"Info\",\r\n - \ \"displayStatus\": \"VM running\"\r\n }\r\n ]\r\n}" + string: "{\n \"properties\": {\n \"disabled\": false\n }\n}" headers: cache-control: - no-cache content-length: - - '1288' + - '43' content-type: - - application/json; charset=utf-8 + - application/json; charset=UTF-8 date: - - Thu, 04 Aug 2022 17:23:10 GMT + - Fri, 14 Oct 2022 15:34:32 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 + - nginx strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -6161,10 +7020,8 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMScaleSetVM3Min;490,Microsoft.Compute/GetVMScaleSetVM30Min;2444,Microsoft.Compute/VMScaleSetVMViews3Min;4990 - x-ms-request-charge: - - '1' + x-frame-options: + - deny status: code: 200 message: OK diff --git a/src/serial-console/azext_serialconsole/tests/latest/recordings/test_enable_disable.yaml b/src/serial-console/azext_serialconsole/tests/latest/recordings/test_enable_disable.yaml index bc7c3673474..b03a58dc383 100644 --- a/src/serial-console/azext_serialconsole/tests/latest/recordings/test_enable_disable.yaml +++ b/src/serial-console/azext_serialconsole/tests/latest/recordings/test_enable_disable.yaml @@ -15,8 +15,8 @@ interactions: Content-Type: - application/json User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) method: POST uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default/disableConsole?api-version=2018-05-01 response: @@ -30,7 +30,7 @@ interactions: content-type: - application/json; charset=UTF-8 date: - - Thu, 04 Aug 2022 17:10:07 GMT + - Fri, 14 Oct 2022 15:18:05 GMT expires: - '-1' pragma: @@ -48,7 +48,7 @@ interactions: x-frame-options: - deny x-ms-ratelimit-remaining-subscription-writes: - - '1198' + - '1199' status: code: 200 message: OK @@ -68,8 +68,8 @@ interactions: Content-Type: - application/json User-Agent: - - AZURECLI/2.39.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.8.13 - (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.35) + - AZURECLI/2.41.0 azsdk-python-microsoftserialconsoleclient/unknown Python/3.10.5 + (Windows-10-10.0.22000-SP0) method: POST uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.SerialConsole/consoleServices/default/enableConsole?api-version=2018-05-01 response: @@ -83,7 +83,7 @@ interactions: content-type: - application/json; charset=UTF-8 date: - - Thu, 04 Aug 2022 17:10:07 GMT + - Fri, 14 Oct 2022 15:18:04 GMT expires: - '-1' pragma: @@ -101,7 +101,7 @@ interactions: x-frame-options: - deny x-ms-ratelimit-remaining-subscription-writes: - - '1198' + - '1199' status: code: 200 message: OK diff --git a/src/serial-console/azext_serialconsole/vendored_sdks/serialconsole/_microsoft_serial_console_client.py b/src/serial-console/azext_serialconsole/vendored_sdks/serialconsole/_microsoft_serial_console_client.py index 3176195fef4..680ea7ca70c 100644 --- a/src/serial-console/azext_serialconsole/vendored_sdks/serialconsole/_microsoft_serial_console_client.py +++ b/src/serial-console/azext_serialconsole/vendored_sdks/serialconsole/_microsoft_serial_console_client.py @@ -36,15 +36,20 @@ class MicrosoftSerialConsoleClient(MicrosoftSerialConsoleClientOperationsMixin): """ def __init__( - self, - credential, # type: "TokenCredential" - subscription_id, # type: str - base_url=None, # type: Optional[str] - **kwargs # type: Any + self, + credential, # type: "TokenCredential" + subscription_id, # type: str + base_url=None, # type: Optional[str] + **kwargs # type: Any ): # type: (...) -> None + + if len(kwargs) > 0 and kwargs.get('storage_account_region') is not None: + base_url = 'https://{}.management.azure.com'.format(kwargs['storage_account_region']) + if not base_url: base_url = 'https://management.azure.com' + self._config = MicrosoftSerialConsoleClientConfiguration(credential, subscription_id, **kwargs) self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) diff --git a/src/serial-console/setup.py b/src/serial-console/setup.py index 3bded74de4a..f0d69cf113e 100644 --- a/src/serial-console/setup.py +++ b/src/serial-console/setup.py @@ -16,7 +16,7 @@ # TODO: Confirm this is the right version number you want and it matches your # HISTORY.rst entry. -VERSION = '0.1.2' +VERSION = '0.1.3' # The full list of classifiers is available at # https://pypi.python.org/pypi?%3Aaction=list_classifiers From 224ecb0981c842f918d378533126e75fdc42a685 Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Fri, 21 Oct 2022 02:19:03 +0000 Subject: [PATCH 17/85] [Release] Update index.json for extension [ serial-console ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=10355&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/ec69d7790d605643c251d23733ff24a604105c7a --- src/index.json | 51 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/src/index.json b/src/index.json index 32071e5a7de..2faa2bee768 100644 --- a/src/index.json +++ b/src/index.json @@ -35204,6 +35204,57 @@ "version": "0.1.2" }, "sha256Digest": "51493fe5617f90d06f5e4493b81a85b5d664f80c264941d4e6f5ca5f3473dc57" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/serial_console-0.1.3-py3-none-any.whl", + "filename": "serial_console-0.1.3-py3-none-any.whl", + "metadata": { + "azext.isPreview": true, + "azext.minCliCoreVersion": "2.15.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/serialconsole" + } + } + }, + "extras": [], + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "serial-console", + "run_requires": [ + { + "requires": [ + "websocket-client (~=0.56.0)" + ] + } + ], + "summary": "Microsoft Azure Command-Line Tools for Serial Console Extension", + "version": "0.1.3" + }, + "sha256Digest": "66ed2625bac7ab1e73300a37eb76d856f25673e398ccdbe6939f0b0d10e4770e" } ], "spring": [ From c6a57d50aa54a8fdcab0c0cad1b7f794c640b4ff Mon Sep 17 00:00:00 2001 From: Vivian Thiebaut <81188381+vthiebaut10@users.noreply.github.com> Date: Thu, 20 Oct 2022 22:42:50 -0400 Subject: [PATCH 18/85] [ssh] Bug Fix: Change how ssh vm reads the ssh client logs (#5366) --- src/ssh/HISTORY.md | 4 + src/ssh/azext_ssh/custom.py | 2 +- src/ssh/azext_ssh/rdp_utils.py | 8 +- src/ssh/azext_ssh/ssh_utils.py | 269 ++++++++---------- .../azext_ssh/tests/latest/test_ssh_utils.py | 121 ++++++-- 5 files changed, 223 insertions(+), 181 deletions(-) diff --git a/src/ssh/HISTORY.md b/src/ssh/HISTORY.md index 4ecef1cb840..6e51f477016 100644 --- a/src/ssh/HISTORY.md +++ b/src/ssh/HISTORY.md @@ -1,5 +1,9 @@ Release History =============== +1.1.3 +----- +* [bug fix] SSH Banners are printed before authentication. + 1.1.2 ----- * Remove dependency to cryptography (Az CLI core alredy has cryptography) diff --git a/src/ssh/azext_ssh/custom.py b/src/ssh/azext_ssh/custom.py index e396fcefcaa..234ee452b96 100644 --- a/src/ssh/azext_ssh/custom.py +++ b/src/ssh/azext_ssh/custom.py @@ -190,7 +190,7 @@ def _do_ssh_op(cmd, op_info, op_call): op_info.private_key_file + ', ' if delete_keys else "", op_info.public_key_file + ', ' if delete_keys else "", op_info.cert_file if delete_cert else "") - ssh_utils.do_cleanup(delete_keys, delete_cert, op_info.cert_file, + ssh_utils.do_cleanup(delete_keys, delete_cert, op_info.delete_credentials, op_info.cert_file, op_info.private_key_file, op_info.public_key_file) raise e diff --git a/src/ssh/azext_ssh/rdp_utils.py b/src/ssh/azext_ssh/rdp_utils.py index e5387b0a852..7e1960d05f7 100644 --- a/src/ssh/azext_ssh/rdp_utils.py +++ b/src/ssh/azext_ssh/rdp_utils.py @@ -43,8 +43,8 @@ def start_rdp_connection(ssh_info, delete_keys, delete_cert): ssh_process, print_ssh_logs = start_ssh_tunnel(ssh_info) ssh_connection_t0 = time.time() ssh_success, log_list = wait_for_ssh_connection(ssh_process, print_ssh_logs) - ssh_utils.do_cleanup(delete_keys, delete_cert, ssh_info.cert_file, ssh_info.private_key_file, - ssh_info.public_key_file) + ssh_utils.do_cleanup(delete_keys, delete_cert, ssh_info.delete_credentials, ssh_info.cert_file, + ssh_info.private_key_file, ssh_info.public_key_file) if ssh_success and ssh_process.poll() is None: call_rdp(local_port) @@ -56,8 +56,8 @@ def start_rdp_connection(ssh_info, delete_keys, delete_cert): telemetry.add_extension_event('ssh', ssh_connection_data) terminate_ssh(ssh_process, log_list, print_ssh_logs) - ssh_utils.do_cleanup(delete_keys, delete_cert, ssh_info.cert_file, ssh_info.private_key_file, - ssh_info.public_key_file) + ssh_utils.do_cleanup(delete_keys, delete_cert, ssh_info.delete_credentials, ssh_info.cert_file, + ssh_info.private_key_file, ssh_info.public_key_file) if delete_keys: # This is only true if keys were generated, so they must be in a temp folder. temp_dir = os.path.dirname(ssh_info.cert_file) diff --git a/src/ssh/azext_ssh/ssh_utils.py b/src/ssh/azext_ssh/ssh_utils.py index 56633acba62..b889ee7cbc6 100644 --- a/src/ssh/azext_ssh/ssh_utils.py +++ b/src/ssh/azext_ssh/ssh_utils.py @@ -5,10 +5,10 @@ import os import platform import subprocess -import multiprocessing as mp import time import datetime import re +import sys import colorama from knack import log @@ -25,16 +25,23 @@ def start_ssh_connection(op_info, delete_keys, delete_cert): try: - # Initialize these so that if something fails in the try block before these - # are initialized, then the finally block won't fail. - cleanup_process = None - log_file = None - connection_status = None - ssh_arg_list = [] if op_info.ssh_args: ssh_arg_list = op_info.ssh_args + # Redirecting stderr: + # 1. Read SSH logs to determine if authentication was successful so credentials can be deleted + # 2. Read SSHProxy error messages to print friendly error messages for well known errors. + # On Linux when connecting to a local user on a host with a banner, output gets messed up if stderr redirected. + # If user expects logs to be printed, do not redirect logs. In some ocasions output gets messed up. + is_local_user_on_linux = (platform.system() != 'Windows' and not delete_cert) + redirect_stderr = set(['-v', '-vv', '-vvv']).isdisjoint(ssh_arg_list) and \ + (op_info.is_arc or delete_cert or op_info.delete_credentials) and \ + not is_local_user_on_linux + + if redirect_stderr: + ssh_arg_list = ['-v'] + ssh_arg_list + env = os.environ.copy() if op_info.is_arc(): env['SSHPROXY_RELAY_INFO'] = connectivity_utils.format_relay_info_string(op_info.relay_info) @@ -42,26 +49,19 @@ def start_ssh_connection(op_info, delete_keys, delete_cert): # Get ssh client before starting the clean up process in case there is an error in getting client. command = [get_ssh_client_path('ssh', op_info.ssh_client_folder), op_info.get_host(), "-l", op_info.local_user] - if not op_info.cert_file and not op_info.private_key_file: - # In this case, even if delete_credentials is true, there is nothing to clean-up. - op_info.delete_credentials = False - - log_file, ssh_arg_list, cleanup_process = _start_cleanup(op_info.cert_file, op_info.private_key_file, - op_info.public_key_file, op_info.delete_credentials, - delete_keys, delete_cert, ssh_arg_list) command = command + op_info.build_args() + ssh_arg_list connection_duration = time.time() logger.debug("Running ssh command %s", ' '.join(command)) - # pylint: disable=subprocess-run-check try: - if set(['-v', '-vv', '-vvv']).isdisjoint(ssh_arg_list) or log_file: - connection_status = subprocess.run(command, shell=platform.system() == 'Windows', env=env, - stderr=subprocess.PIPE, encoding='utf-8') + # pylint: disable=consider-using-with + if redirect_stderr: + ssh_process = subprocess.Popen(command, stderr=subprocess.PIPE, env=env, encoding='utf-8') + _read_ssh_logs(ssh_process, op_info, delete_cert, delete_keys) else: - # Logs are sent to stderr. In that case, we shouldn't capture stderr. - connection_status = subprocess.run(command, shell=platform.system() == 'Windows', env=env) + ssh_process = subprocess.Popen(command, env=env, encoding='utf-8') + _wait_to_delete_credentials(ssh_process, op_info, delete_cert, delete_keys) except OSError as e: colorama.init() raise azclierror.BadRequestError(f"Failed to run ssh command with error: {str(e)}.", @@ -69,15 +69,15 @@ def start_ssh_connection(op_info, delete_keys, delete_cert): connection_duration = (time.time() - connection_duration) / 60 ssh_connection_data = {'Context.Default.AzureCLI.SSHConnectionDurationInMinutes': connection_duration} - if connection_status and connection_status.returncode == 0: + if ssh_process.poll() == 0: ssh_connection_data['Context.Default.AzureCLI.SSHConnectionStatus'] = "Success" telemetry.add_extension_event('ssh', ssh_connection_data) finally: # Even if something fails between the creation of the credentials and the end of the ssh connection, we - # want to make sure that all credentials are cleaned up, and that the clean up process is terminated. - _terminate_cleanup(delete_keys, delete_cert, op_info.delete_credentials, cleanup_process, op_info.cert_file, - op_info.private_key_file, op_info.public_key_file, log_file, connection_status) + # want to make sure that all credentials are cleaned up. + do_cleanup(delete_keys, delete_cert, op_info.delete_credentials, + op_info.cert_file, op_info.private_key_file, op_info.public_key_file) def write_ssh_config(config_info, delete_keys, delete_cert): @@ -94,6 +94,51 @@ def write_ssh_config(config_info, delete_keys, delete_cert): f.write('\n'.join(config_text)) +def _read_ssh_logs(ssh_sub, op_info, delete_cert, delete_keys): + log_list = [] + connection_established = False + t0 = time.time() + + next_line = ssh_sub.stderr.readline() + while next_line: + log_list.append(next_line) + if not next_line.startswith("debug1:") and \ + not next_line.startswith("debug2:") and \ + not next_line.startswith("debug3:") and \ + not next_line.startswith("Authenticated "): + sys.stderr.write(next_line) + _check_for_known_errors(next_line, delete_cert, log_list) + + if "debug1: Entering interactive session." in next_line: + connection_established = True + do_cleanup(delete_keys, delete_cert, op_info.delete_credentials, + op_info.cert_file, op_info.private_key_file, op_info.public_key_file) + + if not connection_established and \ + time.time() - t0 > const.CLEANUP_TOTAL_TIME_LIMIT_IN_SECONDS: + do_cleanup(delete_keys, delete_cert, op_info.delete_credentials, + op_info.cert_file, op_info.private_key_file, op_info.public_key_file) + + next_line = ssh_sub.stderr.readline() + + ssh_sub.wait() + + +def _wait_to_delete_credentials(ssh_sub, op_info, delete_cert, delete_keys): + # wait for 2 minutes. If the process isn't closed until then, delete credentials. + if delete_cert or op_info.delete_credentials: + t0 = time.time() + while (time.time() - t0) < const.CLEANUP_TOTAL_TIME_LIMIT_IN_SECONDS: + if ssh_sub.poll() is not None: + break + time.sleep(1) + + do_cleanup(delete_keys, delete_cert, op_info.delete_credentials, + op_info.cert_file, op_info.private_key_file, op_info.public_key_file) + + ssh_sub.wait() + + def create_ssh_keyfile(private_key_file, ssh_client_folder=None): sshkeygen_path = get_ssh_client_path("ssh-keygen", ssh_client_folder) command = [sshkeygen_path, "-f", private_key_file, "-t", "rsa", "-q", "-N", ""] @@ -162,51 +207,44 @@ def get_ssh_cert_principals(cert_file, ssh_client_folder=None): return principals -def _print_error_messages_from_ssh_log(log_file, connection_status, delete_cert): - with open(log_file, 'r', encoding='utf-8') as ssh_log: - log_text = ssh_log.read() - log_lines = log_text.splitlines() - if ("debug1: Authentication succeeded" not in log_text and - not re.search("^Authenticated to .*\n", log_text, re.MULTILINE)) \ - or (connection_status and connection_status.returncode): - for line in log_lines: - if "debug1:" not in line: - print(line) - - # This connection fails when using our generated certificates. - # Only throw error if conection fails with AAD login. - if "Permission denied (publickey)." in log_text and delete_cert: - # pylint: disable=bare-except - # pylint: disable=too-many-boolean-expressions - # Check if OpenSSH client and server versions are incompatible - try: - regex = 'OpenSSH.*_([0-9]+)\\.([0-9]+)' - local_major, local_minor = re.findall(regex, log_lines[0])[0] - remote_major, remote_minor = re.findall(regex, - file_utils.get_line_that_contains("remote software version", - log_lines))[0] - local_major = int(local_major) - local_minor = int(local_minor) - remote_major = int(remote_major) - remote_minor = int(remote_minor) - except: - ssh_log.close() - return - - if (remote_major < 7 or (remote_major == 7 and remote_minor < 8)) and \ - (local_major > 8 or (local_major == 8 and local_minor >= 8)): - logger.warning("The OpenSSH server version in the target VM %d.%d is too old. " - "Version incompatible with OpenSSH client version %d.%d. " - "Refer to https://bugzilla.mindrot.org/show_bug.cgi?id=3351 for more information.", - remote_major, remote_minor, local_major, local_minor) - - elif (local_major < 7 or (local_major == 7 and local_minor < 8)) and \ - (remote_major > 8 or (remote_major == 8 and remote_minor >= 8)): - logger.warning("The OpenSSH client version %d.%d is too old. " - "Version incompatible with OpenSSH server version %d.%d in the target VM. " - "Refer to https://bugzilla.mindrot.org/show_bug.cgi?id=3351 for more information.", - local_major, local_minor, remote_major, remote_minor) - ssh_log.close() +def _check_for_known_errors(error_message, delete_cert, log_lines): + # This connection fails when using our generated certificates. + # Only throw error if conection fails with AAD login. + if "Permission denied (publickey)." in error_message and delete_cert: + # pylint: disable=bare-except + # pylint: disable=too-many-boolean-expressions + # Check if OpenSSH client and server versions are incompatible + try: + regex = 'OpenSSH.*_([0-9]+)\\.([0-9]+)' + local_major, local_minor = re.findall(regex, log_lines[0])[0] + remote_version_line = file_utils.get_line_that_contains("remote software version", log_lines) + remote_major, remote_minor = re.findall(regex, remote_version_line)[0] + local_major = int(local_major) + local_minor = int(local_minor) + remote_major = int(remote_major) + remote_minor = int(remote_minor) + except: + return + + if (remote_major < 7 or (remote_major == 7 and remote_minor < 8)) and \ + (local_major > 8 or (local_major == 8 and local_minor >= 8)): + logger.warning("The OpenSSH server version in the target VM %d.%d is too old. " + "Version incompatible with OpenSSH client version %d.%d. " + "Refer to https://bugzilla.mindrot.org/show_bug.cgi?id=3351 for more information.", + remote_major, remote_minor, local_major, local_minor) + + elif (local_major < 7 or (local_major == 7 and local_minor < 8)) and \ + (remote_major > 8 or (remote_major == 8 and remote_minor >= 8)): + logger.warning("The OpenSSH client version %d.%d is too old. " + "Version incompatible with OpenSSH server version %d.%d in the target VM. " + "Refer to https://bugzilla.mindrot.org/show_bug.cgi?id=3351 for more information.", + local_major, local_minor, remote_major, remote_minor) + + regex = ("{\"level\":\"fatal\",\"msg\":\"sshproxy: error copying information from the connection: " + ".*\",\"time\":\".*\"}.*") + if re.search(regex, error_message): + logger.error("Please make sure SSH port is allowed using \"azcmagent config list\" in the target " + "Arc Server. Ensure SSHD is running on the target machine.\n") def get_ssh_client_path(ssh_command="ssh", ssh_client_folder=None): @@ -263,96 +301,17 @@ def get_ssh_client_path(ssh_command="ssh", ssh_client_folder=None): return ssh_path -def do_cleanup(delete_keys, delete_cert, cert_file, private_key, public_key, log_file=None, wait=False): - if log_file: - t0 = time.time() - match = False - while (time.time() - t0) < const.CLEANUP_TOTAL_TIME_LIMIT_IN_SECONDS and not match: - time.sleep(const.CLEANUP_TIME_INTERVAL_IN_SECONDS) - # pylint: disable=bare-except - # pylint: disable=anomalous-backslash-in-string - try: - with open(log_file, 'r', encoding='utf-8') as ssh_client_log: - log_text = ssh_client_log.read() - # The "debug1:..." message doesn't seems to exist in OpenSSH 3.9 - match = ("debug1: Authentication succeeded" in log_text or - re.search("^Authenticated to .*\n", log_text, re.MULTILINE)) - ssh_client_log.close() - except: - # If there is an exception, wait for a little bit and try again - time.sleep(const.CLEANUP_TIME_INTERVAL_IN_SECONDS) - - elif wait: - # if we are not checking the logs, but still want to wait for connection before deleting files - time.sleep(const.CLEANUP_TOTAL_TIME_LIMIT_IN_SECONDS) - - if delete_keys and private_key: +def do_cleanup(delete_keys, delete_cert, delete_credentials, cert_file, private_key, public_key): + if (delete_keys or delete_credentials) and private_key: file_utils.delete_file(private_key, f"Couldn't delete private key {private_key}. ", True) if delete_keys and public_key: file_utils.delete_file(public_key, f"Couldn't delete public key {public_key}. ", True) - if delete_cert and cert_file: + if (delete_cert or delete_credentials) and cert_file: file_utils.delete_file(cert_file, f"Couldn't delete certificate {cert_file}. ", True) - - -def _start_cleanup(cert_file, private_key_file, public_key_file, delete_credentials, delete_keys, - delete_cert, ssh_arg_list): - log_file = None - cleanup_process = None - if delete_keys or delete_cert or delete_credentials: - if '-E' not in ssh_arg_list and set(['-v', '-vv', '-vvv']).isdisjoint(ssh_arg_list): - # If the user either provides his own client log file (-E) or - # wants the client log messages to be printed to the console (-vvv/-vv/-v), - # we should not use the log files to check for connection success. - if cert_file: - log_dir = os.path.dirname(cert_file) - elif private_key_file: - log_dir = os.path.dirname(private_key_file) - log_file_name = 'ssh_client_log_' + str(os.getpid()) - log_file = os.path.join(log_dir, log_file_name) - ssh_arg_list = ['-E', log_file, '-v'] + ssh_arg_list - # Create a new process that will wait until the connection is established and then delete keys. - cleanup_process = mp.Process(target=do_cleanup, args=(delete_keys or delete_credentials, - delete_cert or delete_credentials, - cert_file, private_key_file, public_key_file, - log_file, True)) - cleanup_process.start() - - return log_file, ssh_arg_list, cleanup_process - - -def _terminate_cleanup(delete_keys, delete_cert, delete_credentials, cleanup_process, cert_file, - private_key_file, public_key_file, log_file, connection_status): - try: - if connection_status and connection_status.stderr: - if connection_status.returncode != 0: - # Check if stderr is a proxy error - regex = ("{\"level\":\"fatal\",\"msg\":\"sshproxy: error copying information from the connection: " - ".*\",\"time\":\".*\"}.*") - if re.search(regex, connection_status.stderr): - logger.error("Please make sure SSH port is allowed using \"azcmagent config list\" in the target " - "Arc Server. Ensure SSHD is running on the target machine.") - print(connection_status.stderr) - finally: - if delete_keys or delete_cert or delete_credentials: - if cleanup_process and cleanup_process.is_alive(): - cleanup_process.terminate() - # wait for process to terminate - t0 = time.time() - while cleanup_process.is_alive() and (time.time() - t0) < const.CLEANUP_AWAIT_TERMINATION_IN_SECONDS: - time.sleep(1) - - if log_file and os.path.isfile(log_file): - _print_error_messages_from_ssh_log(log_file, connection_status, delete_cert) - - # Make sure all files have been properly removed. - do_cleanup(delete_keys or delete_credentials, delete_cert or delete_credentials, - cert_file, private_key_file, public_key_file) - if log_file: - file_utils.delete_file(log_file, f"Couldn't delete temporary log file {log_file}. ", True) - if delete_keys: - # This is only true if keys were generated, so they must be in a temp folder. - temp_dir = os.path.dirname(cert_file) - file_utils.delete_folder(temp_dir, f"Couldn't delete temporary folder {temp_dir}", True) + if delete_keys and cert_file: + # This is only true if keys were generated, so they must be in a temp folder. + temp_dir = os.path.dirname(cert_file) + file_utils.delete_folder(temp_dir, f"Couldn't delete temporary folder {temp_dir}", True) def _issue_config_cleanup_warning(delete_cert, delete_keys, is_arc, cert_file, relay_info_path, ssh_client_folder): diff --git a/src/ssh/azext_ssh/tests/latest/test_ssh_utils.py b/src/ssh/azext_ssh/tests/latest/test_ssh_utils.py index dc37b42e864..27c9f45e54d 100644 --- a/src/ssh/azext_ssh/tests/latest/test_ssh_utils.py +++ b/src/ssh/azext_ssh/tests/latest/test_ssh_utils.py @@ -12,43 +12,81 @@ from azext_ssh import ssh_utils from azext_ssh import ssh_info -class SSHUtilsTests(unittest.TestCase): - @mock.patch.object(ssh_utils, '_start_cleanup') - @mock.patch.object(ssh_utils, '_terminate_cleanup') + +class SSHUtilsTests(unittest.TestCase): + @mock.patch.object(ssh_utils, 'do_cleanup') + @mock.patch.object(ssh_utils, '_read_ssh_logs') @mock.patch.object(ssh_utils, 'get_ssh_client_path') - @mock.patch('subprocess.run') + @mock.patch('subprocess.Popen') @mock.patch('os.environ.copy') @mock.patch('platform.system') - def test_start_ssh_connection_compute(self, mock_system, mock_copy_env, mock_call, mock_path, mock_terminatecleanup, mock_startcleanup): + def test_start_ssh_connection_compute_aad_windows(self, mock_system, mock_copy_env, mock_call, mock_path, mock_read, mock_cleanup): - op_info = ssh_info.SSHSession("rg", "vm", "ip", None, None, False, "user", None, "port", None, ['arg1', 'arg2', 'arg3'], False, "Microsof.Compute", None, None, False) + op_info = ssh_info.SSHSession("rg", "vm", "ip", None, None, False, "user", None, "port", None, ['arg1', 'arg2', 'arg3'], False, "Microsof.Compute/virtualMachines", None, None, False) op_info.public_key_file = "pub" op_info.private_key_file = "priv" op_info.cert_file = "cert" op_info.ssh_client_folder = "client" + ssh_process = mock.Mock() + ssh_process.poll.return_value = 0 + mock_system.return_value = 'Windows' mock_call.return_value = 0 mock_path.return_value = 'ssh' + mock_call.return_value = ssh_process mock_copy_env.return_value = {'var1':'value1', 'var2':'value2', 'var3':'value3'} - mock_startcleanup.return_value = 'log', ['arg1', 'arg2', 'arg3', '-E', 'log', '-v'], 'cleanup process' - expected_command = ['ssh', 'ip', '-l', 'user', '-i', 'priv', '-o', 'CertificateFile=\"cert\"', '-p', 'port', 'arg1', 'arg2', 'arg3', '-E', 'log', '-v'] + expected_command = ['ssh', 'ip', '-l', 'user', '-i', 'priv', '-o', 'CertificateFile=\"cert\"', '-p', 'port', '-v', 'arg1', 'arg2', 'arg3'] expected_env = {'var1':'value1', 'var2':'value2', 'var3':'value3'} ssh_utils.start_ssh_connection(op_info, True, True) mock_path.assert_called_once_with('ssh', 'client') - mock_startcleanup.assert_called_with('cert', 'priv', 'pub', False, True, True, ['arg1', 'arg2', 'arg3']) - mock_call.assert_called_once_with(expected_command, shell=True, env=expected_env, stderr=mock.ANY, encoding='utf-8') - mock_terminatecleanup.assert_called_once_with(True, True, False, 'cleanup process', 'cert', 'priv', 'pub', 'log', 0) - - @mock.patch.object(ssh_utils, '_terminate_cleanup') + mock_call.assert_called_once_with(expected_command, stderr=mock.ANY, env=expected_env, encoding='utf-8') + mock_read.assert_called_once_with(ssh_process, op_info, True, True) + mock_cleanup.assert_called_once_with(True, True, False, 'cert', 'priv', 'pub') + + @mock.patch.object(ssh_utils, 'do_cleanup') + @mock.patch.object(ssh_utils, '_wait_to_delete_credentials') + @mock.patch.object(ssh_utils, 'get_ssh_client_path') + @mock.patch('subprocess.Popen') @mock.patch('os.environ.copy') + @mock.patch('platform.system') + def test_start_ssh_connection_compute_local_linux(self, mock_system, mock_copy_env, mock_call, mock_path, mock_wait, mock_cleanup): + + op_info = ssh_info.SSHSession("rg", "vm", "ip", None, None, False, "user", None, "port", None, ['arg1', 'arg2', 'arg3'], False, "Microsof.Compute", None, None, False) + op_info.public_key_file = "pub" + op_info.private_key_file = "priv" + op_info.cert_file = "cert" + op_info.ssh_client_folder = "client" + + ssh_process = mock.Mock() + ssh_process.poll.return_value = 0 + + mock_system.return_value = 'Linux' + mock_call.return_value = 0 + mock_path.return_value = 'ssh' + mock_call.return_value = ssh_process + mock_copy_env.return_value = {'var1':'value1', 'var2':'value2', 'var3':'value3'} + expected_command = ['ssh', 'ip', '-l', 'user', '-i', 'priv', '-o', 'CertificateFile=\"cert\"', '-p', 'port', 'arg1', 'arg2', 'arg3'] + expected_env = {'var1':'value1', 'var2':'value2', 'var3':'value3'} + + ssh_utils.start_ssh_connection(op_info, False, False) + + mock_path.assert_called_once_with('ssh', 'client') + mock_call.assert_called_once_with(expected_command, env=expected_env, encoding='utf-8') + mock_wait.assert_called_once_with(ssh_process, op_info, False, False) + mock_cleanup.assert_called_once_with(False, False, False, 'cert', 'priv', 'pub') + + + @mock.patch.object(ssh_utils, 'do_cleanup') + @mock.patch.object(ssh_utils, '_read_ssh_logs') @mock.patch.object(ssh_utils, 'get_ssh_client_path') - @mock.patch('subprocess.run') + @mock.patch('os.environ.copy') + @mock.patch('subprocess.Popen') @mock.patch('azext_ssh.custom.connectivity_utils.format_relay_info_string') @mock.patch('platform.system') - def test_start_ssh_connection_arc(self, mock_system, mock_relay_str, mock_call, mock_path, mock_copy_env, mock_terminatecleanup): + def test_start_ssh_connection_arc_aad_windows(self, mock_platform, mock_relay_str, mock_call, mock_copy_env, mock_path, mock_read, mock_cleanup): op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, "user", None, "port", None, ['arg1'], False, "Microsoft.HybridCompute", None, None, False) op_info.public_key_file = "pub" @@ -57,9 +95,49 @@ def test_start_ssh_connection_arc(self, mock_system, mock_relay_str, mock_call, op_info.ssh_client_folder = "client" op_info.proxy_path = "proxy" op_info.relay_info = "relay" + + ssh_process = mock.Mock() + ssh_process.poll.return_value = 0 - mock_system.return_value = 'Linux' - mock_call.return_value = 0 + mock_platform.return_value = 'Windows' + mock_call.return_value = ssh_process + mock_relay_str.return_value = 'relay_string' + mock_copy_env.return_value = {'var1':'value1', 'var2':'value2', 'var3':'value3'} + mock_path.return_value = 'ssh' + expected_command = ['ssh', 'vm', '-l', 'user', '-o', 'ProxyCommand=\"proxy\" -p port', '-i', 'priv', '-o', 'CertificateFile=\"cert\"', '-v', 'arg1'] + expected_env = {'var1':'value1', 'var2':'value2', 'var3':'value3', 'SSHPROXY_RELAY_INFO':'relay_string'} + + ssh_utils.start_ssh_connection(op_info, True, True) + + mock_relay_str.assert_called_once_with('relay') + mock_path.assert_called_once_with('ssh', 'client') + mock_call.assert_called_once_with(expected_command, stderr=mock.ANY, env=expected_env, encoding='utf-8') + mock_cleanup.assert_called_once_with(True, True, False, 'cert', 'priv', 'pub') + mock_read.assert_called_once_with(ssh_process, op_info, True, True) + + + @mock.patch.object(ssh_utils, 'do_cleanup') + @mock.patch.object(ssh_utils, '_wait_to_delete_credentials') + @mock.patch.object(ssh_utils, 'get_ssh_client_path') + @mock.patch('os.environ.copy') + @mock.patch('subprocess.Popen') + @mock.patch('azext_ssh.custom.connectivity_utils.format_relay_info_string') + @mock.patch('platform.system') + def test_start_ssh_connection_arc_local_linux(self, mock_platform, mock_relay_str, mock_call, mock_copy_env, mock_path, mock_wait, mock_cleanup): + + op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, "user", None, "port", None, ['arg1'], False, "Microsoft.HybridCompute", None, None, False) + op_info.public_key_file = "pub" + op_info.private_key_file = "priv" + op_info.cert_file = "cert" + op_info.ssh_client_folder = "client" + op_info.proxy_path = "proxy" + op_info.relay_info = "relay" + + ssh_process = mock.Mock() + ssh_process.poll.return_value = 0 + + mock_platform.return_value = 'Linux' + mock_call.return_value = ssh_process mock_relay_str.return_value = 'relay_string' mock_copy_env.return_value = {'var1':'value1', 'var2':'value2', 'var3':'value3'} mock_path.return_value = 'ssh' @@ -70,10 +148,11 @@ def test_start_ssh_connection_arc(self, mock_system, mock_relay_str, mock_call, mock_relay_str.assert_called_once_with('relay') mock_path.assert_called_once_with('ssh', 'client') - mock_call.assert_called_once_with(expected_command, shell=False, env=expected_env, stderr=mock.ANY, encoding='utf-8') - mock_terminatecleanup.assert_called_once_with(False, False, False, None, 'cert', 'priv', 'pub', None, 0) - - + mock_call.assert_called_once_with(expected_command, env=expected_env, encoding='utf-8') + mock_cleanup.assert_called_once_with(False, False, False, 'cert', 'priv', 'pub') + mock_wait.assert_called_once_with(ssh_process, op_info, False, False) + + @mock.patch.object(ssh_utils, '_issue_config_cleanup_warning') @mock.patch('os.path.abspath') def test_write_ssh_config_ip_and_vm_compute_append(self, mock_abspath, mock_warning): From 94bd54abb089e62ea33b41463b3e06bd92868bfe Mon Sep 17 00:00:00 2001 From: Vivian Thiebaut <81188381+vthiebaut10@users.noreply.github.com> Date: Fri, 21 Oct 2022 01:18:15 -0400 Subject: [PATCH 19/85] [ssh] Add support to "Microsoft.ConnectedVMwarevSphere/virtualMachines" resource type (#5367) --- src/ssh/HISTORY.md | 2 + src/ssh/azext_ssh/_client_factory.py | 10 + src/ssh/azext_ssh/_params.py | 18 +- src/ssh/azext_ssh/connectivity_utils.py | 18 +- src/ssh/azext_ssh/constants.py | 32 + src/ssh/azext_ssh/custom.py | 148 +- src/ssh/azext_ssh/resource_type_utils.py | 78 + src/ssh/azext_ssh/ssh_info.py | 8 +- src/ssh/azext_ssh/target_os_utils.py | 88 + src/ssh/azext_ssh/tests/latest/test_custom.py | 100 +- .../azext_ssh/tests/latest/test_rdp_utils.py | 4 +- .../tests/latest/test_resource_type_utils.py | 110 + .../azext_ssh/tests/latest/test_ssh_info.py | 10 +- .../azext_ssh/tests/latest/test_ssh_utils.py | 13 +- .../vendored_sdks/connectedvmware/__init__.py | 23 + ...azure_arc_vmware_management_service_api.py | 127 + .../connectedvmware/_configuration.py | 73 + .../vendored_sdks/connectedvmware/_patch.py | 19 + .../vendored_sdks/connectedvmware/_vendor.py | 27 + .../vendored_sdks/connectedvmware/_version.py | 9 + .../connectedvmware/aio/__init__.py | 20 + ...azure_arc_vmware_management_service_api.py | 125 + .../connectedvmware/aio/_configuration.py | 72 + .../connectedvmware/aio/_patch.py | 19 + .../aio/operations/__init__.py | 20 + .../aio/operations/_operations.py | 122 + .../connectedvmware/aio/operations/_patch.py | 19 + .../_virtual_machines_operations.py | 111 + .../connectedvmware/models/__init__.py | 235 + ...arc_vmware_management_service_api_enums.py | 230 + .../connectedvmware/models/_models_py3.py | 5263 +++++++++++++++++ .../connectedvmware/models/_patch.py | 19 + .../connectedvmware/operations/__init__.py | 20 + .../connectedvmware/operations/_operations.py | 151 + .../connectedvmware/operations/_patch.py | 19 + .../_virtual_machines_operations.py | 154 + .../vendored_sdks/connectedvmware/py.typed | 1 + .../operations/_endpoints_operations.py | 8 +- 38 files changed, 7293 insertions(+), 232 deletions(-) create mode 100644 src/ssh/azext_ssh/resource_type_utils.py create mode 100644 src/ssh/azext_ssh/target_os_utils.py create mode 100644 src/ssh/azext_ssh/tests/latest/test_resource_type_utils.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/__init__.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/_azure_arc_vmware_management_service_api.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/_configuration.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/_patch.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/_vendor.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/_version.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/__init__.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/_azure_arc_vmware_management_service_api.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/_configuration.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/_patch.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/operations/__init__.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/operations/_operations.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/operations/_patch.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/operations/_virtual_machines_operations.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/models/__init__.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/models/_azure_arc_vmware_management_service_api_enums.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/models/_models_py3.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/models/_patch.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/operations/__init__.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/operations/_operations.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/operations/_patch.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/operations/_virtual_machines_operations.py create mode 100644 src/ssh/azext_ssh/vendored_sdks/connectedvmware/py.typed diff --git a/src/ssh/HISTORY.md b/src/ssh/HISTORY.md index 6e51f477016..fb6548dacc6 100644 --- a/src/ssh/HISTORY.md +++ b/src/ssh/HISTORY.md @@ -2,6 +2,8 @@ Release History =============== 1.1.3 ----- +* Add support to Microsoft.ConnectedVMwarevSphere/virtualMachines Resource Type. +* Correct the format of expected input for --resource-type parameter. From Resource Provider name (e.g. "Microsoft.HybridCompute") to Resource Type name (e.g. "Microsoft.HybridCompute/machines"). * [bug fix] SSH Banners are printed before authentication. 1.1.2 diff --git a/src/ssh/azext_ssh/_client_factory.py b/src/ssh/azext_ssh/_client_factory.py index e91e3a9b8bc..8c33c6f3ccb 100644 --- a/src/ssh/azext_ssh/_client_factory.py +++ b/src/ssh/azext_ssh/_client_factory.py @@ -29,3 +29,13 @@ def cf_connectedmachine_cl(cli_ctx, *_): def cf_machine(cli_ctx, *_): return cf_connectedmachine_cl(cli_ctx).machines + + +def cf_connectedvmware_cl(cli_ctx, *_): + from azext_ssh.vendored_sdks.connectedvmware import AzureArcVMwareManagementServiceAPI + return get_mgmt_service_client(cli_ctx, + AzureArcVMwareManagementServiceAPI) + + +def cf_vmware(cli_ctx, *_): + return cf_connectedvmware_cl(cli_ctx).virtual_machines diff --git a/src/ssh/azext_ssh/_params.py b/src/ssh/azext_ssh/_params.py index 59718ccd9ee..b4d65666328 100644 --- a/src/ssh/azext_ssh/_params.py +++ b/src/ssh/azext_ssh/_params.py @@ -20,8 +20,11 @@ def load_arguments(self, _): help='Path to a certificate file used for authentication when using local user credentials.') c.argument('port', options_list=['--port'], help='SSH port') c.argument('resource_type', options_list=['--resource-type'], - help='Resource type should be either Microsoft.Compute or Microsoft.HybridCompute', - completer=["Microsoft.HybridCompute", "Microsoft.Compute"]) + help=('Resource type should be either Microsoft.Compute/virtualMachines, ' + 'Microsoft.HybridCompute/machines, ' + 'or Microsoft.ConnectedVMwareSphere/virtualMachines.'), + completer=['Microsoft.Compute/virtualMachines', 'Microsoft.HybridCompute/machines', + 'Microsoft.ConnectedVMwareSphere/virtualMachines']) c.argument('ssh_client_folder', options_list=['--ssh-client-folder'], help='Folder path that contains ssh executables (ssh.exe, ssh-keygen.exe, etc). ' 'Default to ssh pre-installed if not provided.') @@ -52,7 +55,11 @@ def load_arguments(self, _): help='Folder where new generated keys will be stored.') c.argument('port', options_list=['--port'], help='SSH Port') c.argument('resource_type', options_list=['--resource-type'], - help='Resource type should be either Microsoft.Compute or Microsoft.HybridCompute') + help=('Resource type should be either Microsoft.Compute/virtualMachines, ' + 'Microsoft.HybridCompute/machines, ' + 'or Microsoft.ConnectedVMwareSphere/virtualMachines.'), + completer=['Microsoft.Compute/virtualMachines', 'Microsoft.HybridCompute/machines', + 'Microsoft.ConnectedVMwareSphere/virtualMachines']) c.argument('cert_file', options_list=['--certificate-file', '-c'], help='Path to certificate file') c.argument('ssh_proxy_folder', options_list=['--ssh-proxy-folder'], help=('Path to the folder where the ssh proxy should be saved. ' @@ -79,6 +86,11 @@ def load_arguments(self, _): help='The username for a local user') c.argument('cert_file', options_list=['--certificate-file', '-c'], help='Path to certificate file') c.argument('port', options_list=['--port'], help='Port to connect to on the remote host.') + c.argument('resource_type', options_list=['--resource-type'], + help=('Resource type should be either Microsoft.HybridCompute/machines ' + 'or Microsoft.ConnectedVMwareSphere/virtualMachines.'), + completer=['Microsoft.HybridCompute/machines', + 'Microsoft.ConnectedVMwareSphere/virtualMachines']) c.argument('ssh_client_folder', options_list=['--ssh-client-folder'], help='Folder path that contains ssh executables (ssh.exe, ssh-keygen.exe, etc). ' 'Default to ssh pre-installed if not provided.') diff --git a/src/ssh/azext_ssh/connectivity_utils.py b/src/ssh/azext_ssh/connectivity_utils.py index fd38dab6281..9e99a026f46 100644 --- a/src/ssh/azext_ssh/connectivity_utils.py +++ b/src/ssh/azext_ssh/connectivity_utils.py @@ -28,7 +28,7 @@ # Get the Access Details to connect to Arc Connectivity platform from the HybridConnectivity RP -def get_relay_information(cmd, resource_group, vm_name, certificate_validity_in_seconds): +def get_relay_information(cmd, resource_group, vm_name, resource_type, certificate_validity_in_seconds): from azext_ssh._client_factory import cf_endpoint client = cf_endpoint(cmd.cli_ctx) @@ -39,16 +39,18 @@ def get_relay_information(cmd, resource_group, vm_name, certificate_validity_in_ try: t0 = time.time() result = client.list_credentials(resource_group_name=resource_group, machine_name=vm_name, - endpoint_name="default", expiresin=certificate_validity_in_seconds) + resource_type=resource_type, endpoint_name="default", + expiresin=certificate_validity_in_seconds) time_elapsed = time.time() - t0 telemetry.add_extension_event('ssh', {'Context.Default.AzureCLI.SSHListCredentialsTime': time_elapsed}) except ResourceNotFoundError: logger.debug("Default Endpoint couldn't be found. Trying to create Default Endpoint.") - _create_default_endpoint(cmd, resource_group, vm_name, client) + _create_default_endpoint(cmd, resource_group, vm_name, resource_type, client) try: t0 = time.time() result = client.list_credentials(resource_group_name=resource_group, machine_name=vm_name, - endpoint_name="default", expiresin=certificate_validity_in_seconds) + resource_type=resource_type, endpoint_name="default", + expiresin=certificate_validity_in_seconds) time_elapsed = time.time() - t0 telemetry.add_extension_event('ssh', {'Context.Default.AzureCLI.SSHListCredentialsTime': time_elapsed}) except Exception as e: @@ -58,12 +60,14 @@ def get_relay_information(cmd, resource_group, vm_name, certificate_validity_in_ return result -def _create_default_endpoint(cmd, resource_group, vm_name, client): +def _create_default_endpoint(cmd, resource_group, vm_name, resource_type, client): + namespace = resource_type.split('/', 1)[0] + arc_type = resource_type.split('/', 1)[1] az_resource_id = resource_id(subscription=get_subscription_id(cmd.cli_ctx), resource_group=resource_group, - namespace="Microsoft.HybridCompute", type="machines", name=vm_name) + namespace=namespace, type=arc_type, name=vm_name) endpoint_resource = {"id": az_resource_id, "type_properties_type": "default"} try: - client.create_or_update(resource_group, vm_name, "default", endpoint_resource) + client.create_or_update(resource_group, vm_name, resource_type, "default", endpoint_resource) except Exception as e: colorama.init() raise azclierror.UnauthorizedError(f"Unable to create Default Endpoint for {vm_name} in {resource_group}." diff --git a/src/ssh/azext_ssh/constants.py b/src/ssh/azext_ssh/constants.py index 4917411aeb6..62a6575f2df 100644 --- a/src/ssh/azext_ssh/constants.py +++ b/src/ssh/azext_ssh/constants.py @@ -18,3 +18,35 @@ RECOMMENDATION_RESOURCE_NOT_FOUND = (Fore.YELLOW + "Please ensure the active subscription is set properly " "and resource exists." + Style.RESET_ALL) RDP_TERMINATE_SSH_WAIT_TIME_IN_SECONDS = 30 + +ARC_RESOURCE_TYPE_PLACEHOLDER = "arc_resource_type_placeholder" + +SUPPORTED_RESOURCE_TYPES = ["microsoft.hybridcompute/machines", + "microsoft.compute/virtualmachines", + "microsoft.connectedvmwarevsphere/virtualmachines", + "microsoft.scvmm/virtualmachines", + "microsoft.azurestackhci/virtualmachines"] + +# Old version incorrectly used resource providers instead of resource type. +# Will continue to support to avoid breaking backwards compatibility. +LEGACY_SUPPORTED_RESOURCE_TYPES = ["microsoft.hybridcompute", + "microsoft.compute", + "microsoft.connectedvmwarevsphere", + "microsoft.scvmm", + "microsoft.azurestackhci"] + +RESOURCE_PROVIDER_TO_RESOURCE_TYPE = { + "microsoft.hybridcompute": "Microsoft.HybridCompute/machines", + "microsoft.compute": "Microsoft.Compute/virtualMachines", + "microsoft.connectedvmwarevsphere": "Microsoft.ConnectedVMwarevSphere/virtualMachines", + "microsoft.azurestackhci": "Microsoft.AzureStackHCI/virtualMachines", + "microsoft.scvmm": "Microsoft.ScVmm/virtualMachines" +} + +RESOURCE_TYPE_LOWER_CASE_TO_CORRECT_CASE = { + "microsoft.hybridcompute/machines": "Microsoft.HybridCompute/machines", + "microsoft.compute/virtualmachines": "Microsoft.Compute/virtualMachines", + "microsoft.connectedvmwarevsphere/virtualmachines": "Microsoft.ConnectedVMwarevSphere/virtualMachines", + "microsoft.scvmm/virtualmachines": "Microsoft.ScVmm/virtualMachines", + "microsoft.azurestackhci/virtualmachines": "Microsoft.AzureStackHCI/virtualMachines" +} diff --git a/src/ssh/azext_ssh/custom.py b/src/ssh/azext_ssh/custom.py index 234ee452b96..39df248a006 100644 --- a/src/ssh/azext_ssh/custom.py +++ b/src/ssh/azext_ssh/custom.py @@ -11,12 +11,9 @@ import platform import oschmod -import colorama - from knack import log from azure.cli.core import azclierror from azure.cli.core import telemetry -from azure.core.exceptions import ResourceNotFoundError, HttpResponseError from azure.cli.core.style import Style, print_styled_text from . import ip_utils @@ -27,6 +24,8 @@ from . import ssh_info from . import file_utils from . import constants as const +from . import resource_type_utils +from . import target_os_utils logger = log.get_logger(__name__) @@ -61,7 +60,9 @@ def ssh_vm(cmd, resource_group_name=None, vm_name=None, ssh_ip=None, public_key_ private_key_file, use_private_ip, local_user, cert_file, port, ssh_client_folder, ssh_args, delete_credentials, resource_type, ssh_proxy_folder, credentials_folder, winrdp) - ssh_session.resource_type = _decide_resource_type(cmd, ssh_session) + ssh_session.resource_type = resource_type_utils.decide_resource_type(cmd, ssh_session) + target_os_utils.handle_target_os_type(cmd, ssh_session) + _do_ssh_op(cmd, ssh_session, op_call) @@ -81,7 +82,8 @@ def ssh_config(cmd, config_path, resource_group_name=None, vm_name=None, ssh_ip= resource_type, credentials_folder, ssh_proxy_folder, ssh_client_folder) op_call = ssh_utils.write_ssh_config - config_session.resource_type = _decide_resource_type(cmd, config_session) + config_session.resource_type = resource_type_utils.decide_resource_type(cmd, config_session) + target_os_utils.handle_target_os_type(cmd, config_session) # if the folder doesn't exist, this extension won't create a new one. config_folder = os.path.dirname(config_session.config_path) @@ -139,11 +141,14 @@ def ssh_cert(cmd, cert_path=None, public_key_file=None, ssh_client_folder=None): def ssh_arc(cmd, resource_group_name=None, vm_name=None, public_key_file=None, private_key_file=None, - local_user=None, cert_file=None, port=None, ssh_client_folder=None, delete_credentials=False, - ssh_proxy_folder=None, winrdp=False, ssh_args=None): + local_user=None, cert_file=None, port=None, resource_type=None, ssh_client_folder=None, + delete_credentials=False, ssh_proxy_folder=None, winrdp=False, ssh_args=None): + + if not resource_type: + resource_type = const.ARC_RESOURCE_TYPE_PLACEHOLDER ssh_vm(cmd, resource_group_name, vm_name, None, public_key_file, private_key_file, False, local_user, cert_file, - port, ssh_client_folder, delete_credentials, "Microsoft.HybridCompute", ssh_proxy_folder, winrdp, ssh_args) + port, ssh_client_folder, delete_credentials, resource_type, ssh_proxy_folder, winrdp, ssh_args) def _do_ssh_op(cmd, op_info, op_call): @@ -183,7 +188,8 @@ def _do_ssh_op(cmd, op_info, op_call): if op_info.is_arc(): op_info.proxy_path = connectivity_utils.get_client_side_proxy(op_info.ssh_proxy_folder) op_info.relay_info = connectivity_utils.get_relay_information(cmd, op_info.resource_group_name, - op_info.vm_name, cert_lifetime) + op_info.vm_name, op_info.resource_type, + cert_lifetime) except Exception as e: if delete_keys or delete_cert: logger.debug("An error occured before operation concluded. Deleting generated keys: %s %s %s", @@ -260,10 +266,15 @@ def _prepare_jwk_data(public_key_file): def _assert_args(resource_group, vm_name, ssh_ip, resource_type, cert_file, username): - if resource_type and resource_type.lower() != "microsoft.compute" \ - and resource_type.lower() != "microsoft.hybridcompute": - raise azclierror.InvalidArgumentValueError("--resource-type must be either \"Microsoft.Compute\" " - "for Azure VMs or \"Microsoft.HybridCompute\" for Arc Servers.") + + if resource_type and \ + resource_type.lower() not in const.SUPPORTED_RESOURCE_TYPES and \ + resource_type.lower() not in const.LEGACY_SUPPORTED_RESOURCE_TYPES and \ + resource_type != const.ARC_RESOURCE_TYPE_PLACEHOLDER: + raise azclierror.InvalidArgumentValueError("--resource-type must be either " + "\"Microsoft.Compute/virtualMachines\", " + "\"Microsoft.HybridCompute/machines\", " + "or \"Microsoft.ConnectedVMwareSphere/virtualMachines\".") if not (resource_group or vm_name or ssh_ip): raise azclierror.RequiredArgumentMissingError( @@ -352,114 +363,3 @@ def _get_modulus_exponent(public_key_file): exponent = parser.exponent return modulus, exponent - - -def _decide_resource_type(cmd, op_info): - # If the user provides an IP address the target will be treated as an Azure VM even if it is an - # Arc Server. Which just means that the Connectivity Proxy won't be used to establish connection. - is_arc_server = False - is_azure_vm = False - - if op_info.ip: - is_azure_vm = True - vm = None - - elif op_info.resource_type: - if op_info.resource_type.lower() == "microsoft.hybridcompute": - arc, arc_error, is_arc_server = _check_if_arc_server(cmd, op_info.resource_group_name, op_info.vm_name) - if not is_arc_server: - colorama.init() - if isinstance(arc_error, ResourceNotFoundError): - raise azclierror.ResourceNotFoundError(f"The resource {op_info.vm_name} in the resource group " - f"{op_info.resource_group_name} was not found.", - const.RECOMMENDATION_RESOURCE_NOT_FOUND) - raise azclierror.BadRequestError("Unable to determine that the target machine is an Arc Server. " - f"Error:\n{str(arc_error)}", const.RECOMMENDATION_RESOURCE_NOT_FOUND) - - elif op_info.resource_type.lower() == "microsoft.compute": - vm, vm_error, is_azure_vm = _check_if_azure_vm(cmd, op_info.resource_group_name, op_info.vm_name) - if not is_azure_vm: - colorama.init() - if isinstance(vm_error, ResourceNotFoundError): - raise azclierror.ResourceNotFoundError(f"The resource {op_info.vm_name} in the resource group " - f"{op_info.resource_group_name} was not found.", - const.RECOMMENDATION_RESOURCE_NOT_FOUND) - raise azclierror.BadRequestError("Unable to determine that the target machine is an Azure VM. " - f"Error:\n{str(vm_error)}", const.RECOMMENDATION_RESOURCE_NOT_FOUND) - - else: - vm, vm_error, is_azure_vm = _check_if_azure_vm(cmd, op_info.resource_group_name, op_info.vm_name) - arc, arc_error, is_arc_server = _check_if_arc_server(cmd, op_info.resource_group_name, op_info.vm_name) - - if is_azure_vm and is_arc_server: - colorama.init() - raise azclierror.BadRequestError(f"{op_info.resource_group_name} has Azure VM and Arc Server with the " - f"same name: {op_info.vm_name}.", - colorama.Fore.YELLOW + "Please provide a --resource-type." + - colorama.Style.RESET_ALL) - if not is_azure_vm and not is_arc_server: - colorama.init() - if isinstance(arc_error, ResourceNotFoundError) and isinstance(vm_error, ResourceNotFoundError): - raise azclierror.ResourceNotFoundError(f"The resource {op_info.vm_name} in the resource group " - f"{op_info.resource_group_name} was not found. ", - const.RECOMMENDATION_RESOURCE_NOT_FOUND) - raise azclierror.BadRequestError("Unable to determine the target machine type as Azure VM or " - f"Arc Server. Errors:\n{str(arc_error)}\n{str(vm_error)}", - const.RECOMMENDATION_RESOURCE_NOT_FOUND) - - # Note: We are not able to determine the os of the target if the user only provides an IP address. - os_type = None - if is_azure_vm and vm and vm.storage_profile and vm.storage_profile.os_disk and vm.storage_profile.os_disk.os_type: - os_type = vm.storage_profile.os_disk.os_type - - if is_arc_server and arc and arc.properties and arc.properties and arc.properties.os_name: - os_type = arc.properties.os_name - - if os_type: - telemetry.add_extension_event('ssh', {'Context.Default.AzureCLI.TargetOSType': os_type}) - - # Note 2: This is a temporary check while AAD login is not enabled for Windows. - if os_type and os_type.lower() == 'windows' and not op_info.local_user: - colorama.init() - raise azclierror.RequiredArgumentMissingError("SSH Login using AAD credentials is not currently supported " - "for Windows.", - colorama.Fore.YELLOW + "Please provide --local-user." + - colorama.Style.RESET_ALL) - - target_resource_type = "Microsoft.Compute" - if is_arc_server: - target_resource_type = "Microsoft.HybridCompute" - telemetry.add_extension_event('ssh', {'Context.Default.AzureCLI.TargetResourceType': target_resource_type}) - - return target_resource_type - - -def _check_if_azure_vm(cmd, resource_group_name, vm_name): - from azure.cli.core.commands import client_factory - from azure.cli.core import profiles - vm = None - try: - compute_client = client_factory.get_mgmt_service_client(cmd.cli_ctx, profiles.ResourceType.MGMT_COMPUTE) - vm = compute_client.virtual_machines.get(resource_group_name, vm_name) - except ResourceNotFoundError as e: - return None, e, False - # If user is not authorized to get the VM, it will throw a HttpResponseError - except HttpResponseError as e: - return None, e, False - - return vm, None, True - - -def _check_if_arc_server(cmd, resource_group_name, vm_name): - from azext_ssh._client_factory import cf_machine - client = cf_machine(cmd.cli_ctx) - arc = None - try: - arc = client.get(resource_group_name=resource_group_name, machine_name=vm_name) - except ResourceNotFoundError as e: - return None, e, False - # If user is not authorized to get the arc server, it will throw a HttpResponseError - except HttpResponseError as e: - return None, e, False - - return arc, None, True diff --git a/src/ssh/azext_ssh/resource_type_utils.py b/src/ssh/azext_ssh/resource_type_utils.py new file mode 100644 index 00000000000..b4038c9aca1 --- /dev/null +++ b/src/ssh/azext_ssh/resource_type_utils.py @@ -0,0 +1,78 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +import colorama + +from knack import log +from azure.cli.core import telemetry, azclierror +from azure.mgmt.resource import ResourceManagementClient +from azure.cli.core.commands.client_factory import get_mgmt_service_client + +from . import constants as consts + +logger = log.get_logger(__name__) + + +def _list_types_of_resources_with_provided_name(cmd, op_info): + resource_client = get_mgmt_service_client(cmd.cli_ctx, ResourceManagementClient) + resources = resource_client.resources.list_by_resource_group( + op_info.resource_group_name, + filter=f"name eq '{op_info.vm_name}'") + resource_types_present = set() + + while True: + try: + resource = resources.next() + if resource.type.lower() in consts.SUPPORTED_RESOURCE_TYPES: + resource_types_present.add(resource.type.lower()) + except StopIteration: + break + + return resource_types_present + + +def decide_resource_type(cmd, op_info): + + # If the user provides an IP address the target will be treated as an Azure VM even if it is an + # Arc Server. Which just means that the Connectivity Proxy won't be used to establish connection. + if op_info.ip: + return "Microsoft.Compute/virtualMachines" + + # Set of resource types in target resource group of resources that match vm_name + types_in_rg = _list_types_of_resources_with_provided_name(cmd, op_info) + target_resource_type = None + + if op_info.resource_type and op_info.resource_type != consts.ARC_RESOURCE_TYPE_PLACEHOLDER: + if op_info.resource_type.lower() in consts.LEGACY_SUPPORTED_RESOURCE_TYPES: + op_info.resource_type = consts.RESOURCE_PROVIDER_TO_RESOURCE_TYPE[op_info.resource_type.lower()] + if op_info.resource_type.lower() in types_in_rg: + target_resource_type = consts.RESOURCE_TYPE_LOWER_CASE_TO_CORRECT_CASE[op_info.resource_type.lower()] + else: + raise azclierror.ResourceNotFoundError( + f"Unable to find resource {op_info.vm_name} of type " + f"{consts.RESOURCE_TYPE_LOWER_CASE_TO_CORRECT_CASE[op_info.resource_type.lower()]} " + f"under the resource group {op_info.resource_group_name}", + consts.RECOMMENDATION_RESOURCE_NOT_FOUND) + + else: + if op_info.resource_type == consts.ARC_RESOURCE_TYPE_PLACEHOLDER: + types_in_rg.discard("microsoft.compute/virtualmachines") + + if len(types_in_rg) > 1: + raise azclierror.BadRequestError(f"{op_info.resource_group_name} has more than one valid target with the " + f"same name: {op_info.vm_name}.", + colorama.Fore.YELLOW + "Please provide a --resource-type." + + colorama.Style.RESET_ALL) + + if len(types_in_rg) < 1: + raise azclierror.ResourceNotFoundError(f"A valid resource {op_info.vm_name} in the resource group " + f"{op_info.resource_group_name} was not found. ", + consts.RECOMMENDATION_RESOURCE_NOT_FOUND) + + target_resource_type = consts.RESOURCE_TYPE_LOWER_CASE_TO_CORRECT_CASE[types_in_rg.pop().lower()] + + telemetry.add_extension_event('ssh', {'Context.Default.AzureCLI.TargetResourceType': target_resource_type}) + logger.debug("Target Resource Type: %s", target_resource_type) + return target_resource_type diff --git a/src/ssh/azext_ssh/ssh_info.py b/src/ssh/azext_ssh/ssh_info.py index 722da8f3292..b0010186057 100644 --- a/src/ssh/azext_ssh/ssh_info.py +++ b/src/ssh/azext_ssh/ssh_info.py @@ -41,7 +41,10 @@ def __init__(self, resource_group_name, vm_name, ssh_ip, public_key_file, privat self.credentials_folder = os.path.abspath(credentials_folder) if credentials_folder else None def is_arc(self): - if self.resource_type == "Microsoft.HybridCompute": + if self.resource_type in ["Microsoft.HybridCompute/machines", + "Microsoft.ConnectedVMwarevSphere/virtualMachines", + "Microsoft.ScVmm/virtualMachines", + "Microsoft.AzureStackHCI/virtualMachines"]: return True return False @@ -100,7 +103,8 @@ def __init__(self, config_path, resource_group_name, vm_name, ssh_ip, public_key self.credentials_folder = os.path.abspath(credentials_folder) if credentials_folder else None def is_arc(self): - if self.resource_type == "Microsoft.HybridCompute": + if self.resource_type in ["Microsoft.HybridCompute/machines", + "Microsoft.ConnectedVMwarevSphere/virtualMachines"]: return True return False diff --git a/src/ssh/azext_ssh/target_os_utils.py b/src/ssh/azext_ssh/target_os_utils.py new file mode 100644 index 00000000000..81a2333fce8 --- /dev/null +++ b/src/ssh/azext_ssh/target_os_utils.py @@ -0,0 +1,88 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +import colorama + +from azure.cli.core import telemetry +from azure.cli.core import azclierror +from knack import log + +logger = log.get_logger(__name__) + + +# Send target OS type telemetry and check if authentication options are valid for that OS. +def handle_target_os_type(cmd, op_info): + + os_type = None + + if op_info.resource_type.lower() == "microsoft.compute/virtualmachines": + os_type = _get_azure_vm_os(cmd, op_info.resource_group_name, op_info.vm_name) + elif op_info.resource_type.lower() == "microsoft.hybridcompute/machines": + os_type = _get_arc_server_os(cmd, op_info.resource_group_name, op_info.vm_name) + elif op_info.resource_type.lower() == "microsoft.connectedvmwarevsphere/virtualmachines": + os_type = _get_connected_vmware_os(cmd, op_info.resource_group_name, op_info.vm_name) + + if os_type: + logger.debug("Target OS Type: %s", os_type) + telemetry.add_extension_event('ssh', {'Context.Default.AzureCLI.TargetOSType': os_type}) + + # Note 2: This is a temporary check while AAD login is not enabled for Windows. + if os_type and os_type.lower() == 'windows' and not op_info.local_user: + colorama.init() + error_message = "SSH Login using AAD credentials is not currently supported for Windows." + recommendation = colorama.Fore.YELLOW + "Please provide --local-user." + colorama.Style.RESET_ALL + raise azclierror.RequiredArgumentMissingError(error_message, recommendation) + + +def _get_azure_vm_os(cmd, resource_group_name, vm_name): + from azure.cli.core.commands import client_factory + from azure.cli.core import profiles + vm = None + os_type = None + # pylint: disable=broad-except + try: + compute_client = client_factory.get_mgmt_service_client(cmd.cli_ctx, profiles.ResourceType.MGMT_COMPUTE) + vm = compute_client.virtual_machines.get(resource_group_name, vm_name) + except Exception: + return None + + if vm and vm.storage_profile and vm.storage_profile.os_disk and vm.storage_profile.os_disk.os_type: + os_type = vm.storage_profile.os_disk.os_type + + return os_type + + +def _get_arc_server_os(cmd, resource_group_name, vm_name): + from azext_ssh._client_factory import cf_machine + client = cf_machine(cmd.cli_ctx) + arc = None + os_type = None + # pylint: disable=broad-except + try: + arc = client.get(resource_group_name=resource_group_name, machine_name=vm_name) + except Exception: + return None + + if arc and arc.properties and arc.properties and arc.properties.os_name: + os_type = arc.properties.os_name + + return os_type + + +def _get_connected_vmware_os(cmd, resource_group_name, vm_name): + from azext_ssh._client_factory import cf_vmware + client = cf_vmware(cmd.cli_ctx) + vmware = None + os_type = None + # pylint: disable=broad-except + try: + vmware = client.get(resource_group_name=resource_group_name, virtual_machine_name=vm_name) + except Exception: + return None + + if vmware and vmware.os_profile and vmware.os_profile.os_type: + os_type = vmware.os_profile.os_type + + return os_type diff --git a/src/ssh/azext_ssh/tests/latest/test_custom.py b/src/ssh/azext_ssh/tests/latest/test_custom.py index 69e1b6f3c2c..7c99a1cfd18 100644 --- a/src/ssh/azext_ssh/tests/latest/test_custom.py +++ b/src/ssh/azext_ssh/tests/latest/test_custom.py @@ -11,7 +11,6 @@ from azext_ssh import ssh_info from azure.cli.core import azclierror -from azure.core.exceptions import ResourceNotFoundError class SshCustomCommandTest(unittest.TestCase): @@ -19,7 +18,7 @@ class SshCustomCommandTest(unittest.TestCase): @mock.patch('azext_ssh.custom._do_ssh_op') @mock.patch('azext_ssh.custom._assert_args') @mock.patch('azext_ssh.ssh_info.SSHSession') - @mock.patch('azext_ssh.custom._decide_resource_type') + @mock.patch('azext_ssh.resource_type_utils.decide_resource_type') def test_ssh_vm(self, mock_type, mock_info, mock_assert, mock_do_op): cmd = mock.Mock() ssh_info = mock.Mock() @@ -37,7 +36,7 @@ def test_ssh_vm(self, mock_type, mock_info, mock_assert, mock_do_op): @mock.patch('azext_ssh.custom._do_ssh_op') @mock.patch('azext_ssh.custom._assert_args') @mock.patch('azext_ssh.ssh_info.SSHSession') - @mock.patch('azext_ssh.custom._decide_resource_type') + @mock.patch('azext_ssh.resource_type_utils.decide_resource_type') @mock.patch('platform.system') def test_ssh_vm_rdp(self, mock_sys, mock_type, mock_info, mock_assert, mock_do_op): cmd = mock.Mock() @@ -57,7 +56,7 @@ def test_ssh_vm_rdp(self, mock_sys, mock_type, mock_info, mock_assert, mock_do_o @mock.patch('azext_ssh.custom._do_ssh_op') @mock.patch('azext_ssh.custom._assert_args') @mock.patch('azext_ssh.ssh_info.SSHSession') - @mock.patch('azext_ssh.custom._decide_resource_type') + @mock.patch('azext_ssh.resource_type_utils.decide_resource_type') def test_ssh_vm_debug(self, mock_type, mock_info, mock_assert, mock_do_op): cmd = mock.Mock() ssh_info = mock.Mock() @@ -73,7 +72,7 @@ def test_ssh_vm_debug(self, mock_type, mock_info, mock_assert, mock_do_op): mock_do_op.assert_called_once_with(cmd, ssh_info, ssh_utils.start_ssh_connection) @mock.patch('azext_ssh.custom._do_ssh_op') - @mock.patch('azext_ssh.custom._decide_resource_type') + @mock.patch('azext_ssh.resource_type_utils.decide_resource_type') @mock.patch('os.environ.get') @mock.patch('azext_ssh.custom._assert_args') @mock.patch('azext_ssh.ssh_info.SSHSession') @@ -100,7 +99,7 @@ def test_delete_credentials_not_cloudshell(self, mock_getenv): @mock.patch('azext_ssh.custom._assert_args') @mock.patch('azext_ssh.custom._do_ssh_op') - @mock.patch('azext_ssh.custom._decide_resource_type') + @mock.patch('azext_ssh.resource_type_utils.decide_resource_type') @mock.patch('os.path.dirname') @mock.patch('os.path.isdir') @mock.patch('azext_ssh.ssh_info.ConfigSession') @@ -131,10 +130,10 @@ def test_ssh_config_no_cred_folder(self, mock_join, mock_info, mock_isdir, mock_ mock_type.assert_called_once_with(cmd, config_info) mock_assert.assert_called_once_with("rg", "vm", "ip", "type", "cert", "user") mock_do_op.assert_called_once_with(cmd, config_info, ssh_utils.write_ssh_config) - + @mock.patch('azext_ssh.custom._assert_args') @mock.patch('azext_ssh.custom._do_ssh_op') - @mock.patch('azext_ssh.custom._decide_resource_type') + @mock.patch('azext_ssh.resource_type_utils.decide_resource_type') @mock.patch('azext_ssh.ssh_info.ConfigSession') @mock.patch('os.path.isdir') @mock.patch('os.path.dirname') @@ -158,13 +157,13 @@ def test_ssh_config_credentials_folder_and_key(self): self.assertRaises( azclierror.ArgumentUsageError, custom.ssh_config, cmd, 'path', 'rg', 'vm', 'ip', 'pub', 'priv', True, False, 'user', 'cert', 'port', 'type', 'cred', 'proxy', 'client' ) - + @mock.patch('azext_ssh.custom.ssh_vm') def test_ssh_arc(self, mock_vm): cmd = mock.Mock() - custom.ssh_arc(cmd, "rg", "vm", "pub", "priv", "user", "cert", "port", "client", False, "proxy", False, []) + custom.ssh_arc(cmd, "rg", "vm", "pub", "priv", "user", "cert", "port", None, "client", False, "proxy", False, []) - mock_vm.assert_called_once_with(cmd, "rg", "vm", None, "pub", "priv", False, "user", "cert", "port", "client", False, "Microsoft.HybridCompute", "proxy", False, []) + mock_vm.assert_called_once_with(cmd, "rg", "vm", None, "pub", "priv", False, "user", "cert", "port", "client", False, 'arc_resource_type_placeholder', "proxy", False, []) def test_ssh_cert_no_args(self): cmd = mock.Mock() @@ -193,10 +192,10 @@ def test_ssh_cert(self, mock_write_cert, mock_get_keys, mock_abspath, mock_isdir mock_get_keys.assert_called_once_with('/pubkey/path', None, None, '/client/path') mock_write_cert.assert_called_once_with(cmd, 'pubkey', '/cert/path', '/client/path') - + def test_assert_args_invalid_resource_type(self): self.assertRaises(azclierror.InvalidArgumentValueError, custom._assert_args, 'rg', 'vm', 'ip', "Microsoft.Network", 'cert', 'user') - + def test_assert_args_no_ip_or_vm(self): self.assertRaises(azclierror.RequiredArgumentMissingError, custom._assert_args, None, None, None, None, None, None) @@ -215,8 +214,9 @@ def test_assert_args_cert_with_no_user(self): @mock.patch('os.path.isfile') def test_assert_args_invalid_cert_filepath(self, mock_is_file): mock_is_file.return_value = False + # Legacy Resource Type shouldn't raise error. self.assertRaises(azclierror.FileOperationError, custom._assert_args, 'rg', 'vm', None, 'Microsoft.HybridCompute', 'cert_path', 'username') - + @mock.patch('azext_ssh.ssh_utils.create_ssh_keyfile') @mock.patch('tempfile.mkdtemp') @mock.patch('os.path.isfile') @@ -360,7 +360,7 @@ def test_do_ssh_op_aad_user_compute(self, mock_write_cert, mock_ssh_creds, mock_ cmd.cli_ctx.cloud = mock.Mock() cmd.cli_ctx.cloud.name = "azurecloud" - op_info = ssh_info.SSHSession(None, None, "1.2.3.4", None, None, False, None, None, None, None, None, None, "Microsoft.Compute", None, None, False) + op_info = ssh_info.SSHSession(None, None, "1.2.3.4", None, None, False, None, None, None, None, None, None, "Microsoft.Compute/virtualMachines", None, None, False) op_info.public_key_file = "publicfile" op_info.private_key_file = "privatefile" op_info.ssh_client_folder = "/client/folder" @@ -389,7 +389,7 @@ def test_do_ssh_op_local_user_compute(self, mock_ip, mock_check_files): mock_op = mock.Mock() mock_ip.return_value = "1.2.3.4" - op_info = ssh_info.ConfigSession("config", "rg", "vm", None, None, None, False, False, "username", None, None, "Microsoft.Compute", None, None, None) + op_info = ssh_info.ConfigSession("config", "rg", "vm", None, None, None, False, False, "username", None, None, "Microsoft.Compute/virtualMachines", None, None, None) op_info.public_key_file = "publicfile" op_info.private_key_file = "privatefile" op_info.cert_file = "cert" @@ -408,7 +408,7 @@ def test_do_ssh_op_no_public_ip(self, mock_ip, mock_check_files): mock_op = mock.Mock() mock_ip.return_value = None - op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, None, None, None, None, None, None, "Microsoft.Compute", None, None, False) + op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, None, None, None, None, None, None, "Microsoft.Compute/virtualMachines", None, None, False) self.assertRaises( azclierror.ResourceNotFoundError, custom._do_ssh_op, cmd, op_info, mock_op) @@ -426,7 +426,7 @@ def test_do_ssh_op_arc_local_user(self, mock_get_cert, mock_check_keys, mock_sta cmd = mock.Mock() mock_op = mock.Mock() - op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, "user", None, "port", None, [], False, "Microsoft.HybridCompute", None, None, False) + op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, "user", None, "port", None, [], False, "Microsoft.HybridCompute/machines", None, None, False) op_info.private_key_file = "priv" op_info.cert_file = "cert" op_info.ssh_client_folder = "client" @@ -435,7 +435,7 @@ def test_do_ssh_op_arc_local_user(self, mock_get_cert, mock_check_keys, mock_sta custom._do_ssh_op(cmd, op_info, mock_op) mock_get_proxy.assert_called_once_with('proxy') - mock_get_relay_info.assert_called_once_with(cmd, 'rg', 'vm', None) + mock_get_relay_info.assert_called_once_with(cmd, 'rg', 'vm', 'Microsoft.HybridCompute/machines', None) mock_op.assert_called_once_with(op_info, False, False) mock_get_cert.assert_not_called() mock_check_keys.assert_not_called() @@ -471,7 +471,7 @@ def test_do_ssh_arc_op_aad_user(self, mock_cert_exp, mock_start_ssh, mock_write_ mock_op = mock.Mock() - op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, None, None, "port", None, [], False, "Microsoft.HybridCompute", None, None, False) + op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, None, None, "port", None, [], False, "Microsoft.HybridCompute/machines", None, None, False) op_info.public_key_file = "publicfile" op_info.private_key_file = "privatefile" op_info.ssh_client_folder = "client" @@ -486,66 +486,8 @@ def test_do_ssh_arc_op_aad_user(self, mock_cert_exp, mock_start_ssh, mock_write_ mock_get_mod_exp.assert_called_once_with("public") mock_write_cert.assert_called_once_with("certificate", "public-aadcert.pub") mock_get_proxy.assert_called_once_with('proxy') - mock_get_relay_info.assert_called_once_with(cmd, 'rg', 'vm', 3600) + mock_get_relay_info.assert_called_once_with(cmd, 'rg', 'vm', 'Microsoft.HybridCompute/machines', 3600) mock_op.assert_called_once_with(op_info, False, True) - - def test_decide_resource_type_ip(self): - cmd = mock.Mock() - op_info = ssh_info.SSHSession(None, None, "ip", None, None, False, None, None, None, None, [], False, None, None, None, False) - self.assertEqual(custom._decide_resource_type(cmd, op_info), "Microsoft.Compute") - - @mock.patch('azext_ssh.custom._check_if_arc_server') - def test_decide_resource_type_resourcetype_arc(self, mock_is_arc): - cmd = mock.Mock() - mock_is_arc.return_value = None, None, True - op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, None, None, None, None, [], False, "Microsoft.HybridCompute", None, None, False) - self.assertEqual(custom._decide_resource_type(cmd, op_info), "Microsoft.HybridCompute") - - @mock.patch('azext_ssh.custom._check_if_azure_vm') - def test_decide_resource_type_resourcetype_arc(self, mock_is_vm): - cmd = mock.Mock() - mock_is_vm.return_value = None, None, True - op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, None, None, None, None, [], False, "Microsoft.Compute", None, None, False) - self.assertEqual(custom._decide_resource_type(cmd, op_info), "Microsoft.Compute") - - @mock.patch('azext_ssh.custom._check_if_azure_vm') - @mock.patch('azext_ssh.custom._check_if_arc_server') - def test_decide_resource_type_rg_vm_both(self, mock_is_arc, mock_is_vm): - cmd = mock.Mock() - mock_is_vm.return_value = None, None, True - mock_is_arc.return_value = None, None, True - op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, None, None, None, None, [], False, None, None, None, False) - self.assertRaises( - azclierror.BadRequestError, custom._decide_resource_type, cmd, op_info) - - @mock.patch('azext_ssh.custom._check_if_azure_vm') - @mock.patch('azext_ssh.custom._check_if_arc_server') - def test_decide_resource_type_rg_vm_neither(self, mock_is_arc, mock_is_vm): - cmd = mock.Mock() - mock_is_vm.return_value = None, ResourceNotFoundError(), False - mock_is_arc.return_value = None, ResourceNotFoundError(), False - op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, None, None, None, None, [], False, None, None, None, False) - self.assertRaises( - azclierror.ResourceNotFoundError, custom._decide_resource_type, cmd, op_info) - - @mock.patch('azext_ssh.custom._check_if_azure_vm') - @mock.patch('azext_ssh.custom._check_if_arc_server') - def test_decide_resource_type_rg_vm_arc(self, mock_is_arc, mock_is_vm): - cmd = mock.Mock() - mock_is_vm.return_value = None, ResourceNotFoundError(), False - mock_is_arc.return_value = None, None, True - op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, None, None, None, None, [], False, None, None, None, False) - self.assertEqual(custom._decide_resource_type(cmd, op_info), "Microsoft.HybridCompute") - - @mock.patch('azext_ssh.custom._check_if_azure_vm') - @mock.patch('azext_ssh.custom._check_if_arc_server') - def test_decide_resource_type_rg_vm_arc(self, mock_is_arc, mock_is_vm): - cmd = mock.Mock() - mock_is_vm.return_value = None, None, True - mock_is_arc.return_value = None, ResourceNotFoundError(), False - op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, None, None, None, None, [], False, None, None, None, False) - self.assertEqual(custom._decide_resource_type(cmd, op_info), "Microsoft.Compute") - if __name__ == '__main__': unittest.main() diff --git a/src/ssh/azext_ssh/tests/latest/test_rdp_utils.py b/src/ssh/azext_ssh/tests/latest/test_rdp_utils.py index 92e96b39091..0bda844c41c 100644 --- a/src/ssh/azext_ssh/tests/latest/test_rdp_utils.py +++ b/src/ssh/azext_ssh/tests/latest/test_rdp_utils.py @@ -16,7 +16,7 @@ class RDPUtilsTest(unittest.TestCase): @mock.patch('azext_ssh.custom.connectivity_utils.format_relay_info_string') @mock.patch("subprocess.Popen") def test_start_ssh_tunnel(self, mock_popen, mock_relay, mock_path, mock_env): - op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, "user", None, "port", None, ['arg1', 'arg2', '-v'], False, "Microsoft.HybridCompute", None, None, True) + op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, "user", None, "port", None, ['arg1', 'arg2', '-v'], False, "Microsoft.HybridCompute/machines", None, None, True) op_info.public_key_file = "pub" op_info.private_key_file = "priv" op_info.cert_file = "cert" @@ -70,7 +70,7 @@ def test_get_rdp_path(self, mock_isfile, mock_join, mock_env, mock_arch, mock_sy @mock.patch.object(rdp_utils, 'call_rdp') @mock.patch.object(rdp_utils, 'terminate_ssh') def test_start_rdp_connection(self, mock_terminate, mock_rdp, mock_wait, mock_tunnel, mock_isopen, mock_getport): - op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, "user", None, "port", None, ['arg1', 'arg2'], False, "Microsoft.HybridCompute", None, None, True) + op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, "user", None, "port", None, ['arg1', 'arg2'], False, "Microsoft.HybridCompute/machines", None, None, True) op_info.public_key_file = "pub" op_info.private_key_file = "priv" op_info.cert_file = "cert" diff --git a/src/ssh/azext_ssh/tests/latest/test_resource_type_utils.py b/src/ssh/azext_ssh/tests/latest/test_resource_type_utils.py new file mode 100644 index 00000000000..a73df6e8915 --- /dev/null +++ b/src/ssh/azext_ssh/tests/latest/test_resource_type_utils.py @@ -0,0 +1,110 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- +import io +import unittest +from unittest import mock +from azext_ssh import resource_type_utils +from azext_ssh import rdp_utils +from azext_ssh import ssh_utils +from azext_ssh import ssh_info + +from azure.cli.core import azclierror + +class SshResourceTypeUtilsCommandTest(unittest.TestCase): + + @mock.patch('azext_ssh.resource_type_utils._list_types_of_resources_with_provided_name') + def test_decide_resource_type_ip(self, mock_list_types): + cmd = mock.Mock() + op_info = ssh_info.SSHSession(None, None, "ip", None, None, False, None, None, None, None, [], False, None, None, None, False) + self.assertEqual(resource_type_utils.decide_resource_type(cmd, op_info), "Microsoft.Compute/virtualMachines") + mock_list_types.assert_not_called() + + ################################ Test Resource Type Provided ############################## + + @mock.patch('azext_ssh.resource_type_utils._list_types_of_resources_with_provided_name') + def test_decide_resource_type_resourcetype_arc(self, mock_list_types): + cmd = mock.Mock() + mock_list_types.return_value = {'microsoft.hybridcompute/machines', 'microsoft.compute/virtualmachines'} + op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, None, None, None, None, [], False, "Microsoft.HybridCompute/machines", None, None, False) + self.assertEqual(resource_type_utils.decide_resource_type(cmd, op_info), "Microsoft.HybridCompute/machines") + + @mock.patch('azext_ssh.resource_type_utils._list_types_of_resources_with_provided_name') + def test_decide_resource_type_resourcetype_compute(self, mock_list_types): + cmd = mock.Mock() + mock_list_types.return_value = {'microsoft.hybridcompute/machines', 'microsoft.compute/virtualmachines', 'microsoft.connectedvmwarevsphere/virtualmachines'} + op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, None, None, None, None, [], False, "Microsoft.Compute/virtualMachines", None, None, False) + self.assertEqual(resource_type_utils.decide_resource_type(cmd, op_info), "Microsoft.Compute/virtualMachines") + + @mock.patch('azext_ssh.resource_type_utils._list_types_of_resources_with_provided_name') + def test_decide_resource_type_resourcetype_vmware(self, mock_list_types): + cmd = mock.Mock() + mock_list_types.return_value = {'microsoft.hybridcompute/machines', 'microsoft.compute/virtualmachines', 'microsoft.connectedvmwarevsphere/virtualmachines'} + op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, None, None, None, None, [], False, "Microsoft.connectedvmwarevsphere/virtualMachines", None, None, False) + self.assertEqual(resource_type_utils.decide_resource_type(cmd, op_info), "Microsoft.ConnectedVMwarevSphere/virtualMachines") + + + ############################ Test Legacy Resource Type (Resource Provider) ########################## + @mock.patch('azext_ssh.resource_type_utils._list_types_of_resources_with_provided_name') + def test_decide_resource_type_resourcetype_arc_legacy(self, mock_list_types): + cmd = mock.Mock() + mock_list_types.return_value = {'microsoft.hybridcompute/machines', 'microsoft.compute/virtualmachines'} + op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, None, None, None, None, [], False, "Microsoft.HybridCompute", None, None, False) + self.assertEqual(resource_type_utils.decide_resource_type(cmd, op_info), "Microsoft.HybridCompute/machines") + + @mock.patch('azext_ssh.resource_type_utils._list_types_of_resources_with_provided_name') + def test_decide_resource_type_resourcetype_compute_legacy(self, mock_list_types): + cmd = mock.Mock() + mock_list_types.return_value = {'microsoft.hybridcompute/machines', 'microsoft.compute/virtualmachines', 'microsoft.connectedvmwarevsphere/virtualmachines'} + op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, None, None, None, None, [], False, "Microsoft.Compute", None, None, False) + self.assertEqual(resource_type_utils.decide_resource_type(cmd, op_info), "Microsoft.Compute/virtualMachines") + + @mock.patch('azext_ssh.resource_type_utils._list_types_of_resources_with_provided_name') + def test_decide_resource_type_resourcetype_vmware_legacy(self, mock_list_types): + cmd = mock.Mock() + mock_list_types.return_value = {'microsoft.hybridcompute/machines', 'microsoft.compute/virtualmachines', 'microsoft.connectedvmwarevsphere/virtualmachines'} + op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, None, None, None, None, [], False, "Microsoft.connectedvmwarevsphere", None, None, False) + self.assertEqual(resource_type_utils.decide_resource_type(cmd, op_info), "Microsoft.ConnectedVMwarevSphere/virtualMachines") + + ############################# Test No Resource Type Provided ################################### + @mock.patch('azext_ssh.resource_type_utils._list_types_of_resources_with_provided_name') + def test_decide_resource_type_more_than_one(self, mock_list_types): + cmd = mock.Mock() + mock_list_types.return_value = {'microsoft.hybridcompute/machines', 'microsoft.compute/virtualmachines', 'microsoft.connectedvmwarevsphere/virtualmachines'} + op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, None, None, None, None, [], False, None, None, None, False) + self.assertRaises( + azclierror.BadRequestError, resource_type_utils.decide_resource_type, cmd, op_info) + + @mock.patch('azext_ssh.resource_type_utils._list_types_of_resources_with_provided_name') + def test_decide_resource_type_rg_vm_neither(self, mock_list_types): + cmd = mock.Mock() + mock_list_types.return_value = {} + op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, None, None, None, None, [], False, None, None, None, False) + self.assertRaises( + azclierror.ResourceNotFoundError, resource_type_utils.decide_resource_type, cmd, op_info) + + @mock.patch('azext_ssh.resource_type_utils._list_types_of_resources_with_provided_name') + def test_decide_resource_type_rg_arc(self, mock_list_types): + cmd = mock.Mock() + mock_list_types.return_value = {"microsoft.hybridcompute/machines"} + op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, None, None, None, None, [], False, None, None, None, False) + self.assertEqual(resource_type_utils.decide_resource_type(cmd, op_info), "Microsoft.HybridCompute/machines") + + @mock.patch('azext_ssh.resource_type_utils._list_types_of_resources_with_provided_name') + def test_decide_resource_type_rg_vm(self, mock_list_types): + cmd = mock.Mock() + mock_list_types.return_value = {"microsoft.compute/virtualmachines"} + op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, None, None, None, None, [], False, None, None, None, False) + self.assertEqual(resource_type_utils.decide_resource_type(cmd, op_info), "Microsoft.Compute/virtualMachines") + + @mock.patch('azext_ssh.resource_type_utils._list_types_of_resources_with_provided_name') + def test_decide_resource_type_rg_vmware(self, mock_list_types): + cmd = mock.Mock() + mock_list_types.return_value = {'microsoft.connectedvmwarevsphere/virtualmachines'} + op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, None, None, None, None, [], False, None, None, None, False) + self.assertEqual(resource_type_utils.decide_resource_type(cmd, op_info), "Microsoft.ConnectedVMwarevSphere/virtualMachines") + + + if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/src/ssh/azext_ssh/tests/latest/test_ssh_info.py b/src/ssh/azext_ssh/tests/latest/test_ssh_info.py index 681f77a7088..185b9d36fc9 100644 --- a/src/ssh/azext_ssh/tests/latest/test_ssh_info.py +++ b/src/ssh/azext_ssh/tests/latest/test_ssh_info.py @@ -44,21 +44,21 @@ def test_ssh_session(self, mock_abspath): def test_ssh_session_get_host(self): - session = ssh_info.SSHSession(None, None, "ip", None, None, False, "user", None, None, None, [], False, "Microsoft.Compute", None, None, False) + session = ssh_info.SSHSession(None, None, "ip", None, None, False, "user", None, None, None, [], False, "Microsoft.Compute/virtualMachines", None, None, False) self.assertEqual("ip", session.get_host()) - session = ssh_info.SSHSession("rg", "vm", None, None, None, False, "user", None, None, None, [], False, "Microsoft.HybridCompute", None, None, True) + session = ssh_info.SSHSession("rg", "vm", None, None, None, False, "user", None, None, None, [], False, "Microsoft.HybridCompute/machines", None, None, True) self.assertEqual("vm", session.get_host()) @mock.patch('os.path.abspath') def test_ssh_session_build_args_compute(self, mock_abspath): mock_abspath.side_effect = ["pub_path", "priv_path", "cert_path", "client_path"] - session = ssh_info.SSHSession("rg", "vm", "ip", "pub", "priv", False, "user", "cert", "port", "client/folder", [], None, "Microsoft.Compute", None, None, False) + session = ssh_info.SSHSession("rg", "vm", "ip", "pub", "priv", False, "user", "cert", "port", "client/folder", [], None, "Microsoft.Compute/virtualMachines", None, None, False) self.assertEqual(["-i", "priv_path", "-o", "CertificateFile=\"cert_path\"", "-p", "port"], session.build_args()) @mock.patch('os.path.abspath') def test_ssh_session_build_args_hyvridcompute(self, mock_abspath): mock_abspath.side_effect = ["pub_path", "priv_path", "cert_path", "client_path"] - session = ssh_info.SSHSession("rg", "vm", "ip", "pub", "priv", False, "user", "cert", "port", "client/folder", [], None, "Microsoft.HybridCompute", None, None, True) + session = ssh_info.SSHSession("rg", "vm", "ip", "pub", "priv", False, "user", "cert", "port", "client/folder", [], None, "Microsoft.HybridCompute/machines", None, None, True) session.proxy_path = "proxy_path" self.assertEqual(["-o", "ProxyCommand=\"proxy_path\" -p port", "-i", "priv_path", "-o", "CertificateFile=\"cert_path\""], session.build_args()) @@ -233,7 +233,7 @@ def test_get_config_text_arc(self, create_file, mock_abspath): ] mock_abspath.side_effect = ["config_path", "pub_path", "priv_path", "cert_path", "client_path", "cred_path"] - session = ssh_info.ConfigSession("config", "rg", "vm", None, "pub", "priv", False, False, "user", "cert", None, "Microsoft.HybridCompute", "cred", None, "client/folder") + session = ssh_info.ConfigSession("config", "rg", "vm", None, "pub", "priv", False, False, "user", "cert", None, "Microsoft.HybridCompute/machines", "cred", None, "client/folder") session.proxy_path = "proxy_path" self.assertEqual(session.get_config_text(True), expected_lines_aad) self.assertEqual(session.get_config_text(False), expected_lines_local_user) diff --git a/src/ssh/azext_ssh/tests/latest/test_ssh_utils.py b/src/ssh/azext_ssh/tests/latest/test_ssh_utils.py index 27c9f45e54d..0adf24aa446 100644 --- a/src/ssh/azext_ssh/tests/latest/test_ssh_utils.py +++ b/src/ssh/azext_ssh/tests/latest/test_ssh_utils.py @@ -6,9 +6,6 @@ from azure.cli.core import azclierror from unittest import mock import unittest -import platform -import os - from azext_ssh import ssh_utils from azext_ssh import ssh_info @@ -54,7 +51,7 @@ def test_start_ssh_connection_compute_aad_windows(self, mock_system, mock_copy_e @mock.patch('platform.system') def test_start_ssh_connection_compute_local_linux(self, mock_system, mock_copy_env, mock_call, mock_path, mock_wait, mock_cleanup): - op_info = ssh_info.SSHSession("rg", "vm", "ip", None, None, False, "user", None, "port", None, ['arg1', 'arg2', 'arg3'], False, "Microsof.Compute", None, None, False) + op_info = ssh_info.SSHSession("rg", "vm", "ip", None, None, False, "user", None, "port", None, ['arg1', 'arg2', 'arg3'], False, "Microsoft.Compute/virtualMachines", None, None, False) op_info.public_key_file = "pub" op_info.private_key_file = "priv" op_info.cert_file = "cert" @@ -88,7 +85,7 @@ def test_start_ssh_connection_compute_local_linux(self, mock_system, mock_copy_e @mock.patch('platform.system') def test_start_ssh_connection_arc_aad_windows(self, mock_platform, mock_relay_str, mock_call, mock_copy_env, mock_path, mock_read, mock_cleanup): - op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, "user", None, "port", None, ['arg1'], False, "Microsoft.HybridCompute", None, None, False) + op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, "user", None, "port", None, ['arg1'], False, "Microsoft.HybridCompute/machines", None, None, False) op_info.public_key_file = "pub" op_info.private_key_file = "priv" op_info.cert_file = "cert" @@ -125,7 +122,7 @@ def test_start_ssh_connection_arc_aad_windows(self, mock_platform, mock_relay_st @mock.patch('platform.system') def test_start_ssh_connection_arc_local_linux(self, mock_platform, mock_relay_str, mock_call, mock_copy_env, mock_path, mock_wait, mock_cleanup): - op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, "user", None, "port", None, ['arg1'], False, "Microsoft.HybridCompute", None, None, False) + op_info = ssh_info.SSHSession("rg", "vm", None, None, None, False, "user", None, "port", None, ['arg1'], False, "Microsoft.HybridCompute/machines", None, None, False) op_info.public_key_file = "pub" op_info.private_key_file = "priv" op_info.cert_file = "cert" @@ -156,7 +153,7 @@ def test_start_ssh_connection_arc_local_linux(self, mock_platform, mock_relay_st @mock.patch.object(ssh_utils, '_issue_config_cleanup_warning') @mock.patch('os.path.abspath') def test_write_ssh_config_ip_and_vm_compute_append(self, mock_abspath, mock_warning): - op_info = ssh_info.ConfigSession("config", "rg", "vm", "ip", None, None, False, False, "user", None, "port", "Microsoft.Compute", None, None, "client") + op_info = ssh_info.ConfigSession("config", "rg", "vm", "ip", None, None, False, False, "user", None, "port", "Microsoft.Compute/virtualMachines", None, None, "client") op_info.config_path = "config" op_info.ssh_client_folder = "client" op_info.private_key_file = "priv" @@ -191,7 +188,7 @@ def test_write_ssh_config_ip_and_vm_compute_append(self, mock_abspath, mock_warn @mock.patch('os.path.abspath') @mock.patch.object(ssh_info.ConfigSession, '_create_relay_info_file') def test_write_ssh_config_arc_overwrite(self, mock_create_file, mock_abspath, mock_warning): - op_info = ssh_info.ConfigSession("config", "rg", "vm", None, None, None, True, False, "user", None, "port", "Microsoft.HybridCompute", None, None, "client") + op_info = ssh_info.ConfigSession("config", "rg", "vm", None, None, None, True, False, "user", None, "port", "Microsoft.HybridCompute/machines", None, None, "client") op_info.config_path = "config" op_info.ssh_client_folder = "client" op_info.private_key_file = "priv" diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/__init__.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/__init__.py new file mode 100644 index 00000000000..7b5c68dfb4d --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/__init__.py @@ -0,0 +1,23 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._azure_arc_vmware_management_service_api import AzureArcVMwareManagementServiceAPI +from ._version import VERSION + +__version__ = VERSION + +try: + from ._patch import __all__ as _patch_all + from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk +__all__ = ['AzureArcVMwareManagementServiceAPI'] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/_azure_arc_vmware_management_service_api.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/_azure_arc_vmware_management_service_api.py new file mode 100644 index 00000000000..8b1d12bc337 --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/_azure_arc_vmware_management_service_api.py @@ -0,0 +1,127 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, TYPE_CHECKING + +from msrest import Deserializer, Serializer + +from azure.core.rest import HttpRequest, HttpResponse +from azure.mgmt.core import ARMPipelineClient + +from . import models +from ._configuration import AzureArcVMwareManagementServiceAPIConfiguration +from .operations import Operations, VirtualMachinesOperations + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials import TokenCredential + +class AzureArcVMwareManagementServiceAPI: # pylint: disable=too-many-instance-attributes + """Self service experience for VMware. + + :ivar operations: Operations operations + :vartype operations: azure.mgmt.connectedvmware.operations.Operations + :ivar virtual_machines: VirtualMachinesOperations operations + :vartype virtual_machines: azure.mgmt.connectedvmware.operations.VirtualMachinesOperations + :ivar resource_pools: ResourcePoolsOperations operations + :vartype resource_pools: azure.mgmt.connectedvmware.operations.ResourcePoolsOperations + :ivar clusters: ClustersOperations operations + :vartype clusters: azure.mgmt.connectedvmware.operations.ClustersOperations + :ivar hosts: HostsOperations operations + :vartype hosts: azure.mgmt.connectedvmware.operations.HostsOperations + :ivar datastores: DatastoresOperations operations + :vartype datastores: azure.mgmt.connectedvmware.operations.DatastoresOperations + :ivar vcenters: VCentersOperations operations + :vartype vcenters: azure.mgmt.connectedvmware.operations.VCentersOperations + :ivar virtual_machine_templates: VirtualMachineTemplatesOperations operations + :vartype virtual_machine_templates: + azure.mgmt.connectedvmware.operations.VirtualMachineTemplatesOperations + :ivar virtual_networks: VirtualNetworksOperations operations + :vartype virtual_networks: azure.mgmt.connectedvmware.operations.VirtualNetworksOperations + :ivar inventory_items: InventoryItemsOperations operations + :vartype inventory_items: azure.mgmt.connectedvmware.operations.InventoryItemsOperations + :ivar hybrid_identity_metadata: HybridIdentityMetadataOperations operations + :vartype hybrid_identity_metadata: + azure.mgmt.connectedvmware.operations.HybridIdentityMetadataOperations + :ivar machine_extensions: MachineExtensionsOperations operations + :vartype machine_extensions: azure.mgmt.connectedvmware.operations.MachineExtensionsOperations + :ivar guest_agents: GuestAgentsOperations operations + :vartype guest_agents: azure.mgmt.connectedvmware.operations.GuestAgentsOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The Subscription ID. + :type subscription_id: str + :param base_url: Service URL. Default value is "https://management.azure.com". + :type base_url: str + :keyword api_version: Api Version. Default value is "2022-01-10-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + """ + + def __init__( + self, + credential: "TokenCredential", + subscription_id: str, + base_url: str = "https://management.azure.com", + **kwargs: Any + ) -> None: + self._config = AzureArcVMwareManagementServiceAPIConfiguration(credential=credential, subscription_id=subscription_id, **kwargs) + self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + self._serialize.client_side_validation = False + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize + ) + self.virtual_machines = VirtualMachinesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + + def _send_request( + self, + request: HttpRequest, + **kwargs: Any + ) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, **kwargs) + + def close(self): + # type: () -> None + self._client.close() + + def __enter__(self): + # type: () -> AzureArcVMwareManagementServiceAPI + self._client.__enter__() + return self + + def __exit__(self, *exc_details): + # type: (Any) -> None + self._client.__exit__(*exc_details) diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/_configuration.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/_configuration.py new file mode 100644 index 00000000000..8a8e8f9b946 --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/_configuration.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy + +from ._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials import TokenCredential + + +class AzureArcVMwareManagementServiceAPIConfiguration(Configuration): # pylint: disable=too-many-instance-attributes + """Configuration for AzureArcVMwareManagementServiceAPI. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The Subscription ID. + :type subscription_id: str + :keyword api_version: Api Version. Default value is "2022-01-10-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__( + self, + credential: "TokenCredential", + subscription_id: str, + **kwargs: Any + ) -> None: + super(AzureArcVMwareManagementServiceAPIConfiguration, self).__init__(**kwargs) + api_version = kwargs.pop('api_version', "2022-01-10-preview") # type: str + + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + + self.credential = credential + self.subscription_id = subscription_id + self.api_version = api_version + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'mgmt-connectedvmware/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs # type: Any + ): + # type: (...) -> None + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/_patch.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/_patch.py new file mode 100644 index 00000000000..0ad201a8c58 --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/_patch.py @@ -0,0 +1,19 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/_vendor.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/_vendor.py new file mode 100644 index 00000000000..138f663c53a --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/_vendor.py @@ -0,0 +1,27 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.pipeline.transport import HttpRequest + +def _convert_request(request, files=None): + data = request.content if not files else None + request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) + if files: + request.set_formdata_body(files) + return request + +def _format_url_section(template, **kwargs): + components = template.split("/") + while components: + try: + return template.format(**kwargs) + except KeyError as key: + formatted_components = template.split("/") + components = [ + c for c in formatted_components if "{}".format(key.args[0]) not in c + ] + template = "/".join(components) diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/_version.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/_version.py new file mode 100644 index 00000000000..dfa6ee022f1 --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0b2" diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/__init__.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/__init__.py new file mode 100644 index 00000000000..93ed9911373 --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/__init__.py @@ -0,0 +1,20 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._azure_arc_vmware_management_service_api import AzureArcVMwareManagementServiceAPI + +try: + from ._patch import __all__ as _patch_all + from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk +__all__ = ['AzureArcVMwareManagementServiceAPI'] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/_azure_arc_vmware_management_service_api.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/_azure_arc_vmware_management_service_api.py new file mode 100644 index 00000000000..c959f20fc28 --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/_azure_arc_vmware_management_service_api.py @@ -0,0 +1,125 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Awaitable, TYPE_CHECKING + +from msrest import Deserializer, Serializer + +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.mgmt.core import AsyncARMPipelineClient + +from .. import models +from ._configuration import AzureArcVMwareManagementServiceAPIConfiguration +from .operations import VirtualMachinesOperations, Operations + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + +class AzureArcVMwareManagementServiceAPI: # pylint: disable=too-many-instance-attributes + """Self service experience for VMware. + + :ivar operations: Operations operations + :vartype operations: azure.mgmt.connectedvmware.aio.operations.Operations + :ivar virtual_machines: VirtualMachinesOperations operations + :vartype virtual_machines: azure.mgmt.connectedvmware.aio.operations.VirtualMachinesOperations + :ivar resource_pools: ResourcePoolsOperations operations + :vartype resource_pools: azure.mgmt.connectedvmware.aio.operations.ResourcePoolsOperations + :ivar clusters: ClustersOperations operations + :vartype clusters: azure.mgmt.connectedvmware.aio.operations.ClustersOperations + :ivar hosts: HostsOperations operations + :vartype hosts: azure.mgmt.connectedvmware.aio.operations.HostsOperations + :ivar datastores: DatastoresOperations operations + :vartype datastores: azure.mgmt.connectedvmware.aio.operations.DatastoresOperations + :ivar vcenters: VCentersOperations operations + :vartype vcenters: azure.mgmt.connectedvmware.aio.operations.VCentersOperations + :ivar virtual_machine_templates: VirtualMachineTemplatesOperations operations + :vartype virtual_machine_templates: + azure.mgmt.connectedvmware.aio.operations.VirtualMachineTemplatesOperations + :ivar virtual_networks: VirtualNetworksOperations operations + :vartype virtual_networks: azure.mgmt.connectedvmware.aio.operations.VirtualNetworksOperations + :ivar inventory_items: InventoryItemsOperations operations + :vartype inventory_items: azure.mgmt.connectedvmware.aio.operations.InventoryItemsOperations + :ivar hybrid_identity_metadata: HybridIdentityMetadataOperations operations + :vartype hybrid_identity_metadata: + azure.mgmt.connectedvmware.aio.operations.HybridIdentityMetadataOperations + :ivar machine_extensions: MachineExtensionsOperations operations + :vartype machine_extensions: + azure.mgmt.connectedvmware.aio.operations.MachineExtensionsOperations + :ivar guest_agents: GuestAgentsOperations operations + :vartype guest_agents: azure.mgmt.connectedvmware.aio.operations.GuestAgentsOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The Subscription ID. + :type subscription_id: str + :param base_url: Service URL. Default value is "https://management.azure.com". + :type base_url: str + :keyword api_version: Api Version. Default value is "2022-01-10-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + base_url: str = "https://management.azure.com", + **kwargs: Any + ) -> None: + self._config = AzureArcVMwareManagementServiceAPIConfiguration(credential=credential, subscription_id=subscription_id, **kwargs) + self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + self._serialize.client_side_validation = False + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize + ) + self.virtual_machines = VirtualMachinesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + + def _send_request( + self, + request: HttpRequest, + **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, **kwargs) + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> "AzureArcVMwareManagementServiceAPI": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details) -> None: + await self._client.__aexit__(*exc_details) diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/_configuration.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/_configuration.py new file mode 100644 index 00000000000..a3e014206f2 --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/_configuration.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy + +from .._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + + +class AzureArcVMwareManagementServiceAPIConfiguration(Configuration): # pylint: disable=too-many-instance-attributes + """Configuration for AzureArcVMwareManagementServiceAPI. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The Subscription ID. + :type subscription_id: str + :keyword api_version: Api Version. Default value is "2022-01-10-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + **kwargs: Any + ) -> None: + super(AzureArcVMwareManagementServiceAPIConfiguration, self).__init__(**kwargs) + api_version = kwargs.pop('api_version', "2022-01-10-preview") # type: str + + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + + self.credential = credential + self.subscription_id = subscription_id + self.api_version = api_version + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'mgmt-connectedvmware/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs: Any + ) -> None: + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/_patch.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/_patch.py new file mode 100644 index 00000000000..0ad201a8c58 --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/_patch.py @@ -0,0 +1,19 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/operations/__init__.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/operations/__init__.py new file mode 100644 index 00000000000..295107adb1c --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/operations/__init__.py @@ -0,0 +1,20 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._operations import Operations +from ._virtual_machines_operations import VirtualMachinesOperations + +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk +__all__ = [ + 'Operations', + 'VirtualMachinesOperations', +] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() \ No newline at end of file diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/operations/_operations.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/operations/_operations.py new file mode 100644 index 00000000000..93099c75710 --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/operations/_operations.py @@ -0,0 +1,122 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._operations import build_list_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class Operations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.connectedvmware.aio.AzureArcVMwareManagementServiceAPI`'s + :attr:`operations` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + **kwargs: Any + ) -> AsyncIterable[_models.OperationsList]: + """Returns list of all operations. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OperationsList or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.connectedvmware.models.OperationsList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-01-10-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.OperationsList] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("OperationsList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/providers/Microsoft.ConnectedVMwarevSphere/operations"} # type: ignore diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/operations/_patch.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/operations/_patch.py new file mode 100644 index 00000000000..0ad201a8c58 --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/operations/_patch.py @@ -0,0 +1,19 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/operations/_virtual_machines_operations.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/operations/_virtual_machines_operations.py new file mode 100644 index 00000000000..9316d03f374 --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/aio/operations/_virtual_machines_operations.py @@ -0,0 +1,111 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._virtual_machines_operations import build_get_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class VirtualMachinesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.connectedvmware.aio.AzureArcVMwareManagementServiceAPI`'s + :attr:`virtual_machines` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + virtual_machine_name: str, + **kwargs: Any + ) -> _models.VirtualMachine: + """Gets a virtual machine. + + Implements virtual machine GET method. + + :param resource_group_name: The Resource Group Name. + :type resource_group_name: str + :param virtual_machine_name: Name of the virtual machine resource. + :type virtual_machine_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: VirtualMachine, or the result of cls(response) + :rtype: ~azure.mgmt.connectedvmware.models.VirtualMachine + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-01-10-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.VirtualMachine] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + virtual_machine_name=virtual_machine_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('VirtualMachine', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ConnectedVMwarevSphere/virtualMachines/{virtualMachineName}"} # type: ignore diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/models/__init__.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/models/__init__.py new file mode 100644 index 00000000000..89491cbd810 --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/models/__init__.py @@ -0,0 +1,235 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._models_py3 import AvailablePatchCountByClassification +from ._models_py3 import Cluster +from ._models_py3 import ClusterInventoryItem +from ._models_py3 import ClustersList +from ._models_py3 import Condition +from ._models_py3 import Datastore +from ._models_py3 import DatastoreInventoryItem +from ._models_py3 import DatastoresList +from ._models_py3 import ErrorDefinition +from ._models_py3 import ErrorDetail +from ._models_py3 import ErrorResponse +from ._models_py3 import ExtendedLocation +from ._models_py3 import GuestAgent +from ._models_py3 import GuestAgentList +from ._models_py3 import GuestAgentProfile +from ._models_py3 import GuestCredential +from ._models_py3 import HardwareProfile +from ._models_py3 import Host +from ._models_py3 import HostInventoryItem +from ._models_py3 import HostsList +from ._models_py3 import HttpProxyConfiguration +from ._models_py3 import HybridIdentityMetadata +from ._models_py3 import HybridIdentityMetadataList +from ._models_py3 import Identity +from ._models_py3 import InventoryItem +from ._models_py3 import InventoryItemDetails +from ._models_py3 import InventoryItemProperties +from ._models_py3 import InventoryItemsList +from ._models_py3 import LinuxParameters +from ._models_py3 import MachineExtension +from ._models_py3 import MachineExtensionInstanceView +from ._models_py3 import MachineExtensionInstanceViewStatus +from ._models_py3 import MachineExtensionPropertiesInstanceView +from ._models_py3 import MachineExtensionUpdate +from ._models_py3 import MachineExtensionsListResult +from ._models_py3 import NetworkInterface +from ._models_py3 import NetworkInterfaceUpdate +from ._models_py3 import NetworkProfile +from ._models_py3 import NetworkProfileUpdate +from ._models_py3 import NicIPAddressSettings +from ._models_py3 import NicIPSettings +from ._models_py3 import Operation +from ._models_py3 import OperationDisplay +from ._models_py3 import OperationsList +from ._models_py3 import OsProfile +from ._models_py3 import OsProfileLinuxConfiguration +from ._models_py3 import OsProfileUpdate +from ._models_py3 import OsProfileUpdateLinuxConfiguration +from ._models_py3 import OsProfileUpdateWindowsConfiguration +from ._models_py3 import OsProfileWindowsConfiguration +from ._models_py3 import PlacementProfile +from ._models_py3 import ProxyResource +from ._models_py3 import Resource +from ._models_py3 import ResourcePatch +from ._models_py3 import ResourcePool +from ._models_py3 import ResourcePoolInventoryItem +from ._models_py3 import ResourcePoolsList +from ._models_py3 import ResourceStatus +from ._models_py3 import SecurityProfile +from ._models_py3 import StopVirtualMachineOptions +from ._models_py3 import StorageProfile +from ._models_py3 import StorageProfileUpdate +from ._models_py3 import SystemData +from ._models_py3 import UefiSettings +from ._models_py3 import VCenter +from ._models_py3 import VCentersList +from ._models_py3 import VICredential +from ._models_py3 import VirtualDisk +from ._models_py3 import VirtualDiskUpdate +from ._models_py3 import VirtualMachine +from ._models_py3 import VirtualMachineAssessPatchesResult +from ._models_py3 import VirtualMachineInstallPatchesParameters +from ._models_py3 import VirtualMachineInstallPatchesResult +from ._models_py3 import VirtualMachineInventoryItem +from ._models_py3 import VirtualMachineTemplate +from ._models_py3 import VirtualMachineTemplateInventoryItem +from ._models_py3 import VirtualMachineTemplatesList +from ._models_py3 import VirtualMachineUpdate +from ._models_py3 import VirtualMachinesList +from ._models_py3 import VirtualNetwork +from ._models_py3 import VirtualNetworkInventoryItem +from ._models_py3 import VirtualNetworksList +from ._models_py3 import VirtualSCSIController +from ._models_py3 import WindowsParameters + + +from ._azure_arc_vmware_management_service_api_enums import ( + CreatedByType, + DiskMode, + DiskType, + FirmwareType, + IPAddressAllocationMethod, + IdentityType, + InventoryType, + NICType, + OsType, + OsTypeUM, + PatchOperationStartedBy, + PatchOperationStatus, + PatchServiceUsed, + PowerOnBootOption, + ProvisioningAction, + ProvisioningState, + SCSIControllerType, + StatusLevelTypes, + StatusTypes, + VMGuestPatchClassificationLinux, + VMGuestPatchClassificationWindows, + VMGuestPatchRebootSetting, + VMGuestPatchRebootStatus, + VirtualSCSISharing, +) +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk +__all__ = [ + 'AvailablePatchCountByClassification', + 'Cluster', + 'ClusterInventoryItem', + 'ClustersList', + 'Condition', + 'Datastore', + 'DatastoreInventoryItem', + 'DatastoresList', + 'ErrorDefinition', + 'ErrorDetail', + 'ErrorResponse', + 'ExtendedLocation', + 'GuestAgent', + 'GuestAgentList', + 'GuestAgentProfile', + 'GuestCredential', + 'HardwareProfile', + 'Host', + 'HostInventoryItem', + 'HostsList', + 'HttpProxyConfiguration', + 'HybridIdentityMetadata', + 'HybridIdentityMetadataList', + 'Identity', + 'InventoryItem', + 'InventoryItemDetails', + 'InventoryItemProperties', + 'InventoryItemsList', + 'LinuxParameters', + 'MachineExtension', + 'MachineExtensionInstanceView', + 'MachineExtensionInstanceViewStatus', + 'MachineExtensionPropertiesInstanceView', + 'MachineExtensionUpdate', + 'MachineExtensionsListResult', + 'NetworkInterface', + 'NetworkInterfaceUpdate', + 'NetworkProfile', + 'NetworkProfileUpdate', + 'NicIPAddressSettings', + 'NicIPSettings', + 'Operation', + 'OperationDisplay', + 'OperationsList', + 'OsProfile', + 'OsProfileLinuxConfiguration', + 'OsProfileUpdate', + 'OsProfileUpdateLinuxConfiguration', + 'OsProfileUpdateWindowsConfiguration', + 'OsProfileWindowsConfiguration', + 'PlacementProfile', + 'ProxyResource', + 'Resource', + 'ResourcePatch', + 'ResourcePool', + 'ResourcePoolInventoryItem', + 'ResourcePoolsList', + 'ResourceStatus', + 'SecurityProfile', + 'StopVirtualMachineOptions', + 'StorageProfile', + 'StorageProfileUpdate', + 'SystemData', + 'UefiSettings', + 'VCenter', + 'VCentersList', + 'VICredential', + 'VirtualDisk', + 'VirtualDiskUpdate', + 'VirtualMachine', + 'VirtualMachineAssessPatchesResult', + 'VirtualMachineInstallPatchesParameters', + 'VirtualMachineInstallPatchesResult', + 'VirtualMachineInventoryItem', + 'VirtualMachineTemplate', + 'VirtualMachineTemplateInventoryItem', + 'VirtualMachineTemplatesList', + 'VirtualMachineUpdate', + 'VirtualMachinesList', + 'VirtualNetwork', + 'VirtualNetworkInventoryItem', + 'VirtualNetworksList', + 'VirtualSCSIController', + 'WindowsParameters', + 'CreatedByType', + 'DiskMode', + 'DiskType', + 'FirmwareType', + 'IPAddressAllocationMethod', + 'IdentityType', + 'InventoryType', + 'NICType', + 'OsType', + 'OsTypeUM', + 'PatchOperationStartedBy', + 'PatchOperationStatus', + 'PatchServiceUsed', + 'PowerOnBootOption', + 'ProvisioningAction', + 'ProvisioningState', + 'SCSIControllerType', + 'StatusLevelTypes', + 'StatusTypes', + 'VMGuestPatchClassificationLinux', + 'VMGuestPatchClassificationWindows', + 'VMGuestPatchRebootSetting', + 'VMGuestPatchRebootStatus', + 'VirtualSCSISharing', +] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() \ No newline at end of file diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/models/_azure_arc_vmware_management_service_api_enums.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/models/_azure_arc_vmware_management_service_api_enums.py new file mode 100644 index 00000000000..56ca49a3352 --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/models/_azure_arc_vmware_management_service_api_enums.py @@ -0,0 +1,230 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta + + +class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of identity that created the resource. + """ + + USER = "User" + APPLICATION = "Application" + MANAGED_IDENTITY = "ManagedIdentity" + KEY = "Key" + +class DiskMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Defines the different types of disk modes. + """ + + PERSISTENT = "persistent" + INDEPENDENT_PERSISTENT = "independent_persistent" + INDEPENDENT_NONPERSISTENT = "independent_nonpersistent" + +class DiskType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Defines the different types of disks. + """ + + FLAT = "flat" + PMEM = "pmem" + RAWPHYSICAL = "rawphysical" + RAWVIRTUAL = "rawvirtual" + SPARSE = "sparse" + SESPARSE = "sesparse" + UNKNOWN = "unknown" + +class FirmwareType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Firmware type + """ + + BIOS = "bios" + EFI = "efi" + +class IdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of managed service identity. + """ + + NONE = "None" + SYSTEM_ASSIGNED = "SystemAssigned" + +class InventoryType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The inventory type. + """ + + RESOURCE_POOL = "ResourcePool" + VIRTUAL_MACHINE = "VirtualMachine" + VIRTUAL_MACHINE_TEMPLATE = "VirtualMachineTemplate" + VIRTUAL_NETWORK = "VirtualNetwork" + CLUSTER = "Cluster" + DATASTORE = "Datastore" + HOST = "Host" + +class IPAddressAllocationMethod(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """IP address allocation method. + """ + + UNSET = "unset" + DYNAMIC = "dynamic" + STATIC = "static" + LINKLAYER = "linklayer" + RANDOM = "random" + OTHER = "other" + +class NICType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """NIC type + """ + + VMXNET3 = "vmxnet3" + VMXNET2 = "vmxnet2" + VMXNET = "vmxnet" + E1000 = "e1000" + E1000_E = "e1000e" + PCNET32 = "pcnet32" + +class OsType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Defines the different types of VM guest operating systems. + """ + + WINDOWS = "Windows" + LINUX = "Linux" + OTHER = "Other" + +class OsTypeUM(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The operating system type of the machine. + """ + + WINDOWS = "Windows" + LINUX = "Linux" + +class PatchOperationStartedBy(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Indicates if operation was triggered by user or by platform. + """ + + USER = "User" + PLATFORM = "Platform" + +class PatchOperationStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The overall success or failure status of the operation. It remains "InProgress" until the + operation completes. At that point it will become "Unknown", "Failed", "Succeeded", or + "CompletedWithWarnings." + """ + + UNKNOWN = "Unknown" + IN_PROGRESS = "InProgress" + FAILED = "Failed" + SUCCEEDED = "Succeeded" + COMPLETED_WITH_WARNINGS = "CompletedWithWarnings" + +class PatchServiceUsed(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Specifies the patch service used for the operation. + """ + + UNKNOWN = "Unknown" + WU = "WU" + WU_WSUS = "WU_WSUS" + YUM = "YUM" + APT = "APT" + ZYPPER = "Zypper" + +class PowerOnBootOption(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Defines the options for power on boot. + """ + + ENABLED = "enabled" + DISABLED = "disabled" + +class ProvisioningAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Defines the different types of operations for guest agent. + """ + + INSTALL = "install" + UNINSTALL = "uninstall" + REPAIR = "repair" + +class ProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The current deployment state of resource. + """ + + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + PROVISIONING = "Provisioning" + UPDATING = "Updating" + DELETING = "Deleting" + ACCEPTED = "Accepted" + CREATED = "Created" + +class SCSIControllerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Defines the different types of SCSI controllers. + """ + + LSILOGIC = "lsilogic" + BUSLOGIC = "buslogic" + PVSCSI = "pvscsi" + LSILOGICSAS = "lsilogicsas" + +class StatusLevelTypes(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The level code. + """ + + INFO = "Info" + WARNING = "Warning" + ERROR = "Error" + +class StatusTypes(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The status of the hybrid machine agent. + """ + + CONNECTED = "Connected" + DISCONNECTED = "Disconnected" + ERROR = "Error" + +class VirtualSCSISharing(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Defines the sharing mode for sharing the SCSI bus. + """ + + NO_SHARING = "noSharing" + PHYSICAL_SHARING = "physicalSharing" + VIRTUAL_SHARING = "virtualSharing" + +class VMGuestPatchClassificationLinux(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + CRITICAL = "Critical" + SECURITY = "Security" + OTHER = "Other" + +class VMGuestPatchClassificationWindows(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + CRITICAL = "Critical" + SECURITY = "Security" + UPDATE_ROLL_UP = "UpdateRollUp" + FEATURE_PACK = "FeaturePack" + SERVICE_PACK = "ServicePack" + DEFINITION = "Definition" + TOOLS = "Tools" + UPDATES = "Updates" + +class VMGuestPatchRebootSetting(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Defines when it is acceptable to reboot a VM during a software update operation. + """ + + IF_REQUIRED = "IfRequired" + NEVER = "Never" + ALWAYS = "Always" + +class VMGuestPatchRebootStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The reboot state of the VM following completion of the operation. + """ + + UNKNOWN = "Unknown" + NOT_NEEDED = "NotNeeded" + REQUIRED = "Required" + STARTED = "Started" + FAILED = "Failed" + COMPLETED = "Completed" diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/models/_models_py3.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/models/_models_py3.py new file mode 100644 index 00000000000..ef58bbce0cc --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/models/_models_py3.py @@ -0,0 +1,5263 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +import datetime +from typing import Any, Dict, List, Optional, TYPE_CHECKING, Union + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + import __init__ as _models + + +class AvailablePatchCountByClassification(msrest.serialization.Model): + """Summarization of patches available for installation on the machine by classification. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar security: Number of security patches available for installation. + :vartype security: int + :ivar critical: Number of critical patches available for installation. + :vartype critical: int + :ivar definition: Number of definition patches available for installation. + :vartype definition: int + :ivar update_rollup: Number of update Rollup patches available for installation. + :vartype update_rollup: int + :ivar feature_pack: Number of feature pack patches available for installation. + :vartype feature_pack: int + :ivar service_pack: Number of service pack patches available for installation. + :vartype service_pack: int + :ivar tools: Number of tools patches available for installation. + :vartype tools: int + :ivar updates: Number of updates category patches available for installation. + :vartype updates: int + :ivar other: Number of other patches available for installation. + :vartype other: int + """ + + _validation = { + 'security': {'readonly': True}, + 'critical': {'readonly': True}, + 'definition': {'readonly': True}, + 'update_rollup': {'readonly': True}, + 'feature_pack': {'readonly': True}, + 'service_pack': {'readonly': True}, + 'tools': {'readonly': True}, + 'updates': {'readonly': True}, + 'other': {'readonly': True}, + } + + _attribute_map = { + 'security': {'key': 'security', 'type': 'int'}, + 'critical': {'key': 'critical', 'type': 'int'}, + 'definition': {'key': 'definition', 'type': 'int'}, + 'update_rollup': {'key': 'updateRollup', 'type': 'int'}, + 'feature_pack': {'key': 'featurePack', 'type': 'int'}, + 'service_pack': {'key': 'servicePack', 'type': 'int'}, + 'tools': {'key': 'tools', 'type': 'int'}, + 'updates': {'key': 'updates', 'type': 'int'}, + 'other': {'key': 'other', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(AvailablePatchCountByClassification, self).__init__(**kwargs) + self.security = None + self.critical = None + self.definition = None + self.update_rollup = None + self.feature_pack = None + self.service_pack = None + self.tools = None + self.updates = None + self.other = None + + +class Cluster(msrest.serialization.Model): + """Define the cluster. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar location: Required. Gets or sets the location. + :vartype location: str + :ivar extended_location: Gets or sets the extended location. + :vartype extended_location: ~azure.mgmt.connectedvmware.models.ExtendedLocation + :ivar system_data: The system data. + :vartype system_data: ~azure.mgmt.connectedvmware.models.SystemData + :ivar tags: A set of tags. Gets or sets the Resource tags. + :vartype tags: dict[str, str] + :ivar name: Gets or sets the name. + :vartype name: str + :ivar id: Gets or sets the Id. + :vartype id: str + :ivar type: Gets or sets the type of the resource. + :vartype type: str + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, + the resource provider must validate and persist this value. + :vartype kind: str + :ivar uuid: Gets or sets a unique identifier for this resource. + :vartype uuid: str + :ivar v_center_id: Gets or sets the ARM Id of the vCenter resource in which this cluster + resides. + :vartype v_center_id: str + :ivar mo_ref_id: Gets or sets the vCenter MoRef (Managed Object Reference) ID for the cluster. + :vartype mo_ref_id: str + :ivar inventory_item_id: Gets or sets the inventory Item ID for the cluster. + :vartype inventory_item_id: str + :ivar mo_name: Gets or sets the vCenter Managed Object name for the cluster. + :vartype mo_name: str + :ivar statuses: The resource status information. + :vartype statuses: list[~azure.mgmt.connectedvmware.models.ResourceStatus] + :ivar custom_resource_name: Gets the name of the corresponding resource in Kubernetes. + :vartype custom_resource_name: str + :ivar datastore_ids: Gets or sets the datastore ARM ids. + :vartype datastore_ids: list[str] + :ivar network_ids: Gets or sets the network ARM ids. + :vartype network_ids: list[str] + :ivar provisioning_state: Gets or sets the provisioning state. + :vartype provisioning_state: str + """ + + _validation = { + 'location': {'required': True}, + 'system_data': {'readonly': True}, + 'name': {'readonly': True}, + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'uuid': {'readonly': True}, + 'mo_name': {'readonly': True}, + 'statuses': {'readonly': True}, + 'custom_resource_name': {'readonly': True}, + 'datastore_ids': {'readonly': True}, + 'network_ids': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'extended_location': {'key': 'extendedLocation', 'type': 'ExtendedLocation'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'uuid': {'key': 'properties.uuid', 'type': 'str'}, + 'v_center_id': {'key': 'properties.vCenterId', 'type': 'str'}, + 'mo_ref_id': {'key': 'properties.moRefId', 'type': 'str'}, + 'inventory_item_id': {'key': 'properties.inventoryItemId', 'type': 'str'}, + 'mo_name': {'key': 'properties.moName', 'type': 'str'}, + 'statuses': {'key': 'properties.statuses', 'type': '[ResourceStatus]'}, + 'custom_resource_name': {'key': 'properties.customResourceName', 'type': 'str'}, + 'datastore_ids': {'key': 'properties.datastoreIds', 'type': '[str]'}, + 'network_ids': {'key': 'properties.networkIds', 'type': '[str]'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + location: str, + extended_location: Optional["_models.ExtendedLocation"] = None, + tags: Optional[Dict[str, str]] = None, + kind: Optional[str] = None, + v_center_id: Optional[str] = None, + mo_ref_id: Optional[str] = None, + inventory_item_id: Optional[str] = None, + **kwargs + ): + """ + :keyword location: Required. Gets or sets the location. + :paramtype location: str + :keyword extended_location: Gets or sets the extended location. + :paramtype extended_location: ~azure.mgmt.connectedvmware.models.ExtendedLocation + :keyword tags: A set of tags. Gets or sets the Resource tags. + :paramtype tags: dict[str, str] + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, + the resource provider must validate and persist this value. + :paramtype kind: str + :keyword v_center_id: Gets or sets the ARM Id of the vCenter resource in which this cluster + resides. + :paramtype v_center_id: str + :keyword mo_ref_id: Gets or sets the vCenter MoRef (Managed Object Reference) ID for the + cluster. + :paramtype mo_ref_id: str + :keyword inventory_item_id: Gets or sets the inventory Item ID for the cluster. + :paramtype inventory_item_id: str + """ + super(Cluster, self).__init__(**kwargs) + self.location = location + self.extended_location = extended_location + self.system_data = None + self.tags = tags + self.name = None + self.id = None + self.type = None + self.kind = kind + self.uuid = None + self.v_center_id = v_center_id + self.mo_ref_id = mo_ref_id + self.inventory_item_id = inventory_item_id + self.mo_name = None + self.statuses = None + self.custom_resource_name = None + self.datastore_ids = None + self.network_ids = None + self.provisioning_state = None + + +class InventoryItemProperties(msrest.serialization.Model): + """Defines the resource properties. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ClusterInventoryItem, DatastoreInventoryItem, HostInventoryItem, ResourcePoolInventoryItem, VirtualMachineInventoryItem, VirtualMachineTemplateInventoryItem, VirtualNetworkInventoryItem. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar inventory_type: Required. They inventory type.Constant filled by server. Known values + are: "ResourcePool", "VirtualMachine", "VirtualMachineTemplate", "VirtualNetwork", "Cluster", + "Datastore", "Host". + :vartype inventory_type: str or ~azure.mgmt.connectedvmware.models.InventoryType + :ivar managed_resource_id: Gets or sets the tracked resource id corresponding to the inventory + resource. + :vartype managed_resource_id: str + :ivar mo_ref_id: Gets or sets the MoRef (Managed Object Reference) ID for the inventory item. + :vartype mo_ref_id: str + :ivar mo_name: Gets or sets the vCenter Managed Object name for the inventory item. + :vartype mo_name: str + :ivar provisioning_state: Gets or sets the provisioning state. + :vartype provisioning_state: str + """ + + _validation = { + 'inventory_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'inventory_type': {'key': 'inventoryType', 'type': 'str'}, + 'managed_resource_id': {'key': 'managedResourceId', 'type': 'str'}, + 'mo_ref_id': {'key': 'moRefId', 'type': 'str'}, + 'mo_name': {'key': 'moName', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + } + + _subtype_map = { + 'inventory_type': {'Cluster': 'ClusterInventoryItem', 'Datastore': 'DatastoreInventoryItem', 'Host': 'HostInventoryItem', 'ResourcePool': 'ResourcePoolInventoryItem', 'VirtualMachine': 'VirtualMachineInventoryItem', 'VirtualMachineTemplate': 'VirtualMachineTemplateInventoryItem', 'VirtualNetwork': 'VirtualNetworkInventoryItem'} + } + + def __init__( + self, + *, + managed_resource_id: Optional[str] = None, + mo_ref_id: Optional[str] = None, + mo_name: Optional[str] = None, + **kwargs + ): + """ + :keyword managed_resource_id: Gets or sets the tracked resource id corresponding to the + inventory resource. + :paramtype managed_resource_id: str + :keyword mo_ref_id: Gets or sets the MoRef (Managed Object Reference) ID for the inventory + item. + :paramtype mo_ref_id: str + :keyword mo_name: Gets or sets the vCenter Managed Object name for the inventory item. + :paramtype mo_name: str + """ + super(InventoryItemProperties, self).__init__(**kwargs) + self.inventory_type = None # type: Optional[str] + self.managed_resource_id = managed_resource_id + self.mo_ref_id = mo_ref_id + self.mo_name = mo_name + self.provisioning_state = None + + +class ClusterInventoryItem(InventoryItemProperties): + """The cluster inventory item. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar inventory_type: Required. They inventory type.Constant filled by server. Known values + are: "ResourcePool", "VirtualMachine", "VirtualMachineTemplate", "VirtualNetwork", "Cluster", + "Datastore", "Host". + :vartype inventory_type: str or ~azure.mgmt.connectedvmware.models.InventoryType + :ivar managed_resource_id: Gets or sets the tracked resource id corresponding to the inventory + resource. + :vartype managed_resource_id: str + :ivar mo_ref_id: Gets or sets the MoRef (Managed Object Reference) ID for the inventory item. + :vartype mo_ref_id: str + :ivar mo_name: Gets or sets the vCenter Managed Object name for the inventory item. + :vartype mo_name: str + :ivar provisioning_state: Gets or sets the provisioning state. + :vartype provisioning_state: str + """ + + _validation = { + 'inventory_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'inventory_type': {'key': 'inventoryType', 'type': 'str'}, + 'managed_resource_id': {'key': 'managedResourceId', 'type': 'str'}, + 'mo_ref_id': {'key': 'moRefId', 'type': 'str'}, + 'mo_name': {'key': 'moName', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + managed_resource_id: Optional[str] = None, + mo_ref_id: Optional[str] = None, + mo_name: Optional[str] = None, + **kwargs + ): + """ + :keyword managed_resource_id: Gets or sets the tracked resource id corresponding to the + inventory resource. + :paramtype managed_resource_id: str + :keyword mo_ref_id: Gets or sets the MoRef (Managed Object Reference) ID for the inventory + item. + :paramtype mo_ref_id: str + :keyword mo_name: Gets or sets the vCenter Managed Object name for the inventory item. + :paramtype mo_name: str + """ + super(ClusterInventoryItem, self).__init__(managed_resource_id=managed_resource_id, mo_ref_id=mo_ref_id, mo_name=mo_name, **kwargs) + self.inventory_type = 'Cluster' # type: str + + +class ClustersList(msrest.serialization.Model): + """List of Clusters. + + All required parameters must be populated in order to send to Azure. + + :ivar next_link: Url to follow for getting next page of Clusters. + :vartype next_link: str + :ivar value: Required. Array of Clusters. + :vartype value: list[~azure.mgmt.connectedvmware.models.Cluster] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[Cluster]'}, + } + + def __init__( + self, + *, + value: List["_models.Cluster"], + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword next_link: Url to follow for getting next page of Clusters. + :paramtype next_link: str + :keyword value: Required. Array of Clusters. + :paramtype value: list[~azure.mgmt.connectedvmware.models.Cluster] + """ + super(ClustersList, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class Condition(msrest.serialization.Model): + """Condition defines an extension to status. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar status: Status of the condition. + :vartype status: str + :ivar reason: The reason for the condition's status. + :vartype reason: str + :ivar message: A human readable message indicating details about the status. + :vartype message: str + :ivar severity: Severity with which to treat failures of this type of condition. + :vartype severity: str + """ + + _validation = { + 'status': {'readonly': True}, + 'reason': {'readonly': True}, + 'message': {'readonly': True}, + 'severity': {'readonly': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'reason': {'key': 'reason', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'severity': {'key': 'severity', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(Condition, self).__init__(**kwargs) + self.status = None + self.reason = None + self.message = None + self.severity = None + + +class Datastore(msrest.serialization.Model): + """Define the datastore. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar location: Required. Gets or sets the location. + :vartype location: str + :ivar extended_location: Gets or sets the extended location. + :vartype extended_location: ~azure.mgmt.connectedvmware.models.ExtendedLocation + :ivar system_data: The system data. + :vartype system_data: ~azure.mgmt.connectedvmware.models.SystemData + :ivar tags: A set of tags. Gets or sets the Resource tags. + :vartype tags: dict[str, str] + :ivar name: Gets or sets the name. + :vartype name: str + :ivar id: Gets or sets the Id. + :vartype id: str + :ivar type: Gets or sets the type of the resource. + :vartype type: str + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, + the resource provider must validate and persist this value. + :vartype kind: str + :ivar uuid: Gets or sets a unique identifier for this resource. + :vartype uuid: str + :ivar v_center_id: Gets or sets the ARM Id of the vCenter resource in which this datastore + resides. + :vartype v_center_id: str + :ivar mo_ref_id: Gets or sets the vCenter MoRef (Managed Object Reference) ID for the + datastore. + :vartype mo_ref_id: str + :ivar inventory_item_id: Gets or sets the inventory Item ID for the datastore. + :vartype inventory_item_id: str + :ivar mo_name: Gets or sets the vCenter Managed Object name for the datastore. + :vartype mo_name: str + :ivar statuses: The resource status information. + :vartype statuses: list[~azure.mgmt.connectedvmware.models.ResourceStatus] + :ivar custom_resource_name: Gets the name of the corresponding resource in Kubernetes. + :vartype custom_resource_name: str + :ivar provisioning_state: Provisioning state of the resource. Known values are: "Succeeded", + "Failed", "Canceled", "Provisioning", "Updating", "Deleting", "Accepted", "Created". + :vartype provisioning_state: str or ~azure.mgmt.connectedvmware.models.ProvisioningState + """ + + _validation = { + 'location': {'required': True}, + 'system_data': {'readonly': True}, + 'name': {'readonly': True}, + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'uuid': {'readonly': True}, + 'mo_name': {'readonly': True}, + 'statuses': {'readonly': True}, + 'custom_resource_name': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'extended_location': {'key': 'extendedLocation', 'type': 'ExtendedLocation'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'uuid': {'key': 'properties.uuid', 'type': 'str'}, + 'v_center_id': {'key': 'properties.vCenterId', 'type': 'str'}, + 'mo_ref_id': {'key': 'properties.moRefId', 'type': 'str'}, + 'inventory_item_id': {'key': 'properties.inventoryItemId', 'type': 'str'}, + 'mo_name': {'key': 'properties.moName', 'type': 'str'}, + 'statuses': {'key': 'properties.statuses', 'type': '[ResourceStatus]'}, + 'custom_resource_name': {'key': 'properties.customResourceName', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + location: str, + extended_location: Optional["_models.ExtendedLocation"] = None, + tags: Optional[Dict[str, str]] = None, + kind: Optional[str] = None, + v_center_id: Optional[str] = None, + mo_ref_id: Optional[str] = None, + inventory_item_id: Optional[str] = None, + **kwargs + ): + """ + :keyword location: Required. Gets or sets the location. + :paramtype location: str + :keyword extended_location: Gets or sets the extended location. + :paramtype extended_location: ~azure.mgmt.connectedvmware.models.ExtendedLocation + :keyword tags: A set of tags. Gets or sets the Resource tags. + :paramtype tags: dict[str, str] + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, + the resource provider must validate and persist this value. + :paramtype kind: str + :keyword v_center_id: Gets or sets the ARM Id of the vCenter resource in which this datastore + resides. + :paramtype v_center_id: str + :keyword mo_ref_id: Gets or sets the vCenter MoRef (Managed Object Reference) ID for the + datastore. + :paramtype mo_ref_id: str + :keyword inventory_item_id: Gets or sets the inventory Item ID for the datastore. + :paramtype inventory_item_id: str + """ + super(Datastore, self).__init__(**kwargs) + self.location = location + self.extended_location = extended_location + self.system_data = None + self.tags = tags + self.name = None + self.id = None + self.type = None + self.kind = kind + self.uuid = None + self.v_center_id = v_center_id + self.mo_ref_id = mo_ref_id + self.inventory_item_id = inventory_item_id + self.mo_name = None + self.statuses = None + self.custom_resource_name = None + self.provisioning_state = None + + +class DatastoreInventoryItem(InventoryItemProperties): + """The datastore inventory item. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar inventory_type: Required. They inventory type.Constant filled by server. Known values + are: "ResourcePool", "VirtualMachine", "VirtualMachineTemplate", "VirtualNetwork", "Cluster", + "Datastore", "Host". + :vartype inventory_type: str or ~azure.mgmt.connectedvmware.models.InventoryType + :ivar managed_resource_id: Gets or sets the tracked resource id corresponding to the inventory + resource. + :vartype managed_resource_id: str + :ivar mo_ref_id: Gets or sets the MoRef (Managed Object Reference) ID for the inventory item. + :vartype mo_ref_id: str + :ivar mo_name: Gets or sets the vCenter Managed Object name for the inventory item. + :vartype mo_name: str + :ivar provisioning_state: Gets or sets the provisioning state. + :vartype provisioning_state: str + :ivar capacity_gb: Gets or sets Maximum capacity of this datastore, in GBs. + :vartype capacity_gb: long + :ivar free_space_gb: Gets or sets Available space of this datastore, in GBs. + :vartype free_space_gb: long + """ + + _validation = { + 'inventory_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'inventory_type': {'key': 'inventoryType', 'type': 'str'}, + 'managed_resource_id': {'key': 'managedResourceId', 'type': 'str'}, + 'mo_ref_id': {'key': 'moRefId', 'type': 'str'}, + 'mo_name': {'key': 'moName', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'capacity_gb': {'key': 'capacityGB', 'type': 'long'}, + 'free_space_gb': {'key': 'freeSpaceGB', 'type': 'long'}, + } + + def __init__( + self, + *, + managed_resource_id: Optional[str] = None, + mo_ref_id: Optional[str] = None, + mo_name: Optional[str] = None, + capacity_gb: Optional[int] = None, + free_space_gb: Optional[int] = None, + **kwargs + ): + """ + :keyword managed_resource_id: Gets or sets the tracked resource id corresponding to the + inventory resource. + :paramtype managed_resource_id: str + :keyword mo_ref_id: Gets or sets the MoRef (Managed Object Reference) ID for the inventory + item. + :paramtype mo_ref_id: str + :keyword mo_name: Gets or sets the vCenter Managed Object name for the inventory item. + :paramtype mo_name: str + :keyword capacity_gb: Gets or sets Maximum capacity of this datastore, in GBs. + :paramtype capacity_gb: long + :keyword free_space_gb: Gets or sets Available space of this datastore, in GBs. + :paramtype free_space_gb: long + """ + super(DatastoreInventoryItem, self).__init__(managed_resource_id=managed_resource_id, mo_ref_id=mo_ref_id, mo_name=mo_name, **kwargs) + self.inventory_type = 'Datastore' # type: str + self.capacity_gb = capacity_gb + self.free_space_gb = free_space_gb + + +class DatastoresList(msrest.serialization.Model): + """List of Datastores. + + All required parameters must be populated in order to send to Azure. + + :ivar next_link: Url to follow for getting next page of Datastores. + :vartype next_link: str + :ivar value: Required. Array of Datastores. + :vartype value: list[~azure.mgmt.connectedvmware.models.Datastore] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[Datastore]'}, + } + + def __init__( + self, + *, + value: List["_models.Datastore"], + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword next_link: Url to follow for getting next page of Datastores. + :paramtype next_link: str + :keyword value: Required. Array of Datastores. + :paramtype value: list[~azure.mgmt.connectedvmware.models.Datastore] + """ + super(DatastoresList, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class ErrorDefinition(msrest.serialization.Model): + """Error definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: Service specific error code which serves as the substatus for the HTTP error code. + :vartype code: str + :ivar message: Description of the error. + :vartype message: str + :ivar details: Internal error details. + :vartype details: list[~azure.mgmt.connectedvmware.models.ErrorDefinition] + """ + + _validation = { + 'code': {'readonly': True}, + 'message': {'readonly': True}, + 'details': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorDefinition]'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ErrorDefinition, self).__init__(**kwargs) + self.code = None + self.message = None + self.details = None + + +class ErrorDetail(msrest.serialization.Model): + """Error details. + + All required parameters must be populated in order to send to Azure. + + :ivar code: Required. The error's code. + :vartype code: str + :ivar message: Required. A human readable error message. + :vartype message: str + :ivar target: Indicates which property in the request is responsible for the error. + :vartype target: str + :ivar details: Additional error details. + :vartype details: list[~azure.mgmt.connectedvmware.models.ErrorDetail] + """ + + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorDetail]'}, + } + + def __init__( + self, + *, + code: str, + message: str, + target: Optional[str] = None, + details: Optional[List["_models.ErrorDetail"]] = None, + **kwargs + ): + """ + :keyword code: Required. The error's code. + :paramtype code: str + :keyword message: Required. A human readable error message. + :paramtype message: str + :keyword target: Indicates which property in the request is responsible for the error. + :paramtype target: str + :keyword details: Additional error details. + :paramtype details: list[~azure.mgmt.connectedvmware.models.ErrorDetail] + """ + super(ErrorDetail, self).__init__(**kwargs) + self.code = code + self.message = message + self.target = target + self.details = details + + +class ErrorResponse(msrest.serialization.Model): + """Error response. + + :ivar error: The error details. + :vartype error: ~azure.mgmt.connectedvmware.models.ErrorDefinition + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorDefinition'}, + } + + def __init__( + self, + *, + error: Optional["_models.ErrorDefinition"] = None, + **kwargs + ): + """ + :keyword error: The error details. + :paramtype error: ~azure.mgmt.connectedvmware.models.ErrorDefinition + """ + super(ErrorResponse, self).__init__(**kwargs) + self.error = error + + +class ExtendedLocation(msrest.serialization.Model): + """The extended location. + + :ivar type: The extended location type. + :vartype type: str + :ivar name: The extended location name. + :vartype name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Optional[str] = None, + name: Optional[str] = None, + **kwargs + ): + """ + :keyword type: The extended location type. + :paramtype type: str + :keyword name: The extended location name. + :paramtype name: str + """ + super(ExtendedLocation, self).__init__(**kwargs) + self.type = type + self.name = name + + +class Resource(msrest.serialization.Model): + """Common fields that are returned in the response for all Azure Resource Manager resources. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + + +class ProxyResource(Resource): + """The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ProxyResource, self).__init__(**kwargs) + + +class GuestAgent(ProxyResource): + """Defines the GuestAgent. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: The system data. + :vartype system_data: ~azure.mgmt.connectedvmware.models.SystemData + :ivar uuid: Gets or sets a unique identifier for this resource. + :vartype uuid: str + :ivar credentials: Username / Password Credentials to provision guest agent. + :vartype credentials: ~azure.mgmt.connectedvmware.models.GuestCredential + :ivar http_proxy_config: HTTP Proxy configuration for the VM. + :vartype http_proxy_config: ~azure.mgmt.connectedvmware.models.HttpProxyConfiguration + :ivar provisioning_action: Gets or sets the guest agent provisioning action. Known values are: + "install", "uninstall", "repair". + :vartype provisioning_action: str or ~azure.mgmt.connectedvmware.models.ProvisioningAction + :ivar status: Gets or sets the guest agent status. + :vartype status: str + :ivar custom_resource_name: Gets the name of the corresponding resource in Kubernetes. + :vartype custom_resource_name: str + :ivar statuses: The resource status information. + :vartype statuses: list[~azure.mgmt.connectedvmware.models.ResourceStatus] + :ivar provisioning_state: Gets or sets the provisioning state. + :vartype provisioning_state: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'uuid': {'readonly': True}, + 'status': {'readonly': True}, + 'custom_resource_name': {'readonly': True}, + 'statuses': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'uuid': {'key': 'properties.uuid', 'type': 'str'}, + 'credentials': {'key': 'properties.credentials', 'type': 'GuestCredential'}, + 'http_proxy_config': {'key': 'properties.httpProxyConfig', 'type': 'HttpProxyConfiguration'}, + 'provisioning_action': {'key': 'properties.provisioningAction', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + 'custom_resource_name': {'key': 'properties.customResourceName', 'type': 'str'}, + 'statuses': {'key': 'properties.statuses', 'type': '[ResourceStatus]'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + credentials: Optional["_models.GuestCredential"] = None, + http_proxy_config: Optional["_models.HttpProxyConfiguration"] = None, + provisioning_action: Optional[Union[str, "_models.ProvisioningAction"]] = None, + **kwargs + ): + """ + :keyword credentials: Username / Password Credentials to provision guest agent. + :paramtype credentials: ~azure.mgmt.connectedvmware.models.GuestCredential + :keyword http_proxy_config: HTTP Proxy configuration for the VM. + :paramtype http_proxy_config: ~azure.mgmt.connectedvmware.models.HttpProxyConfiguration + :keyword provisioning_action: Gets or sets the guest agent provisioning action. Known values + are: "install", "uninstall", "repair". + :paramtype provisioning_action: str or ~azure.mgmt.connectedvmware.models.ProvisioningAction + """ + super(GuestAgent, self).__init__(**kwargs) + self.system_data = None + self.uuid = None + self.credentials = credentials + self.http_proxy_config = http_proxy_config + self.provisioning_action = provisioning_action + self.status = None + self.custom_resource_name = None + self.statuses = None + self.provisioning_state = None + + +class GuestAgentList(msrest.serialization.Model): + """List of GuestAgent. + + All required parameters must be populated in order to send to Azure. + + :ivar next_link: Url to follow for getting next page of GuestAgent. + :vartype next_link: str + :ivar value: Required. Array of GuestAgent. + :vartype value: list[~azure.mgmt.connectedvmware.models.GuestAgent] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[GuestAgent]'}, + } + + def __init__( + self, + *, + value: List["_models.GuestAgent"], + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword next_link: Url to follow for getting next page of GuestAgent. + :paramtype next_link: str + :keyword value: Required. Array of GuestAgent. + :paramtype value: list[~azure.mgmt.connectedvmware.models.GuestAgent] + """ + super(GuestAgentList, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class GuestAgentProfile(msrest.serialization.Model): + """Defines the resource properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar vm_uuid: Specifies the VM's unique SMBIOS ID. + :vartype vm_uuid: str + :ivar status: The status of the hybrid machine agent. Known values are: "Connected", + "Disconnected", "Error". + :vartype status: str or ~azure.mgmt.connectedvmware.models.StatusTypes + :ivar last_status_change: The time of the last status change. + :vartype last_status_change: ~datetime.datetime + :ivar agent_version: The hybrid machine agent full version. + :vartype agent_version: str + :ivar error_details: Details about the error state. + :vartype error_details: list[~azure.mgmt.connectedvmware.models.ErrorDetail] + """ + + _validation = { + 'vm_uuid': {'readonly': True}, + 'status': {'readonly': True}, + 'last_status_change': {'readonly': True}, + 'agent_version': {'readonly': True}, + 'error_details': {'readonly': True}, + } + + _attribute_map = { + 'vm_uuid': {'key': 'vmUuid', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'last_status_change': {'key': 'lastStatusChange', 'type': 'iso-8601'}, + 'agent_version': {'key': 'agentVersion', 'type': 'str'}, + 'error_details': {'key': 'errorDetails', 'type': '[ErrorDetail]'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(GuestAgentProfile, self).__init__(**kwargs) + self.vm_uuid = None + self.status = None + self.last_status_change = None + self.agent_version = None + self.error_details = None + + +class GuestCredential(msrest.serialization.Model): + """Username / Password Credentials to connect to guest. + + :ivar username: Gets or sets username to connect with the guest. + :vartype username: str + :ivar password: Gets or sets the password to connect with the guest. + :vartype password: str + """ + + _attribute_map = { + 'username': {'key': 'username', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + } + + def __init__( + self, + *, + username: Optional[str] = None, + password: Optional[str] = None, + **kwargs + ): + """ + :keyword username: Gets or sets username to connect with the guest. + :paramtype username: str + :keyword password: Gets or sets the password to connect with the guest. + :paramtype password: str + """ + super(GuestCredential, self).__init__(**kwargs) + self.username = username + self.password = password + + +class HardwareProfile(msrest.serialization.Model): + """Defines the resource properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar memory_size_mb: Gets or sets memory size in MBs for the vm. + :vartype memory_size_mb: int + :ivar num_cp_us: Gets or sets the number of vCPUs for the vm. + :vartype num_cp_us: int + :ivar num_cores_per_socket: Gets or sets the number of cores per socket for the vm. Defaults to + 1 if unspecified. + :vartype num_cores_per_socket: int + :ivar cpu_hot_add_enabled: Gets or sets a value indicating whether virtual processors can be + added while this virtual machine is running. + :vartype cpu_hot_add_enabled: bool + :ivar cpu_hot_remove_enabled: Gets or sets a value indicating whether virtual processors can be + removed while this virtual machine is running. + :vartype cpu_hot_remove_enabled: bool + :ivar memory_hot_add_enabled: Gets or sets a value indicating whether memory can be added while + this virtual machine is running. + :vartype memory_hot_add_enabled: bool + """ + + _validation = { + 'cpu_hot_add_enabled': {'readonly': True}, + 'cpu_hot_remove_enabled': {'readonly': True}, + 'memory_hot_add_enabled': {'readonly': True}, + } + + _attribute_map = { + 'memory_size_mb': {'key': 'memorySizeMB', 'type': 'int'}, + 'num_cp_us': {'key': 'numCPUs', 'type': 'int'}, + 'num_cores_per_socket': {'key': 'numCoresPerSocket', 'type': 'int'}, + 'cpu_hot_add_enabled': {'key': 'cpuHotAddEnabled', 'type': 'bool'}, + 'cpu_hot_remove_enabled': {'key': 'cpuHotRemoveEnabled', 'type': 'bool'}, + 'memory_hot_add_enabled': {'key': 'memoryHotAddEnabled', 'type': 'bool'}, + } + + def __init__( + self, + *, + memory_size_mb: Optional[int] = None, + num_cp_us: Optional[int] = None, + num_cores_per_socket: Optional[int] = None, + **kwargs + ): + """ + :keyword memory_size_mb: Gets or sets memory size in MBs for the vm. + :paramtype memory_size_mb: int + :keyword num_cp_us: Gets or sets the number of vCPUs for the vm. + :paramtype num_cp_us: int + :keyword num_cores_per_socket: Gets or sets the number of cores per socket for the vm. Defaults + to 1 if unspecified. + :paramtype num_cores_per_socket: int + """ + super(HardwareProfile, self).__init__(**kwargs) + self.memory_size_mb = memory_size_mb + self.num_cp_us = num_cp_us + self.num_cores_per_socket = num_cores_per_socket + self.cpu_hot_add_enabled = None + self.cpu_hot_remove_enabled = None + self.memory_hot_add_enabled = None + + +class Host(msrest.serialization.Model): + """Define the host. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar location: Required. Gets or sets the location. + :vartype location: str + :ivar extended_location: Gets or sets the extended location. + :vartype extended_location: ~azure.mgmt.connectedvmware.models.ExtendedLocation + :ivar system_data: The system data. + :vartype system_data: ~azure.mgmt.connectedvmware.models.SystemData + :ivar tags: A set of tags. Gets or sets the Resource tags. + :vartype tags: dict[str, str] + :ivar name: Gets or sets the name. + :vartype name: str + :ivar id: Gets or sets the Id. + :vartype id: str + :ivar type: Gets or sets the type of the resource. + :vartype type: str + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, + the resource provider must validate and persist this value. + :vartype kind: str + :ivar uuid: Gets or sets a unique identifier for this resource. + :vartype uuid: str + :ivar v_center_id: Gets or sets the ARM Id of the vCenter resource in which this host resides. + :vartype v_center_id: str + :ivar mo_ref_id: Gets or sets the vCenter MoRef (Managed Object Reference) ID for the host. + :vartype mo_ref_id: str + :ivar inventory_item_id: Gets or sets the inventory Item ID for the host. + :vartype inventory_item_id: str + :ivar mo_name: Gets or sets the vCenter Managed Object name for the host. + :vartype mo_name: str + :ivar statuses: The resource status information. + :vartype statuses: list[~azure.mgmt.connectedvmware.models.ResourceStatus] + :ivar custom_resource_name: Gets the name of the corresponding resource in Kubernetes. + :vartype custom_resource_name: str + :ivar provisioning_state: Gets or sets the provisioning state. + :vartype provisioning_state: str + """ + + _validation = { + 'location': {'required': True}, + 'system_data': {'readonly': True}, + 'name': {'readonly': True}, + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'uuid': {'readonly': True}, + 'mo_name': {'readonly': True}, + 'statuses': {'readonly': True}, + 'custom_resource_name': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'extended_location': {'key': 'extendedLocation', 'type': 'ExtendedLocation'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'uuid': {'key': 'properties.uuid', 'type': 'str'}, + 'v_center_id': {'key': 'properties.vCenterId', 'type': 'str'}, + 'mo_ref_id': {'key': 'properties.moRefId', 'type': 'str'}, + 'inventory_item_id': {'key': 'properties.inventoryItemId', 'type': 'str'}, + 'mo_name': {'key': 'properties.moName', 'type': 'str'}, + 'statuses': {'key': 'properties.statuses', 'type': '[ResourceStatus]'}, + 'custom_resource_name': {'key': 'properties.customResourceName', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + location: str, + extended_location: Optional["_models.ExtendedLocation"] = None, + tags: Optional[Dict[str, str]] = None, + kind: Optional[str] = None, + v_center_id: Optional[str] = None, + mo_ref_id: Optional[str] = None, + inventory_item_id: Optional[str] = None, + **kwargs + ): + """ + :keyword location: Required. Gets or sets the location. + :paramtype location: str + :keyword extended_location: Gets or sets the extended location. + :paramtype extended_location: ~azure.mgmt.connectedvmware.models.ExtendedLocation + :keyword tags: A set of tags. Gets or sets the Resource tags. + :paramtype tags: dict[str, str] + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, + the resource provider must validate and persist this value. + :paramtype kind: str + :keyword v_center_id: Gets or sets the ARM Id of the vCenter resource in which this host + resides. + :paramtype v_center_id: str + :keyword mo_ref_id: Gets or sets the vCenter MoRef (Managed Object Reference) ID for the host. + :paramtype mo_ref_id: str + :keyword inventory_item_id: Gets or sets the inventory Item ID for the host. + :paramtype inventory_item_id: str + """ + super(Host, self).__init__(**kwargs) + self.location = location + self.extended_location = extended_location + self.system_data = None + self.tags = tags + self.name = None + self.id = None + self.type = None + self.kind = kind + self.uuid = None + self.v_center_id = v_center_id + self.mo_ref_id = mo_ref_id + self.inventory_item_id = inventory_item_id + self.mo_name = None + self.statuses = None + self.custom_resource_name = None + self.provisioning_state = None + + +class HostInventoryItem(InventoryItemProperties): + """The host inventory item. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar inventory_type: Required. They inventory type.Constant filled by server. Known values + are: "ResourcePool", "VirtualMachine", "VirtualMachineTemplate", "VirtualNetwork", "Cluster", + "Datastore", "Host". + :vartype inventory_type: str or ~azure.mgmt.connectedvmware.models.InventoryType + :ivar managed_resource_id: Gets or sets the tracked resource id corresponding to the inventory + resource. + :vartype managed_resource_id: str + :ivar mo_ref_id: Gets or sets the MoRef (Managed Object Reference) ID for the inventory item. + :vartype mo_ref_id: str + :ivar mo_name: Gets or sets the vCenter Managed Object name for the inventory item. + :vartype mo_name: str + :ivar provisioning_state: Gets or sets the provisioning state. + :vartype provisioning_state: str + :ivar parent: Parent host inventory resource details. + :vartype parent: ~azure.mgmt.connectedvmware.models.InventoryItemDetails + """ + + _validation = { + 'inventory_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'inventory_type': {'key': 'inventoryType', 'type': 'str'}, + 'managed_resource_id': {'key': 'managedResourceId', 'type': 'str'}, + 'mo_ref_id': {'key': 'moRefId', 'type': 'str'}, + 'mo_name': {'key': 'moName', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'parent': {'key': 'parent', 'type': 'InventoryItemDetails'}, + } + + def __init__( + self, + *, + managed_resource_id: Optional[str] = None, + mo_ref_id: Optional[str] = None, + mo_name: Optional[str] = None, + parent: Optional["_models.InventoryItemDetails"] = None, + **kwargs + ): + """ + :keyword managed_resource_id: Gets or sets the tracked resource id corresponding to the + inventory resource. + :paramtype managed_resource_id: str + :keyword mo_ref_id: Gets or sets the MoRef (Managed Object Reference) ID for the inventory + item. + :paramtype mo_ref_id: str + :keyword mo_name: Gets or sets the vCenter Managed Object name for the inventory item. + :paramtype mo_name: str + :keyword parent: Parent host inventory resource details. + :paramtype parent: ~azure.mgmt.connectedvmware.models.InventoryItemDetails + """ + super(HostInventoryItem, self).__init__(managed_resource_id=managed_resource_id, mo_ref_id=mo_ref_id, mo_name=mo_name, **kwargs) + self.inventory_type = 'Host' # type: str + self.parent = parent + + +class HostsList(msrest.serialization.Model): + """List of Hosts. + + All required parameters must be populated in order to send to Azure. + + :ivar next_link: Url to follow for getting next page of Hosts. + :vartype next_link: str + :ivar value: Required. Array of Hosts. + :vartype value: list[~azure.mgmt.connectedvmware.models.Host] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[Host]'}, + } + + def __init__( + self, + *, + value: List["_models.Host"], + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword next_link: Url to follow for getting next page of Hosts. + :paramtype next_link: str + :keyword value: Required. Array of Hosts. + :paramtype value: list[~azure.mgmt.connectedvmware.models.Host] + """ + super(HostsList, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class HttpProxyConfiguration(msrest.serialization.Model): + """HTTP Proxy configuration for the VM. + + :ivar https_proxy: Gets or sets httpsProxy url. + :vartype https_proxy: str + """ + + _attribute_map = { + 'https_proxy': {'key': 'httpsProxy', 'type': 'str'}, + } + + def __init__( + self, + *, + https_proxy: Optional[str] = None, + **kwargs + ): + """ + :keyword https_proxy: Gets or sets httpsProxy url. + :paramtype https_proxy: str + """ + super(HttpProxyConfiguration, self).__init__(**kwargs) + self.https_proxy = https_proxy + + +class HybridIdentityMetadata(ProxyResource): + """Defines the HybridIdentityMetadata. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: The system data. + :vartype system_data: ~azure.mgmt.connectedvmware.models.SystemData + :ivar vm_id: Gets or sets the Vm Id. + :vartype vm_id: str + :ivar public_key: Gets or sets the Public Key. + :vartype public_key: str + :ivar identity: The identity of the resource. + :vartype identity: ~azure.mgmt.connectedvmware.models.Identity + :ivar provisioning_state: Gets or sets the provisioning state. + :vartype provisioning_state: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'identity': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'vm_id': {'key': 'properties.vmId', 'type': 'str'}, + 'public_key': {'key': 'properties.publicKey', 'type': 'str'}, + 'identity': {'key': 'properties.identity', 'type': 'Identity'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + vm_id: Optional[str] = None, + public_key: Optional[str] = None, + **kwargs + ): + """ + :keyword vm_id: Gets or sets the Vm Id. + :paramtype vm_id: str + :keyword public_key: Gets or sets the Public Key. + :paramtype public_key: str + """ + super(HybridIdentityMetadata, self).__init__(**kwargs) + self.system_data = None + self.vm_id = vm_id + self.public_key = public_key + self.identity = None + self.provisioning_state = None + + +class HybridIdentityMetadataList(msrest.serialization.Model): + """List of HybridIdentityMetadata. + + All required parameters must be populated in order to send to Azure. + + :ivar next_link: Url to follow for getting next page of HybridIdentityMetadata. + :vartype next_link: str + :ivar value: Required. Array of HybridIdentityMetadata. + :vartype value: list[~azure.mgmt.connectedvmware.models.HybridIdentityMetadata] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[HybridIdentityMetadata]'}, + } + + def __init__( + self, + *, + value: List["_models.HybridIdentityMetadata"], + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword next_link: Url to follow for getting next page of HybridIdentityMetadata. + :paramtype next_link: str + :keyword value: Required. Array of HybridIdentityMetadata. + :paramtype value: list[~azure.mgmt.connectedvmware.models.HybridIdentityMetadata] + """ + super(HybridIdentityMetadataList, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class Identity(msrest.serialization.Model): + """Managed service identity. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar principal_id: The principal id of managed service identity. + :vartype principal_id: str + :ivar tenant_id: The tenant of managed service identity. + :vartype tenant_id: str + :ivar type: Required. The type of managed service identity. Known values are: "None", + "SystemAssigned". + :vartype type: str or ~azure.mgmt.connectedvmware.models.IdentityType + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Union[str, "_models.IdentityType"], + **kwargs + ): + """ + :keyword type: Required. The type of managed service identity. Known values are: "None", + "SystemAssigned". + :paramtype type: str or ~azure.mgmt.connectedvmware.models.IdentityType + """ + super(Identity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = type + + +class InventoryItem(ProxyResource): + """Defines the inventory item. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: The system data. + :vartype system_data: ~azure.mgmt.connectedvmware.models.SystemData + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, + the resource provider must validate and persist this value. + :vartype kind: str + :ivar inventory_type: Required. They inventory type.Constant filled by server. Known values + are: "ResourcePool", "VirtualMachine", "VirtualMachineTemplate", "VirtualNetwork", "Cluster", + "Datastore", "Host". + :vartype inventory_type: str or ~azure.mgmt.connectedvmware.models.InventoryType + :ivar managed_resource_id: Gets or sets the tracked resource id corresponding to the inventory + resource. + :vartype managed_resource_id: str + :ivar mo_ref_id: Gets or sets the MoRef (Managed Object Reference) ID for the inventory item. + :vartype mo_ref_id: str + :ivar mo_name: Gets or sets the vCenter Managed Object name for the inventory item. + :vartype mo_name: str + :ivar provisioning_state: Gets or sets the provisioning state. + :vartype provisioning_state: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'inventory_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'inventory_type': {'key': 'properties.inventoryType', 'type': 'str'}, + 'managed_resource_id': {'key': 'properties.managedResourceId', 'type': 'str'}, + 'mo_ref_id': {'key': 'properties.moRefId', 'type': 'str'}, + 'mo_name': {'key': 'properties.moName', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + kind: Optional[str] = None, + managed_resource_id: Optional[str] = None, + mo_ref_id: Optional[str] = None, + mo_name: Optional[str] = None, + **kwargs + ): + """ + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, + the resource provider must validate and persist this value. + :paramtype kind: str + :keyword managed_resource_id: Gets or sets the tracked resource id corresponding to the + inventory resource. + :paramtype managed_resource_id: str + :keyword mo_ref_id: Gets or sets the MoRef (Managed Object Reference) ID for the inventory + item. + :paramtype mo_ref_id: str + :keyword mo_name: Gets or sets the vCenter Managed Object name for the inventory item. + :paramtype mo_name: str + """ + super(InventoryItem, self).__init__(**kwargs) + self.system_data = None + self.kind = kind + self.inventory_type = None # type: Optional[str] + self.managed_resource_id = managed_resource_id + self.mo_ref_id = mo_ref_id + self.mo_name = mo_name + self.provisioning_state = None + + +class InventoryItemDetails(msrest.serialization.Model): + """Defines the resource properties. + + :ivar inventory_item_id: Gets or sets the inventory Item ID for the resource. + :vartype inventory_item_id: str + :ivar mo_name: Gets or sets the vCenter Managed Object name for the resource. + :vartype mo_name: str + """ + + _attribute_map = { + 'inventory_item_id': {'key': 'inventoryItemId', 'type': 'str'}, + 'mo_name': {'key': 'moName', 'type': 'str'}, + } + + def __init__( + self, + *, + inventory_item_id: Optional[str] = None, + mo_name: Optional[str] = None, + **kwargs + ): + """ + :keyword inventory_item_id: Gets or sets the inventory Item ID for the resource. + :paramtype inventory_item_id: str + :keyword mo_name: Gets or sets the vCenter Managed Object name for the resource. + :paramtype mo_name: str + """ + super(InventoryItemDetails, self).__init__(**kwargs) + self.inventory_item_id = inventory_item_id + self.mo_name = mo_name + + +class InventoryItemsList(msrest.serialization.Model): + """List of InventoryItems. + + All required parameters must be populated in order to send to Azure. + + :ivar next_link: Url to follow for getting next page of InventoryItems. + :vartype next_link: str + :ivar value: Required. Array of InventoryItems. + :vartype value: list[~azure.mgmt.connectedvmware.models.InventoryItem] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[InventoryItem]'}, + } + + def __init__( + self, + *, + value: List["_models.InventoryItem"], + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword next_link: Url to follow for getting next page of InventoryItems. + :paramtype next_link: str + :keyword value: Required. Array of InventoryItems. + :paramtype value: list[~azure.mgmt.connectedvmware.models.InventoryItem] + """ + super(InventoryItemsList, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class LinuxParameters(msrest.serialization.Model): + """Input for InstallPatches on a Linux VM, as directly received by the API. + + :ivar classifications_to_include: The update classifications to select when installing patches + for Linux. + :vartype classifications_to_include: list[str or + ~azure.mgmt.connectedvmware.models.VMGuestPatchClassificationLinux] + :ivar package_name_masks_to_include: packages to include in the patch operation. Format: + packageName_packageVersion. + :vartype package_name_masks_to_include: list[str] + :ivar package_name_masks_to_exclude: packages to exclude in the patch operation. Format: + packageName_packageVersion. + :vartype package_name_masks_to_exclude: list[str] + """ + + _attribute_map = { + 'classifications_to_include': {'key': 'classificationsToInclude', 'type': '[str]'}, + 'package_name_masks_to_include': {'key': 'packageNameMasksToInclude', 'type': '[str]'}, + 'package_name_masks_to_exclude': {'key': 'packageNameMasksToExclude', 'type': '[str]'}, + } + + def __init__( + self, + *, + classifications_to_include: Optional[List[Union[str, "_models.VMGuestPatchClassificationLinux"]]] = None, + package_name_masks_to_include: Optional[List[str]] = None, + package_name_masks_to_exclude: Optional[List[str]] = None, + **kwargs + ): + """ + :keyword classifications_to_include: The update classifications to select when installing + patches for Linux. + :paramtype classifications_to_include: list[str or + ~azure.mgmt.connectedvmware.models.VMGuestPatchClassificationLinux] + :keyword package_name_masks_to_include: packages to include in the patch operation. Format: + packageName_packageVersion. + :paramtype package_name_masks_to_include: list[str] + :keyword package_name_masks_to_exclude: packages to exclude in the patch operation. Format: + packageName_packageVersion. + :paramtype package_name_masks_to_exclude: list[str] + """ + super(LinuxParameters, self).__init__(**kwargs) + self.classifications_to_include = classifications_to_include + self.package_name_masks_to_include = package_name_masks_to_include + self.package_name_masks_to_exclude = package_name_masks_to_exclude + + +class MachineExtension(msrest.serialization.Model): + """Describes a Machine Extension. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar location: Gets or sets the location. + :vartype location: str + :ivar system_data: The system data. + :vartype system_data: ~azure.mgmt.connectedvmware.models.SystemData + :ivar tags: A set of tags. Gets or sets the Resource tags. + :vartype tags: dict[str, str] + :ivar name: Gets or sets the name. + :vartype name: str + :ivar id: Gets or sets the Id. + :vartype id: str + :ivar type: Gets or sets the type of the resource. + :vartype type: str + :ivar force_update_tag: How the extension handler should be forced to update even if the + extension configuration has not changed. + :vartype force_update_tag: str + :ivar publisher: The name of the extension handler publisher. + :vartype publisher: str + :ivar type_properties_type: Specifies the type of the extension; an example is + "CustomScriptExtension". + :vartype type_properties_type: str + :ivar type_handler_version: Specifies the version of the script handler. + :vartype type_handler_version: str + :ivar enable_automatic_upgrade: Indicates whether the extension should be automatically + upgraded by the platform if there is a newer version available. + :vartype enable_automatic_upgrade: bool + :ivar auto_upgrade_minor_version: Indicates whether the extension should use a newer minor + version if one is available at deployment time. Once deployed, however, the extension will not + upgrade minor versions unless redeployed, even with this property set to true. + :vartype auto_upgrade_minor_version: bool + :ivar settings: Json formatted public settings for the extension. + :vartype settings: any + :ivar protected_settings: The extension can contain either protectedSettings or + protectedSettingsFromKeyVault or no protected settings at all. + :vartype protected_settings: any + :ivar provisioning_state: The provisioning state, which only appears in the response. + :vartype provisioning_state: str + :ivar instance_view: The machine extension instance view. + :vartype instance_view: + ~azure.mgmt.connectedvmware.models.MachineExtensionPropertiesInstanceView + """ + + _validation = { + 'system_data': {'readonly': True}, + 'name': {'readonly': True}, + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'force_update_tag': {'key': 'properties.forceUpdateTag', 'type': 'str'}, + 'publisher': {'key': 'properties.publisher', 'type': 'str'}, + 'type_properties_type': {'key': 'properties.type', 'type': 'str'}, + 'type_handler_version': {'key': 'properties.typeHandlerVersion', 'type': 'str'}, + 'enable_automatic_upgrade': {'key': 'properties.enableAutomaticUpgrade', 'type': 'bool'}, + 'auto_upgrade_minor_version': {'key': 'properties.autoUpgradeMinorVersion', 'type': 'bool'}, + 'settings': {'key': 'properties.settings', 'type': 'object'}, + 'protected_settings': {'key': 'properties.protectedSettings', 'type': 'object'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'instance_view': {'key': 'properties.instanceView', 'type': 'MachineExtensionPropertiesInstanceView'}, + } + + def __init__( + self, + *, + location: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + force_update_tag: Optional[str] = None, + publisher: Optional[str] = None, + type_properties_type: Optional[str] = None, + type_handler_version: Optional[str] = None, + enable_automatic_upgrade: Optional[bool] = None, + auto_upgrade_minor_version: Optional[bool] = None, + settings: Optional[Any] = None, + protected_settings: Optional[Any] = None, + instance_view: Optional["_models.MachineExtensionPropertiesInstanceView"] = None, + **kwargs + ): + """ + :keyword location: Gets or sets the location. + :paramtype location: str + :keyword tags: A set of tags. Gets or sets the Resource tags. + :paramtype tags: dict[str, str] + :keyword force_update_tag: How the extension handler should be forced to update even if the + extension configuration has not changed. + :paramtype force_update_tag: str + :keyword publisher: The name of the extension handler publisher. + :paramtype publisher: str + :keyword type_properties_type: Specifies the type of the extension; an example is + "CustomScriptExtension". + :paramtype type_properties_type: str + :keyword type_handler_version: Specifies the version of the script handler. + :paramtype type_handler_version: str + :keyword enable_automatic_upgrade: Indicates whether the extension should be automatically + upgraded by the platform if there is a newer version available. + :paramtype enable_automatic_upgrade: bool + :keyword auto_upgrade_minor_version: Indicates whether the extension should use a newer minor + version if one is available at deployment time. Once deployed, however, the extension will not + upgrade minor versions unless redeployed, even with this property set to true. + :paramtype auto_upgrade_minor_version: bool + :keyword settings: Json formatted public settings for the extension. + :paramtype settings: any + :keyword protected_settings: The extension can contain either protectedSettings or + protectedSettingsFromKeyVault or no protected settings at all. + :paramtype protected_settings: any + :keyword instance_view: The machine extension instance view. + :paramtype instance_view: + ~azure.mgmt.connectedvmware.models.MachineExtensionPropertiesInstanceView + """ + super(MachineExtension, self).__init__(**kwargs) + self.location = location + self.system_data = None + self.tags = tags + self.name = None + self.id = None + self.type = None + self.force_update_tag = force_update_tag + self.publisher = publisher + self.type_properties_type = type_properties_type + self.type_handler_version = type_handler_version + self.enable_automatic_upgrade = enable_automatic_upgrade + self.auto_upgrade_minor_version = auto_upgrade_minor_version + self.settings = settings + self.protected_settings = protected_settings + self.provisioning_state = None + self.instance_view = instance_view + + +class MachineExtensionInstanceView(msrest.serialization.Model): + """Describes the Machine Extension Instance View. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: The machine extension name. + :vartype name: str + :ivar type: Specifies the type of the extension; an example is "CustomScriptExtension". + :vartype type: str + :ivar type_handler_version: Specifies the version of the script handler. + :vartype type_handler_version: str + :ivar status: Instance view status. + :vartype status: ~azure.mgmt.connectedvmware.models.MachineExtensionInstanceViewStatus + """ + + _validation = { + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'type_handler_version': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_handler_version': {'key': 'typeHandlerVersion', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'MachineExtensionInstanceViewStatus'}, + } + + def __init__( + self, + *, + status: Optional["_models.MachineExtensionInstanceViewStatus"] = None, + **kwargs + ): + """ + :keyword status: Instance view status. + :paramtype status: ~azure.mgmt.connectedvmware.models.MachineExtensionInstanceViewStatus + """ + super(MachineExtensionInstanceView, self).__init__(**kwargs) + self.name = None + self.type = None + self.type_handler_version = None + self.status = status + + +class MachineExtensionInstanceViewStatus(msrest.serialization.Model): + """Instance view status. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: The status code. + :vartype code: str + :ivar level: The level code. Known values are: "Info", "Warning", "Error". + :vartype level: str or ~azure.mgmt.connectedvmware.models.StatusLevelTypes + :ivar display_status: The short localizable label for the status. + :vartype display_status: str + :ivar message: The detailed status message, including for alerts and error messages. + :vartype message: str + :ivar time: The time of the status. + :vartype time: ~datetime.datetime + """ + + _validation = { + 'code': {'readonly': True}, + 'level': {'readonly': True}, + 'display_status': {'readonly': True}, + 'message': {'readonly': True}, + 'time': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + 'display_status': {'key': 'displayStatus', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'time': {'key': 'time', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(MachineExtensionInstanceViewStatus, self).__init__(**kwargs) + self.code = None + self.level = None + self.display_status = None + self.message = None + self.time = None + + +class MachineExtensionPropertiesInstanceView(MachineExtensionInstanceView): + """The machine extension instance view. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: The machine extension name. + :vartype name: str + :ivar type: Specifies the type of the extension; an example is "CustomScriptExtension". + :vartype type: str + :ivar type_handler_version: Specifies the version of the script handler. + :vartype type_handler_version: str + :ivar status: Instance view status. + :vartype status: ~azure.mgmt.connectedvmware.models.MachineExtensionInstanceViewStatus + """ + + _validation = { + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'type_handler_version': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_handler_version': {'key': 'typeHandlerVersion', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'MachineExtensionInstanceViewStatus'}, + } + + def __init__( + self, + *, + status: Optional["_models.MachineExtensionInstanceViewStatus"] = None, + **kwargs + ): + """ + :keyword status: Instance view status. + :paramtype status: ~azure.mgmt.connectedvmware.models.MachineExtensionInstanceViewStatus + """ + super(MachineExtensionPropertiesInstanceView, self).__init__(status=status, **kwargs) + + +class MachineExtensionsListResult(msrest.serialization.Model): + """Describes the Machine Extensions List Result. + + :ivar value: The list of extensions. + :vartype value: list[~azure.mgmt.connectedvmware.models.MachineExtension] + :ivar next_link: The uri to fetch the next page of machine extensions. Call ListNext() with + this to fetch the next page of extensions. + :vartype next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[MachineExtension]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["_models.MachineExtension"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword value: The list of extensions. + :paramtype value: list[~azure.mgmt.connectedvmware.models.MachineExtension] + :keyword next_link: The uri to fetch the next page of machine extensions. Call ListNext() with + this to fetch the next page of extensions. + :paramtype next_link: str + """ + super(MachineExtensionsListResult, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class ResourcePatch(msrest.serialization.Model): + """Object containing updates for patch operations. + + :ivar tags: A set of tags. Resource tags. + :vartype tags: dict[str, str] + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword tags: A set of tags. Resource tags. + :paramtype tags: dict[str, str] + """ + super(ResourcePatch, self).__init__(**kwargs) + self.tags = tags + + +class MachineExtensionUpdate(ResourcePatch): + """Describes a Machine Extension Update. + + :ivar tags: A set of tags. Resource tags. + :vartype tags: dict[str, str] + :ivar force_update_tag: How the extension handler should be forced to update even if the + extension configuration has not changed. + :vartype force_update_tag: str + :ivar publisher: The name of the extension handler publisher. + :vartype publisher: str + :ivar type: Specifies the type of the extension; an example is "CustomScriptExtension". + :vartype type: str + :ivar type_handler_version: Specifies the version of the script handler. + :vartype type_handler_version: str + :ivar enable_automatic_upgrade: Indicates whether the extension should be automatically + upgraded by the platform if there is a newer version available. + :vartype enable_automatic_upgrade: bool + :ivar auto_upgrade_minor_version: Indicates whether the extension should use a newer minor + version if one is available at deployment time. Once deployed, however, the extension will not + upgrade minor versions unless redeployed, even with this property set to true. + :vartype auto_upgrade_minor_version: bool + :ivar settings: Json formatted public settings for the extension. + :vartype settings: any + :ivar protected_settings: The extension can contain either protectedSettings or + protectedSettingsFromKeyVault or no protected settings at all. + :vartype protected_settings: any + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'force_update_tag': {'key': 'properties.forceUpdateTag', 'type': 'str'}, + 'publisher': {'key': 'properties.publisher', 'type': 'str'}, + 'type': {'key': 'properties.type', 'type': 'str'}, + 'type_handler_version': {'key': 'properties.typeHandlerVersion', 'type': 'str'}, + 'enable_automatic_upgrade': {'key': 'properties.enableAutomaticUpgrade', 'type': 'bool'}, + 'auto_upgrade_minor_version': {'key': 'properties.autoUpgradeMinorVersion', 'type': 'bool'}, + 'settings': {'key': 'properties.settings', 'type': 'object'}, + 'protected_settings': {'key': 'properties.protectedSettings', 'type': 'object'}, + } + + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + force_update_tag: Optional[str] = None, + publisher: Optional[str] = None, + type: Optional[str] = None, + type_handler_version: Optional[str] = None, + enable_automatic_upgrade: Optional[bool] = None, + auto_upgrade_minor_version: Optional[bool] = None, + settings: Optional[Any] = None, + protected_settings: Optional[Any] = None, + **kwargs + ): + """ + :keyword tags: A set of tags. Resource tags. + :paramtype tags: dict[str, str] + :keyword force_update_tag: How the extension handler should be forced to update even if the + extension configuration has not changed. + :paramtype force_update_tag: str + :keyword publisher: The name of the extension handler publisher. + :paramtype publisher: str + :keyword type: Specifies the type of the extension; an example is "CustomScriptExtension". + :paramtype type: str + :keyword type_handler_version: Specifies the version of the script handler. + :paramtype type_handler_version: str + :keyword enable_automatic_upgrade: Indicates whether the extension should be automatically + upgraded by the platform if there is a newer version available. + :paramtype enable_automatic_upgrade: bool + :keyword auto_upgrade_minor_version: Indicates whether the extension should use a newer minor + version if one is available at deployment time. Once deployed, however, the extension will not + upgrade minor versions unless redeployed, even with this property set to true. + :paramtype auto_upgrade_minor_version: bool + :keyword settings: Json formatted public settings for the extension. + :paramtype settings: any + :keyword protected_settings: The extension can contain either protectedSettings or + protectedSettingsFromKeyVault or no protected settings at all. + :paramtype protected_settings: any + """ + super(MachineExtensionUpdate, self).__init__(tags=tags, **kwargs) + self.force_update_tag = force_update_tag + self.publisher = publisher + self.type = type + self.type_handler_version = type_handler_version + self.enable_automatic_upgrade = enable_automatic_upgrade + self.auto_upgrade_minor_version = auto_upgrade_minor_version + self.settings = settings + self.protected_settings = protected_settings + + +class NetworkInterface(msrest.serialization.Model): + """Network Interface model. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Gets or sets the name of the network interface. + :vartype name: str + :ivar label: Gets or sets the label of the virtual network in vCenter that the nic is connected + to. + :vartype label: str + :ivar ip_addresses: Gets or sets the nic ip addresses. + :vartype ip_addresses: list[str] + :ivar mac_address: Gets or sets the NIC MAC address. + :vartype mac_address: str + :ivar network_id: Gets or sets the ARM Id of the network resource to connect the virtual + machine. + :vartype network_id: str + :ivar nic_type: NIC type. Known values are: "vmxnet3", "vmxnet2", "vmxnet", "e1000", "e1000e", + "pcnet32". + :vartype nic_type: str or ~azure.mgmt.connectedvmware.models.NICType + :ivar power_on_boot: Gets or sets the power on boot. Known values are: "enabled", "disabled". + :vartype power_on_boot: str or ~azure.mgmt.connectedvmware.models.PowerOnBootOption + :ivar network_mo_ref_id: Gets or sets the vCenter MoRef (Managed Object Reference) ID of the + virtual network + that the nic is connected to. + :vartype network_mo_ref_id: str + :ivar network_mo_name: Gets or sets the name of the virtual network in vCenter that the nic is + connected to. + :vartype network_mo_name: str + :ivar device_key: Gets or sets the device key value. + :vartype device_key: int + :ivar ip_settings: Gets or sets the ipsettings. + :vartype ip_settings: ~azure.mgmt.connectedvmware.models.NicIPSettings + """ + + _validation = { + 'label': {'readonly': True}, + 'ip_addresses': {'readonly': True}, + 'mac_address': {'readonly': True}, + 'network_mo_ref_id': {'readonly': True}, + 'network_mo_name': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'label': {'key': 'label', 'type': 'str'}, + 'ip_addresses': {'key': 'ipAddresses', 'type': '[str]'}, + 'mac_address': {'key': 'macAddress', 'type': 'str'}, + 'network_id': {'key': 'networkId', 'type': 'str'}, + 'nic_type': {'key': 'nicType', 'type': 'str'}, + 'power_on_boot': {'key': 'powerOnBoot', 'type': 'str'}, + 'network_mo_ref_id': {'key': 'networkMoRefId', 'type': 'str'}, + 'network_mo_name': {'key': 'networkMoName', 'type': 'str'}, + 'device_key': {'key': 'deviceKey', 'type': 'int'}, + 'ip_settings': {'key': 'ipSettings', 'type': 'NicIPSettings'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + network_id: Optional[str] = None, + nic_type: Optional[Union[str, "_models.NICType"]] = None, + power_on_boot: Optional[Union[str, "_models.PowerOnBootOption"]] = None, + device_key: Optional[int] = None, + ip_settings: Optional["_models.NicIPSettings"] = None, + **kwargs + ): + """ + :keyword name: Gets or sets the name of the network interface. + :paramtype name: str + :keyword network_id: Gets or sets the ARM Id of the network resource to connect the virtual + machine. + :paramtype network_id: str + :keyword nic_type: NIC type. Known values are: "vmxnet3", "vmxnet2", "vmxnet", "e1000", + "e1000e", "pcnet32". + :paramtype nic_type: str or ~azure.mgmt.connectedvmware.models.NICType + :keyword power_on_boot: Gets or sets the power on boot. Known values are: "enabled", + "disabled". + :paramtype power_on_boot: str or ~azure.mgmt.connectedvmware.models.PowerOnBootOption + :keyword device_key: Gets or sets the device key value. + :paramtype device_key: int + :keyword ip_settings: Gets or sets the ipsettings. + :paramtype ip_settings: ~azure.mgmt.connectedvmware.models.NicIPSettings + """ + super(NetworkInterface, self).__init__(**kwargs) + self.name = name + self.label = None + self.ip_addresses = None + self.mac_address = None + self.network_id = network_id + self.nic_type = nic_type + self.power_on_boot = power_on_boot + self.network_mo_ref_id = None + self.network_mo_name = None + self.device_key = device_key + self.ip_settings = ip_settings + + +class NetworkInterfaceUpdate(msrest.serialization.Model): + """Defines the network interface update. + + :ivar name: Gets or sets the name of the network interface. + :vartype name: str + :ivar network_id: Gets or sets the ARM Id of the network resource to connect the virtual + machine. + :vartype network_id: str + :ivar nic_type: NIC type. Known values are: "vmxnet3", "vmxnet2", "vmxnet", "e1000", "e1000e", + "pcnet32". + :vartype nic_type: str or ~azure.mgmt.connectedvmware.models.NICType + :ivar power_on_boot: Gets or sets the power on boot. Known values are: "enabled", "disabled". + :vartype power_on_boot: str or ~azure.mgmt.connectedvmware.models.PowerOnBootOption + :ivar device_key: Gets or sets the device key value. + :vartype device_key: int + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'network_id': {'key': 'networkId', 'type': 'str'}, + 'nic_type': {'key': 'nicType', 'type': 'str'}, + 'power_on_boot': {'key': 'powerOnBoot', 'type': 'str'}, + 'device_key': {'key': 'deviceKey', 'type': 'int'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + network_id: Optional[str] = None, + nic_type: Optional[Union[str, "_models.NICType"]] = None, + power_on_boot: Optional[Union[str, "_models.PowerOnBootOption"]] = None, + device_key: Optional[int] = None, + **kwargs + ): + """ + :keyword name: Gets or sets the name of the network interface. + :paramtype name: str + :keyword network_id: Gets or sets the ARM Id of the network resource to connect the virtual + machine. + :paramtype network_id: str + :keyword nic_type: NIC type. Known values are: "vmxnet3", "vmxnet2", "vmxnet", "e1000", + "e1000e", "pcnet32". + :paramtype nic_type: str or ~azure.mgmt.connectedvmware.models.NICType + :keyword power_on_boot: Gets or sets the power on boot. Known values are: "enabled", + "disabled". + :paramtype power_on_boot: str or ~azure.mgmt.connectedvmware.models.PowerOnBootOption + :keyword device_key: Gets or sets the device key value. + :paramtype device_key: int + """ + super(NetworkInterfaceUpdate, self).__init__(**kwargs) + self.name = name + self.network_id = network_id + self.nic_type = nic_type + self.power_on_boot = power_on_boot + self.device_key = device_key + + +class NetworkProfile(msrest.serialization.Model): + """Defines the resource properties. + + :ivar network_interfaces: Gets or sets the list of network interfaces associated with the + virtual machine. + :vartype network_interfaces: list[~azure.mgmt.connectedvmware.models.NetworkInterface] + """ + + _attribute_map = { + 'network_interfaces': {'key': 'networkInterfaces', 'type': '[NetworkInterface]'}, + } + + def __init__( + self, + *, + network_interfaces: Optional[List["_models.NetworkInterface"]] = None, + **kwargs + ): + """ + :keyword network_interfaces: Gets or sets the list of network interfaces associated with the + virtual machine. + :paramtype network_interfaces: list[~azure.mgmt.connectedvmware.models.NetworkInterface] + """ + super(NetworkProfile, self).__init__(**kwargs) + self.network_interfaces = network_interfaces + + +class NetworkProfileUpdate(msrest.serialization.Model): + """Defines the update resource properties. + + :ivar network_interfaces: Gets or sets the list of network interfaces associated with the + virtual machine. + :vartype network_interfaces: list[~azure.mgmt.connectedvmware.models.NetworkInterfaceUpdate] + """ + + _attribute_map = { + 'network_interfaces': {'key': 'networkInterfaces', 'type': '[NetworkInterfaceUpdate]'}, + } + + def __init__( + self, + *, + network_interfaces: Optional[List["_models.NetworkInterfaceUpdate"]] = None, + **kwargs + ): + """ + :keyword network_interfaces: Gets or sets the list of network interfaces associated with the + virtual machine. + :paramtype network_interfaces: list[~azure.mgmt.connectedvmware.models.NetworkInterfaceUpdate] + """ + super(NetworkProfileUpdate, self).__init__(**kwargs) + self.network_interfaces = network_interfaces + + +class NicIPAddressSettings(msrest.serialization.Model): + """IP address information for a virtual network adapter reported by the fabric. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar allocation_method: Gets the ip address allocation method. + :vartype allocation_method: str + :ivar ip_address: Gets the ip address for the nic. + :vartype ip_address: str + :ivar subnet_mask: Gets the mask. + :vartype subnet_mask: str + """ + + _validation = { + 'allocation_method': {'readonly': True}, + 'ip_address': {'readonly': True}, + 'subnet_mask': {'readonly': True}, + } + + _attribute_map = { + 'allocation_method': {'key': 'allocationMethod', 'type': 'str'}, + 'ip_address': {'key': 'ipAddress', 'type': 'str'}, + 'subnet_mask': {'key': 'subnetMask', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(NicIPAddressSettings, self).__init__(**kwargs) + self.allocation_method = None + self.ip_address = None + self.subnet_mask = None + + +class NicIPSettings(msrest.serialization.Model): + """Defines the network interface ip settings. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar allocation_method: Gets or sets the nic allocation method. Known values are: "unset", + "dynamic", "static", "linklayer", "random", "other". + :vartype allocation_method: str or ~azure.mgmt.connectedvmware.models.IPAddressAllocationMethod + :ivar dns_servers: Gets or sets the dns servers. + :vartype dns_servers: list[str] + :ivar gateway: Gets or sets the gateway. + :vartype gateway: list[str] + :ivar ip_address: Gets or sets the ip address for the nic. + :vartype ip_address: str + :ivar subnet_mask: Gets or sets the mask. + :vartype subnet_mask: str + :ivar primary_wins_server: Gets or sets the primary server. + :vartype primary_wins_server: str + :ivar secondary_wins_server: Gets or sets the secondary server. + :vartype secondary_wins_server: str + :ivar ip_address_info: Gets or sets the IP address information being reported for this NIC. + This contains the same IPv4 information above plus IPV6 information. + :vartype ip_address_info: list[~azure.mgmt.connectedvmware.models.NicIPAddressSettings] + """ + + _validation = { + 'primary_wins_server': {'readonly': True}, + 'secondary_wins_server': {'readonly': True}, + 'ip_address_info': {'readonly': True}, + } + + _attribute_map = { + 'allocation_method': {'key': 'allocationMethod', 'type': 'str'}, + 'dns_servers': {'key': 'dnsServers', 'type': '[str]'}, + 'gateway': {'key': 'gateway', 'type': '[str]'}, + 'ip_address': {'key': 'ipAddress', 'type': 'str'}, + 'subnet_mask': {'key': 'subnetMask', 'type': 'str'}, + 'primary_wins_server': {'key': 'primaryWinsServer', 'type': 'str'}, + 'secondary_wins_server': {'key': 'secondaryWinsServer', 'type': 'str'}, + 'ip_address_info': {'key': 'ipAddressInfo', 'type': '[NicIPAddressSettings]'}, + } + + def __init__( + self, + *, + allocation_method: Optional[Union[str, "_models.IPAddressAllocationMethod"]] = None, + dns_servers: Optional[List[str]] = None, + gateway: Optional[List[str]] = None, + ip_address: Optional[str] = None, + subnet_mask: Optional[str] = None, + **kwargs + ): + """ + :keyword allocation_method: Gets or sets the nic allocation method. Known values are: "unset", + "dynamic", "static", "linklayer", "random", "other". + :paramtype allocation_method: str or + ~azure.mgmt.connectedvmware.models.IPAddressAllocationMethod + :keyword dns_servers: Gets or sets the dns servers. + :paramtype dns_servers: list[str] + :keyword gateway: Gets or sets the gateway. + :paramtype gateway: list[str] + :keyword ip_address: Gets or sets the ip address for the nic. + :paramtype ip_address: str + :keyword subnet_mask: Gets or sets the mask. + :paramtype subnet_mask: str + """ + super(NicIPSettings, self).__init__(**kwargs) + self.allocation_method = allocation_method + self.dns_servers = dns_servers + self.gateway = gateway + self.ip_address = ip_address + self.subnet_mask = subnet_mask + self.primary_wins_server = None + self.secondary_wins_server = None + self.ip_address_info = None + + +class Operation(msrest.serialization.Model): + """Operation provided by provider. + + :ivar name: Name of the operation. + :vartype name: str + :ivar is_data_action: Indicates whether the operation is data action or not. + :vartype is_data_action: bool + :ivar display: Properties of the operation. + :vartype display: ~azure.mgmt.connectedvmware.models.OperationDisplay + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'is_data_action': {'key': 'isDataAction', 'type': 'bool'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + is_data_action: Optional[bool] = None, + display: Optional["_models.OperationDisplay"] = None, + **kwargs + ): + """ + :keyword name: Name of the operation. + :paramtype name: str + :keyword is_data_action: Indicates whether the operation is data action or not. + :paramtype is_data_action: bool + :keyword display: Properties of the operation. + :paramtype display: ~azure.mgmt.connectedvmware.models.OperationDisplay + """ + super(Operation, self).__init__(**kwargs) + self.name = name + self.is_data_action = is_data_action + self.display = display + + +class OperationDisplay(msrest.serialization.Model): + """Properties of the operation. + + :ivar provider: Provider name. + :vartype provider: str + :ivar resource: Resource name. + :vartype resource: str + :ivar operation: Operation name. + :vartype operation: str + :ivar description: Description of the operation. + :vartype description: str + """ + + _attribute_map = { + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + *, + provider: Optional[str] = None, + resource: Optional[str] = None, + operation: Optional[str] = None, + description: Optional[str] = None, + **kwargs + ): + """ + :keyword provider: Provider name. + :paramtype provider: str + :keyword resource: Resource name. + :paramtype resource: str + :keyword operation: Operation name. + :paramtype operation: str + :keyword description: Description of the operation. + :paramtype description: str + """ + super(OperationDisplay, self).__init__(**kwargs) + self.provider = provider + self.resource = resource + self.operation = operation + self.description = description + + +class OperationsList(msrest.serialization.Model): + """Lists the operations available. + + All required parameters must be populated in order to send to Azure. + + :ivar next_link: Url to follow for getting next page of operations. + :vartype next_link: str + :ivar value: Required. Array of operations. + :vartype value: list[~azure.mgmt.connectedvmware.models.Operation] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[Operation]'}, + } + + def __init__( + self, + *, + value: List["_models.Operation"], + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword next_link: Url to follow for getting next page of operations. + :paramtype next_link: str + :keyword value: Required. Array of operations. + :paramtype value: list[~azure.mgmt.connectedvmware.models.Operation] + """ + super(OperationsList, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class OsProfile(msrest.serialization.Model): + """Defines the resource properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar computer_name: Gets or sets computer name. + :vartype computer_name: str + :ivar admin_username: Gets or sets administrator username. + :vartype admin_username: str + :ivar admin_password: Gets or sets administrator password. + :vartype admin_password: str + :ivar guest_id: Gets or sets the guestId. + :vartype guest_id: str + :ivar allow_extension_operations: Gets or sets a value indicating whether the VM is ready for + extension operations. + :vartype allow_extension_operations: bool + :ivar os_type: Gets or sets the type of the os. Known values are: "Windows", "Linux", "Other". + :vartype os_type: str or ~azure.mgmt.connectedvmware.models.OsType + :ivar os_name: Gets or sets os name. + :vartype os_name: str + :ivar tools_running_status: Gets or sets the current running status of VMware Tools running in + the guest operating system. + :vartype tools_running_status: str + :ivar tools_version_status: Gets or sets the current version status of VMware Tools installed + in the guest operating system. + :vartype tools_version_status: str + :ivar tools_version: Gets or sets the current version of VMware Tools. + :vartype tools_version: str + :ivar windows_configuration: Specifies the windows configuration for update management. + :vartype windows_configuration: + ~azure.mgmt.connectedvmware.models.OsProfileWindowsConfiguration + :ivar linux_configuration: Specifies the linux configuration for update management. + :vartype linux_configuration: ~azure.mgmt.connectedvmware.models.OsProfileLinuxConfiguration + """ + + _validation = { + 'allow_extension_operations': {'readonly': True}, + 'os_name': {'readonly': True}, + 'tools_running_status': {'readonly': True}, + 'tools_version_status': {'readonly': True}, + 'tools_version': {'readonly': True}, + } + + _attribute_map = { + 'computer_name': {'key': 'computerName', 'type': 'str'}, + 'admin_username': {'key': 'adminUsername', 'type': 'str'}, + 'admin_password': {'key': 'adminPassword', 'type': 'str'}, + 'guest_id': {'key': 'guestId', 'type': 'str'}, + 'allow_extension_operations': {'key': 'allowExtensionOperations', 'type': 'bool'}, + 'os_type': {'key': 'osType', 'type': 'str'}, + 'os_name': {'key': 'osName', 'type': 'str'}, + 'tools_running_status': {'key': 'toolsRunningStatus', 'type': 'str'}, + 'tools_version_status': {'key': 'toolsVersionStatus', 'type': 'str'}, + 'tools_version': {'key': 'toolsVersion', 'type': 'str'}, + 'windows_configuration': {'key': 'windowsConfiguration', 'type': 'OsProfileWindowsConfiguration'}, + 'linux_configuration': {'key': 'linuxConfiguration', 'type': 'OsProfileLinuxConfiguration'}, + } + + def __init__( + self, + *, + computer_name: Optional[str] = None, + admin_username: Optional[str] = None, + admin_password: Optional[str] = None, + guest_id: Optional[str] = None, + os_type: Optional[Union[str, "_models.OsType"]] = None, + windows_configuration: Optional["_models.OsProfileWindowsConfiguration"] = None, + linux_configuration: Optional["_models.OsProfileLinuxConfiguration"] = None, + **kwargs + ): + """ + :keyword computer_name: Gets or sets computer name. + :paramtype computer_name: str + :keyword admin_username: Gets or sets administrator username. + :paramtype admin_username: str + :keyword admin_password: Gets or sets administrator password. + :paramtype admin_password: str + :keyword guest_id: Gets or sets the guestId. + :paramtype guest_id: str + :keyword os_type: Gets or sets the type of the os. Known values are: "Windows", "Linux", + "Other". + :paramtype os_type: str or ~azure.mgmt.connectedvmware.models.OsType + :keyword windows_configuration: Specifies the windows configuration for update management. + :paramtype windows_configuration: + ~azure.mgmt.connectedvmware.models.OsProfileWindowsConfiguration + :keyword linux_configuration: Specifies the linux configuration for update management. + :paramtype linux_configuration: ~azure.mgmt.connectedvmware.models.OsProfileLinuxConfiguration + """ + super(OsProfile, self).__init__(**kwargs) + self.computer_name = computer_name + self.admin_username = admin_username + self.admin_password = admin_password + self.guest_id = guest_id + self.allow_extension_operations = None + self.os_type = os_type + self.os_name = None + self.tools_running_status = None + self.tools_version_status = None + self.tools_version = None + self.windows_configuration = windows_configuration + self.linux_configuration = linux_configuration + + +class OsProfileLinuxConfiguration(msrest.serialization.Model): + """Specifies the linux configuration for update management. + + :ivar assessment_mode: Specifies the assessment mode. + :vartype assessment_mode: str + :ivar patch_mode: Specifies the patch mode. + :vartype patch_mode: str + """ + + _attribute_map = { + 'assessment_mode': {'key': 'patchSettings.assessmentMode', 'type': 'str'}, + 'patch_mode': {'key': 'patchSettings.patchMode', 'type': 'str'}, + } + + def __init__( + self, + *, + assessment_mode: Optional[str] = None, + patch_mode: Optional[str] = None, + **kwargs + ): + """ + :keyword assessment_mode: Specifies the assessment mode. + :paramtype assessment_mode: str + :keyword patch_mode: Specifies the patch mode. + :paramtype patch_mode: str + """ + super(OsProfileLinuxConfiguration, self).__init__(**kwargs) + self.assessment_mode = assessment_mode + self.patch_mode = patch_mode + + +class OsProfileUpdate(msrest.serialization.Model): + """Defines the os update properties. + + :ivar windows_configuration: Specifies the windows configuration for update management. + :vartype windows_configuration: + ~azure.mgmt.connectedvmware.models.OsProfileUpdateWindowsConfiguration + :ivar linux_configuration: Specifies the linux configuration for update management. + :vartype linux_configuration: + ~azure.mgmt.connectedvmware.models.OsProfileUpdateLinuxConfiguration + """ + + _attribute_map = { + 'windows_configuration': {'key': 'windowsConfiguration', 'type': 'OsProfileUpdateWindowsConfiguration'}, + 'linux_configuration': {'key': 'linuxConfiguration', 'type': 'OsProfileUpdateLinuxConfiguration'}, + } + + def __init__( + self, + *, + windows_configuration: Optional["_models.OsProfileUpdateWindowsConfiguration"] = None, + linux_configuration: Optional["_models.OsProfileUpdateLinuxConfiguration"] = None, + **kwargs + ): + """ + :keyword windows_configuration: Specifies the windows configuration for update management. + :paramtype windows_configuration: + ~azure.mgmt.connectedvmware.models.OsProfileUpdateWindowsConfiguration + :keyword linux_configuration: Specifies the linux configuration for update management. + :paramtype linux_configuration: + ~azure.mgmt.connectedvmware.models.OsProfileUpdateLinuxConfiguration + """ + super(OsProfileUpdate, self).__init__(**kwargs) + self.windows_configuration = windows_configuration + self.linux_configuration = linux_configuration + + +class OsProfileUpdateLinuxConfiguration(msrest.serialization.Model): + """Specifies the linux configuration for update management. + + :ivar assessment_mode: Specifies the assessment mode. + :vartype assessment_mode: str + :ivar patch_mode: Specifies the patch mode. + :vartype patch_mode: str + """ + + _attribute_map = { + 'assessment_mode': {'key': 'patchSettings.assessmentMode', 'type': 'str'}, + 'patch_mode': {'key': 'patchSettings.patchMode', 'type': 'str'}, + } + + def __init__( + self, + *, + assessment_mode: Optional[str] = None, + patch_mode: Optional[str] = None, + **kwargs + ): + """ + :keyword assessment_mode: Specifies the assessment mode. + :paramtype assessment_mode: str + :keyword patch_mode: Specifies the patch mode. + :paramtype patch_mode: str + """ + super(OsProfileUpdateLinuxConfiguration, self).__init__(**kwargs) + self.assessment_mode = assessment_mode + self.patch_mode = patch_mode + + +class OsProfileUpdateWindowsConfiguration(msrest.serialization.Model): + """Specifies the windows configuration for update management. + + :ivar assessment_mode: Specifies the assessment mode. + :vartype assessment_mode: str + :ivar patch_mode: Specifies the patch mode. + :vartype patch_mode: str + """ + + _attribute_map = { + 'assessment_mode': {'key': 'patchSettings.assessmentMode', 'type': 'str'}, + 'patch_mode': {'key': 'patchSettings.patchMode', 'type': 'str'}, + } + + def __init__( + self, + *, + assessment_mode: Optional[str] = None, + patch_mode: Optional[str] = None, + **kwargs + ): + """ + :keyword assessment_mode: Specifies the assessment mode. + :paramtype assessment_mode: str + :keyword patch_mode: Specifies the patch mode. + :paramtype patch_mode: str + """ + super(OsProfileUpdateWindowsConfiguration, self).__init__(**kwargs) + self.assessment_mode = assessment_mode + self.patch_mode = patch_mode + + +class OsProfileWindowsConfiguration(msrest.serialization.Model): + """Specifies the windows configuration for update management. + + :ivar assessment_mode: Specifies the assessment mode. + :vartype assessment_mode: str + :ivar patch_mode: Specifies the patch mode. + :vartype patch_mode: str + """ + + _attribute_map = { + 'assessment_mode': {'key': 'patchSettings.assessmentMode', 'type': 'str'}, + 'patch_mode': {'key': 'patchSettings.patchMode', 'type': 'str'}, + } + + def __init__( + self, + *, + assessment_mode: Optional[str] = None, + patch_mode: Optional[str] = None, + **kwargs + ): + """ + :keyword assessment_mode: Specifies the assessment mode. + :paramtype assessment_mode: str + :keyword patch_mode: Specifies the patch mode. + :paramtype patch_mode: str + """ + super(OsProfileWindowsConfiguration, self).__init__(**kwargs) + self.assessment_mode = assessment_mode + self.patch_mode = patch_mode + + +class PlacementProfile(msrest.serialization.Model): + """Defines the resource properties. + + :ivar resource_pool_id: Gets or sets the ARM Id of the resourcePool resource on which this + virtual machine will deploy. + :vartype resource_pool_id: str + :ivar cluster_id: Gets or sets the ARM Id of the cluster resource on which this virtual machine + will deploy. + :vartype cluster_id: str + :ivar host_id: Gets or sets the ARM Id of the host resource on which this virtual machine will + deploy. + :vartype host_id: str + :ivar datastore_id: Gets or sets the ARM Id of the datastore resource on which the data for the + virtual machine will be kept. + :vartype datastore_id: str + """ + + _attribute_map = { + 'resource_pool_id': {'key': 'resourcePoolId', 'type': 'str'}, + 'cluster_id': {'key': 'clusterId', 'type': 'str'}, + 'host_id': {'key': 'hostId', 'type': 'str'}, + 'datastore_id': {'key': 'datastoreId', 'type': 'str'}, + } + + def __init__( + self, + *, + resource_pool_id: Optional[str] = None, + cluster_id: Optional[str] = None, + host_id: Optional[str] = None, + datastore_id: Optional[str] = None, + **kwargs + ): + """ + :keyword resource_pool_id: Gets or sets the ARM Id of the resourcePool resource on which this + virtual machine will deploy. + :paramtype resource_pool_id: str + :keyword cluster_id: Gets or sets the ARM Id of the cluster resource on which this virtual + machine will deploy. + :paramtype cluster_id: str + :keyword host_id: Gets or sets the ARM Id of the host resource on which this virtual machine + will deploy. + :paramtype host_id: str + :keyword datastore_id: Gets or sets the ARM Id of the datastore resource on which the data for + the virtual machine will be kept. + :paramtype datastore_id: str + """ + super(PlacementProfile, self).__init__(**kwargs) + self.resource_pool_id = resource_pool_id + self.cluster_id = cluster_id + self.host_id = host_id + self.datastore_id = datastore_id + + +class ResourcePool(msrest.serialization.Model): + """Define the resourcePool. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar location: Required. Gets or sets the location. + :vartype location: str + :ivar extended_location: Gets or sets the extended location. + :vartype extended_location: ~azure.mgmt.connectedvmware.models.ExtendedLocation + :ivar system_data: The system data. + :vartype system_data: ~azure.mgmt.connectedvmware.models.SystemData + :ivar tags: A set of tags. Gets or sets the Resource tags. + :vartype tags: dict[str, str] + :ivar name: Gets or sets the name. + :vartype name: str + :ivar id: Gets or sets the Id. + :vartype id: str + :ivar type: Gets or sets the type of the resource. + :vartype type: str + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, + the resource provider must validate and persist this value. + :vartype kind: str + :ivar uuid: Gets or sets a unique identifier for this resource. + :vartype uuid: str + :ivar v_center_id: Gets or sets the ARM Id of the vCenter resource in which this resource pool + resides. + :vartype v_center_id: str + :ivar mo_ref_id: Gets or sets the vCenter MoRef (Managed Object Reference) ID for the resource + pool. + :vartype mo_ref_id: str + :ivar inventory_item_id: Gets or sets the inventory Item ID for the resource pool. + :vartype inventory_item_id: str + :ivar mo_name: Gets or sets the vCenter Managed Object name for the resource pool. + :vartype mo_name: str + :ivar cpu_shares_level: Gets or sets CPUSharesLevel which specifies the CPU allocation level + for this pool. + This property is used in relative allocation between resource consumers. + :vartype cpu_shares_level: str + :ivar cpu_reservation_m_hz: Gets or sets CPUReservationMHz which specifies the CPU size in MHz + that is guaranteed + to be available. + :vartype cpu_reservation_m_hz: long + :ivar cpu_limit_m_hz: Gets or sets CPULimitMHz which specifies a CPU usage limit in MHz. + Utilization will not exceed this limit even if there are available resources. + :vartype cpu_limit_m_hz: long + :ivar mem_shares_level: Gets or sets CPUSharesLevel which specifies the memory allocation level + for this pool. + This property is used in relative allocation between resource consumers. + :vartype mem_shares_level: str + :ivar mem_reservation_mb: Gets or sets MemReservationMB which specifies the guaranteed + available memory in + megabytes. + :vartype mem_reservation_mb: long + :ivar mem_limit_mb: Gets or sets MemLimitMB specifies a memory usage limit in megabytes. + Utilization will not exceed the specified limit even if there are available resources. + :vartype mem_limit_mb: long + :ivar custom_resource_name: Gets the name of the corresponding resource in Kubernetes. + :vartype custom_resource_name: str + :ivar statuses: The resource status information. + :vartype statuses: list[~azure.mgmt.connectedvmware.models.ResourceStatus] + :ivar provisioning_state: Gets or sets the provisioning state. + :vartype provisioning_state: str + """ + + _validation = { + 'location': {'required': True}, + 'system_data': {'readonly': True}, + 'name': {'readonly': True}, + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'uuid': {'readonly': True}, + 'mo_name': {'readonly': True}, + 'cpu_shares_level': {'readonly': True}, + 'cpu_reservation_m_hz': {'readonly': True}, + 'cpu_limit_m_hz': {'readonly': True}, + 'mem_shares_level': {'readonly': True}, + 'mem_reservation_mb': {'readonly': True}, + 'mem_limit_mb': {'readonly': True}, + 'custom_resource_name': {'readonly': True}, + 'statuses': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'extended_location': {'key': 'extendedLocation', 'type': 'ExtendedLocation'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'uuid': {'key': 'properties.uuid', 'type': 'str'}, + 'v_center_id': {'key': 'properties.vCenterId', 'type': 'str'}, + 'mo_ref_id': {'key': 'properties.moRefId', 'type': 'str'}, + 'inventory_item_id': {'key': 'properties.inventoryItemId', 'type': 'str'}, + 'mo_name': {'key': 'properties.moName', 'type': 'str'}, + 'cpu_shares_level': {'key': 'properties.cpuSharesLevel', 'type': 'str'}, + 'cpu_reservation_m_hz': {'key': 'properties.cpuReservationMHz', 'type': 'long'}, + 'cpu_limit_m_hz': {'key': 'properties.cpuLimitMHz', 'type': 'long'}, + 'mem_shares_level': {'key': 'properties.memSharesLevel', 'type': 'str'}, + 'mem_reservation_mb': {'key': 'properties.memReservationMB', 'type': 'long'}, + 'mem_limit_mb': {'key': 'properties.memLimitMB', 'type': 'long'}, + 'custom_resource_name': {'key': 'properties.customResourceName', 'type': 'str'}, + 'statuses': {'key': 'properties.statuses', 'type': '[ResourceStatus]'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + location: str, + extended_location: Optional["_models.ExtendedLocation"] = None, + tags: Optional[Dict[str, str]] = None, + kind: Optional[str] = None, + v_center_id: Optional[str] = None, + mo_ref_id: Optional[str] = None, + inventory_item_id: Optional[str] = None, + **kwargs + ): + """ + :keyword location: Required. Gets or sets the location. + :paramtype location: str + :keyword extended_location: Gets or sets the extended location. + :paramtype extended_location: ~azure.mgmt.connectedvmware.models.ExtendedLocation + :keyword tags: A set of tags. Gets or sets the Resource tags. + :paramtype tags: dict[str, str] + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, + the resource provider must validate and persist this value. + :paramtype kind: str + :keyword v_center_id: Gets or sets the ARM Id of the vCenter resource in which this resource + pool resides. + :paramtype v_center_id: str + :keyword mo_ref_id: Gets or sets the vCenter MoRef (Managed Object Reference) ID for the + resource pool. + :paramtype mo_ref_id: str + :keyword inventory_item_id: Gets or sets the inventory Item ID for the resource pool. + :paramtype inventory_item_id: str + """ + super(ResourcePool, self).__init__(**kwargs) + self.location = location + self.extended_location = extended_location + self.system_data = None + self.tags = tags + self.name = None + self.id = None + self.type = None + self.kind = kind + self.uuid = None + self.v_center_id = v_center_id + self.mo_ref_id = mo_ref_id + self.inventory_item_id = inventory_item_id + self.mo_name = None + self.cpu_shares_level = None + self.cpu_reservation_m_hz = None + self.cpu_limit_m_hz = None + self.mem_shares_level = None + self.mem_reservation_mb = None + self.mem_limit_mb = None + self.custom_resource_name = None + self.statuses = None + self.provisioning_state = None + + +class ResourcePoolInventoryItem(InventoryItemProperties): + """The resource pool inventory item. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar inventory_type: Required. They inventory type.Constant filled by server. Known values + are: "ResourcePool", "VirtualMachine", "VirtualMachineTemplate", "VirtualNetwork", "Cluster", + "Datastore", "Host". + :vartype inventory_type: str or ~azure.mgmt.connectedvmware.models.InventoryType + :ivar managed_resource_id: Gets or sets the tracked resource id corresponding to the inventory + resource. + :vartype managed_resource_id: str + :ivar mo_ref_id: Gets or sets the MoRef (Managed Object Reference) ID for the inventory item. + :vartype mo_ref_id: str + :ivar mo_name: Gets or sets the vCenter Managed Object name for the inventory item. + :vartype mo_name: str + :ivar provisioning_state: Gets or sets the provisioning state. + :vartype provisioning_state: str + :ivar parent: Parent resourcePool inventory resource details. + :vartype parent: ~azure.mgmt.connectedvmware.models.InventoryItemDetails + """ + + _validation = { + 'inventory_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'inventory_type': {'key': 'inventoryType', 'type': 'str'}, + 'managed_resource_id': {'key': 'managedResourceId', 'type': 'str'}, + 'mo_ref_id': {'key': 'moRefId', 'type': 'str'}, + 'mo_name': {'key': 'moName', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'parent': {'key': 'parent', 'type': 'InventoryItemDetails'}, + } + + def __init__( + self, + *, + managed_resource_id: Optional[str] = None, + mo_ref_id: Optional[str] = None, + mo_name: Optional[str] = None, + parent: Optional["_models.InventoryItemDetails"] = None, + **kwargs + ): + """ + :keyword managed_resource_id: Gets or sets the tracked resource id corresponding to the + inventory resource. + :paramtype managed_resource_id: str + :keyword mo_ref_id: Gets or sets the MoRef (Managed Object Reference) ID for the inventory + item. + :paramtype mo_ref_id: str + :keyword mo_name: Gets or sets the vCenter Managed Object name for the inventory item. + :paramtype mo_name: str + :keyword parent: Parent resourcePool inventory resource details. + :paramtype parent: ~azure.mgmt.connectedvmware.models.InventoryItemDetails + """ + super(ResourcePoolInventoryItem, self).__init__(managed_resource_id=managed_resource_id, mo_ref_id=mo_ref_id, mo_name=mo_name, **kwargs) + self.inventory_type = 'ResourcePool' # type: str + self.parent = parent + + +class ResourcePoolsList(msrest.serialization.Model): + """List of ResourcePools. + + All required parameters must be populated in order to send to Azure. + + :ivar next_link: Url to follow for getting next page of ResourcePools. + :vartype next_link: str + :ivar value: Required. Array of ResourcePools. + :vartype value: list[~azure.mgmt.connectedvmware.models.ResourcePool] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[ResourcePool]'}, + } + + def __init__( + self, + *, + value: List["_models.ResourcePool"], + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword next_link: Url to follow for getting next page of ResourcePools. + :paramtype next_link: str + :keyword value: Required. Array of ResourcePools. + :paramtype value: list[~azure.mgmt.connectedvmware.models.ResourcePool] + """ + super(ResourcePoolsList, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class ResourceStatus(msrest.serialization.Model): + """The resource status information. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar type: The type of the condition. + :vartype type: str + :ivar status: Status of the condition. + :vartype status: str + :ivar reason: The reason for the condition's status. + :vartype reason: str + :ivar message: A human readable message indicating details about the status. + :vartype message: str + :ivar severity: Severity with which to treat failures of this type of condition. + :vartype severity: str + :ivar last_updated_at: The last update time for this condition. + :vartype last_updated_at: ~datetime.datetime + """ + + _validation = { + 'type': {'readonly': True}, + 'status': {'readonly': True}, + 'reason': {'readonly': True}, + 'message': {'readonly': True}, + 'severity': {'readonly': True}, + 'last_updated_at': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'reason': {'key': 'reason', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'severity': {'key': 'severity', 'type': 'str'}, + 'last_updated_at': {'key': 'lastUpdatedAt', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ResourceStatus, self).__init__(**kwargs) + self.type = None + self.status = None + self.reason = None + self.message = None + self.severity = None + self.last_updated_at = None + + +class SecurityProfile(msrest.serialization.Model): + """Specifies the Security profile settings for the virtual machine. + + :ivar uefi_settings: Specifies the security settings like secure boot used while creating the + virtual machine. + :vartype uefi_settings: ~azure.mgmt.connectedvmware.models.UefiSettings + """ + + _attribute_map = { + 'uefi_settings': {'key': 'uefiSettings', 'type': 'UefiSettings'}, + } + + def __init__( + self, + *, + uefi_settings: Optional["_models.UefiSettings"] = None, + **kwargs + ): + """ + :keyword uefi_settings: Specifies the security settings like secure boot used while creating + the virtual machine. + :paramtype uefi_settings: ~azure.mgmt.connectedvmware.models.UefiSettings + """ + super(SecurityProfile, self).__init__(**kwargs) + self.uefi_settings = uefi_settings + + +class StopVirtualMachineOptions(msrest.serialization.Model): + """Defines the stop action properties. + + :ivar skip_shutdown: Gets or sets a value indicating whether to request non-graceful VM + shutdown. True value for this flag indicates non-graceful shutdown whereas false indicates + otherwise. Defaults to false. + :vartype skip_shutdown: bool + """ + + _attribute_map = { + 'skip_shutdown': {'key': 'skipShutdown', 'type': 'bool'}, + } + + def __init__( + self, + *, + skip_shutdown: Optional[bool] = False, + **kwargs + ): + """ + :keyword skip_shutdown: Gets or sets a value indicating whether to request non-graceful VM + shutdown. True value for this flag indicates non-graceful shutdown whereas false indicates + otherwise. Defaults to false. + :paramtype skip_shutdown: bool + """ + super(StopVirtualMachineOptions, self).__init__(**kwargs) + self.skip_shutdown = skip_shutdown + + +class StorageProfile(msrest.serialization.Model): + """Defines the resource properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar disks: Gets or sets the list of virtual disks associated with the virtual machine. + :vartype disks: list[~azure.mgmt.connectedvmware.models.VirtualDisk] + :ivar scsi_controllers: Gets or sets the list of virtual SCSI controllers associated with the + virtual machine. + :vartype scsi_controllers: list[~azure.mgmt.connectedvmware.models.VirtualSCSIController] + """ + + _validation = { + 'scsi_controllers': {'readonly': True}, + } + + _attribute_map = { + 'disks': {'key': 'disks', 'type': '[VirtualDisk]'}, + 'scsi_controllers': {'key': 'scsiControllers', 'type': '[VirtualSCSIController]'}, + } + + def __init__( + self, + *, + disks: Optional[List["_models.VirtualDisk"]] = None, + **kwargs + ): + """ + :keyword disks: Gets or sets the list of virtual disks associated with the virtual machine. + :paramtype disks: list[~azure.mgmt.connectedvmware.models.VirtualDisk] + """ + super(StorageProfile, self).__init__(**kwargs) + self.disks = disks + self.scsi_controllers = None + + +class StorageProfileUpdate(msrest.serialization.Model): + """Defines the resource update properties. + + :ivar disks: Gets or sets the list of virtual disks associated with the virtual machine. + :vartype disks: list[~azure.mgmt.connectedvmware.models.VirtualDiskUpdate] + """ + + _attribute_map = { + 'disks': {'key': 'disks', 'type': '[VirtualDiskUpdate]'}, + } + + def __init__( + self, + *, + disks: Optional[List["_models.VirtualDiskUpdate"]] = None, + **kwargs + ): + """ + :keyword disks: Gets or sets the list of virtual disks associated with the virtual machine. + :paramtype disks: list[~azure.mgmt.connectedvmware.models.VirtualDiskUpdate] + """ + super(StorageProfileUpdate, self).__init__(**kwargs) + self.disks = disks + + +class SystemData(msrest.serialization.Model): + """Metadata pertaining to creation and last modification of the resource. + + :ivar created_by: The identity that created the resource. + :vartype created_by: str + :ivar created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", "Key". + :vartype created_by_type: str or ~azure.mgmt.connectedvmware.models.CreatedByType + :ivar created_at: The timestamp of resource creation (UTC). + :vartype created_at: ~datetime.datetime + :ivar last_modified_by: The identity that last modified the resource. + :vartype last_modified_by: str + :ivar last_modified_by_type: The type of identity that last modified the resource. Known values + are: "User", "Application", "ManagedIdentity", "Key". + :vartype last_modified_by_type: str or ~azure.mgmt.connectedvmware.models.CreatedByType + :ivar last_modified_at: The timestamp of resource last modification (UTC). + :vartype last_modified_at: ~datetime.datetime + """ + + _attribute_map = { + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + } + + def __init__( + self, + *, + created_by: Optional[str] = None, + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, + created_at: Optional[datetime.datetime] = None, + last_modified_by: Optional[str] = None, + last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, + last_modified_at: Optional[datetime.datetime] = None, + **kwargs + ): + """ + :keyword created_by: The identity that created the resource. + :paramtype created_by: str + :keyword created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", "Key". + :paramtype created_by_type: str or ~azure.mgmt.connectedvmware.models.CreatedByType + :keyword created_at: The timestamp of resource creation (UTC). + :paramtype created_at: ~datetime.datetime + :keyword last_modified_by: The identity that last modified the resource. + :paramtype last_modified_by: str + :keyword last_modified_by_type: The type of identity that last modified the resource. Known + values are: "User", "Application", "ManagedIdentity", "Key". + :paramtype last_modified_by_type: str or ~azure.mgmt.connectedvmware.models.CreatedByType + :keyword last_modified_at: The timestamp of resource last modification (UTC). + :paramtype last_modified_at: ~datetime.datetime + """ + super(SystemData, self).__init__(**kwargs) + self.created_by = created_by + self.created_by_type = created_by_type + self.created_at = created_at + self.last_modified_by = last_modified_by + self.last_modified_by_type = last_modified_by_type + self.last_modified_at = last_modified_at + + +class UefiSettings(msrest.serialization.Model): + """Specifies the security settings like secure boot used while creating the virtual machine. + + :ivar secure_boot_enabled: Specifies whether secure boot should be enabled on the virtual + machine. + :vartype secure_boot_enabled: bool + """ + + _attribute_map = { + 'secure_boot_enabled': {'key': 'secureBootEnabled', 'type': 'bool'}, + } + + def __init__( + self, + *, + secure_boot_enabled: Optional[bool] = None, + **kwargs + ): + """ + :keyword secure_boot_enabled: Specifies whether secure boot should be enabled on the virtual + machine. + :paramtype secure_boot_enabled: bool + """ + super(UefiSettings, self).__init__(**kwargs) + self.secure_boot_enabled = secure_boot_enabled + + +class VCenter(msrest.serialization.Model): + """Defines the vCenter. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar location: Required. Gets or sets the location. + :vartype location: str + :ivar extended_location: Gets or sets the extended location. + :vartype extended_location: ~azure.mgmt.connectedvmware.models.ExtendedLocation + :ivar system_data: The system data. + :vartype system_data: ~azure.mgmt.connectedvmware.models.SystemData + :ivar tags: A set of tags. Gets or sets the Resource tags. + :vartype tags: dict[str, str] + :ivar name: Gets or sets the name. + :vartype name: str + :ivar id: Gets or sets the Id. + :vartype id: str + :ivar type: Gets or sets the type of the resource. + :vartype type: str + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, + the resource provider must validate and persist this value. + :vartype kind: str + :ivar uuid: Gets or sets a unique identifier for this resource. + :vartype uuid: str + :ivar fqdn: Required. Gets or sets the FQDN/IPAddress of the vCenter. + :vartype fqdn: str + :ivar port: Gets or sets the port of the vCenter. + :vartype port: int + :ivar version: Gets or sets the version of the vCenter. + :vartype version: str + :ivar instance_uuid: Gets or sets the instance UUID of the vCenter. + :vartype instance_uuid: str + :ivar connection_status: Gets or sets the connection status to the vCenter. + :vartype connection_status: str + :ivar custom_resource_name: Gets the name of the corresponding resource in Kubernetes. + :vartype custom_resource_name: str + :ivar credentials: Username / Password Credentials to connect to vcenter. + :vartype credentials: ~azure.mgmt.connectedvmware.models.VICredential + :ivar statuses: The resource status information. + :vartype statuses: list[~azure.mgmt.connectedvmware.models.ResourceStatus] + :ivar provisioning_state: Gets or sets the provisioning state. + :vartype provisioning_state: str + """ + + _validation = { + 'location': {'required': True}, + 'system_data': {'readonly': True}, + 'name': {'readonly': True}, + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'uuid': {'readonly': True}, + 'fqdn': {'required': True}, + 'port': {'maximum': 65535, 'minimum': 1}, + 'version': {'readonly': True}, + 'instance_uuid': {'readonly': True}, + 'connection_status': {'readonly': True}, + 'custom_resource_name': {'readonly': True}, + 'statuses': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'extended_location': {'key': 'extendedLocation', 'type': 'ExtendedLocation'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'uuid': {'key': 'properties.uuid', 'type': 'str'}, + 'fqdn': {'key': 'properties.fqdn', 'type': 'str'}, + 'port': {'key': 'properties.port', 'type': 'int'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'instance_uuid': {'key': 'properties.instanceUuid', 'type': 'str'}, + 'connection_status': {'key': 'properties.connectionStatus', 'type': 'str'}, + 'custom_resource_name': {'key': 'properties.customResourceName', 'type': 'str'}, + 'credentials': {'key': 'properties.credentials', 'type': 'VICredential'}, + 'statuses': {'key': 'properties.statuses', 'type': '[ResourceStatus]'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + location: str, + fqdn: str, + extended_location: Optional["_models.ExtendedLocation"] = None, + tags: Optional[Dict[str, str]] = None, + kind: Optional[str] = None, + port: Optional[int] = None, + credentials: Optional["_models.VICredential"] = None, + **kwargs + ): + """ + :keyword location: Required. Gets or sets the location. + :paramtype location: str + :keyword extended_location: Gets or sets the extended location. + :paramtype extended_location: ~azure.mgmt.connectedvmware.models.ExtendedLocation + :keyword tags: A set of tags. Gets or sets the Resource tags. + :paramtype tags: dict[str, str] + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, + the resource provider must validate and persist this value. + :paramtype kind: str + :keyword fqdn: Required. Gets or sets the FQDN/IPAddress of the vCenter. + :paramtype fqdn: str + :keyword port: Gets or sets the port of the vCenter. + :paramtype port: int + :keyword credentials: Username / Password Credentials to connect to vcenter. + :paramtype credentials: ~azure.mgmt.connectedvmware.models.VICredential + """ + super(VCenter, self).__init__(**kwargs) + self.location = location + self.extended_location = extended_location + self.system_data = None + self.tags = tags + self.name = None + self.id = None + self.type = None + self.kind = kind + self.uuid = None + self.fqdn = fqdn + self.port = port + self.version = None + self.instance_uuid = None + self.connection_status = None + self.custom_resource_name = None + self.credentials = credentials + self.statuses = None + self.provisioning_state = None + + +class VCentersList(msrest.serialization.Model): + """List of VCenters. + + All required parameters must be populated in order to send to Azure. + + :ivar next_link: Url to follow for getting next page of VCenters. + :vartype next_link: str + :ivar value: Required. Array of VCenters. + :vartype value: list[~azure.mgmt.connectedvmware.models.VCenter] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[VCenter]'}, + } + + def __init__( + self, + *, + value: List["_models.VCenter"], + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword next_link: Url to follow for getting next page of VCenters. + :paramtype next_link: str + :keyword value: Required. Array of VCenters. + :paramtype value: list[~azure.mgmt.connectedvmware.models.VCenter] + """ + super(VCentersList, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class VICredential(msrest.serialization.Model): + """Username / Password Credentials to connect to vcenter. + + :ivar username: Gets or sets username to connect with the vCenter. + :vartype username: str + :ivar password: Gets or sets the password to connect with the vCenter. + :vartype password: str + """ + + _attribute_map = { + 'username': {'key': 'username', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + } + + def __init__( + self, + *, + username: Optional[str] = None, + password: Optional[str] = None, + **kwargs + ): + """ + :keyword username: Gets or sets username to connect with the vCenter. + :paramtype username: str + :keyword password: Gets or sets the password to connect with the vCenter. + :paramtype password: str + """ + super(VICredential, self).__init__(**kwargs) + self.username = username + self.password = password + + +class VirtualDisk(msrest.serialization.Model): + """Virtual disk model. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Gets or sets the name of the virtual disk. + :vartype name: str + :ivar label: Gets or sets the label of the virtual disk in vCenter. + :vartype label: str + :ivar disk_object_id: Gets or sets the disk object id. + :vartype disk_object_id: str + :ivar disk_size_gb: Gets or sets the disk total size. + :vartype disk_size_gb: int + :ivar device_key: Gets or sets the device key value. + :vartype device_key: int + :ivar disk_mode: Gets or sets the disk mode. Known values are: "persistent", + "independent_persistent", "independent_nonpersistent". + :vartype disk_mode: str or ~azure.mgmt.connectedvmware.models.DiskMode + :ivar controller_key: Gets or sets the controller id. + :vartype controller_key: int + :ivar unit_number: Gets or sets the unit number of the disk on the controller. + :vartype unit_number: int + :ivar device_name: Gets or sets the device name. + :vartype device_name: str + :ivar disk_type: Gets or sets the disk backing type. Known values are: "flat", "pmem", + "rawphysical", "rawvirtual", "sparse", "sesparse", "unknown". + :vartype disk_type: str or ~azure.mgmt.connectedvmware.models.DiskType + """ + + _validation = { + 'label': {'readonly': True}, + 'disk_object_id': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'label': {'key': 'label', 'type': 'str'}, + 'disk_object_id': {'key': 'diskObjectId', 'type': 'str'}, + 'disk_size_gb': {'key': 'diskSizeGB', 'type': 'int'}, + 'device_key': {'key': 'deviceKey', 'type': 'int'}, + 'disk_mode': {'key': 'diskMode', 'type': 'str'}, + 'controller_key': {'key': 'controllerKey', 'type': 'int'}, + 'unit_number': {'key': 'unitNumber', 'type': 'int'}, + 'device_name': {'key': 'deviceName', 'type': 'str'}, + 'disk_type': {'key': 'diskType', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + disk_size_gb: Optional[int] = None, + device_key: Optional[int] = None, + disk_mode: Optional[Union[str, "_models.DiskMode"]] = None, + controller_key: Optional[int] = None, + unit_number: Optional[int] = None, + device_name: Optional[str] = None, + disk_type: Optional[Union[str, "_models.DiskType"]] = None, + **kwargs + ): + """ + :keyword name: Gets or sets the name of the virtual disk. + :paramtype name: str + :keyword disk_size_gb: Gets or sets the disk total size. + :paramtype disk_size_gb: int + :keyword device_key: Gets or sets the device key value. + :paramtype device_key: int + :keyword disk_mode: Gets or sets the disk mode. Known values are: "persistent", + "independent_persistent", "independent_nonpersistent". + :paramtype disk_mode: str or ~azure.mgmt.connectedvmware.models.DiskMode + :keyword controller_key: Gets or sets the controller id. + :paramtype controller_key: int + :keyword unit_number: Gets or sets the unit number of the disk on the controller. + :paramtype unit_number: int + :keyword device_name: Gets or sets the device name. + :paramtype device_name: str + :keyword disk_type: Gets or sets the disk backing type. Known values are: "flat", "pmem", + "rawphysical", "rawvirtual", "sparse", "sesparse", "unknown". + :paramtype disk_type: str or ~azure.mgmt.connectedvmware.models.DiskType + """ + super(VirtualDisk, self).__init__(**kwargs) + self.name = name + self.label = None + self.disk_object_id = None + self.disk_size_gb = disk_size_gb + self.device_key = device_key + self.disk_mode = disk_mode + self.controller_key = controller_key + self.unit_number = unit_number + self.device_name = device_name + self.disk_type = disk_type + + +class VirtualDiskUpdate(msrest.serialization.Model): + """Defines the virtual disk update. + + :ivar name: Gets or sets the name of the virtual disk. + :vartype name: str + :ivar disk_size_gb: Gets or sets the disk total size. + :vartype disk_size_gb: int + :ivar device_key: Gets or sets the device key value. + :vartype device_key: int + :ivar disk_mode: Gets or sets the disk mode. Known values are: "persistent", + "independent_persistent", "independent_nonpersistent". + :vartype disk_mode: str or ~azure.mgmt.connectedvmware.models.DiskMode + :ivar controller_key: Gets or sets the controller id. + :vartype controller_key: int + :ivar unit_number: Gets or sets the unit number of the disk on the controller. + :vartype unit_number: int + :ivar device_name: Gets or sets the device name. + :vartype device_name: str + :ivar disk_type: Gets or sets the disk backing type. Known values are: "flat", "pmem", + "rawphysical", "rawvirtual", "sparse", "sesparse", "unknown". + :vartype disk_type: str or ~azure.mgmt.connectedvmware.models.DiskType + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'disk_size_gb': {'key': 'diskSizeGB', 'type': 'int'}, + 'device_key': {'key': 'deviceKey', 'type': 'int'}, + 'disk_mode': {'key': 'diskMode', 'type': 'str'}, + 'controller_key': {'key': 'controllerKey', 'type': 'int'}, + 'unit_number': {'key': 'unitNumber', 'type': 'int'}, + 'device_name': {'key': 'deviceName', 'type': 'str'}, + 'disk_type': {'key': 'diskType', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + disk_size_gb: Optional[int] = None, + device_key: Optional[int] = None, + disk_mode: Optional[Union[str, "_models.DiskMode"]] = None, + controller_key: Optional[int] = None, + unit_number: Optional[int] = None, + device_name: Optional[str] = None, + disk_type: Optional[Union[str, "_models.DiskType"]] = None, + **kwargs + ): + """ + :keyword name: Gets or sets the name of the virtual disk. + :paramtype name: str + :keyword disk_size_gb: Gets or sets the disk total size. + :paramtype disk_size_gb: int + :keyword device_key: Gets or sets the device key value. + :paramtype device_key: int + :keyword disk_mode: Gets or sets the disk mode. Known values are: "persistent", + "independent_persistent", "independent_nonpersistent". + :paramtype disk_mode: str or ~azure.mgmt.connectedvmware.models.DiskMode + :keyword controller_key: Gets or sets the controller id. + :paramtype controller_key: int + :keyword unit_number: Gets or sets the unit number of the disk on the controller. + :paramtype unit_number: int + :keyword device_name: Gets or sets the device name. + :paramtype device_name: str + :keyword disk_type: Gets or sets the disk backing type. Known values are: "flat", "pmem", + "rawphysical", "rawvirtual", "sparse", "sesparse", "unknown". + :paramtype disk_type: str or ~azure.mgmt.connectedvmware.models.DiskType + """ + super(VirtualDiskUpdate, self).__init__(**kwargs) + self.name = name + self.disk_size_gb = disk_size_gb + self.device_key = device_key + self.disk_mode = disk_mode + self.controller_key = controller_key + self.unit_number = unit_number + self.device_name = device_name + self.disk_type = disk_type + + +class VirtualMachine(msrest.serialization.Model): + """Define the virtualMachine. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar location: Required. Gets or sets the location. + :vartype location: str + :ivar extended_location: Gets or sets the extended location. + :vartype extended_location: ~azure.mgmt.connectedvmware.models.ExtendedLocation + :ivar system_data: The system data. + :vartype system_data: ~azure.mgmt.connectedvmware.models.SystemData + :ivar tags: A set of tags. Gets or sets the Resource tags. + :vartype tags: dict[str, str] + :ivar name: Gets or sets the name. + :vartype name: str + :ivar id: Gets or sets the Id. + :vartype id: str + :ivar type: Gets or sets the type of the resource. + :vartype type: str + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, + the resource provider must validate and persist this value. + :vartype kind: str + :ivar identity: The identity of the resource. + :vartype identity: ~azure.mgmt.connectedvmware.models.Identity + :ivar resource_pool_id: Gets or sets the ARM Id of the resourcePool resource on which this + virtual machine will + deploy. + :vartype resource_pool_id: str + :ivar template_id: Gets or sets the ARM Id of the template resource to deploy the virtual + machine. + :vartype template_id: str + :ivar v_center_id: Gets or sets the ARM Id of the vCenter resource in which this resource pool + resides. + :vartype v_center_id: str + :ivar placement_profile: Placement properties. + :vartype placement_profile: ~azure.mgmt.connectedvmware.models.PlacementProfile + :ivar os_profile: OS properties. + :vartype os_profile: ~azure.mgmt.connectedvmware.models.OsProfile + :ivar hardware_profile: Hardware properties. + :vartype hardware_profile: ~azure.mgmt.connectedvmware.models.HardwareProfile + :ivar network_profile: Network properties. + :vartype network_profile: ~azure.mgmt.connectedvmware.models.NetworkProfile + :ivar storage_profile: Storage properties. + :vartype storage_profile: ~azure.mgmt.connectedvmware.models.StorageProfile + :ivar guest_agent_profile: Guest agent status properties. + :vartype guest_agent_profile: ~azure.mgmt.connectedvmware.models.GuestAgentProfile + :ivar security_profile: Gets the security profile. + :vartype security_profile: ~azure.mgmt.connectedvmware.models.SecurityProfile + :ivar mo_ref_id: Gets or sets the vCenter MoRef (Managed Object Reference) ID for the virtual + machine. + :vartype mo_ref_id: str + :ivar inventory_item_id: Gets or sets the inventory Item ID for the virtual machine. + :vartype inventory_item_id: str + :ivar mo_name: Gets or sets the vCenter Managed Object name for the virtual machine. + :vartype mo_name: str + :ivar folder_path: Gets or sets the folder path of the vm. + :vartype folder_path: str + :ivar instance_uuid: Gets or sets the instance uuid of the vm. + :vartype instance_uuid: str + :ivar smbios_uuid: Gets or sets the SMBIOS UUID of the vm. + :vartype smbios_uuid: str + :ivar firmware_type: Firmware type. Known values are: "bios", "efi". + :vartype firmware_type: str or ~azure.mgmt.connectedvmware.models.FirmwareType + :ivar power_state: Gets the power state of the virtual machine. + :vartype power_state: str + :ivar custom_resource_name: Gets the name of the corresponding resource in Kubernetes. + :vartype custom_resource_name: str + :ivar uuid: Gets or sets a unique identifier for this resource. + :vartype uuid: str + :ivar statuses: The resource status information. + :vartype statuses: list[~azure.mgmt.connectedvmware.models.ResourceStatus] + :ivar provisioning_state: Gets or sets the provisioning state. + :vartype provisioning_state: str + :ivar vm_id: Gets or sets a unique identifier for the vm resource. + :vartype vm_id: str + """ + + _validation = { + 'location': {'required': True}, + 'system_data': {'readonly': True}, + 'name': {'readonly': True}, + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'mo_name': {'readonly': True}, + 'folder_path': {'readonly': True}, + 'instance_uuid': {'readonly': True}, + 'power_state': {'readonly': True}, + 'custom_resource_name': {'readonly': True}, + 'uuid': {'readonly': True}, + 'statuses': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'vm_id': {'readonly': True}, + } + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'extended_location': {'key': 'extendedLocation', 'type': 'ExtendedLocation'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'resource_pool_id': {'key': 'properties.resourcePoolId', 'type': 'str'}, + 'template_id': {'key': 'properties.templateId', 'type': 'str'}, + 'v_center_id': {'key': 'properties.vCenterId', 'type': 'str'}, + 'placement_profile': {'key': 'properties.placementProfile', 'type': 'PlacementProfile'}, + 'os_profile': {'key': 'properties.osProfile', 'type': 'OsProfile'}, + 'hardware_profile': {'key': 'properties.hardwareProfile', 'type': 'HardwareProfile'}, + 'network_profile': {'key': 'properties.networkProfile', 'type': 'NetworkProfile'}, + 'storage_profile': {'key': 'properties.storageProfile', 'type': 'StorageProfile'}, + 'guest_agent_profile': {'key': 'properties.guestAgentProfile', 'type': 'GuestAgentProfile'}, + 'security_profile': {'key': 'properties.securityProfile', 'type': 'SecurityProfile'}, + 'mo_ref_id': {'key': 'properties.moRefId', 'type': 'str'}, + 'inventory_item_id': {'key': 'properties.inventoryItemId', 'type': 'str'}, + 'mo_name': {'key': 'properties.moName', 'type': 'str'}, + 'folder_path': {'key': 'properties.folderPath', 'type': 'str'}, + 'instance_uuid': {'key': 'properties.instanceUuid', 'type': 'str'}, + 'smbios_uuid': {'key': 'properties.smbiosUuid', 'type': 'str'}, + 'firmware_type': {'key': 'properties.firmwareType', 'type': 'str'}, + 'power_state': {'key': 'properties.powerState', 'type': 'str'}, + 'custom_resource_name': {'key': 'properties.customResourceName', 'type': 'str'}, + 'uuid': {'key': 'properties.uuid', 'type': 'str'}, + 'statuses': {'key': 'properties.statuses', 'type': '[ResourceStatus]'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'vm_id': {'key': 'properties.vmId', 'type': 'str'}, + } + + def __init__( + self, + *, + location: str, + extended_location: Optional["_models.ExtendedLocation"] = None, + tags: Optional[Dict[str, str]] = None, + kind: Optional[str] = None, + identity: Optional["_models.Identity"] = None, + resource_pool_id: Optional[str] = None, + template_id: Optional[str] = None, + v_center_id: Optional[str] = None, + placement_profile: Optional["_models.PlacementProfile"] = None, + os_profile: Optional["_models.OsProfile"] = None, + hardware_profile: Optional["_models.HardwareProfile"] = None, + network_profile: Optional["_models.NetworkProfile"] = None, + storage_profile: Optional["_models.StorageProfile"] = None, + guest_agent_profile: Optional["_models.GuestAgentProfile"] = None, + security_profile: Optional["_models.SecurityProfile"] = None, + mo_ref_id: Optional[str] = None, + inventory_item_id: Optional[str] = None, + smbios_uuid: Optional[str] = None, + firmware_type: Optional[Union[str, "_models.FirmwareType"]] = None, + **kwargs + ): + """ + :keyword location: Required. Gets or sets the location. + :paramtype location: str + :keyword extended_location: Gets or sets the extended location. + :paramtype extended_location: ~azure.mgmt.connectedvmware.models.ExtendedLocation + :keyword tags: A set of tags. Gets or sets the Resource tags. + :paramtype tags: dict[str, str] + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, + the resource provider must validate and persist this value. + :paramtype kind: str + :keyword identity: The identity of the resource. + :paramtype identity: ~azure.mgmt.connectedvmware.models.Identity + :keyword resource_pool_id: Gets or sets the ARM Id of the resourcePool resource on which this + virtual machine will + deploy. + :paramtype resource_pool_id: str + :keyword template_id: Gets or sets the ARM Id of the template resource to deploy the virtual + machine. + :paramtype template_id: str + :keyword v_center_id: Gets or sets the ARM Id of the vCenter resource in which this resource + pool resides. + :paramtype v_center_id: str + :keyword placement_profile: Placement properties. + :paramtype placement_profile: ~azure.mgmt.connectedvmware.models.PlacementProfile + :keyword os_profile: OS properties. + :paramtype os_profile: ~azure.mgmt.connectedvmware.models.OsProfile + :keyword hardware_profile: Hardware properties. + :paramtype hardware_profile: ~azure.mgmt.connectedvmware.models.HardwareProfile + :keyword network_profile: Network properties. + :paramtype network_profile: ~azure.mgmt.connectedvmware.models.NetworkProfile + :keyword storage_profile: Storage properties. + :paramtype storage_profile: ~azure.mgmt.connectedvmware.models.StorageProfile + :keyword guest_agent_profile: Guest agent status properties. + :paramtype guest_agent_profile: ~azure.mgmt.connectedvmware.models.GuestAgentProfile + :keyword security_profile: Gets the security profile. + :paramtype security_profile: ~azure.mgmt.connectedvmware.models.SecurityProfile + :keyword mo_ref_id: Gets or sets the vCenter MoRef (Managed Object Reference) ID for the + virtual machine. + :paramtype mo_ref_id: str + :keyword inventory_item_id: Gets or sets the inventory Item ID for the virtual machine. + :paramtype inventory_item_id: str + :keyword smbios_uuid: Gets or sets the SMBIOS UUID of the vm. + :paramtype smbios_uuid: str + :keyword firmware_type: Firmware type. Known values are: "bios", "efi". + :paramtype firmware_type: str or ~azure.mgmt.connectedvmware.models.FirmwareType + """ + super(VirtualMachine, self).__init__(**kwargs) + self.location = location + self.extended_location = extended_location + self.system_data = None + self.tags = tags + self.name = None + self.id = None + self.type = None + self.kind = kind + self.identity = identity + self.resource_pool_id = resource_pool_id + self.template_id = template_id + self.v_center_id = v_center_id + self.placement_profile = placement_profile + self.os_profile = os_profile + self.hardware_profile = hardware_profile + self.network_profile = network_profile + self.storage_profile = storage_profile + self.guest_agent_profile = guest_agent_profile + self.security_profile = security_profile + self.mo_ref_id = mo_ref_id + self.inventory_item_id = inventory_item_id + self.mo_name = None + self.folder_path = None + self.instance_uuid = None + self.smbios_uuid = smbios_uuid + self.firmware_type = firmware_type + self.power_state = None + self.custom_resource_name = None + self.uuid = None + self.statuses = None + self.provisioning_state = None + self.vm_id = None + + +class VirtualMachineAssessPatchesResult(msrest.serialization.Model): + """Describes the properties of an AssessPatches result. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar status: The overall success or failure status of the operation. It remains "InProgress" + until the operation completes. At that point it will become "Unknown", "Failed", "Succeeded", + or "CompletedWithWarnings.". Known values are: "Unknown", "InProgress", "Failed", "Succeeded", + "CompletedWithWarnings". + :vartype status: str or ~azure.mgmt.connectedvmware.models.PatchOperationStatus + :ivar assessment_activity_id: The activity ID of the operation that produced this result. + :vartype assessment_activity_id: str + :ivar reboot_pending: The overall reboot status of the VM. It will be true when partially + installed patches require a reboot to complete installation but the reboot has not yet + occurred. + :vartype reboot_pending: bool + :ivar available_patch_count_by_classification: Summarization of patches available for + installation on the machine by classification. + :vartype available_patch_count_by_classification: + ~azure.mgmt.connectedvmware.models.AvailablePatchCountByClassification + :ivar start_date_time: The UTC timestamp when the operation began. + :vartype start_date_time: ~datetime.datetime + :ivar last_modified_date_time: The UTC timestamp when the operation finished. + :vartype last_modified_date_time: ~datetime.datetime + :ivar started_by: Indicates if operation was triggered by user or by platform. Known values + are: "User", "Platform". + :vartype started_by: str or ~azure.mgmt.connectedvmware.models.PatchOperationStartedBy + :ivar patch_service_used: Specifies the patch service used for the operation. Known values are: + "Unknown", "WU", "WU_WSUS", "YUM", "APT", "Zypper". + :vartype patch_service_used: str or ~azure.mgmt.connectedvmware.models.PatchServiceUsed + :ivar os_type: The operating system type of the machine. Known values are: "Windows", "Linux". + :vartype os_type: str or ~azure.mgmt.connectedvmware.models.OsTypeUM + :ivar error_details: The errors that were encountered during execution of the operation. The + details array contains the list of them. + :vartype error_details: ~azure.mgmt.connectedvmware.models.ErrorDetail + """ + + _validation = { + 'status': {'readonly': True}, + 'assessment_activity_id': {'readonly': True}, + 'reboot_pending': {'readonly': True}, + 'start_date_time': {'readonly': True}, + 'last_modified_date_time': {'readonly': True}, + 'started_by': {'readonly': True}, + 'patch_service_used': {'readonly': True}, + 'os_type': {'readonly': True}, + 'error_details': {'readonly': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'assessment_activity_id': {'key': 'assessmentActivityId', 'type': 'str'}, + 'reboot_pending': {'key': 'rebootPending', 'type': 'bool'}, + 'available_patch_count_by_classification': {'key': 'availablePatchCountByClassification', 'type': 'AvailablePatchCountByClassification'}, + 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'}, + 'last_modified_date_time': {'key': 'lastModifiedDateTime', 'type': 'iso-8601'}, + 'started_by': {'key': 'startedBy', 'type': 'str'}, + 'patch_service_used': {'key': 'patchServiceUsed', 'type': 'str'}, + 'os_type': {'key': 'osType', 'type': 'str'}, + 'error_details': {'key': 'errorDetails', 'type': 'ErrorDetail'}, + } + + def __init__( + self, + *, + available_patch_count_by_classification: Optional["_models.AvailablePatchCountByClassification"] = None, + **kwargs + ): + """ + :keyword available_patch_count_by_classification: Summarization of patches available for + installation on the machine by classification. + :paramtype available_patch_count_by_classification: + ~azure.mgmt.connectedvmware.models.AvailablePatchCountByClassification + """ + super(VirtualMachineAssessPatchesResult, self).__init__(**kwargs) + self.status = None + self.assessment_activity_id = None + self.reboot_pending = None + self.available_patch_count_by_classification = available_patch_count_by_classification + self.start_date_time = None + self.last_modified_date_time = None + self.started_by = None + self.patch_service_used = None + self.os_type = None + self.error_details = None + + +class VirtualMachineInstallPatchesParameters(msrest.serialization.Model): + """Input for InstallPatches as directly received by the API. + + All required parameters must be populated in order to send to Azure. + + :ivar maximum_duration: Required. Specifies the maximum amount of time that the operation will + run. It must be an ISO 8601-compliant duration string such as PT4H (4 hours). + :vartype maximum_duration: str + :ivar reboot_setting: Required. Defines when it is acceptable to reboot a VM during a software + update operation. Known values are: "IfRequired", "Never", "Always". + :vartype reboot_setting: str or ~azure.mgmt.connectedvmware.models.VMGuestPatchRebootSetting + :ivar windows_parameters: Input for InstallPatches on a Windows VM, as directly received by the + API. + :vartype windows_parameters: ~azure.mgmt.connectedvmware.models.WindowsParameters + :ivar linux_parameters: Input for InstallPatches on a Linux VM, as directly received by the + API. + :vartype linux_parameters: ~azure.mgmt.connectedvmware.models.LinuxParameters + """ + + _validation = { + 'maximum_duration': {'required': True}, + 'reboot_setting': {'required': True}, + } + + _attribute_map = { + 'maximum_duration': {'key': 'maximumDuration', 'type': 'str'}, + 'reboot_setting': {'key': 'rebootSetting', 'type': 'str'}, + 'windows_parameters': {'key': 'windowsParameters', 'type': 'WindowsParameters'}, + 'linux_parameters': {'key': 'linuxParameters', 'type': 'LinuxParameters'}, + } + + def __init__( + self, + *, + maximum_duration: str, + reboot_setting: Union[str, "_models.VMGuestPatchRebootSetting"], + windows_parameters: Optional["_models.WindowsParameters"] = None, + linux_parameters: Optional["_models.LinuxParameters"] = None, + **kwargs + ): + """ + :keyword maximum_duration: Required. Specifies the maximum amount of time that the operation + will run. It must be an ISO 8601-compliant duration string such as PT4H (4 hours). + :paramtype maximum_duration: str + :keyword reboot_setting: Required. Defines when it is acceptable to reboot a VM during a + software update operation. Known values are: "IfRequired", "Never", "Always". + :paramtype reboot_setting: str or ~azure.mgmt.connectedvmware.models.VMGuestPatchRebootSetting + :keyword windows_parameters: Input for InstallPatches on a Windows VM, as directly received by + the API. + :paramtype windows_parameters: ~azure.mgmt.connectedvmware.models.WindowsParameters + :keyword linux_parameters: Input for InstallPatches on a Linux VM, as directly received by the + API. + :paramtype linux_parameters: ~azure.mgmt.connectedvmware.models.LinuxParameters + """ + super(VirtualMachineInstallPatchesParameters, self).__init__(**kwargs) + self.maximum_duration = maximum_duration + self.reboot_setting = reboot_setting + self.windows_parameters = windows_parameters + self.linux_parameters = linux_parameters + + +class VirtualMachineInstallPatchesResult(msrest.serialization.Model): + """The result summary of an installation operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar status: The overall success or failure status of the operation. It remains "InProgress" + until the operation completes. At that point it will become "Failed", "Succeeded", "Unknown" or + "CompletedWithWarnings.". Known values are: "Unknown", "InProgress", "Failed", "Succeeded", + "CompletedWithWarnings". + :vartype status: str or ~azure.mgmt.connectedvmware.models.PatchOperationStatus + :ivar installation_activity_id: The activity ID of the operation that produced this result. + :vartype installation_activity_id: str + :ivar reboot_status: The reboot state of the VM following completion of the operation. Known + values are: "Unknown", "NotNeeded", "Required", "Started", "Failed", "Completed". + :vartype reboot_status: str or ~azure.mgmt.connectedvmware.models.VMGuestPatchRebootStatus + :ivar maintenance_window_exceeded: Whether the operation ran out of time before it completed + all its intended actions. + :vartype maintenance_window_exceeded: bool + :ivar excluded_patch_count: The number of patches that were not installed due to the user + blocking their installation. + :vartype excluded_patch_count: int + :ivar not_selected_patch_count: The number of patches that were detected as available for + install, but did not meet the operation's criteria. + :vartype not_selected_patch_count: int + :ivar pending_patch_count: The number of patches that were identified as meeting the + installation criteria, but were not able to be installed. Typically this happens when + maintenanceWindowExceeded == true. + :vartype pending_patch_count: int + :ivar installed_patch_count: The number of patches successfully installed. + :vartype installed_patch_count: int + :ivar failed_patch_count: The number of patches that could not be installed due to some issue. + See errors for details. + :vartype failed_patch_count: int + :ivar start_date_time: The UTC timestamp when the operation began. + :vartype start_date_time: ~datetime.datetime + :ivar last_modified_date_time: The UTC timestamp when the operation finished. + :vartype last_modified_date_time: ~datetime.datetime + :ivar started_by: Indicates if operation was triggered by user or by platform. Known values + are: "User", "Platform". + :vartype started_by: str or ~azure.mgmt.connectedvmware.models.PatchOperationStartedBy + :ivar patch_service_used: Specifies the patch service used for the operation. Known values are: + "Unknown", "WU", "WU_WSUS", "YUM", "APT", "Zypper". + :vartype patch_service_used: str or ~azure.mgmt.connectedvmware.models.PatchServiceUsed + :ivar os_type: The operating system type of the machine. Known values are: "Windows", "Linux". + :vartype os_type: str or ~azure.mgmt.connectedvmware.models.OsTypeUM + :ivar error_details: The errors that were encountered during execution of the operation. The + details array contains the list of them. + :vartype error_details: ~azure.mgmt.connectedvmware.models.ErrorDetail + """ + + _validation = { + 'status': {'readonly': True}, + 'installation_activity_id': {'readonly': True}, + 'reboot_status': {'readonly': True}, + 'maintenance_window_exceeded': {'readonly': True}, + 'excluded_patch_count': {'readonly': True}, + 'not_selected_patch_count': {'readonly': True}, + 'pending_patch_count': {'readonly': True}, + 'installed_patch_count': {'readonly': True}, + 'failed_patch_count': {'readonly': True}, + 'start_date_time': {'readonly': True}, + 'last_modified_date_time': {'readonly': True}, + 'started_by': {'readonly': True}, + 'patch_service_used': {'readonly': True}, + 'os_type': {'readonly': True}, + 'error_details': {'readonly': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'installation_activity_id': {'key': 'installationActivityId', 'type': 'str'}, + 'reboot_status': {'key': 'rebootStatus', 'type': 'str'}, + 'maintenance_window_exceeded': {'key': 'maintenanceWindowExceeded', 'type': 'bool'}, + 'excluded_patch_count': {'key': 'excludedPatchCount', 'type': 'int'}, + 'not_selected_patch_count': {'key': 'notSelectedPatchCount', 'type': 'int'}, + 'pending_patch_count': {'key': 'pendingPatchCount', 'type': 'int'}, + 'installed_patch_count': {'key': 'installedPatchCount', 'type': 'int'}, + 'failed_patch_count': {'key': 'failedPatchCount', 'type': 'int'}, + 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'}, + 'last_modified_date_time': {'key': 'lastModifiedDateTime', 'type': 'iso-8601'}, + 'started_by': {'key': 'startedBy', 'type': 'str'}, + 'patch_service_used': {'key': 'patchServiceUsed', 'type': 'str'}, + 'os_type': {'key': 'osType', 'type': 'str'}, + 'error_details': {'key': 'errorDetails', 'type': 'ErrorDetail'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(VirtualMachineInstallPatchesResult, self).__init__(**kwargs) + self.status = None + self.installation_activity_id = None + self.reboot_status = None + self.maintenance_window_exceeded = None + self.excluded_patch_count = None + self.not_selected_patch_count = None + self.pending_patch_count = None + self.installed_patch_count = None + self.failed_patch_count = None + self.start_date_time = None + self.last_modified_date_time = None + self.started_by = None + self.patch_service_used = None + self.os_type = None + self.error_details = None + + +class VirtualMachineInventoryItem(InventoryItemProperties): + """The VM inventory item. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar inventory_type: Required. They inventory type.Constant filled by server. Known values + are: "ResourcePool", "VirtualMachine", "VirtualMachineTemplate", "VirtualNetwork", "Cluster", + "Datastore", "Host". + :vartype inventory_type: str or ~azure.mgmt.connectedvmware.models.InventoryType + :ivar managed_resource_id: Gets or sets the tracked resource id corresponding to the inventory + resource. + :vartype managed_resource_id: str + :ivar mo_ref_id: Gets or sets the MoRef (Managed Object Reference) ID for the inventory item. + :vartype mo_ref_id: str + :ivar mo_name: Gets or sets the vCenter Managed Object name for the inventory item. + :vartype mo_name: str + :ivar provisioning_state: Gets or sets the provisioning state. + :vartype provisioning_state: str + :ivar os_type: Gets or sets the type of the os. Known values are: "Windows", "Linux", "Other". + :vartype os_type: str or ~azure.mgmt.connectedvmware.models.OsType + :ivar os_name: Gets or sets os name. + :vartype os_name: str + :ivar ip_addresses: Gets or sets the nic ip addresses. + :vartype ip_addresses: list[str] + :ivar folder_path: Gets or sets the folder path of the vm. + :vartype folder_path: str + :ivar host: Host inventory resource details. + :vartype host: ~azure.mgmt.connectedvmware.models.InventoryItemDetails + :ivar resource_pool: ResourcePool inventory resource details. + :vartype resource_pool: ~azure.mgmt.connectedvmware.models.InventoryItemDetails + :ivar instance_uuid: Gets or sets the instance uuid of the vm. + :vartype instance_uuid: str + :ivar smbios_uuid: Gets or sets the SMBIOS UUID of the vm. + :vartype smbios_uuid: str + :ivar power_state: Gets the power state of the virtual machine. + :vartype power_state: str + :ivar tools_running_status: Gets or sets the current running status of VMware Tools running in + the guest operating system. + :vartype tools_running_status: str + :ivar tools_version_status: Gets or sets the current version status of VMware Tools installed + in the guest operating system. + :vartype tools_version_status: str + :ivar tools_version: Gets or sets the current version of VMware Tools. + :vartype tools_version: str + """ + + _validation = { + 'inventory_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'power_state': {'readonly': True}, + 'tools_running_status': {'readonly': True}, + 'tools_version_status': {'readonly': True}, + 'tools_version': {'readonly': True}, + } + + _attribute_map = { + 'inventory_type': {'key': 'inventoryType', 'type': 'str'}, + 'managed_resource_id': {'key': 'managedResourceId', 'type': 'str'}, + 'mo_ref_id': {'key': 'moRefId', 'type': 'str'}, + 'mo_name': {'key': 'moName', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'os_type': {'key': 'osType', 'type': 'str'}, + 'os_name': {'key': 'osName', 'type': 'str'}, + 'ip_addresses': {'key': 'ipAddresses', 'type': '[str]'}, + 'folder_path': {'key': 'folderPath', 'type': 'str'}, + 'host': {'key': 'host', 'type': 'InventoryItemDetails'}, + 'resource_pool': {'key': 'resourcePool', 'type': 'InventoryItemDetails'}, + 'instance_uuid': {'key': 'instanceUuid', 'type': 'str'}, + 'smbios_uuid': {'key': 'smbiosUuid', 'type': 'str'}, + 'power_state': {'key': 'powerState', 'type': 'str'}, + 'tools_running_status': {'key': 'toolsRunningStatus', 'type': 'str'}, + 'tools_version_status': {'key': 'toolsVersionStatus', 'type': 'str'}, + 'tools_version': {'key': 'toolsVersion', 'type': 'str'}, + } + + def __init__( + self, + *, + managed_resource_id: Optional[str] = None, + mo_ref_id: Optional[str] = None, + mo_name: Optional[str] = None, + os_type: Optional[Union[str, "_models.OsType"]] = None, + os_name: Optional[str] = None, + ip_addresses: Optional[List[str]] = None, + folder_path: Optional[str] = None, + host: Optional["_models.InventoryItemDetails"] = None, + resource_pool: Optional["_models.InventoryItemDetails"] = None, + instance_uuid: Optional[str] = None, + smbios_uuid: Optional[str] = None, + **kwargs + ): + """ + :keyword managed_resource_id: Gets or sets the tracked resource id corresponding to the + inventory resource. + :paramtype managed_resource_id: str + :keyword mo_ref_id: Gets or sets the MoRef (Managed Object Reference) ID for the inventory + item. + :paramtype mo_ref_id: str + :keyword mo_name: Gets or sets the vCenter Managed Object name for the inventory item. + :paramtype mo_name: str + :keyword os_type: Gets or sets the type of the os. Known values are: "Windows", "Linux", + "Other". + :paramtype os_type: str or ~azure.mgmt.connectedvmware.models.OsType + :keyword os_name: Gets or sets os name. + :paramtype os_name: str + :keyword ip_addresses: Gets or sets the nic ip addresses. + :paramtype ip_addresses: list[str] + :keyword folder_path: Gets or sets the folder path of the vm. + :paramtype folder_path: str + :keyword host: Host inventory resource details. + :paramtype host: ~azure.mgmt.connectedvmware.models.InventoryItemDetails + :keyword resource_pool: ResourcePool inventory resource details. + :paramtype resource_pool: ~azure.mgmt.connectedvmware.models.InventoryItemDetails + :keyword instance_uuid: Gets or sets the instance uuid of the vm. + :paramtype instance_uuid: str + :keyword smbios_uuid: Gets or sets the SMBIOS UUID of the vm. + :paramtype smbios_uuid: str + """ + super(VirtualMachineInventoryItem, self).__init__(managed_resource_id=managed_resource_id, mo_ref_id=mo_ref_id, mo_name=mo_name, **kwargs) + self.inventory_type = 'VirtualMachine' # type: str + self.os_type = os_type + self.os_name = os_name + self.ip_addresses = ip_addresses + self.folder_path = folder_path + self.host = host + self.resource_pool = resource_pool + self.instance_uuid = instance_uuid + self.smbios_uuid = smbios_uuid + self.power_state = None + self.tools_running_status = None + self.tools_version_status = None + self.tools_version = None + + +class VirtualMachinesList(msrest.serialization.Model): + """List of VirtualMachines. + + All required parameters must be populated in order to send to Azure. + + :ivar next_link: Url to follow for getting next page of VirtualMachines. + :vartype next_link: str + :ivar value: Required. Array of VirtualMachines. + :vartype value: list[~azure.mgmt.connectedvmware.models.VirtualMachine] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[VirtualMachine]'}, + } + + def __init__( + self, + *, + value: List["_models.VirtualMachine"], + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword next_link: Url to follow for getting next page of VirtualMachines. + :paramtype next_link: str + :keyword value: Required. Array of VirtualMachines. + :paramtype value: list[~azure.mgmt.connectedvmware.models.VirtualMachine] + """ + super(VirtualMachinesList, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class VirtualMachineTemplate(msrest.serialization.Model): + """Define the virtualMachineTemplate. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar location: Required. Gets or sets the location. + :vartype location: str + :ivar extended_location: Gets or sets the extended location. + :vartype extended_location: ~azure.mgmt.connectedvmware.models.ExtendedLocation + :ivar system_data: The system data. + :vartype system_data: ~azure.mgmt.connectedvmware.models.SystemData + :ivar tags: A set of tags. Gets or sets the Resource tags. + :vartype tags: dict[str, str] + :ivar name: Gets or sets the name. + :vartype name: str + :ivar id: Gets or sets the Id. + :vartype id: str + :ivar type: Gets or sets the type of the resource. + :vartype type: str + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, + the resource provider must validate and persist this value. + :vartype kind: str + :ivar uuid: Gets or sets a unique identifier for this resource. + :vartype uuid: str + :ivar v_center_id: Gets or sets the ARM Id of the vCenter resource in which this template + resides. + :vartype v_center_id: str + :ivar mo_ref_id: Gets or sets the vCenter MoRef (Managed Object Reference) ID for the virtual + machine + template. + :vartype mo_ref_id: str + :ivar inventory_item_id: Gets or sets the inventory Item ID for the virtual machine template. + :vartype inventory_item_id: str + :ivar mo_name: Gets or sets the vCenter Managed Object name for the virtual machine template. + :vartype mo_name: str + :ivar memory_size_mb: Gets or sets memory size in MBs for the template. + :vartype memory_size_mb: int + :ivar num_cp_us: Gets or sets the number of vCPUs for the template. + :vartype num_cp_us: int + :ivar num_cores_per_socket: Gets or sets the number of cores per socket for the template. + Defaults to 1 if unspecified. + :vartype num_cores_per_socket: int + :ivar os_type: Gets or sets the type of the os. Known values are: "Windows", "Linux", "Other". + :vartype os_type: str or ~azure.mgmt.connectedvmware.models.OsType + :ivar os_name: Gets or sets os name. + :vartype os_name: str + :ivar folder_path: Gets or sets the folder path of the template. + :vartype folder_path: str + :ivar network_interfaces: Gets or sets the network interfaces of the template. + :vartype network_interfaces: list[~azure.mgmt.connectedvmware.models.NetworkInterface] + :ivar disks: Gets or sets the disks the template. + :vartype disks: list[~azure.mgmt.connectedvmware.models.VirtualDisk] + :ivar custom_resource_name: Gets the name of the corresponding resource in Kubernetes. + :vartype custom_resource_name: str + :ivar tools_version_status: Gets or sets the current version status of VMware Tools installed + in the guest operating system. + :vartype tools_version_status: str + :ivar tools_version: Gets or sets the current version of VMware Tools. + :vartype tools_version: str + :ivar firmware_type: Firmware type. Known values are: "bios", "efi". + :vartype firmware_type: str or ~azure.mgmt.connectedvmware.models.FirmwareType + :ivar statuses: The resource status information. + :vartype statuses: list[~azure.mgmt.connectedvmware.models.ResourceStatus] + :ivar provisioning_state: Gets or sets the provisioning state. + :vartype provisioning_state: str + """ + + _validation = { + 'location': {'required': True}, + 'system_data': {'readonly': True}, + 'name': {'readonly': True}, + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'uuid': {'readonly': True}, + 'mo_name': {'readonly': True}, + 'memory_size_mb': {'readonly': True}, + 'num_cp_us': {'readonly': True}, + 'num_cores_per_socket': {'readonly': True}, + 'os_type': {'readonly': True}, + 'os_name': {'readonly': True}, + 'folder_path': {'readonly': True}, + 'network_interfaces': {'readonly': True}, + 'disks': {'readonly': True}, + 'custom_resource_name': {'readonly': True}, + 'tools_version_status': {'readonly': True}, + 'tools_version': {'readonly': True}, + 'firmware_type': {'readonly': True}, + 'statuses': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'extended_location': {'key': 'extendedLocation', 'type': 'ExtendedLocation'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'uuid': {'key': 'properties.uuid', 'type': 'str'}, + 'v_center_id': {'key': 'properties.vCenterId', 'type': 'str'}, + 'mo_ref_id': {'key': 'properties.moRefId', 'type': 'str'}, + 'inventory_item_id': {'key': 'properties.inventoryItemId', 'type': 'str'}, + 'mo_name': {'key': 'properties.moName', 'type': 'str'}, + 'memory_size_mb': {'key': 'properties.memorySizeMB', 'type': 'int'}, + 'num_cp_us': {'key': 'properties.numCPUs', 'type': 'int'}, + 'num_cores_per_socket': {'key': 'properties.numCoresPerSocket', 'type': 'int'}, + 'os_type': {'key': 'properties.osType', 'type': 'str'}, + 'os_name': {'key': 'properties.osName', 'type': 'str'}, + 'folder_path': {'key': 'properties.folderPath', 'type': 'str'}, + 'network_interfaces': {'key': 'properties.networkInterfaces', 'type': '[NetworkInterface]'}, + 'disks': {'key': 'properties.disks', 'type': '[VirtualDisk]'}, + 'custom_resource_name': {'key': 'properties.customResourceName', 'type': 'str'}, + 'tools_version_status': {'key': 'properties.toolsVersionStatus', 'type': 'str'}, + 'tools_version': {'key': 'properties.toolsVersion', 'type': 'str'}, + 'firmware_type': {'key': 'properties.firmwareType', 'type': 'str'}, + 'statuses': {'key': 'properties.statuses', 'type': '[ResourceStatus]'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + location: str, + extended_location: Optional["_models.ExtendedLocation"] = None, + tags: Optional[Dict[str, str]] = None, + kind: Optional[str] = None, + v_center_id: Optional[str] = None, + mo_ref_id: Optional[str] = None, + inventory_item_id: Optional[str] = None, + **kwargs + ): + """ + :keyword location: Required. Gets or sets the location. + :paramtype location: str + :keyword extended_location: Gets or sets the extended location. + :paramtype extended_location: ~azure.mgmt.connectedvmware.models.ExtendedLocation + :keyword tags: A set of tags. Gets or sets the Resource tags. + :paramtype tags: dict[str, str] + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, + the resource provider must validate and persist this value. + :paramtype kind: str + :keyword v_center_id: Gets or sets the ARM Id of the vCenter resource in which this template + resides. + :paramtype v_center_id: str + :keyword mo_ref_id: Gets or sets the vCenter MoRef (Managed Object Reference) ID for the + virtual machine + template. + :paramtype mo_ref_id: str + :keyword inventory_item_id: Gets or sets the inventory Item ID for the virtual machine + template. + :paramtype inventory_item_id: str + """ + super(VirtualMachineTemplate, self).__init__(**kwargs) + self.location = location + self.extended_location = extended_location + self.system_data = None + self.tags = tags + self.name = None + self.id = None + self.type = None + self.kind = kind + self.uuid = None + self.v_center_id = v_center_id + self.mo_ref_id = mo_ref_id + self.inventory_item_id = inventory_item_id + self.mo_name = None + self.memory_size_mb = None + self.num_cp_us = None + self.num_cores_per_socket = None + self.os_type = None + self.os_name = None + self.folder_path = None + self.network_interfaces = None + self.disks = None + self.custom_resource_name = None + self.tools_version_status = None + self.tools_version = None + self.firmware_type = None + self.statuses = None + self.provisioning_state = None + + +class VirtualMachineTemplateInventoryItem(InventoryItemProperties): + """The VM Template inventory item. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar inventory_type: Required. They inventory type.Constant filled by server. Known values + are: "ResourcePool", "VirtualMachine", "VirtualMachineTemplate", "VirtualNetwork", "Cluster", + "Datastore", "Host". + :vartype inventory_type: str or ~azure.mgmt.connectedvmware.models.InventoryType + :ivar managed_resource_id: Gets or sets the tracked resource id corresponding to the inventory + resource. + :vartype managed_resource_id: str + :ivar mo_ref_id: Gets or sets the MoRef (Managed Object Reference) ID for the inventory item. + :vartype mo_ref_id: str + :ivar mo_name: Gets or sets the vCenter Managed Object name for the inventory item. + :vartype mo_name: str + :ivar provisioning_state: Gets or sets the provisioning state. + :vartype provisioning_state: str + :ivar memory_size_mb: Gets or sets memory size in MBs for the template. + :vartype memory_size_mb: int + :ivar num_cp_us: Gets or sets the number of vCPUs for the template. + :vartype num_cp_us: int + :ivar num_cores_per_socket: Gets or sets the number of cores per socket for the template. + Defaults to 1 if unspecified. + :vartype num_cores_per_socket: int + :ivar os_type: Gets or sets the type of the os. Known values are: "Windows", "Linux", "Other". + :vartype os_type: str or ~azure.mgmt.connectedvmware.models.OsType + :ivar os_name: Gets or sets os name. + :vartype os_name: str + :ivar folder_path: Gets or sets the folder path of the template. + :vartype folder_path: str + """ + + _validation = { + 'inventory_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'inventory_type': {'key': 'inventoryType', 'type': 'str'}, + 'managed_resource_id': {'key': 'managedResourceId', 'type': 'str'}, + 'mo_ref_id': {'key': 'moRefId', 'type': 'str'}, + 'mo_name': {'key': 'moName', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'memory_size_mb': {'key': 'memorySizeMB', 'type': 'int'}, + 'num_cp_us': {'key': 'numCPUs', 'type': 'int'}, + 'num_cores_per_socket': {'key': 'numCoresPerSocket', 'type': 'int'}, + 'os_type': {'key': 'osType', 'type': 'str'}, + 'os_name': {'key': 'osName', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'str'}, + } + + def __init__( + self, + *, + managed_resource_id: Optional[str] = None, + mo_ref_id: Optional[str] = None, + mo_name: Optional[str] = None, + memory_size_mb: Optional[int] = None, + num_cp_us: Optional[int] = None, + num_cores_per_socket: Optional[int] = None, + os_type: Optional[Union[str, "_models.OsType"]] = None, + os_name: Optional[str] = None, + folder_path: Optional[str] = None, + **kwargs + ): + """ + :keyword managed_resource_id: Gets or sets the tracked resource id corresponding to the + inventory resource. + :paramtype managed_resource_id: str + :keyword mo_ref_id: Gets or sets the MoRef (Managed Object Reference) ID for the inventory + item. + :paramtype mo_ref_id: str + :keyword mo_name: Gets or sets the vCenter Managed Object name for the inventory item. + :paramtype mo_name: str + :keyword memory_size_mb: Gets or sets memory size in MBs for the template. + :paramtype memory_size_mb: int + :keyword num_cp_us: Gets or sets the number of vCPUs for the template. + :paramtype num_cp_us: int + :keyword num_cores_per_socket: Gets or sets the number of cores per socket for the template. + Defaults to 1 if unspecified. + :paramtype num_cores_per_socket: int + :keyword os_type: Gets or sets the type of the os. Known values are: "Windows", "Linux", + "Other". + :paramtype os_type: str or ~azure.mgmt.connectedvmware.models.OsType + :keyword os_name: Gets or sets os name. + :paramtype os_name: str + :keyword folder_path: Gets or sets the folder path of the template. + :paramtype folder_path: str + """ + super(VirtualMachineTemplateInventoryItem, self).__init__(managed_resource_id=managed_resource_id, mo_ref_id=mo_ref_id, mo_name=mo_name, **kwargs) + self.inventory_type = 'VirtualMachineTemplate' # type: str + self.memory_size_mb = memory_size_mb + self.num_cp_us = num_cp_us + self.num_cores_per_socket = num_cores_per_socket + self.os_type = os_type + self.os_name = os_name + self.folder_path = folder_path + + +class VirtualMachineTemplatesList(msrest.serialization.Model): + """List of VirtualMachineTemplates. + + All required parameters must be populated in order to send to Azure. + + :ivar next_link: Url to follow for getting next page of VirtualMachineTemplates. + :vartype next_link: str + :ivar value: Required. Array of VirtualMachineTemplates. + :vartype value: list[~azure.mgmt.connectedvmware.models.VirtualMachineTemplate] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[VirtualMachineTemplate]'}, + } + + def __init__( + self, + *, + value: List["_models.VirtualMachineTemplate"], + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword next_link: Url to follow for getting next page of VirtualMachineTemplates. + :paramtype next_link: str + :keyword value: Required. Array of VirtualMachineTemplates. + :paramtype value: list[~azure.mgmt.connectedvmware.models.VirtualMachineTemplate] + """ + super(VirtualMachineTemplatesList, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class VirtualMachineUpdate(msrest.serialization.Model): + """Defines the virtualMachineUpdate. + + :ivar tags: A set of tags. Gets or sets the Resource tags. + :vartype tags: dict[str, str] + :ivar identity: The identity of the resource. + :vartype identity: ~azure.mgmt.connectedvmware.models.Identity + :ivar hardware_profile: Defines the resource properties. + :vartype hardware_profile: ~azure.mgmt.connectedvmware.models.HardwareProfile + :ivar os_profile: OS properties. + :vartype os_profile: ~azure.mgmt.connectedvmware.models.OsProfileUpdate + :ivar storage_profile: Defines the resource update properties. + :vartype storage_profile: ~azure.mgmt.connectedvmware.models.StorageProfileUpdate + :ivar network_profile: Defines the update resource properties. + :vartype network_profile: ~azure.mgmt.connectedvmware.models.NetworkProfileUpdate + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'hardware_profile': {'key': 'properties.hardwareProfile', 'type': 'HardwareProfile'}, + 'os_profile': {'key': 'properties.osProfile', 'type': 'OsProfileUpdate'}, + 'storage_profile': {'key': 'properties.storageProfile', 'type': 'StorageProfileUpdate'}, + 'network_profile': {'key': 'properties.networkProfile', 'type': 'NetworkProfileUpdate'}, + } + + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + identity: Optional["_models.Identity"] = None, + hardware_profile: Optional["_models.HardwareProfile"] = None, + os_profile: Optional["_models.OsProfileUpdate"] = None, + storage_profile: Optional["_models.StorageProfileUpdate"] = None, + network_profile: Optional["_models.NetworkProfileUpdate"] = None, + **kwargs + ): + """ + :keyword tags: A set of tags. Gets or sets the Resource tags. + :paramtype tags: dict[str, str] + :keyword identity: The identity of the resource. + :paramtype identity: ~azure.mgmt.connectedvmware.models.Identity + :keyword hardware_profile: Defines the resource properties. + :paramtype hardware_profile: ~azure.mgmt.connectedvmware.models.HardwareProfile + :keyword os_profile: OS properties. + :paramtype os_profile: ~azure.mgmt.connectedvmware.models.OsProfileUpdate + :keyword storage_profile: Defines the resource update properties. + :paramtype storage_profile: ~azure.mgmt.connectedvmware.models.StorageProfileUpdate + :keyword network_profile: Defines the update resource properties. + :paramtype network_profile: ~azure.mgmt.connectedvmware.models.NetworkProfileUpdate + """ + super(VirtualMachineUpdate, self).__init__(**kwargs) + self.tags = tags + self.identity = identity + self.hardware_profile = hardware_profile + self.os_profile = os_profile + self.storage_profile = storage_profile + self.network_profile = network_profile + + +class VirtualNetwork(msrest.serialization.Model): + """Define the virtualNetwork. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar location: Required. Gets or sets the location. + :vartype location: str + :ivar extended_location: Gets or sets the extended location. + :vartype extended_location: ~azure.mgmt.connectedvmware.models.ExtendedLocation + :ivar system_data: The system data. + :vartype system_data: ~azure.mgmt.connectedvmware.models.SystemData + :ivar tags: A set of tags. Gets or sets the Resource tags. + :vartype tags: dict[str, str] + :ivar name: Gets or sets the name. + :vartype name: str + :ivar id: Gets or sets the Id. + :vartype id: str + :ivar type: Gets or sets the type of the resource. + :vartype type: str + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, + the resource provider must validate and persist this value. + :vartype kind: str + :ivar uuid: Gets or sets a unique identifier for this resource. + :vartype uuid: str + :ivar v_center_id: Gets or sets the ARM Id of the vCenter resource in which this template + resides. + :vartype v_center_id: str + :ivar mo_ref_id: Gets or sets the vCenter MoRef (Managed Object Reference) ID for the virtual + network. + :vartype mo_ref_id: str + :ivar inventory_item_id: Gets or sets the inventory Item ID for the virtual network. + :vartype inventory_item_id: str + :ivar mo_name: Gets or sets the vCenter Managed Object name for the virtual network. + :vartype mo_name: str + :ivar custom_resource_name: Gets the name of the corresponding resource in Kubernetes. + :vartype custom_resource_name: str + :ivar statuses: The resource status information. + :vartype statuses: list[~azure.mgmt.connectedvmware.models.ResourceStatus] + :ivar provisioning_state: Gets or sets the provisioning state. + :vartype provisioning_state: str + """ + + _validation = { + 'location': {'required': True}, + 'system_data': {'readonly': True}, + 'name': {'readonly': True}, + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'uuid': {'readonly': True}, + 'mo_name': {'readonly': True}, + 'custom_resource_name': {'readonly': True}, + 'statuses': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'extended_location': {'key': 'extendedLocation', 'type': 'ExtendedLocation'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'uuid': {'key': 'properties.uuid', 'type': 'str'}, + 'v_center_id': {'key': 'properties.vCenterId', 'type': 'str'}, + 'mo_ref_id': {'key': 'properties.moRefId', 'type': 'str'}, + 'inventory_item_id': {'key': 'properties.inventoryItemId', 'type': 'str'}, + 'mo_name': {'key': 'properties.moName', 'type': 'str'}, + 'custom_resource_name': {'key': 'properties.customResourceName', 'type': 'str'}, + 'statuses': {'key': 'properties.statuses', 'type': '[ResourceStatus]'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + location: str, + extended_location: Optional["_models.ExtendedLocation"] = None, + tags: Optional[Dict[str, str]] = None, + kind: Optional[str] = None, + v_center_id: Optional[str] = None, + mo_ref_id: Optional[str] = None, + inventory_item_id: Optional[str] = None, + **kwargs + ): + """ + :keyword location: Required. Gets or sets the location. + :paramtype location: str + :keyword extended_location: Gets or sets the extended location. + :paramtype extended_location: ~azure.mgmt.connectedvmware.models.ExtendedLocation + :keyword tags: A set of tags. Gets or sets the Resource tags. + :paramtype tags: dict[str, str] + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, + the resource provider must validate and persist this value. + :paramtype kind: str + :keyword v_center_id: Gets or sets the ARM Id of the vCenter resource in which this template + resides. + :paramtype v_center_id: str + :keyword mo_ref_id: Gets or sets the vCenter MoRef (Managed Object Reference) ID for the + virtual network. + :paramtype mo_ref_id: str + :keyword inventory_item_id: Gets or sets the inventory Item ID for the virtual network. + :paramtype inventory_item_id: str + """ + super(VirtualNetwork, self).__init__(**kwargs) + self.location = location + self.extended_location = extended_location + self.system_data = None + self.tags = tags + self.name = None + self.id = None + self.type = None + self.kind = kind + self.uuid = None + self.v_center_id = v_center_id + self.mo_ref_id = mo_ref_id + self.inventory_item_id = inventory_item_id + self.mo_name = None + self.custom_resource_name = None + self.statuses = None + self.provisioning_state = None + + +class VirtualNetworkInventoryItem(InventoryItemProperties): + """The Virtual network inventory item. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar inventory_type: Required. They inventory type.Constant filled by server. Known values + are: "ResourcePool", "VirtualMachine", "VirtualMachineTemplate", "VirtualNetwork", "Cluster", + "Datastore", "Host". + :vartype inventory_type: str or ~azure.mgmt.connectedvmware.models.InventoryType + :ivar managed_resource_id: Gets or sets the tracked resource id corresponding to the inventory + resource. + :vartype managed_resource_id: str + :ivar mo_ref_id: Gets or sets the MoRef (Managed Object Reference) ID for the inventory item. + :vartype mo_ref_id: str + :ivar mo_name: Gets or sets the vCenter Managed Object name for the inventory item. + :vartype mo_name: str + :ivar provisioning_state: Gets or sets the provisioning state. + :vartype provisioning_state: str + """ + + _validation = { + 'inventory_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'inventory_type': {'key': 'inventoryType', 'type': 'str'}, + 'managed_resource_id': {'key': 'managedResourceId', 'type': 'str'}, + 'mo_ref_id': {'key': 'moRefId', 'type': 'str'}, + 'mo_name': {'key': 'moName', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + managed_resource_id: Optional[str] = None, + mo_ref_id: Optional[str] = None, + mo_name: Optional[str] = None, + **kwargs + ): + """ + :keyword managed_resource_id: Gets or sets the tracked resource id corresponding to the + inventory resource. + :paramtype managed_resource_id: str + :keyword mo_ref_id: Gets or sets the MoRef (Managed Object Reference) ID for the inventory + item. + :paramtype mo_ref_id: str + :keyword mo_name: Gets or sets the vCenter Managed Object name for the inventory item. + :paramtype mo_name: str + """ + super(VirtualNetworkInventoryItem, self).__init__(managed_resource_id=managed_resource_id, mo_ref_id=mo_ref_id, mo_name=mo_name, **kwargs) + self.inventory_type = 'VirtualNetwork' # type: str + + +class VirtualNetworksList(msrest.serialization.Model): + """List of VirtualNetworks. + + All required parameters must be populated in order to send to Azure. + + :ivar next_link: Url to follow for getting next page of VirtualNetworks. + :vartype next_link: str + :ivar value: Required. Array of VirtualNetworks. + :vartype value: list[~azure.mgmt.connectedvmware.models.VirtualNetwork] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[VirtualNetwork]'}, + } + + def __init__( + self, + *, + value: List["_models.VirtualNetwork"], + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword next_link: Url to follow for getting next page of VirtualNetworks. + :paramtype next_link: str + :keyword value: Required. Array of VirtualNetworks. + :paramtype value: list[~azure.mgmt.connectedvmware.models.VirtualNetwork] + """ + super(VirtualNetworksList, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class VirtualSCSIController(msrest.serialization.Model): + """This data object type contains the properties of a SCSI controller device attached to a virtual machine that is reported by the controller. + + :ivar type: Gets or sets the controller type. Known values are: "lsilogic", "buslogic", + "pvscsi", "lsilogicsas". + :vartype type: str or ~azure.mgmt.connectedvmware.models.SCSIControllerType + :ivar controller_key: Gets or sets the key of the controller. + :vartype controller_key: int + :ivar bus_number: Gets or sets the bus number of the controller. + :vartype bus_number: int + :ivar scsi_ctlr_unit_number: Gets or sets the SCSI controller unit number. + :vartype scsi_ctlr_unit_number: int + :ivar sharing: Gets or sets the sharing mode. Known values are: "noSharing", "physicalSharing", + "virtualSharing". + :vartype sharing: str or ~azure.mgmt.connectedvmware.models.VirtualSCSISharing + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'controller_key': {'key': 'controllerKey', 'type': 'int'}, + 'bus_number': {'key': 'busNumber', 'type': 'int'}, + 'scsi_ctlr_unit_number': {'key': 'scsiCtlrUnitNumber', 'type': 'int'}, + 'sharing': {'key': 'sharing', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Optional[Union[str, "_models.SCSIControllerType"]] = None, + controller_key: Optional[int] = None, + bus_number: Optional[int] = None, + scsi_ctlr_unit_number: Optional[int] = None, + sharing: Optional[Union[str, "_models.VirtualSCSISharing"]] = None, + **kwargs + ): + """ + :keyword type: Gets or sets the controller type. Known values are: "lsilogic", "buslogic", + "pvscsi", "lsilogicsas". + :paramtype type: str or ~azure.mgmt.connectedvmware.models.SCSIControllerType + :keyword controller_key: Gets or sets the key of the controller. + :paramtype controller_key: int + :keyword bus_number: Gets or sets the bus number of the controller. + :paramtype bus_number: int + :keyword scsi_ctlr_unit_number: Gets or sets the SCSI controller unit number. + :paramtype scsi_ctlr_unit_number: int + :keyword sharing: Gets or sets the sharing mode. Known values are: "noSharing", + "physicalSharing", "virtualSharing". + :paramtype sharing: str or ~azure.mgmt.connectedvmware.models.VirtualSCSISharing + """ + super(VirtualSCSIController, self).__init__(**kwargs) + self.type = type + self.controller_key = controller_key + self.bus_number = bus_number + self.scsi_ctlr_unit_number = scsi_ctlr_unit_number + self.sharing = sharing + + +class WindowsParameters(msrest.serialization.Model): + """Input for InstallPatches on a Windows VM, as directly received by the API. + + :ivar classifications_to_include: The update classifications to select when installing patches + for Windows. + :vartype classifications_to_include: list[str or + ~azure.mgmt.connectedvmware.models.VMGuestPatchClassificationWindows] + :ivar kb_numbers_to_include: Kbs to include in the patch operation. + :vartype kb_numbers_to_include: list[str] + :ivar kb_numbers_to_exclude: Kbs to exclude in the patch operation. + :vartype kb_numbers_to_exclude: list[str] + :ivar exclude_kbs_requiring_reboot: Filters out Kbs that don't have an + InstallationRebootBehavior of 'NeverReboots' when this is set to true. + :vartype exclude_kbs_requiring_reboot: bool + :ivar max_patch_publish_date: This is used to install patches that were published on or before + this given max published date. + :vartype max_patch_publish_date: ~datetime.datetime + """ + + _attribute_map = { + 'classifications_to_include': {'key': 'classificationsToInclude', 'type': '[str]'}, + 'kb_numbers_to_include': {'key': 'kbNumbersToInclude', 'type': '[str]'}, + 'kb_numbers_to_exclude': {'key': 'kbNumbersToExclude', 'type': '[str]'}, + 'exclude_kbs_requiring_reboot': {'key': 'excludeKbsRequiringReboot', 'type': 'bool'}, + 'max_patch_publish_date': {'key': 'maxPatchPublishDate', 'type': 'iso-8601'}, + } + + def __init__( + self, + *, + classifications_to_include: Optional[List[Union[str, "_models.VMGuestPatchClassificationWindows"]]] = None, + kb_numbers_to_include: Optional[List[str]] = None, + kb_numbers_to_exclude: Optional[List[str]] = None, + exclude_kbs_requiring_reboot: Optional[bool] = None, + max_patch_publish_date: Optional[datetime.datetime] = None, + **kwargs + ): + """ + :keyword classifications_to_include: The update classifications to select when installing + patches for Windows. + :paramtype classifications_to_include: list[str or + ~azure.mgmt.connectedvmware.models.VMGuestPatchClassificationWindows] + :keyword kb_numbers_to_include: Kbs to include in the patch operation. + :paramtype kb_numbers_to_include: list[str] + :keyword kb_numbers_to_exclude: Kbs to exclude in the patch operation. + :paramtype kb_numbers_to_exclude: list[str] + :keyword exclude_kbs_requiring_reboot: Filters out Kbs that don't have an + InstallationRebootBehavior of 'NeverReboots' when this is set to true. + :paramtype exclude_kbs_requiring_reboot: bool + :keyword max_patch_publish_date: This is used to install patches that were published on or + before this given max published date. + :paramtype max_patch_publish_date: ~datetime.datetime + """ + super(WindowsParameters, self).__init__(**kwargs) + self.classifications_to_include = classifications_to_include + self.kb_numbers_to_include = kb_numbers_to_include + self.kb_numbers_to_exclude = kb_numbers_to_exclude + self.exclude_kbs_requiring_reboot = exclude_kbs_requiring_reboot + self.max_patch_publish_date = max_patch_publish_date diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/models/_patch.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/models/_patch.py new file mode 100644 index 00000000000..0ad201a8c58 --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/models/_patch.py @@ -0,0 +1,19 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/operations/__init__.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/operations/__init__.py new file mode 100644 index 00000000000..295107adb1c --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/operations/__init__.py @@ -0,0 +1,20 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._operations import Operations +from ._virtual_machines_operations import VirtualMachinesOperations + +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk +__all__ = [ + 'Operations', + 'VirtualMachinesOperations', +] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() \ No newline at end of file diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/operations/_operations.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/operations/_operations.py new file mode 100644 index 00000000000..2f6f2cb3cb7 --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/operations/_operations.py @@ -0,0 +1,151 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._vendor import _convert_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-01-10-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/providers/Microsoft.ConnectedVMwarevSphere/operations") + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class Operations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.connectedvmware.AzureArcVMwareManagementServiceAPI`'s + :attr:`operations` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + **kwargs: Any + ) -> Iterable[_models.OperationsList]: + """Returns list of all operations. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OperationsList or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.connectedvmware.models.OperationsList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-01-10-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.OperationsList] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("OperationsList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/providers/Microsoft.ConnectedVMwarevSphere/operations"} # type: ignore diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/operations/_patch.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/operations/_patch.py new file mode 100644 index 00000000000..0ad201a8c58 --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/operations/_patch.py @@ -0,0 +1,19 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/operations/_virtual_machines_operations.py b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/operations/_virtual_machines_operations.py new file mode 100644 index 00000000000..4a13f663fe6 --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/operations/_virtual_machines_operations.py @@ -0,0 +1,154 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_get_request( + subscription_id: str, + resource_group_name: str, + virtual_machine_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-01-10-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ConnectedVMwarevSphere/virtualMachines/{virtualMachineName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), + "virtualMachineName": _SERIALIZER.url("virtual_machine_name", virtual_machine_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +class VirtualMachinesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.connectedvmware.AzureArcVMwareManagementServiceAPI`'s + :attr:`virtual_machines` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def get( + self, + resource_group_name: str, + virtual_machine_name: str, + **kwargs: Any + ) -> _models.VirtualMachine: + """Gets a virtual machine. + + Implements virtual machine GET method. + + :param resource_group_name: The Resource Group Name. + :type resource_group_name: str + :param virtual_machine_name: Name of the virtual machine resource. + :type virtual_machine_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: VirtualMachine, or the result of cls(response) + :rtype: ~azure.mgmt.connectedvmware.models.VirtualMachine + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-01-10-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.VirtualMachine] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + virtual_machine_name=virtual_machine_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('VirtualMachine', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ConnectedVMwarevSphere/virtualMachines/{virtualMachineName}"} # type: ignore + + diff --git a/src/ssh/azext_ssh/vendored_sdks/connectedvmware/py.typed b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/py.typed new file mode 100644 index 00000000000..e5aff4f83af --- /dev/null +++ b/src/ssh/azext_ssh/vendored_sdks/connectedvmware/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/src/ssh/azext_ssh/vendored_sdks/hybridconnectivity/operations/_endpoints_operations.py b/src/ssh/azext_ssh/vendored_sdks/hybridconnectivity/operations/_endpoints_operations.py index d591c41d544..e49a08fe5a9 100644 --- a/src/ssh/azext_ssh/vendored_sdks/hybridconnectivity/operations/_endpoints_operations.py +++ b/src/ssh/azext_ssh/vendored_sdks/hybridconnectivity/operations/_endpoints_operations.py @@ -49,6 +49,7 @@ def create_or_update( self, resource_group_name, # type: str machine_name, # type: str + resource_type, #type: str endpoint_name, # type: str endpoint_resource, # type: "models.EndpointResource" **kwargs # type: Any @@ -83,6 +84,7 @@ def create_or_update( 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), 'machineName': self._serialize.url("machine_name", machine_name, 'str'), + 'resourceType': self._serialize.url("resource_type", resource_type, 'str'), 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) @@ -114,12 +116,13 @@ def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.HybridCompute/machines/{machineName}/providers/Microsoft.HybridConnectivity/endpoints/{endpointName}'} # type: ignore + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceType}/{machineName}/providers/Microsoft.HybridConnectivity/endpoints/{endpointName}'} # type: ignore def list_credentials( self, resource_group_name, # type: str machine_name, # type: str + resource_type, # type: str endpoint_name, # type: str expiresin=10800, # type: Optional[int] **kwargs # type: Any @@ -153,6 +156,7 @@ def list_credentials( 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), 'machineName': self._serialize.url("machine_name", machine_name, 'str'), + 'resourceType': self._serialize.url("resource_type", resource_type, 'str', min_length=1), 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) @@ -182,4 +186,4 @@ def list_credentials( return cls(pipeline_response, deserialized, {}) return deserialized - list_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.HybridCompute/machines/{machineName}/providers/Microsoft.HybridConnectivity/endpoints/{endpointName}/listCredentials'} # type: ignore + list_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceType}/{machineName}/providers/Microsoft.HybridConnectivity/endpoints/{endpointName}/listCredentials'} # type: ignore From 32171dbb8b115363d9a9700cf77555549646f42b Mon Sep 17 00:00:00 2001 From: Alan Enrique Ontiveros Salazar <108426424+alanenriqueo@users.noreply.github.com> Date: Sun, 23 Oct 2022 18:59:27 -0700 Subject: [PATCH 20/85] {rdbms-connect} Update mycli to 1.26.1 and pgcli to 3.4.1 (#5476) --- src/rdbms-connect/HISTORY.rst | 4 + .../azext_rdbms_connect/custom.py | 1 + .../test_mysql_flexible_server_connect.yaml | 272 ++++++++---------- ...test_postgres_flexible_server_connect.yaml | 233 +++++++-------- .../latest/test_rdbms-connect_scenario.py | 2 +- src/rdbms-connect/setup.py | 7 +- 6 files changed, 240 insertions(+), 279 deletions(-) diff --git a/src/rdbms-connect/HISTORY.rst b/src/rdbms-connect/HISTORY.rst index 6bd2f82d18f..482af3d10d1 100644 --- a/src/rdbms-connect/HISTORY.rst +++ b/src/rdbms-connect/HISTORY.rst @@ -3,6 +3,10 @@ Release History =============== +1.0.4 +++++++ ++ Update mycli and pgcli versions + 1.0.3 ++++++ + Add support to read UTF-8 files with and without BOM diff --git a/src/rdbms-connect/azext_rdbms_connect/custom.py b/src/rdbms-connect/azext_rdbms_connect/custom.py index 60b68d47573..08f16e1d43d 100644 --- a/src/rdbms-connect/azext_rdbms_connect/custom.py +++ b/src/rdbms-connect/azext_rdbms_connect/custom.py @@ -183,6 +183,7 @@ def _connect_execute_query(server_type, host, server_name, database_name, login_ else: # set ssl param to allow for connection connection_kwargs['ssl'] = {"fake_flag_to_enable_tls": True} + connection_kwargs['autocommit'] = True connection = pymysql.connect(**connection_kwargs) logger.warning('Successfully connected to %s.', server_name) except Exception as e: diff --git a/src/rdbms-connect/azext_rdbms_connect/tests/latest/recordings/test_mysql_flexible_server_connect.yaml b/src/rdbms-connect/azext_rdbms_connect/tests/latest/recordings/test_mysql_flexible_server_connect.yaml index 4813bff19b1..bc02d0cabfd 100644 --- a/src/rdbms-connect/azext_rdbms_connect/tests/latest/recordings/test_mysql_flexible_server_connect.yaml +++ b/src/rdbms-connect/azext_rdbms_connect/tests/latest/recordings/test_mysql_flexible_server_connect.yaml @@ -13,7 +13,7 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: HEAD uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001?api-version=2021-04-01 response: @@ -25,7 +25,7 @@ interactions: content-length: - '0' date: - - Thu, 28 Jul 2022 00:49:37 GMT + - Fri, 21 Oct 2022 09:22:04 GMT expires: - '-1' pragma: @@ -51,21 +51,21 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001?api-version=2021-04-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001","name":"clitest.rg000001","type":"Microsoft.Resources/resourceGroups","location":"westus","tags":{"product":"azurecli","cause":"automation","date":"2022-07-28T00:49:36Z"},"properties":{"provisioningState":"Succeeded"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001","name":"clitest.rg000001","type":"Microsoft.Resources/resourceGroups","location":"northeurope","tags":{"product":"azurecli","cause":"automation","date":"2022-10-21T09:21:59Z"},"properties":{"provisioningState":"Succeeded"}}' headers: cache-control: - no-cache content-length: - - '310' + - '315' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:49:37 GMT + - Fri, 21 Oct 2022 09:22:04 GMT expires: - '-1' pragma: @@ -97,9 +97,9 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/westus/checkNameAvailability?api-version=2021-12-01-preview + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/northeurope/checkNameAvailability?api-version=2021-12-01-preview response: body: string: '{"nameAvailable":true,"message":""}' @@ -111,7 +111,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:49:37 GMT + - Fri, 21 Oct 2022 09:22:06 GMT expires: - '-1' pragma: @@ -145,21 +145,21 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/westus/capabilities?api-version=2021-12-01-preview + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/northeurope/capabilities?api-version=2021-12-01-preview response: body: - string: '{"value":[{"zone":"none","supportedHAMode":["SameZone"],"supportedGeoBackupRegions":["eastus"],"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]}]}]}]}' + string: '{"value":[{"zone":"none","supportedHAMode":["SameZone","ZoneRedundant"],"supportedGeoBackupRegions":["westeurope"],"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]}]}]},{"zone":"1","supportedHAMode":["SameZone","ZoneRedundant"],"supportedGeoBackupRegions":["westeurope"],"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]}]}]},{"zone":"2","supportedHAMode":["SameZone","ZoneRedundant"],"supportedGeoBackupRegions":["westeurope"],"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]}]}]},{"zone":"3","supportedHAMode":["SameZone","ZoneRedundant"],"supportedGeoBackupRegions":["westeurope"],"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]}]}]}]}' headers: cache-control: - no-cache content-length: - - '6952' + - '27846' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:49:38 GMT + - Fri, 21 Oct 2022 09:22:07 GMT expires: - '-1' pragma: @@ -191,21 +191,21 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/westus/capabilities?api-version=2021-12-01-preview + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/northeurope/capabilities?api-version=2021-12-01-preview response: body: - string: '{"value":[{"zone":"none","supportedHAMode":["SameZone"],"supportedGeoBackupRegions":["eastus"],"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]}]}]}]}' + string: '{"value":[{"zone":"none","supportedHAMode":["SameZone","ZoneRedundant"],"supportedGeoBackupRegions":["westeurope"],"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]}]}]},{"zone":"1","supportedHAMode":["SameZone","ZoneRedundant"],"supportedGeoBackupRegions":["westeurope"],"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]}]}]},{"zone":"2","supportedHAMode":["SameZone","ZoneRedundant"],"supportedGeoBackupRegions":["westeurope"],"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]}]}]},{"zone":"3","supportedHAMode":["SameZone","ZoneRedundant"],"supportedGeoBackupRegions":["westeurope"],"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]}]}]}]}' headers: cache-control: - no-cache content-length: - - '6952' + - '27846' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:49:38 GMT + - Fri, 21 Oct 2022 09:22:09 GMT expires: - '-1' pragma: @@ -224,9 +224,9 @@ interactions: code: 200 message: OK - request: - body: '{"location": "westus", "sku": {"name": "Standard_B1ms", "tier": "Burstable"}, + body: '{"location": "northeurope", "sku": {"name": "Standard_B1ms", "tier": "Burstable"}, "properties": {"administratorLogin": "cliuser", "administratorLoginPassword": - "HCagL7HbpTKsTE52V_rwag", "version": "5.7", "createMode": "Create", "storage": + "2Z3fClKvaUgdj9pJ_Hkm3g", "version": "5.7", "createMode": "Create", "storage": {"storageSizeGB": 32, "iops": 396, "autoGrow": "Enabled"}, "backup": {"backupRetentionDays": 7, "geoRedundantBackup": "Disabled"}, "highAvailability": {"mode": "Disabled"}, "network": {}}}' @@ -240,33 +240,33 @@ interactions: Connection: - keep-alive Content-Length: - - '423' + - '428' Content-Type: - application/json ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforMySQL/flexibleServers/azuredbclitest-000002?api-version=2021-12-01-preview response: body: - string: '{"operation":"UpsertServerManagementOperationV2","startTime":"2022-07-28T00:49:40.253Z"}' + string: '{"operation":"UpsertServerManagementOperationV2","startTime":"2022-10-21T09:22:13.8Z"}' headers: azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/westus/azureAsyncOperation/d68558b1-7ee7-4abd-a44a-c0e73fd44ac4?api-version=2021-12-01-preview + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/northeurope/azureAsyncOperation/9fbf0757-1fb3-4c11-8445-bc00563f776b?api-version=2021-12-01-preview cache-control: - no-cache content-length: - - '88' + - '86' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:49:39 GMT + - Fri, 21 Oct 2022 09:22:14 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/westus/operationResults/d68558b1-7ee7-4abd-a44a-c0e73fd44ac4?api-version=2021-12-01-preview + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/northeurope/operationResults/9fbf0757-1fb3-4c11-8445-bc00563f776b?api-version=2021-12-01-preview pragma: - no-cache server: @@ -276,7 +276,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - - '1198' + - '1199' status: code: 202 message: Accepted @@ -294,67 +294,21 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/westus/azureAsyncOperation/d68558b1-7ee7-4abd-a44a-c0e73fd44ac4?api-version=2021-12-01-preview - response: - body: - string: '{"name":"d68558b1-7ee7-4abd-a44a-c0e73fd44ac4","status":"InProgress","startTime":"2022-07-28T00:49:40.253Z"}' - headers: - cache-control: - - no-cache - content-length: - - '108' - content-type: - - application/json; charset=utf-8 - date: - - Thu, 28 Jul 2022 00:50:40 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - mysql flexible-server create - Connection: - - keep-alive - ParameterSetName: - - -g -n -l --admin-user --storage-size --version --public-access - User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/westus/azureAsyncOperation/d68558b1-7ee7-4abd-a44a-c0e73fd44ac4?api-version=2021-12-01-preview + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/northeurope/azureAsyncOperation/9fbf0757-1fb3-4c11-8445-bc00563f776b?api-version=2021-12-01-preview response: body: - string: '{"name":"d68558b1-7ee7-4abd-a44a-c0e73fd44ac4","status":"InProgress","startTime":"2022-07-28T00:49:40.253Z"}' + string: '{"name":"9fbf0757-1fb3-4c11-8445-bc00563f776b","status":"InProgress","startTime":"2022-10-21T09:22:13.8Z"}' headers: cache-control: - no-cache content-length: - - '108' + - '106' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:51:40 GMT + - Fri, 21 Oct 2022 09:23:14 GMT expires: - '-1' pragma: @@ -386,21 +340,21 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/westus/azureAsyncOperation/d68558b1-7ee7-4abd-a44a-c0e73fd44ac4?api-version=2021-12-01-preview + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/northeurope/azureAsyncOperation/9fbf0757-1fb3-4c11-8445-bc00563f776b?api-version=2021-12-01-preview response: body: - string: '{"name":"d68558b1-7ee7-4abd-a44a-c0e73fd44ac4","status":"InProgress","startTime":"2022-07-28T00:49:40.253Z"}' + string: '{"name":"9fbf0757-1fb3-4c11-8445-bc00563f776b","status":"InProgress","startTime":"2022-10-21T09:22:13.8Z"}' headers: cache-control: - no-cache content-length: - - '108' + - '106' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:52:41 GMT + - Fri, 21 Oct 2022 09:24:15 GMT expires: - '-1' pragma: @@ -432,21 +386,21 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/westus/azureAsyncOperation/d68558b1-7ee7-4abd-a44a-c0e73fd44ac4?api-version=2021-12-01-preview + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/northeurope/azureAsyncOperation/9fbf0757-1fb3-4c11-8445-bc00563f776b?api-version=2021-12-01-preview response: body: - string: '{"name":"d68558b1-7ee7-4abd-a44a-c0e73fd44ac4","status":"Succeeded","startTime":"2022-07-28T00:49:40.253Z"}' + string: '{"name":"9fbf0757-1fb3-4c11-8445-bc00563f776b","status":"Succeeded","startTime":"2022-10-21T09:22:13.8Z"}' headers: cache-control: - no-cache content-length: - - '107' + - '105' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:53:41 GMT + - Fri, 21 Oct 2022 09:25:15 GMT expires: - '-1' pragma: @@ -478,22 +432,22 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforMySQL/flexibleServers/azuredbclitest-000002?api-version=2021-12-01-preview response: body: - string: '{"sku":{"name":"Standard_B1ms","tier":"Burstable"},"systemData":{"createdAt":"2022-07-28T00:49:41.5358712Z"},"properties":{"administratorLogin":"cliuser","storage":{"storageSizeGB":32,"iops":396,"autoGrow":"Enabled","storageSku":"Premium_LRS"},"version":"5.7","state":"Ready","fullyQualifiedDomainName":"azuredbclitest-000002.mysql.database.azure.com","availabilityZone":"","maintenanceWindow":{"customWindow":"Disabled","dayOfWeek":0,"startHour":0,"startMinute":0},"replicationRole":"None","replicaCapacity":10,"network":{"publicNetworkAccess":"Enabled"},"backup":{"backupRetentionDays":7,"geoRedundantBackup":"Disabled","earliestRestoreDate":"2022-07-28T00:52:30.0625719+00:00"},"highAvailability":{"mode":"Disabled","state":"NotEnabled"}},"location":"West - US","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforMySQL/flexibleServers/azuredbclitest-000002","name":"azuredbclitest-000002","type":"Microsoft.DBforMySQL/flexibleServers"}' + string: '{"sku":{"name":"Standard_B1ms","tier":"Burstable"},"systemData":{"createdAt":"2022-10-21T09:22:15.7843084Z"},"properties":{"administratorLogin":"cliuser","storage":{"storageSizeGB":32,"iops":396,"autoGrow":"Enabled","autoIoScaling":"Disabled","storageSku":"Premium_LRS"},"version":"5.7","state":"Ready","fullyQualifiedDomainName":"azuredbclitest-000002.mysql.database.azure.com","availabilityZone":"1","maintenanceWindow":{"customWindow":"Disabled","dayOfWeek":0,"startHour":0,"startMinute":0},"replicationRole":"None","replicaCapacity":10,"network":{"publicNetworkAccess":"Enabled"},"backup":{"backupRetentionDays":7,"geoRedundantBackup":"Disabled","earliestRestoreDate":"2022-10-21T09:32:15.7843084+00:00"},"highAvailability":{"mode":"Disabled","state":"NotEnabled","standbyAvailabilityZone":""}},"location":"North + Europe","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforMySQL/flexibleServers/azuredbclitest-000002","name":"azuredbclitest-000002","type":"Microsoft.DBforMySQL/flexibleServers"}' headers: cache-control: - no-cache content-length: - - '1000' + - '1062' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:53:41 GMT + - Fri, 21 Oct 2022 09:25:16 GMT expires: - '-1' pragma: @@ -529,15 +483,15 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforMySQL/flexibleServers/azuredbclitest-000002/databases/flexibleserverdb?api-version=2021-12-01-preview response: body: - string: '{"operation":"UpsertServerDatabaseManagementOperation","startTime":"2022-07-28T00:53:43.057Z"}' + string: '{"operation":"UpsertServerDatabaseManagementOperation","startTime":"2022-10-21T09:25:18.147Z"}' headers: azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/westus/azureAsyncOperation/196835d2-0039-41c6-a482-599b88cdbaf9?api-version=2021-12-01-preview + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/northeurope/azureAsyncOperation/04cec89c-14e7-4a60-944d-7b2c36115750?api-version=2021-12-01-preview cache-control: - no-cache content-length: @@ -545,11 +499,11 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:53:42 GMT + - Fri, 21 Oct 2022 09:25:18 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/westus/operationResults/196835d2-0039-41c6-a482-599b88cdbaf9?api-version=2021-12-01-preview + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/northeurope/operationResults/04cec89c-14e7-4a60-944d-7b2c36115750?api-version=2021-12-01-preview pragma: - no-cache server: @@ -577,12 +531,12 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/westus/azureAsyncOperation/196835d2-0039-41c6-a482-599b88cdbaf9?api-version=2021-12-01-preview + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/northeurope/azureAsyncOperation/04cec89c-14e7-4a60-944d-7b2c36115750?api-version=2021-12-01-preview response: body: - string: '{"name":"196835d2-0039-41c6-a482-599b88cdbaf9","status":"Succeeded","startTime":"2022-07-28T00:53:43.057Z"}' + string: '{"name":"04cec89c-14e7-4a60-944d-7b2c36115750","status":"Succeeded","startTime":"2022-10-21T09:25:18.147Z"}' headers: cache-control: - no-cache @@ -591,7 +545,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:53:57 GMT + - Fri, 21 Oct 2022 09:25:34 GMT expires: - '-1' pragma: @@ -623,7 +577,7 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforMySQL/flexibleServers/azuredbclitest-000002/databases/flexibleserverdb?api-version=2021-12-01-preview response: @@ -637,7 +591,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:53:58 GMT + - Fri, 21 Oct 2022 09:25:35 GMT expires: - '-1' pragma: @@ -669,22 +623,22 @@ interactions: ParameterSetName: - -g -n -p User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforMySQL/flexibleServers/azuredbclitest-000002?api-version=2021-12-01-preview response: body: - string: '{"sku":{"name":"Standard_B1ms","tier":"Burstable"},"systemData":{"createdAt":"2022-07-28T00:49:41.5358712Z"},"properties":{"administratorLogin":"cliuser","storage":{"storageSizeGB":32,"iops":396,"autoGrow":"Enabled","storageSku":"Premium_LRS"},"version":"5.7","state":"Ready","fullyQualifiedDomainName":"azuredbclitest-000002.mysql.database.azure.com","availabilityZone":"","maintenanceWindow":{"customWindow":"Disabled","dayOfWeek":0,"startHour":0,"startMinute":0},"replicationRole":"None","replicaCapacity":10,"network":{"publicNetworkAccess":"Enabled"},"backup":{"backupRetentionDays":7,"geoRedundantBackup":"Disabled","earliestRestoreDate":"2022-07-28T00:52:30.0625719+00:00"},"highAvailability":{"mode":"Disabled","state":"NotEnabled"}},"location":"West - US","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforMySQL/flexibleServers/azuredbclitest-000002","name":"azuredbclitest-000002","type":"Microsoft.DBforMySQL/flexibleServers"}' + string: '{"sku":{"name":"Standard_B1ms","tier":"Burstable"},"systemData":{"createdAt":"2022-10-21T09:22:15.7843084Z"},"properties":{"administratorLogin":"cliuser","storage":{"storageSizeGB":32,"iops":396,"autoGrow":"Enabled","autoIoScaling":"Disabled","storageSku":"Premium_LRS"},"version":"5.7","state":"Ready","fullyQualifiedDomainName":"azuredbclitest-000002.mysql.database.azure.com","availabilityZone":"1","maintenanceWindow":{"customWindow":"Disabled","dayOfWeek":0,"startHour":0,"startMinute":0},"replicationRole":"None","replicaCapacity":10,"network":{"publicNetworkAccess":"Enabled"},"backup":{"backupRetentionDays":7,"geoRedundantBackup":"Disabled","earliestRestoreDate":"2022-10-21T09:32:15.7843084+00:00"},"highAvailability":{"mode":"Disabled","state":"NotEnabled","standbyAvailabilityZone":""}},"location":"North + Europe","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforMySQL/flexibleServers/azuredbclitest-000002","name":"azuredbclitest-000002","type":"Microsoft.DBforMySQL/flexibleServers"}' headers: cache-control: - no-cache content-length: - - '1000' + - '1062' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:53:59 GMT + - Fri, 21 Oct 2022 09:25:35 GMT expires: - '-1' pragma: @@ -716,21 +670,21 @@ interactions: ParameterSetName: - -g -n -p User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/westus/capabilities?api-version=2021-12-01-preview + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/northeurope/capabilities?api-version=2021-12-01-preview response: body: - string: '{"value":[{"zone":"none","supportedHAMode":["SameZone"],"supportedGeoBackupRegions":["eastus"],"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]}]}]}]}' + string: '{"value":[{"zone":"none","supportedHAMode":["SameZone","ZoneRedundant"],"supportedGeoBackupRegions":["westeurope"],"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]}]}]},{"zone":"1","supportedHAMode":["SameZone","ZoneRedundant"],"supportedGeoBackupRegions":["westeurope"],"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]}]}]},{"zone":"2","supportedHAMode":["SameZone","ZoneRedundant"],"supportedGeoBackupRegions":["westeurope"],"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]}]}]},{"zone":"3","supportedHAMode":["SameZone","ZoneRedundant"],"supportedGeoBackupRegions":["westeurope"],"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]}]}]}]}' headers: cache-control: - no-cache content-length: - - '6952' + - '27846' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:53:59 GMT + - Fri, 21 Oct 2022 09:25:37 GMT expires: - '-1' pragma: @@ -762,21 +716,21 @@ interactions: ParameterSetName: - -g -n -p User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/westus/capabilities?api-version=2021-12-01-preview + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/northeurope/capabilities?api-version=2021-12-01-preview response: body: - string: '{"value":[{"zone":"none","supportedHAMode":["SameZone"],"supportedGeoBackupRegions":["eastus"],"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]}]}]}]}' + string: '{"value":[{"zone":"none","supportedHAMode":["SameZone","ZoneRedundant"],"supportedGeoBackupRegions":["westeurope"],"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]}]}]},{"zone":"1","supportedHAMode":["SameZone","ZoneRedundant"],"supportedGeoBackupRegions":["westeurope"],"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]}]}]},{"zone":"2","supportedHAMode":["SameZone","ZoneRedundant"],"supportedGeoBackupRegions":["westeurope"],"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]}]}]},{"zone":"3","supportedHAMode":["SameZone","ZoneRedundant"],"supportedGeoBackupRegions":["westeurope"],"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_B1s","vCores":1,"supportedIops":400,"supportedMemoryPerVCoreMB":1024},{"name":"Standard_B1ms","vCores":1,"supportedIops":640,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2s","vCores":2,"supportedIops":1280,"supportedMemoryPerVCoreMB":2048},{"name":"Standard_B2ms","vCores":2,"supportedIops":1700,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B4ms","vCores":4,"supportedIops":2400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B8ms","vCores":8,"supportedIops":3100,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B12ms","vCores":12,"supportedIops":3800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B16ms","vCores":16,"supportedIops":4300,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_B20ms","vCores":20,"supportedIops":5000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_D2ds_v4","vCores":2,"supportedIops":3200,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D4ds_v4","vCores":4,"supportedIops":6400,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D8ds_v4","vCores":8,"supportedIops":12800,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D16ds_v4","vCores":16,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D32ds_v4","vCores":32,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D48ds_v4","vCores":48,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096},{"name":"Standard_D64ds_v4","vCores":64,"supportedIops":20000,"supportedMemoryPerVCoreMB":4096}]}]},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"Premium","minStorageSize":20480,"maxStorageSize":16777216,"minBackupRetentionDays":7,"maxBackupRetentionDays":35}],"supportedServerVersions":[{"name":"5.7","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]},{"name":"8.0.21","supportedSkus":[{"name":"Standard_E2ds_v4","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v4","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v4","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v4","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v4","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v4","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v4","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E80ids_v4","vCores":80,"supportedIops":48000,"supportedMemoryPerVCoreMB":6451},{"name":"Standard_E2ds_v5","vCores":2,"supportedIops":5000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E4ds_v5","vCores":4,"supportedIops":10000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E8ds_v5","vCores":8,"supportedIops":18000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E16ds_v5","vCores":16,"supportedIops":28000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E32ds_v5","vCores":32,"supportedIops":38000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E48ds_v5","vCores":48,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E64ds_v5","vCores":64,"supportedIops":48000,"supportedMemoryPerVCoreMB":8192},{"name":"Standard_E96ds_v5","vCores":96,"supportedIops":48000,"supportedMemoryPerVCoreMB":7168}]}]}]}]}' headers: cache-control: - no-cache content-length: - - '6952' + - '27846' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:54:00 GMT + - Fri, 21 Oct 2022 09:25:37 GMT expires: - '-1' pragma: @@ -798,7 +752,7 @@ interactions: body: '{"sku": {"name": "Standard_B1ms", "tier": "Burstable"}, "properties": {"administratorLoginPassword": "cliPwd000003", "storage": {"storageSizeGB": 32, "iops": 396, "autoGrow": "Enabled"}, "backup": {"backupRetentionDays": 7, "geoRedundantBackup": "Disabled"}, "highAvailability": - {"mode": "Disabled"}}}' + {"mode": "Disabled", "standbyAvailabilityZone": ""}}}' headers: Accept: - application/json @@ -809,33 +763,33 @@ interactions: Connection: - keep-alive Content-Length: - - '301' + - '332' Content-Type: - application/json ParameterSetName: - -g -n -p User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: PATCH uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforMySQL/flexibleServers/azuredbclitest-000002?api-version=2021-12-01-preview response: body: - string: '{"operation":"UpsertServerManagementOperationV2","startTime":"2022-07-28T00:54:01.15Z"}' + string: '{"operation":"UpsertServerManagementOperationV2","startTime":"2022-10-21T09:25:39.943Z"}' headers: azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/westus/azureAsyncOperation/89c12204-ec97-487f-8f49-0a33391c6a5d?api-version=2021-12-01-preview + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/northeurope/azureAsyncOperation/362ba45f-af25-444b-bb19-557d7e042c66?api-version=2021-12-01-preview cache-control: - no-cache content-length: - - '87' + - '88' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:54:00 GMT + - Fri, 21 Oct 2022 09:25:39 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/westus/operationResults/89c12204-ec97-487f-8f49-0a33391c6a5d?api-version=2021-12-01-preview + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/northeurope/operationResults/362ba45f-af25-444b-bb19-557d7e042c66?api-version=2021-12-01-preview pragma: - no-cache server: @@ -863,21 +817,21 @@ interactions: ParameterSetName: - -g -n -p User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/westus/azureAsyncOperation/89c12204-ec97-487f-8f49-0a33391c6a5d?api-version=2021-12-01-preview + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/northeurope/azureAsyncOperation/362ba45f-af25-444b-bb19-557d7e042c66?api-version=2021-12-01-preview response: body: - string: '{"name":"89c12204-ec97-487f-8f49-0a33391c6a5d","status":"Succeeded","startTime":"2022-07-28T00:54:01.15Z"}' + string: '{"name":"362ba45f-af25-444b-bb19-557d7e042c66","status":"Succeeded","startTime":"2022-10-21T09:25:39.943Z"}' headers: cache-control: - no-cache content-length: - - '106' + - '107' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:55:01 GMT + - Fri, 21 Oct 2022 09:26:40 GMT expires: - '-1' pragma: @@ -909,22 +863,22 @@ interactions: ParameterSetName: - -g -n -p User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforMySQL/flexibleServers/azuredbclitest-000002?api-version=2021-12-01-preview response: body: - string: '{"sku":{"name":"Standard_B1ms","tier":"Burstable"},"systemData":{"createdAt":"2022-07-28T00:49:41.5358712Z"},"properties":{"administratorLogin":"cliuser","storage":{"storageSizeGB":32,"iops":396,"autoGrow":"Enabled","storageSku":"Premium_LRS"},"version":"5.7","state":"Ready","fullyQualifiedDomainName":"azuredbclitest-000002.mysql.database.azure.com","availabilityZone":"","maintenanceWindow":{"customWindow":"Disabled","dayOfWeek":0,"startHour":0,"startMinute":0},"replicationRole":"None","replicaCapacity":10,"network":{"publicNetworkAccess":"Enabled"},"backup":{"backupRetentionDays":7,"geoRedundantBackup":"Disabled","earliestRestoreDate":"2022-07-28T00:52:30.0625719+00:00"},"highAvailability":{"mode":"Disabled","state":"NotEnabled"}},"location":"West - US","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforMySQL/flexibleServers/azuredbclitest-000002","name":"azuredbclitest-000002","type":"Microsoft.DBforMySQL/flexibleServers"}' + string: '{"sku":{"name":"Standard_B1ms","tier":"Burstable"},"systemData":{"createdAt":"2022-10-21T09:22:15.7843084Z"},"properties":{"administratorLogin":"cliuser","storage":{"storageSizeGB":32,"iops":396,"autoGrow":"Enabled","autoIoScaling":"Disabled","storageSku":"Premium_LRS"},"version":"5.7","state":"Ready","fullyQualifiedDomainName":"azuredbclitest-000002.mysql.database.azure.com","availabilityZone":"1","maintenanceWindow":{"customWindow":"Disabled","dayOfWeek":0,"startHour":0,"startMinute":0},"replicationRole":"None","replicaCapacity":10,"network":{"publicNetworkAccess":"Enabled"},"backup":{"backupRetentionDays":7,"geoRedundantBackup":"Disabled","earliestRestoreDate":"2022-10-21T09:32:15.7843084+00:00"},"highAvailability":{"mode":"Disabled","state":"NotEnabled","standbyAvailabilityZone":""}},"location":"North + Europe","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforMySQL/flexibleServers/azuredbclitest-000002","name":"azuredbclitest-000002","type":"Microsoft.DBforMySQL/flexibleServers"}' headers: cache-control: - no-cache content-length: - - '1000' + - '1062' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:55:01 GMT + - Fri, 21 Oct 2022 09:26:40 GMT expires: - '-1' pragma: @@ -956,22 +910,22 @@ interactions: ParameterSetName: - -g -n --rule-name --start-ip-address --end-ip-address User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforMySQL/flexibleServers/azuredbclitest-000002?api-version=2021-12-01-preview response: body: - string: '{"sku":{"name":"Standard_B1ms","tier":"Burstable"},"systemData":{"createdAt":"2022-07-28T00:49:41.5358712Z"},"properties":{"administratorLogin":"cliuser","storage":{"storageSizeGB":32,"iops":396,"autoGrow":"Enabled","storageSku":"Premium_LRS"},"version":"5.7","state":"Ready","fullyQualifiedDomainName":"azuredbclitest-000002.mysql.database.azure.com","availabilityZone":"","maintenanceWindow":{"customWindow":"Disabled","dayOfWeek":0,"startHour":0,"startMinute":0},"replicationRole":"None","replicaCapacity":10,"network":{"publicNetworkAccess":"Enabled"},"backup":{"backupRetentionDays":7,"geoRedundantBackup":"Disabled","earliestRestoreDate":"2022-07-28T00:52:30.0625719+00:00"},"highAvailability":{"mode":"Disabled","state":"NotEnabled"}},"location":"West - US","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforMySQL/flexibleServers/azuredbclitest-000002","name":"azuredbclitest-000002","type":"Microsoft.DBforMySQL/flexibleServers"}' + string: '{"sku":{"name":"Standard_B1ms","tier":"Burstable"},"systemData":{"createdAt":"2022-10-21T09:22:15.7843084Z"},"properties":{"administratorLogin":"cliuser","storage":{"storageSizeGB":32,"iops":396,"autoGrow":"Enabled","autoIoScaling":"Disabled","storageSku":"Premium_LRS"},"version":"5.7","state":"Ready","fullyQualifiedDomainName":"azuredbclitest-000002.mysql.database.azure.com","availabilityZone":"1","maintenanceWindow":{"customWindow":"Disabled","dayOfWeek":0,"startHour":0,"startMinute":0},"replicationRole":"None","replicaCapacity":10,"network":{"publicNetworkAccess":"Enabled"},"backup":{"backupRetentionDays":7,"geoRedundantBackup":"Disabled","earliestRestoreDate":"2022-10-21T09:32:15.7843084+00:00"},"highAvailability":{"mode":"Disabled","state":"NotEnabled","standbyAvailabilityZone":""}},"location":"North + Europe","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforMySQL/flexibleServers/azuredbclitest-000002","name":"azuredbclitest-000002","type":"Microsoft.DBforMySQL/flexibleServers"}' headers: cache-control: - no-cache content-length: - - '1000' + - '1062' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:55:11 GMT + - Fri, 21 Oct 2022 09:26:52 GMT expires: - '-1' pragma: @@ -1007,27 +961,27 @@ interactions: ParameterSetName: - -g -n --rule-name --start-ip-address --end-ip-address User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforMySQL/flexibleServers/azuredbclitest-000002/firewallRules/allIps?api-version=2021-12-01-preview response: body: - string: '{"operation":"UpsertServerFirewallRulesManagementOperation","startTime":"2022-07-28T00:55:12.76Z"}' + string: '{"operation":"UpsertServerFirewallRulesManagementOperation","startTime":"2022-10-21T09:26:55.143Z"}' headers: azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/westus/azureAsyncOperation/351f2d24-83af-4cdd-8ff3-4b91177fb698?api-version=2021-12-01-preview + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/northeurope/azureAsyncOperation/8f95c76f-77eb-4ebe-a86a-e4bbcdbd2a04?api-version=2021-12-01-preview cache-control: - no-cache content-length: - - '98' + - '99' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:55:12 GMT + - Fri, 21 Oct 2022 09:26:54 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/westus/operationResults/351f2d24-83af-4cdd-8ff3-4b91177fb698?api-version=2021-12-01-preview + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/northeurope/operationResults/8f95c76f-77eb-4ebe-a86a-e4bbcdbd2a04?api-version=2021-12-01-preview pragma: - no-cache server: @@ -1055,21 +1009,21 @@ interactions: ParameterSetName: - -g -n --rule-name --start-ip-address --end-ip-address User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/westus/azureAsyncOperation/351f2d24-83af-4cdd-8ff3-4b91177fb698?api-version=2021-12-01-preview + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforMySQL/locations/northeurope/azureAsyncOperation/8f95c76f-77eb-4ebe-a86a-e4bbcdbd2a04?api-version=2021-12-01-preview response: body: - string: '{"name":"351f2d24-83af-4cdd-8ff3-4b91177fb698","status":"Succeeded","startTime":"2022-07-28T00:55:12.76Z"}' + string: '{"name":"8f95c76f-77eb-4ebe-a86a-e4bbcdbd2a04","status":"Succeeded","startTime":"2022-10-21T09:26:55.143Z"}' headers: cache-control: - no-cache content-length: - - '106' + - '107' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:56:12 GMT + - Fri, 21 Oct 2022 09:27:55 GMT expires: - '-1' pragma: @@ -1101,21 +1055,21 @@ interactions: ParameterSetName: - -g -n --rule-name --start-ip-address --end-ip-address User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforMySQL/flexibleServers/azuredbclitest-000002/firewallRules/allIps?api-version=2021-12-01-preview response: body: - string: '{"properties":{"startIpAddress":"0.0.0.0","endIpAddress":"255.255.255.255"},"name":"allIps","type":"Microsoft.DBforMySQL/flexibleServers/firewallRules"}' + string: '{"properties":{"startIpAddress":"0.0.0.0","endIpAddress":"255.255.255.255"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforMySQL/flexibleServers/azuredbclitest-000002/firewallRules/allIps","name":"allIps","type":"Microsoft.DBforMySQL/flexibleServers/firewallRules"}' headers: cache-control: - no-cache content-length: - - '152' + - '333' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:56:12 GMT + - Fri, 21 Oct 2022 09:27:56 GMT expires: - '-1' pragma: diff --git a/src/rdbms-connect/azext_rdbms_connect/tests/latest/recordings/test_postgres_flexible_server_connect.yaml b/src/rdbms-connect/azext_rdbms_connect/tests/latest/recordings/test_postgres_flexible_server_connect.yaml index 5dda290e3f3..d1894c950d7 100644 --- a/src/rdbms-connect/azext_rdbms_connect/tests/latest/recordings/test_postgres_flexible_server_connect.yaml +++ b/src/rdbms-connect/azext_rdbms_connect/tests/latest/recordings/test_postgres_flexible_server_connect.yaml @@ -13,7 +13,7 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: HEAD uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001?api-version=2021-04-01 response: @@ -25,7 +25,7 @@ interactions: content-length: - '0' date: - - Thu, 28 Jul 2022 00:49:38 GMT + - Fri, 21 Oct 2022 09:21:54 GMT expires: - '-1' pragma: @@ -51,12 +51,12 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001?api-version=2021-04-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001","name":"clitest.rg000001","type":"Microsoft.Resources/resourceGroups","location":"eastus","tags":{"product":"azurecli","cause":"automation","date":"2022-07-28T00:49:36Z"},"properties":{"provisioningState":"Succeeded"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001","name":"clitest.rg000001","type":"Microsoft.Resources/resourceGroups","location":"eastus","tags":{"product":"azurecli","cause":"automation","date":"2022-10-21T09:21:52Z"},"properties":{"provisioningState":"Succeeded"}}' headers: cache-control: - no-cache @@ -65,7 +65,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:49:38 GMT + - Fri, 21 Oct 2022 09:21:55 GMT expires: - '-1' pragma: @@ -97,9 +97,9 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/checkNameAvailability?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/checkNameAvailability?api-version=2022-01-20-preview response: body: string: '{"name":"azuredbclitest-000002","type":"Microsoft.DBforPostgreSQL/flexibleServers","nameAvailable":true,"message":""}' @@ -111,7 +111,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:49:39 GMT + - Fri, 21 Oct 2022 09:21:56 GMT expires: - '-1' pragma: @@ -145,21 +145,21 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/capabilities?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/capabilities?api-version=2022-01-20-preview response: body: - string: '{"value":[{"zone":"none","geoBackupSupported":true,"zoneRedundantHaSupported":true,"zoneRedundantHaAndGeoBackupSupported":true,"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"}],"status":"Available"}],"supportedHyperscaleNodeEditions":[{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":25600,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":51200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":80000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"11.0","supportedVcores":[{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"11.2","supportedVcores":[{"name":"Standard_D4s_v3","vCores":4,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":25600,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":51200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":80000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"11.2.8","supportedVcores":[{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":25600,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":51200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":80000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"}],"supportedNodeTypes":[{"name":"Coordinator","nodeType":"Coordinator","status":"Default"}],"status":"Available"},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":25600,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":51200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":80000,"supportedMemoryPerVcoreMB":8192,"status":"Available"}],"status":"Available"},{"name":"11.0","supportedVcores":[{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":25600,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":51200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":80000,"supportedMemoryPerVcoreMB":8192,"status":"Available"}],"status":"Available"},{"name":"11.2","supportedVcores":[{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":25600,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":51200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":80000,"supportedMemoryPerVcoreMB":8192,"status":"Available"}],"status":"Available"},{"name":"11.2.8","supportedVcores":[{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":25600,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":51200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":80000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"}],"supportedNodeTypes":[{"name":"Worker","nodeType":"Worker","status":"Default"}],"status":"Available"}],"supportedHAMode":["SameZone","ZoneRedundant"],"status":"Default"},{"zone":"1","geoBackupSupported":true,"zoneRedundantHaSupported":true,"zoneRedundantHaAndGeoBackupSupported":true,"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"}],"status":"Available"}],"supportedHyperscaleNodeEditions":[],"supportedHAMode":["SameZone","ZoneRedundant"],"status":"Available"},{"zone":"2","geoBackupSupported":true,"zoneRedundantHaSupported":true,"zoneRedundantHaAndGeoBackupSupported":true,"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"}],"status":"Available"}],"supportedHyperscaleNodeEditions":[],"supportedHAMode":["SameZone","ZoneRedundant"],"status":"Available"},{"zone":"3","geoBackupSupported":true,"zoneRedundantHaSupported":true,"zoneRedundantHaAndGeoBackupSupported":true,"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"}],"status":"Available"}],"supportedHyperscaleNodeEditions":[],"supportedHAMode":["SameZone","ZoneRedundant"],"status":"Available"}]}' + string: '{"value":[{"zone":"none","geoBackupSupported":true,"zoneRedundantHaSupported":true,"zoneRedundantHaAndGeoBackupSupported":true,"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"}],"status":"Available"}],"supportedHAMode":["SameZone","ZoneRedundant"],"status":"Default"},{"zone":"1","geoBackupSupported":true,"zoneRedundantHaSupported":true,"zoneRedundantHaAndGeoBackupSupported":true,"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"}],"status":"Available"}],"supportedHAMode":["SameZone","ZoneRedundant"],"status":"Available"},{"zone":"2","geoBackupSupported":true,"zoneRedundantHaSupported":true,"zoneRedundantHaAndGeoBackupSupported":true,"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"}],"status":"Available"}],"supportedHAMode":["SameZone","ZoneRedundant"],"status":"Available"},{"zone":"3","geoBackupSupported":true,"zoneRedundantHaSupported":true,"zoneRedundantHaAndGeoBackupSupported":true,"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"}],"status":"Available"}],"supportedHAMode":["SameZone","ZoneRedundant"],"status":"Available"}]}' headers: cache-control: - no-cache content-length: - - '80537' + - '73712' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:49:41 GMT + - Fri, 21 Oct 2022 09:21:56 GMT expires: - '-1' pragma: @@ -180,9 +180,10 @@ interactions: - request: body: '{"location": "eastus", "sku": {"name": "Standard_D2s_v3", "tier": "GeneralPurpose"}, "properties": {"administratorLogin": "cliuser", "administratorLoginPassword": - "u_ih3h7diO6J8bdlTU24nQ", "version": "12", "storage": {"storageSizeGB": 32}, - "backup": {"backupRetentionDays": 7}, "network": {}, "highAvailability": {"mode": - "Disabled"}, "createMode": "Create"}}' + "jkVMOvf8pv8tWuzHMXkxew", "version": "12", "storage": {"storageSizeGB": 32}, + "backup": {"backupRetentionDays": 7, "geoRedundantBackup": "Disabled"}, "network": + {"delegatedSubnetResourceId": "", "privateDnsZoneArmResourceId": ""}, "highAvailability": + {"mode": "Disabled"}, "createMode": "Create"}}' headers: Accept: - application/json @@ -193,21 +194,21 @@ interactions: Connection: - keep-alive Content-Length: - - '359' + - '459' Content-Type: - application/json ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002?api-version=2022-01-20-preview response: body: - string: '{"operation":"UpsertServerManagementOperationV2","startTime":"2022-07-28T00:49:44.633Z"}' + string: '{"operation":"UpsertServerManagementOperationV2","startTime":"2022-10-21T09:22:00.253Z"}' headers: azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/bcf88328-961f-44ae-b16f-612e0382b7b1?api-version=2021-06-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/df440483-87a8-4a3a-88aa-3322219202a6?api-version=2022-01-20-preview cache-control: - no-cache content-length: @@ -215,11 +216,11 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:49:44 GMT + - Fri, 21 Oct 2022 09:21:59 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/operationResults/bcf88328-961f-44ae-b16f-612e0382b7b1?api-version=2021-06-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/operationResults/df440483-87a8-4a3a-88aa-3322219202a6?api-version=2022-01-20-preview pragma: - no-cache server: @@ -247,12 +248,12 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/bcf88328-961f-44ae-b16f-612e0382b7b1?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/df440483-87a8-4a3a-88aa-3322219202a6?api-version=2022-01-20-preview response: body: - string: '{"name":"bcf88328-961f-44ae-b16f-612e0382b7b1","status":"InProgress","startTime":"2022-07-28T00:49:44.633Z"}' + string: '{"name":"df440483-87a8-4a3a-88aa-3322219202a6","status":"InProgress","startTime":"2022-10-21T09:22:00.253Z"}' headers: cache-control: - no-cache @@ -261,7 +262,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:50:45 GMT + - Fri, 21 Oct 2022 09:23:00 GMT expires: - '-1' pragma: @@ -293,12 +294,12 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/bcf88328-961f-44ae-b16f-612e0382b7b1?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/df440483-87a8-4a3a-88aa-3322219202a6?api-version=2022-01-20-preview response: body: - string: '{"name":"bcf88328-961f-44ae-b16f-612e0382b7b1","status":"InProgress","startTime":"2022-07-28T00:49:44.633Z"}' + string: '{"name":"df440483-87a8-4a3a-88aa-3322219202a6","status":"InProgress","startTime":"2022-10-21T09:22:00.253Z"}' headers: cache-control: - no-cache @@ -307,7 +308,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:51:44 GMT + - Fri, 21 Oct 2022 09:24:00 GMT expires: - '-1' pragma: @@ -339,12 +340,12 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/bcf88328-961f-44ae-b16f-612e0382b7b1?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/df440483-87a8-4a3a-88aa-3322219202a6?api-version=2022-01-20-preview response: body: - string: '{"name":"bcf88328-961f-44ae-b16f-612e0382b7b1","status":"InProgress","startTime":"2022-07-28T00:49:44.633Z"}' + string: '{"name":"df440483-87a8-4a3a-88aa-3322219202a6","status":"InProgress","startTime":"2022-10-21T09:22:00.253Z"}' headers: cache-control: - no-cache @@ -353,7 +354,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:52:45 GMT + - Fri, 21 Oct 2022 09:25:01 GMT expires: - '-1' pragma: @@ -385,12 +386,12 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/bcf88328-961f-44ae-b16f-612e0382b7b1?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/df440483-87a8-4a3a-88aa-3322219202a6?api-version=2022-01-20-preview response: body: - string: '{"name":"bcf88328-961f-44ae-b16f-612e0382b7b1","status":"InProgress","startTime":"2022-07-28T00:49:44.633Z"}' + string: '{"name":"df440483-87a8-4a3a-88aa-3322219202a6","status":"InProgress","startTime":"2022-10-21T09:22:00.253Z"}' headers: cache-control: - no-cache @@ -399,7 +400,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:53:46 GMT + - Fri, 21 Oct 2022 09:26:01 GMT expires: - '-1' pragma: @@ -431,12 +432,12 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/bcf88328-961f-44ae-b16f-612e0382b7b1?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/df440483-87a8-4a3a-88aa-3322219202a6?api-version=2022-01-20-preview response: body: - string: '{"name":"bcf88328-961f-44ae-b16f-612e0382b7b1","status":"InProgress","startTime":"2022-07-28T00:49:44.633Z"}' + string: '{"name":"df440483-87a8-4a3a-88aa-3322219202a6","status":"InProgress","startTime":"2022-10-21T09:22:00.253Z"}' headers: cache-control: - no-cache @@ -445,7 +446,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:54:46 GMT + - Fri, 21 Oct 2022 09:27:01 GMT expires: - '-1' pragma: @@ -477,12 +478,12 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/bcf88328-961f-44ae-b16f-612e0382b7b1?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/df440483-87a8-4a3a-88aa-3322219202a6?api-version=2022-01-20-preview response: body: - string: '{"name":"bcf88328-961f-44ae-b16f-612e0382b7b1","status":"InProgress","startTime":"2022-07-28T00:49:44.633Z"}' + string: '{"name":"df440483-87a8-4a3a-88aa-3322219202a6","status":"InProgress","startTime":"2022-10-21T09:22:00.253Z"}' headers: cache-control: - no-cache @@ -491,7 +492,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:55:47 GMT + - Fri, 21 Oct 2022 09:28:02 GMT expires: - '-1' pragma: @@ -523,12 +524,12 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/bcf88328-961f-44ae-b16f-612e0382b7b1?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/df440483-87a8-4a3a-88aa-3322219202a6?api-version=2022-01-20-preview response: body: - string: '{"name":"bcf88328-961f-44ae-b16f-612e0382b7b1","status":"Succeeded","startTime":"2022-07-28T00:49:44.633Z"}' + string: '{"name":"df440483-87a8-4a3a-88aa-3322219202a6","status":"Succeeded","startTime":"2022-10-21T09:22:00.253Z"}' headers: cache-control: - no-cache @@ -537,7 +538,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:56:47 GMT + - Fri, 21 Oct 2022 09:29:03 GMT expires: - '-1' pragma: @@ -569,22 +570,22 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002?api-version=2022-01-20-preview response: body: - string: '{"sku":{"name":"Standard_D2s_v3","tier":"GeneralPurpose"},"properties":{"fullyQualifiedDomainName":"azuredbclitest-000002.postgres.database.azure.com","version":"12","minorVersion":"11","administratorLogin":"cliuser","state":"Ready","availabilityZone":"1","storage":{"storageSizeGB":32},"backup":{"backupRetentionDays":7,"geoRedundantBackup":"Disabled","earliestRestoreDate":"2022-07-28T00:56:48.6312306+00:00"},"network":{"publicNetworkAccess":"Enabled"},"highAvailability":{"mode":"Disabled","state":"NotEnabled"},"maintenanceWindow":{"customWindow":"Disabled","dayOfWeek":0,"startHour":0,"startMinute":0}},"location":"East + string: '{"sku":{"name":"Standard_D2s_v3","tier":"GeneralPurpose"},"properties":{"fullyQualifiedDomainName":"azuredbclitest-000002.postgres.database.azure.com","version":"12","minorVersion":"11","administratorLogin":"cliuser","state":"Ready","availabilityZone":"1","storage":{"storageSizeGB":32},"backup":{"backupRetentionDays":7,"geoRedundantBackup":"Disabled","earliestRestoreDate":"2022-10-21T09:22:04.9732994+00:00"},"network":{"publicNetworkAccess":"Enabled"},"highAvailability":{"mode":"Disabled","state":"NotEnabled"},"maintenanceWindow":{"customWindow":"Disabled","dayOfWeek":0,"startHour":0,"startMinute":0},"storageAutoGrowth":false},"location":"East US","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002","name":"azuredbclitest-000002","type":"Microsoft.DBforPostgreSQL/flexibleServers"}' headers: cache-control: - no-cache content-length: - - '877' + - '903' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:56:48 GMT + - Fri, 21 Oct 2022 09:29:03 GMT expires: - '-1' pragma: @@ -620,27 +621,27 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002/databases/flexibleserverdb?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002/databases/flexibleserverdb?api-version=2022-01-20-preview response: body: - string: '{"operation":"UpsertServerDatabaseManagementOperation","startTime":"2022-07-28T00:56:49.76Z"}' + string: '{"operation":"UpsertServerDatabaseManagementOperation","startTime":"2022-10-21T09:29:05.167Z"}' headers: azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/86ec4c13-06b2-43bb-9114-4a1159b1fcee?api-version=2021-06-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/34cc7a15-6e9d-4f2c-8926-2a6f1804d4bd?api-version=2022-01-20-preview cache-control: - no-cache content-length: - - '93' + - '94' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:56:49 GMT + - Fri, 21 Oct 2022 09:29:04 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/operationResults/86ec4c13-06b2-43bb-9114-4a1159b1fcee?api-version=2021-06-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/operationResults/34cc7a15-6e9d-4f2c-8926-2a6f1804d4bd?api-version=2022-01-20-preview pragma: - no-cache server: @@ -668,21 +669,21 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/86ec4c13-06b2-43bb-9114-4a1159b1fcee?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/34cc7a15-6e9d-4f2c-8926-2a6f1804d4bd?api-version=2022-01-20-preview response: body: - string: '{"name":"86ec4c13-06b2-43bb-9114-4a1159b1fcee","status":"Succeeded","startTime":"2022-07-28T00:56:49.76Z"}' + string: '{"name":"34cc7a15-6e9d-4f2c-8926-2a6f1804d4bd","status":"Succeeded","startTime":"2022-10-21T09:29:05.167Z"}' headers: cache-control: - no-cache content-length: - - '106' + - '107' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:56:59 GMT + - Fri, 21 Oct 2022 09:29:14 GMT expires: - '-1' pragma: @@ -714,9 +715,9 @@ interactions: ParameterSetName: - -g -n -l --admin-user --storage-size --version --public-access User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002/databases/flexibleserverdb?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002/databases/flexibleserverdb?api-version=2022-01-20-preview response: body: string: '{"properties":{"charset":"UTF8","collation":"en_US.utf8"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002/databases/flexibleserverdb","name":"flexibleserverdb","type":"Microsoft.DBforPostgreSQL/flexibleServers/databases"}' @@ -728,7 +729,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:56:59 GMT + - Fri, 21 Oct 2022 09:29:15 GMT expires: - '-1' pragma: @@ -760,22 +761,22 @@ interactions: ParameterSetName: - -g -n -p User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002?api-version=2022-01-20-preview response: body: - string: '{"sku":{"name":"Standard_D2s_v3","tier":"GeneralPurpose"},"properties":{"fullyQualifiedDomainName":"azuredbclitest-000002.postgres.database.azure.com","version":"12","minorVersion":"11","administratorLogin":"cliuser","state":"Ready","availabilityZone":"1","storage":{"storageSizeGB":32},"backup":{"backupRetentionDays":7,"geoRedundantBackup":"Disabled","earliestRestoreDate":"2022-07-28T00:57:01.5050447+00:00"},"network":{"publicNetworkAccess":"Enabled"},"highAvailability":{"mode":"Disabled","state":"NotEnabled"},"maintenanceWindow":{"customWindow":"Disabled","dayOfWeek":0,"startHour":0,"startMinute":0}},"location":"East + string: '{"sku":{"name":"Standard_D2s_v3","tier":"GeneralPurpose"},"properties":{"fullyQualifiedDomainName":"azuredbclitest-000002.postgres.database.azure.com","version":"12","minorVersion":"11","administratorLogin":"cliuser","state":"Ready","availabilityZone":"1","storage":{"storageSizeGB":32},"backup":{"backupRetentionDays":7,"geoRedundantBackup":"Disabled","earliestRestoreDate":"2022-10-21T09:22:04.9732994+00:00"},"network":{"publicNetworkAccess":"Enabled"},"highAvailability":{"mode":"Disabled","state":"NotEnabled"},"maintenanceWindow":{"customWindow":"Disabled","dayOfWeek":0,"startHour":0,"startMinute":0},"storageAutoGrowth":false},"location":"East US","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002","name":"azuredbclitest-000002","type":"Microsoft.DBforPostgreSQL/flexibleServers"}' headers: cache-control: - no-cache content-length: - - '877' + - '903' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:57:00 GMT + - Fri, 21 Oct 2022 09:29:15 GMT expires: - '-1' pragma: @@ -807,21 +808,21 @@ interactions: ParameterSetName: - -g -n -p User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/capabilities?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/capabilities?api-version=2022-01-20-preview response: body: - string: '{"value":[{"zone":"none","geoBackupSupported":true,"zoneRedundantHaSupported":true,"zoneRedundantHaAndGeoBackupSupported":true,"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"}],"status":"Available"}],"supportedHyperscaleNodeEditions":[{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":25600,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":51200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":80000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"11.0","supportedVcores":[{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"11.2","supportedVcores":[{"name":"Standard_D4s_v3","vCores":4,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":25600,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":51200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":80000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"11.2.8","supportedVcores":[{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":25600,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":51200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":80000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"}],"supportedNodeTypes":[{"name":"Coordinator","nodeType":"Coordinator","status":"Default"}],"status":"Available"},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":25600,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":51200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":80000,"supportedMemoryPerVcoreMB":8192,"status":"Available"}],"status":"Available"},{"name":"11.0","supportedVcores":[{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":25600,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":51200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":80000,"supportedMemoryPerVcoreMB":8192,"status":"Available"}],"status":"Available"},{"name":"11.2","supportedVcores":[{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":25600,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":51200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":80000,"supportedMemoryPerVcoreMB":8192,"status":"Available"}],"status":"Available"},{"name":"11.2.8","supportedVcores":[{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":25600,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":51200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":80000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"}],"supportedNodeTypes":[{"name":"Worker","nodeType":"Worker","status":"Default"}],"status":"Available"}],"supportedHAMode":["SameZone","ZoneRedundant"],"status":"Default"},{"zone":"1","geoBackupSupported":true,"zoneRedundantHaSupported":true,"zoneRedundantHaAndGeoBackupSupported":true,"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"}],"status":"Available"}],"supportedHyperscaleNodeEditions":[],"supportedHAMode":["SameZone","ZoneRedundant"],"status":"Available"},{"zone":"2","geoBackupSupported":true,"zoneRedundantHaSupported":true,"zoneRedundantHaAndGeoBackupSupported":true,"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"}],"status":"Available"}],"supportedHyperscaleNodeEditions":[],"supportedHAMode":["SameZone","ZoneRedundant"],"status":"Available"},{"zone":"3","geoBackupSupported":true,"zoneRedundantHaSupported":true,"zoneRedundantHaAndGeoBackupSupported":true,"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"}],"status":"Available"}],"supportedHyperscaleNodeEditions":[],"supportedHAMode":["SameZone","ZoneRedundant"],"status":"Available"}]}' + string: '{"value":[{"zone":"none","geoBackupSupported":true,"zoneRedundantHaSupported":true,"zoneRedundantHaAndGeoBackupSupported":true,"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"}],"status":"Available"}],"supportedHAMode":["SameZone","ZoneRedundant"],"status":"Default"},{"zone":"1","geoBackupSupported":true,"zoneRedundantHaSupported":true,"zoneRedundantHaAndGeoBackupSupported":true,"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"}],"status":"Available"}],"supportedHAMode":["SameZone","ZoneRedundant"],"status":"Available"},{"zone":"2","geoBackupSupported":true,"zoneRedundantHaSupported":true,"zoneRedundantHaAndGeoBackupSupported":true,"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"}],"status":"Available"}],"supportedHAMode":["SameZone","ZoneRedundant"],"status":"Available"},{"zone":"3","geoBackupSupported":true,"zoneRedundantHaSupported":true,"zoneRedundantHaAndGeoBackupSupported":true,"supportedFlexibleServerEditions":[{"name":"Burstable","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_B1ms","vCores":1,"supportedIOPS":640,"supportedMemoryPerVcoreMB":2048,"status":"Available"},{"name":"Standard_B2s","vCores":2,"supportedIOPS":1280,"supportedMemoryPerVcoreMB":2048,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"GeneralPurpose","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_D2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"},{"name":"Standard_D64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":4096,"status":"Available"}],"status":"Available"}],"status":"Available"},{"name":"MemoryOptimized","supportedStorageEditions":[{"name":"ManagedDisk","supportedStorageMB":[{"name":"32768","supportedIOPS":120,"storageSizeMB":32768,"status":"Available"},{"name":"65536","supportedIOPS":240,"storageSizeMB":65536,"status":"Available"},{"name":"131072","supportedIOPS":500,"storageSizeMB":131072,"status":"Available"},{"name":"262144","supportedIOPS":1100,"storageSizeMB":262144,"status":"Available"},{"name":"524288","supportedIOPS":2300,"storageSizeMB":524288,"status":"Available"},{"name":"1048576","supportedIOPS":5000,"storageSizeMB":1048576,"status":"Available"},{"name":"2097152","supportedIOPS":7500,"storageSizeMB":2097152,"status":"Available"},{"name":"4194304","supportedIOPS":7500,"storageSizeMB":4194304,"status":"Available"},{"name":"8388608","supportedIOPS":16000,"storageSizeMB":8388608,"status":"Available"},{"name":"16777216","supportedIOPS":18000,"storageSizeMB":16777216,"status":"Available"},{"name":"33553408","supportedIOPS":20000,"storageSizeMB":33553408,"status":"Available"}],"status":"Default"}],"supportedServerVersions":[{"name":"11","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"12","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"13","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":20000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"},{"name":"14","supportedVcores":[{"name":"Standard_E2s_v3","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4s_v3","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8s_v3","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16s_v3","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32s_v3","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48s_v3","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64s_v3","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"},{"name":"Standard_E2ds_v4","vCores":2,"supportedIOPS":3200,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E4ds_v4","vCores":4,"supportedIOPS":6400,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E8ds_v4","vCores":8,"supportedIOPS":12800,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E16ds_v4","vCores":16,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E20ds_v4","vCores":20,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E32ds_v4","vCores":32,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E48ds_v4","vCores":48,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":8192,"status":"Available"},{"name":"Standard_E64ds_v4","vCores":64,"supportedIOPS":18000,"supportedMemoryPerVcoreMB":6912,"status":"Available"}],"status":"Available"}],"status":"Available"}],"supportedHAMode":["SameZone","ZoneRedundant"],"status":"Available"}]}' headers: cache-control: - no-cache content-length: - - '80537' + - '73712' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:57:02 GMT + - Fri, 21 Oct 2022 09:29:17 GMT expires: - '-1' pragma: @@ -861,15 +862,15 @@ interactions: ParameterSetName: - -g -n -p User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: PATCH - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002?api-version=2022-01-20-preview response: body: - string: '{"operation":"UpsertServerManagementOperationV2","startTime":"2022-07-28T00:57:04.533Z"}' + string: '{"operation":"UpsertServerManagementOperationV2","startTime":"2022-10-21T09:29:18.877Z"}' headers: azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/0fd605cc-960a-4c02-a6ed-35357b84267f?api-version=2021-06-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/2ee88744-1528-424b-82f5-7e078d37d7da?api-version=2022-01-20-preview cache-control: - no-cache content-length: @@ -877,11 +878,11 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:57:04 GMT + - Fri, 21 Oct 2022 09:29:18 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/operationResults/0fd605cc-960a-4c02-a6ed-35357b84267f?api-version=2021-06-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/operationResults/2ee88744-1528-424b-82f5-7e078d37d7da?api-version=2022-01-20-preview pragma: - no-cache server: @@ -909,12 +910,12 @@ interactions: ParameterSetName: - -g -n -p User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/0fd605cc-960a-4c02-a6ed-35357b84267f?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/2ee88744-1528-424b-82f5-7e078d37d7da?api-version=2022-01-20-preview response: body: - string: '{"name":"0fd605cc-960a-4c02-a6ed-35357b84267f","status":"Succeeded","startTime":"2022-07-28T00:57:04.533Z"}' + string: '{"name":"2ee88744-1528-424b-82f5-7e078d37d7da","status":"Succeeded","startTime":"2022-10-21T09:29:18.877Z"}' headers: cache-control: - no-cache @@ -923,7 +924,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:58:04 GMT + - Fri, 21 Oct 2022 09:30:18 GMT expires: - '-1' pragma: @@ -955,22 +956,22 @@ interactions: ParameterSetName: - -g -n -p User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002?api-version=2022-01-20-preview response: body: - string: '{"sku":{"name":"Standard_D2s_v3","tier":"GeneralPurpose"},"properties":{"fullyQualifiedDomainName":"azuredbclitest-000002.postgres.database.azure.com","version":"12","minorVersion":"11","administratorLogin":"cliuser","state":"Ready","availabilityZone":"1","storage":{"storageSizeGB":32},"backup":{"backupRetentionDays":7,"geoRedundantBackup":"Disabled","earliestRestoreDate":"2022-07-28T00:58:05.3940841+00:00"},"network":{"publicNetworkAccess":"Enabled"},"highAvailability":{"mode":"Disabled","state":"NotEnabled"},"maintenanceWindow":{"customWindow":"Disabled","dayOfWeek":0,"startHour":0,"startMinute":0}},"location":"East + string: '{"sku":{"name":"Standard_D2s_v3","tier":"GeneralPurpose"},"properties":{"fullyQualifiedDomainName":"azuredbclitest-000002.postgres.database.azure.com","version":"12","minorVersion":"11","administratorLogin":"cliuser","state":"Ready","availabilityZone":"1","storage":{"storageSizeGB":32},"backup":{"backupRetentionDays":7,"geoRedundantBackup":"Disabled","earliestRestoreDate":"2022-10-21T09:29:23.3019011+00:00"},"network":{"publicNetworkAccess":"Enabled"},"highAvailability":{"mode":"Disabled","state":"NotEnabled"},"maintenanceWindow":{"customWindow":"Disabled","dayOfWeek":0,"startHour":0,"startMinute":0},"storageAutoGrowth":false},"location":"East US","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002","name":"azuredbclitest-000002","type":"Microsoft.DBforPostgreSQL/flexibleServers"}' headers: cache-control: - no-cache content-length: - - '877' + - '903' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 00:58:05 GMT + - Fri, 21 Oct 2022 09:30:19 GMT expires: - '-1' pragma: @@ -1002,22 +1003,22 @@ interactions: ParameterSetName: - -g -n --rule-name --start-ip-address --end-ip-address User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002?api-version=2022-01-20-preview response: body: - string: '{"sku":{"name":"Standard_D2s_v3","tier":"GeneralPurpose"},"properties":{"fullyQualifiedDomainName":"azuredbclitest-000002.postgres.database.azure.com","version":"12","minorVersion":"11","administratorLogin":"cliuser","state":"Ready","availabilityZone":"1","storage":{"storageSizeGB":32},"backup":{"backupRetentionDays":7,"geoRedundantBackup":"Disabled","earliestRestoreDate":"2022-07-28T01:00:17.4066848+00:00"},"network":{"publicNetworkAccess":"Enabled"},"highAvailability":{"mode":"Disabled","state":"NotEnabled"},"maintenanceWindow":{"customWindow":"Disabled","dayOfWeek":0,"startHour":0,"startMinute":0}},"location":"East + string: '{"sku":{"name":"Standard_D2s_v3","tier":"GeneralPurpose"},"properties":{"fullyQualifiedDomainName":"azuredbclitest-000002.postgres.database.azure.com","version":"12","minorVersion":"11","administratorLogin":"cliuser","state":"Ready","availabilityZone":"1","storage":{"storageSizeGB":32},"backup":{"backupRetentionDays":7,"geoRedundantBackup":"Disabled","earliestRestoreDate":"2022-10-21T09:29:23.3019011+00:00"},"network":{"publicNetworkAccess":"Enabled"},"highAvailability":{"mode":"Disabled","state":"NotEnabled"},"maintenanceWindow":{"customWindow":"Disabled","dayOfWeek":0,"startHour":0,"startMinute":0},"storageAutoGrowth":false},"location":"East US","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002","name":"azuredbclitest-000002","type":"Microsoft.DBforPostgreSQL/flexibleServers"}' headers: cache-control: - no-cache content-length: - - '877' + - '903' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 01:00:16 GMT + - Fri, 21 Oct 2022 09:32:31 GMT expires: - '-1' pragma: @@ -1053,27 +1054,27 @@ interactions: ParameterSetName: - -g -n --rule-name --start-ip-address --end-ip-address User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002/firewallRules/allIps?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002/firewallRules/allIps?api-version=2022-01-20-preview response: body: - string: '{"operation":"UpsertServerFirewallRulesManagementOperation","startTime":"2022-07-28T01:00:18.357Z"}' + string: '{"operation":"UpsertServerFirewallRulesManagementOperation","startTime":"2022-10-21T09:32:32.83Z"}' headers: azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/79711917-ddea-47ca-86da-6fb0c20eb91b?api-version=2021-06-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/f13711e4-09e4-43ef-84f7-a33dd166228f?api-version=2022-01-20-preview cache-control: - no-cache content-length: - - '99' + - '98' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 01:00:17 GMT + - Fri, 21 Oct 2022 09:32:32 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/operationResults/79711917-ddea-47ca-86da-6fb0c20eb91b?api-version=2021-06-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/operationResults/f13711e4-09e4-43ef-84f7-a33dd166228f?api-version=2022-01-20-preview pragma: - no-cache server: @@ -1101,21 +1102,21 @@ interactions: ParameterSetName: - -g -n --rule-name --start-ip-address --end-ip-address User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/79711917-ddea-47ca-86da-6fb0c20eb91b?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DBforPostgreSQL/locations/eastus/azureAsyncOperation/f13711e4-09e4-43ef-84f7-a33dd166228f?api-version=2022-01-20-preview response: body: - string: '{"name":"79711917-ddea-47ca-86da-6fb0c20eb91b","status":"Succeeded","startTime":"2022-07-28T01:00:18.357Z"}' + string: '{"name":"f13711e4-09e4-43ef-84f7-a33dd166228f","status":"Succeeded","startTime":"2022-10-21T09:32:32.83Z"}' headers: cache-control: - no-cache content-length: - - '107' + - '106' content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 01:01:18 GMT + - Fri, 21 Oct 2022 09:33:32 GMT expires: - '-1' pragma: @@ -1147,9 +1148,9 @@ interactions: ParameterSetName: - -g -n --rule-name --start-ip-address --end-ip-address User-Agent: - - AZURECLI/2.38.0 azsdk-python-mgmt-rdbms/10.2.0b1 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) + - AZURECLI/2.41.0 azsdk-python-mgmt-rdbms/10.2.0b3 Python/3.8.10 (Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002/firewallRules/allIps?api-version=2021-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002/firewallRules/allIps?api-version=2022-01-20-preview response: body: string: '{"properties":{"startIpAddress":"0.0.0.0","endIpAddress":"255.255.255.255"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.DBforPostgreSQL/flexibleServers/azuredbclitest-000002/firewallRules/allIps","name":"allIps","type":"Microsoft.DBforPostgreSQL/flexibleServers/firewallRules"}' @@ -1161,7 +1162,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Thu, 28 Jul 2022 01:01:18 GMT + - Fri, 21 Oct 2022 09:33:33 GMT expires: - '-1' pragma: diff --git a/src/rdbms-connect/azext_rdbms_connect/tests/latest/test_rdbms-connect_scenario.py b/src/rdbms-connect/azext_rdbms_connect/tests/latest/test_rdbms-connect_scenario.py index 5c9c26cb671..dacfc24a837 100644 --- a/src/rdbms-connect/azext_rdbms_connect/tests/latest/test_rdbms-connect_scenario.py +++ b/src/rdbms-connect/azext_rdbms_connect/tests/latest/test_rdbms-connect_scenario.py @@ -24,7 +24,7 @@ class RdbmsConnectMgmtScenarioTest(ScenarioTest): postgres_location = 'eastus' - mysql_location = 'westus' + mysql_location = 'northeurope' @AllowLargeResponse() @ResourceGroupPreparer(location=postgres_location) diff --git a/src/rdbms-connect/setup.py b/src/rdbms-connect/setup.py index 1ad6ea9b905..021eb7e499b 100644 --- a/src/rdbms-connect/setup.py +++ b/src/rdbms-connect/setup.py @@ -16,7 +16,7 @@ # TODO: Confirm this is the right version number you want and it matches your # HISTORY.rst entry. -VERSION = '1.0.3' +VERSION = '1.0.4' # The full list of classifiers is available at # https://pypi.python.org/pypi?%3Aaction=list_classifiers @@ -34,8 +34,9 @@ DEPENDENCIES = [ 'setproctitle~=1.2.2', - 'mycli==1.22.2', - 'pgcli==3.0.0' + 'psycopg2==2.9.3', + 'mycli==1.26.1', + 'pgcli==3.4.1' ] with open('README.rst', 'r', encoding='utf-8') as f: From e4d83948c9600c2d0481919f9e08e95907d22f99 Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Mon, 24 Oct 2022 02:05:24 +0000 Subject: [PATCH 21/85] [Release] Update index.json for extension [ rdbms-connect ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=10641&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/32171dbb8b115363d9a9700cf77555549646f42b --- src/index.json | 53 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) diff --git a/src/index.json b/src/index.json index 2faa2bee768..db52934ebb2 100644 --- a/src/index.json +++ b/src/index.json @@ -33807,6 +33807,59 @@ "version": "1.0.3" }, "sha256Digest": "fadc2e3b684636448f1f4b6533e54dc30381984c125dc771e605a0410d26dcbe" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/rdbms_connect-1.0.4-py2.py3-none-any.whl", + "filename": "rdbms_connect-1.0.4-py2.py3-none-any.whl", + "metadata": { + "azext.minCliCoreVersion": "2.19.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/rdbms-connect" + } + } + }, + "extras": [], + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "rdbms-connect", + "run_requires": [ + { + "requires": [ + "mycli (==1.26.1)", + "pgcli (==3.4.1)", + "psycopg2 (==2.9.3)", + "setproctitle (~=1.2.2)" + ] + } + ], + "summary": "Support for testing connection to Azure Database for MySQL & PostgreSQL servers.", + "version": "1.0.4" + }, + "sha256Digest": "8dc15a70881c71f21ba000854960b1be851a057b403dc8de156b73883efb543d" } ], "redisenterprise": [ From 2387dfd60c3d4140fb34bf8876bf86ae9a5b6882 Mon Sep 17 00:00:00 2001 From: ZelinWang Date: Mon, 24 Oct 2022 11:12:46 +0800 Subject: [PATCH 22/85] {CI} Update milestone from Oct 2022 (2022-11-01) to Nov 2022 (2022-12-06) (#5480) --- .github/fabricbot.json | 172 ++++++++++++++++++++--------------------- 1 file changed, 86 insertions(+), 86 deletions(-) diff --git a/.github/fabricbot.json b/.github/fabricbot.json index 1fee9bf8e09..5d35b0fa95e 100644 --- a/.github/fabricbot.json +++ b/.github/fabricbot.json @@ -3235,7 +3235,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -3417,7 +3417,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -3644,7 +3644,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -3802,7 +3802,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -3966,7 +3966,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -4131,7 +4131,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -4219,7 +4219,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -4447,7 +4447,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -4611,7 +4611,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -4780,7 +4780,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -4856,7 +4856,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -5096,7 +5096,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -5242,7 +5242,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -5387,7 +5387,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -5450,7 +5450,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -5513,7 +5513,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -5576,7 +5576,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -5639,7 +5639,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -5702,7 +5702,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -5765,7 +5765,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -5828,7 +5828,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -5891,7 +5891,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -5954,7 +5954,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -6017,7 +6017,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -6080,7 +6080,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -6143,7 +6143,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -6206,7 +6206,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -6269,7 +6269,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -6332,7 +6332,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -6395,7 +6395,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -6458,7 +6458,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -6521,7 +6521,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -6584,7 +6584,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -6647,7 +6647,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -6710,7 +6710,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -6773,7 +6773,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -6836,7 +6836,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -6899,7 +6899,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -6962,7 +6962,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -7025,7 +7025,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -7088,7 +7088,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -7151,7 +7151,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -7214,7 +7214,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -7277,7 +7277,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -7340,7 +7340,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -7403,7 +7403,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -7466,7 +7466,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -7529,7 +7529,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -7592,7 +7592,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -7655,7 +7655,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -7718,7 +7718,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -7781,7 +7781,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -7844,7 +7844,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -7907,7 +7907,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -7970,7 +7970,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -8033,7 +8033,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -8096,7 +8096,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -8173,7 +8173,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -8250,7 +8250,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -8415,7 +8415,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -8504,7 +8504,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -8681,7 +8681,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -8866,7 +8866,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -9060,7 +9060,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -9214,7 +9214,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -9378,7 +9378,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -9461,7 +9461,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -9632,7 +9632,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -9797,7 +9797,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -9962,7 +9962,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -10127,7 +10127,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -10303,7 +10303,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } } ] @@ -10478,7 +10478,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -10637,7 +10637,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -10796,7 +10796,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -10976,7 +10976,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -11205,7 +11205,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -11295,7 +11295,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -11461,7 +11461,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -11627,7 +11627,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -11780,7 +11780,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -11869,7 +11869,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -12022,7 +12022,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -12098,7 +12098,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -12253,7 +12253,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { @@ -12464,7 +12464,7 @@ { "name": "addMilestone", "parameters": { - "milestoneName": "Oct 2022 (2022-11-01)" + "milestoneName": "Nov 2022 (2022-12-06)" } }, { From eb8393144808f6b901f3a86c7568e1a5946c5ce5 Mon Sep 17 00:00:00 2001 From: haagha <64601174+haagha@users.noreply.github.com> Date: Mon, 24 Oct 2022 22:09:34 -0400 Subject: [PATCH 23/85] [Vm-repair] Vm repair bug fixes (#5451) --- src/vm-repair/HISTORY.rst | 4 ++ src/vm-repair/azext_vm_repair/_params.py | 1 + src/vm-repair/azext_vm_repair/_validators.py | 4 +- src/vm-repair/azext_vm_repair/custom.py | 48 ++++++++++--------- src/vm-repair/azext_vm_repair/repair_utils.py | 18 ++++++- .../tests/latest/test_repair_commands.py | 43 +++++++++-------- src/vm-repair/setup.py | 2 +- 7 files changed, 74 insertions(+), 46 deletions(-) diff --git a/src/vm-repair/HISTORY.rst b/src/vm-repair/HISTORY.rst index ee30596bf7e..5724981f511 100644 --- a/src/vm-repair/HISTORY.rst +++ b/src/vm-repair/HISTORY.rst @@ -2,6 +2,10 @@ Release History =============== +0.4.8 +++++++ +Fix for encrypted vm's and fixing test cases + 0.4.7 ++++++ Setting subscription account for reset-nic diff --git a/src/vm-repair/azext_vm_repair/_params.py b/src/vm-repair/azext_vm_repair/_params.py index e6091691ec8..a5e132f2a33 100644 --- a/src/vm-repair/azext_vm_repair/_params.py +++ b/src/vm-repair/azext_vm_repair/_params.py @@ -32,6 +32,7 @@ def load_arguments(self, _): c.argument('enable_nested', help='enable nested hyperv.') c.argument('associate_public_ip', help='Option to create repair vm with public ip') c.argument('distro', help='Option to create repair vm from a specific linux distro (rhel7|rhel8|suse12|ubuntu20|centos7|oracle7)') + c.argument('yes', help='Option to skip prompt for associating public ip and confirm yes to it in no Tty mode') with self.argument_context('vm repair restore') as c: c.argument('repair_vm_id', help='Repair VM resource id.') diff --git a/src/vm-repair/azext_vm_repair/_validators.py b/src/vm-repair/azext_vm_repair/_validators.py index 22277011c0b..aefc0029881 100644 --- a/src/vm-repair/azext_vm_repair/_validators.py +++ b/src/vm-repair/azext_vm_repair/_validators.py @@ -87,7 +87,7 @@ def validate_create(cmd, namespace): # Validate vm password validate_vm_password(namespace.repair_password, is_linux) # Prompt input for public ip usage - if not namespace.associate_public_ip: + if (not namespace.associate_public_ip) and (not namespace.yes): _prompt_public_ip(namespace) @@ -313,7 +313,7 @@ def fetch_repair_vm(namespace): # Find repair VM tag = _get_repair_resource_tag(namespace.resource_group_name, namespace.vm_name) try: - find_repair_command = 'az resource list --tag {tag} --query "[?type==\'microsoft.compute/virtualmachines\']" -o json' \ + find_repair_command = 'az resource list --tag {tag} --query "[?type==\'microsoft.compute/virtualmachines\' || type==\'Microsoft.Compute/virtualMachines\']" -o json' \ .format(tag=tag) logger.info('Searching for repair-vm within subscription...') output = _call_az_command(find_repair_command) diff --git a/src/vm-repair/azext_vm_repair/custom.py b/src/vm-repair/azext_vm_repair/custom.py index 751ced9b8c3..09b0ac8685e 100644 --- a/src/vm-repair/azext_vm_repair/custom.py +++ b/src/vm-repair/azext_vm_repair/custom.py @@ -14,6 +14,7 @@ from azure.cli.command_modules.vm.custom import get_vm, _is_linux_os from azure.cli.command_modules.storage.storage_url_helpers import StorageResourceIdentifier from msrestazure.tools import parse_resource_id +from .exceptions import SkuDoesNotSupportHyperV from .command_helper_class import command_helper from .repair_utils import ( @@ -39,13 +40,15 @@ _select_distro_linux_gen2, _set_repair_map_url, _is_gen2, + _unlock_encrypted_vm_run, + _create_repair_vm, _check_n_start_vm ) from .exceptions import AzCommandError, SkuNotAvailableError, UnmanagedDiskCopyError, WindowsOsNotAvailableError, RunScriptNotFoundForIdError, SkuDoesNotSupportHyperV, ScriptReturnsError, SupportingResourceNotFoundError, CommandCanceledByUserError logger = get_logger(__name__) -def create(cmd, vm_name, resource_group_name, repair_password=None, repair_username=None, repair_vm_name=None, copy_disk_name=None, repair_group_name=None, unlock_encrypted_vm=False, enable_nested=False, associate_public_ip=False, distro='ubuntu'): +def create(cmd, vm_name, resource_group_name, repair_password=None, repair_username=None, repair_vm_name=None, copy_disk_name=None, repair_group_name=None, unlock_encrypted_vm=False, enable_nested=False, associate_public_ip=False, distro='ubuntu', yes=False): # Init command helper object command = command_helper(logger, cmd, 'vm repair create') # Main command calling block @@ -64,13 +67,12 @@ def create(cmd, vm_name, resource_group_name, repair_password=None, repair_usern created_resources = [] # Fetch OS image urn and set OS type for disk create - if is_linux: + if is_linux and _uses_managed_disk(source_vm): # os_image_urn = "UbuntuLTS" os_type = 'Linux' hyperV_generation_linux = _check_linux_hyperV_gen(source_vm) if hyperV_generation_linux == 'V2': logger.info('Generation 2 VM detected, RHEL/Centos/Oracle 6 distros not available to be used for rescue VM ') - logger.debug('gen2 machine detected') os_image_urn = _select_distro_linux_gen2(distro) else: os_image_urn = _select_distro_linux(distro) @@ -125,31 +127,31 @@ def create(cmd, vm_name, resource_group_name, repair_password=None, repair_usern # Copy OS Disk logger.info('Copying OS disk of source VM...') copy_disk_id = _call_az_command(copy_disk_command).strip('\n') - # For Linux the disk gets not attached at VM creation time. To prevent an incorrect boot state it is required to attach the disk after the VM got created. - if not is_linux: - # Add copied OS Disk to VM creat command so that the VM is created with the disk attached - create_repair_vm_command += ' --attach-data-disks {id}'.format(id=copy_disk_id) - # Validate create vm create command to validate parameters before runnning copy disk command - validate_create_vm_command = create_repair_vm_command + ' --validate' - logger.info('Validating VM template before continuing...') - _call_az_command(validate_create_vm_command, secure_params=[repair_password, repair_username]) - # Create repair VM - logger.info('Creating repair VM...') - _call_az_command(create_repair_vm_command, secure_params=[repair_password, repair_username]) - if is_linux: - # Attach copied managed disk to new vm + # Create VM according to the two conditions: is_linux, unlock_encrypted_vm + # Only in the case of a Linux VM without encryption the data-disk gets attached after VM creation. + # This is required to prevent an incorrect boot due to an UUID mismatch + if not is_linux: + # windows + _create_repair_vm(copy_disk_id, create_repair_vm_command, repair_password, repair_username) + + if not is_linux and unlock_encrypted_vm: + # windows with encryption + _create_repair_vm(copy_disk_id, create_repair_vm_command, repair_password, repair_username) + _unlock_encrypted_vm_run(repair_vm_name, repair_group_name, is_linux) + + if is_linux and unlock_encrypted_vm: + # linux with encryption + _create_repair_vm(copy_disk_id, create_repair_vm_command, repair_password, repair_username) + _unlock_encrypted_vm_run(repair_vm_name, repair_group_name, is_linux) + + if is_linux and (not unlock_encrypted_vm): + # linux without encryption + _create_repair_vm(copy_disk_id, create_repair_vm_command, repair_password, repair_username, fix_uuid=True) logger.info('Attaching copied disk to repair VM as data disk...') attach_disk_command = "az vm disk attach -g {g} --name {disk_id} --vm-name {vm_name} ".format(g=repair_group_name, disk_id=copy_disk_id, vm_name=repair_vm_name) _call_az_command(attach_disk_command) - # Handle encrypted VM cases - if unlock_encrypted_vm: - stdout, stderr = _unlock_singlepass_encrypted_disk(repair_vm_name, repair_group_name, is_linux) - logger.debug('Unlock script STDOUT:\n%s', stdout) - if stderr: - logger.warning('Encryption unlock script error was generated:\n%s', stderr) - # UNMANAGED DISK else: logger.info('Source VM uses unmanaged disks. Creating repair VM with unmanaged disks.\n') diff --git a/src/vm-repair/azext_vm_repair/repair_utils.py b/src/vm-repair/azext_vm_repair/repair_utils.py index 3205afb6e1a..efe37ddf49a 100644 --- a/src/vm-repair/azext_vm_repair/repair_utils.py +++ b/src/vm-repair/azext_vm_repair/repair_utils.py @@ -358,7 +358,7 @@ def _check_linux_hyperV_gen(source_vm): .format(i=disk_id) hyperVGen = loads(_call_az_command(show_disk_command)) if hyperVGen != 'V2': - logger.info('Trying to check on the source VM if it has the parameter of gen2') + logger.info('Checking if source VM is gen2') # if image is created from Marketplace gen2 image , the disk will not have the mark for gen2 fetch_hypervgen_command = 'az vm get-instance-view --ids {id} --query "[instanceView.hyperVGeneration]" -o json'.format(id=source_vm.id) hyperVGen_list = loads(_call_az_command(fetch_hypervgen_command)) @@ -682,3 +682,19 @@ def _get_function_param_dict(frame): if param in values: values[param] = '********' return values + + +def _unlock_encrypted_vm_run(repair_vm_name, repair_group_name, is_linux): + stdout, stderr = _unlock_singlepass_encrypted_disk(repair_vm_name, repair_group_name, is_linux) + logger.debug('Unlock script STDOUT:\n%s', stdout) + if stderr: + logger.warning('Encryption unlock script error was generated:\n%s', stderr) + + +def _create_repair_vm(copy_disk_id, create_repair_vm_command, repair_password, repair_username, fix_uuid=False): + if not fix_uuid: + create_repair_vm_command += ' --attach-data-disks {id}'.format(id=copy_disk_id) + logger.info('Validating VM template before continuing...') + _call_az_command(create_repair_vm_command + ' --validate', secure_params=[repair_password, repair_username]) + logger.info('Creating repair VM...') + _call_az_command(create_repair_vm_command, secure_params=[repair_password, repair_username]) diff --git a/src/vm-repair/azext_vm_repair/tests/latest/test_repair_commands.py b/src/vm-repair/azext_vm_repair/tests/latest/test_repair_commands.py index c59af8abbc2..994f6ad6314 100644 --- a/src/vm-repair/azext_vm_repair/tests/latest/test_repair_commands.py +++ b/src/vm-repair/azext_vm_repair/tests/latest/test_repair_commands.py @@ -4,6 +4,8 @@ # -------------------------------------------------------------------------------------------- # pylint: disable=line-too-long, unused-argument import time + +import pytest from azure.cli.testsdk import LiveScenarioTest, ResourceGroupPreparer STATUS_SUCCESS = 'SUCCESS' @@ -24,7 +26,7 @@ def test_vmrepair_WinManagedCreateRestore(self, resource_group): assert len(vms) == 1 # Test create - result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --associate-public-ip -o json').get_output_in_json() + result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 -o json --yes').get_output_in_json() assert result['status'] == STATUS_SUCCESS, result['error_message'] # Check repair VM @@ -58,7 +60,7 @@ def test_vmrepair_WinUnmanagedCreateRestore(self, resource_group): assert len(vms) == 1 # Test create - result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 -o json').get_output_in_json() + result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --yes -o json').get_output_in_json() assert result['status'] == STATUS_SUCCESS, result['error_message'] # Check repair VM @@ -77,9 +79,10 @@ def test_vmrepair_WinUnmanagedCreateRestore(self, resource_group): assert source_vm['storageProfile']['osDisk']['vhd']['uri'] == result['copied_disk_uri'] +@pytest.mark.linux class LinuxManagedDiskCreateRestoreTest(LiveScenarioTest): - @ResourceGroupPreparer(location='westus2') + @ResourceGroupPreparer(location='eastus') def test_vmrepair_LinuxManagedCreateRestore(self, resource_group): self.kwargs.update({ 'vm': 'vm1' @@ -92,7 +95,7 @@ def test_vmrepair_LinuxManagedCreateRestore(self, resource_group): assert len(vms) == 1 # Test create - result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 -o json').get_output_in_json() + result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --yes -o json').get_output_in_json() assert result['status'] == STATUS_SUCCESS, result['error_message'] # Check repair VM @@ -111,9 +114,10 @@ def test_vmrepair_LinuxManagedCreateRestore(self, resource_group): assert source_vm['storageProfile']['osDisk']['name'] == result['copied_disk_name'] +@pytest.mark.linux class LinuxUnmanagedDiskCreateRestoreTest(LiveScenarioTest): - @ResourceGroupPreparer(location='westus2') + @ResourceGroupPreparer(location='eastus') def test_vmrepair_LinuxUnmanagedCreateRestore(self, resource_group): self.kwargs.update({ 'vm': 'vm1' @@ -126,7 +130,7 @@ def test_vmrepair_LinuxUnmanagedCreateRestore(self, resource_group): assert len(vms) == 1 # Test create - result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 -o json').get_output_in_json() + result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --yes -o json').get_output_in_json() assert result['status'] == STATUS_SUCCESS, result['error_message'] # Check repair VM @@ -160,7 +164,7 @@ def test_vmrepair_WinManagedCreateRestorePublicIp(self, resource_group): assert len(vms) == 1 # Test create - result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --associate-public-ip -o json').get_output_in_json() + result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --yes -o json').get_output_in_json() assert result['status'] == STATUS_SUCCESS, result['error_message'] # Check repair VM @@ -194,7 +198,7 @@ def test_vmrepair_WinUnmanagedCreateRestorePublicIp(self, resource_group): assert len(vms) == 1 # Test create - result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --associate-public-ip -o json').get_output_in_json() + result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --yes -o json').get_output_in_json() assert result['status'] == STATUS_SUCCESS, result['error_message'] # Check repair VM @@ -215,7 +219,7 @@ def test_vmrepair_WinUnmanagedCreateRestorePublicIp(self, resource_group): class LinuxManagedDiskCreateRestoreTestwithpublicip(LiveScenarioTest): - @ResourceGroupPreparer(location='westus2') + @ResourceGroupPreparer(location='eastus') def test_vmrepair_LinuxManagedCreateRestorePublicIp(self, resource_group): self.kwargs.update({ 'vm': 'vm1' @@ -228,7 +232,7 @@ def test_vmrepair_LinuxManagedCreateRestorePublicIp(self, resource_group): assert len(vms) == 1 # Test create - result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --associate-public-ip -o json').get_output_in_json() + result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --yes -o json').get_output_in_json() assert result['status'] == STATUS_SUCCESS, result['error_message'] # Check repair VM @@ -262,7 +266,7 @@ def test_vmrepair_LinuxUnmanagedCreateRestorePublicIp(self, resource_group): assert len(vms) == 1 # Test create - result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --associate-public-ip -o json').get_output_in_json() + result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --yes -o json').get_output_in_json() assert result['status'] == STATUS_SUCCESS, result['error_message'] # Check repair VM @@ -281,6 +285,7 @@ def test_vmrepair_LinuxUnmanagedCreateRestorePublicIp(self, resource_group): assert source_vm['storageProfile']['osDisk']['vhd']['uri'] == result['copied_disk_uri'] +@pytest.mark.encryption class WindowsSinglepassKekEncryptedManagedDiskCreateRestoreTest(LiveScenarioTest): @ResourceGroupPreparer(location='westus2') @@ -315,7 +320,7 @@ def test_vmrepair_WinSinglepassKekEncryptedManagedDiskCreateRestore(self, resour self.cmd('vm encryption enable -g {rg} -n {vm} --disk-encryption-keyvault {kv} --key-encryption-key {key}') # Test create - result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --unlock-encrypted-vm -o json').get_output_in_json() + result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --unlock-encrypted-vm --yes -o json').get_output_in_json() assert result['status'] == STATUS_SUCCESS, result['error_message'] # Check repair VM @@ -370,7 +375,7 @@ def test_vmrepair_LinuxSinglepassKekEncryptedManagedDiskCreateRestore(self, reso time.sleep(300) # Test create - result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --unlock-encrypted-vm -o json').get_output_in_json() + result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --unlock-encrypted-vm --yes -o json').get_output_in_json() assert result['status'] == STATUS_SUCCESS, result['error_message'] # Check repair VM @@ -415,7 +420,7 @@ def test_vmrepair_WinSinglepassNoKekEncryptedManagedDiskCreateRestore(self, reso self.cmd('vm encryption enable -g {rg} -n {vm} --disk-encryption-keyvault {kv}') # Test create - result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --unlock-encrypted-vm -o json').get_output_in_json() + result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --unlock-encrypted-vm --yes -o json').get_output_in_json() assert result['status'] == STATUS_SUCCESS, result['error_message'] # Check repair VM @@ -462,7 +467,7 @@ def test_vmrepair_LinuxSinglepassNoKekEncryptedManagedDiskCreateRestoreTest(self time.sleep(300) # Test create - result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --unlock-encrypted-vm -o json').get_output_in_json() + result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --unlock-encrypted-vm --yes -o json').get_output_in_json() assert result['status'] == STATUS_SUCCESS, result['error_message'] # Check repair VM @@ -538,7 +543,7 @@ def test_vmrepair_WinManagedCreateRestoreGen2(self, resource_group): assert len(vms) == 1 # Test create - result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --associate-public-ip -o json').get_output_in_json() + result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --yes -o json').get_output_in_json() assert result['status'] == STATUS_SUCCESS, result['error_message'] # Check repair VM @@ -592,7 +597,7 @@ def test_vmrepair_LinuxSinglepassKekEncryptedManagedDiskCreateRestoreRHEL8(self, time.sleep(300) # Test create - result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --distro rhel8 --unlock-encrypted-vm -o json').get_output_in_json() + result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --distro rhel8 --unlock-encrypted-vm --yes -o json').get_output_in_json() assert result['status'] == STATUS_SUCCESS, result['error_message'] # Check repair VM @@ -638,7 +643,7 @@ def test_vmrepair_LinuxSinglepassNoKekEncryptedManagedDiskCreateRestoreTestSLES1 time.sleep(300) # Test create - result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --distro sles15 --unlock-encrypted-vm -o json').get_output_in_json() + result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --distro sles15 --unlock-encrypted-vm --yes -o json').get_output_in_json() assert result['status'] == STATUS_SUCCESS, result['error_message'] # Check repair VM @@ -671,7 +676,7 @@ def test_vmrepair_LinuxManagedCreateRestoreOracle8PublicIp(self, resource_group) assert len(vms) == 1 # Test create - result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --distro oracle8 --associate-public-ip -o json').get_output_in_json() + result = self.cmd('vm repair create -g {rg} -n {vm} --repair-username azureadmin --repair-password !Passw0rd2018 --distro oracle8 --yes -o json').get_output_in_json() assert result['status'] == STATUS_SUCCESS, result['error_message'] # Check repair VM diff --git a/src/vm-repair/setup.py b/src/vm-repair/setup.py index 84eee56eec0..f4f0da7ba68 100644 --- a/src/vm-repair/setup.py +++ b/src/vm-repair/setup.py @@ -8,7 +8,7 @@ from codecs import open from setuptools import setup, find_packages -VERSION = "0.4.7" +VERSION = "0.4.8" CLASSIFIERS = [ 'Development Status :: 4 - Beta', From 4367bb565abaa836f663252c79ab4f35e11746de Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Tue, 25 Oct 2022 02:15:35 +0000 Subject: [PATCH 24/85] [Release] Update index.json for extension [ vm-repair ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=10970&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/eb8393144808f6b901f3a86c7568e1a5946c5ce5 --- src/index.json | 45 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/src/index.json b/src/index.json index db52934ebb2..f4340e6672f 100644 --- a/src/index.json +++ b/src/index.json @@ -43185,6 +43185,51 @@ "version": "0.4.7" }, "sha256Digest": "bed73a688d073c97207daaa907816db5781b429b6f3441d1b618b463feff2fcb" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/vm_repair-0.4.8-py2.py3-none-any.whl", + "filename": "vm_repair-0.4.8-py2.py3-none-any.whl", + "metadata": { + "azext.isPreview": false, + "azext.minCliCoreVersion": "2.0.67", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "caiddev@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/vm-repair" + } + } + }, + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "vm-repair", + "summary": "Auto repair commands to fix VMs.", + "version": "0.4.8" + }, + "sha256Digest": "2ea0b50f1b484bc9c7ea72e9285163de489e726ef24cb2939620d3fa3c763c4a" } ], "vmware": [ From 80434e6eddb0e7abed11fdb1d4b9b87548bc954f Mon Sep 17 00:00:00 2001 From: zmssp Date: Tue, 25 Oct 2022 14:54:04 +0800 Subject: [PATCH 25/85] Add remote debugging support for spring (#5453) --- src/spring/HISTORY.md | 6 + src/spring/azext_spring/_help.py | 15 + src/spring/azext_spring/_params.py | 15 +- src/spring/azext_spring/_validators.py | 6 + src/spring/azext_spring/commands.py | 3 + src/spring/azext_spring/custom.py | 18 + .../recordings/test_remote_debugging.yaml | 1348 +++++++++++++++++ .../tests/latest/test_asa_scenario.py | 30 + src/spring/setup.py | 2 +- 9 files changed, 1441 insertions(+), 2 deletions(-) create mode 100644 src/spring/azext_spring/tests/latest/recordings/test_remote_debugging.yaml diff --git a/src/spring/HISTORY.md b/src/spring/HISTORY.md index 1fafa9ab910..111889261d6 100644 --- a/src/spring/HISTORY.md +++ b/src/spring/HISTORY.md @@ -1,5 +1,11 @@ Release History =============== +1.1.11 +--- +* Add command `az spring app deployment enable-remote-debugging`. +* Add command `az spring app deployment disable-remote-debugging`. +* Add command `az spring app deployment get-remote-debugging-config`. + 1.1.10 --- * Remove `Preview` tag for user-assigned identities of apps. diff --git a/src/spring/azext_spring/_help.py b/src/spring/azext_spring/_help.py index 7b40a18c4bd..fe0c8314bee 100644 --- a/src/spring/azext_spring/_help.py +++ b/src/spring/azext_spring/_help.py @@ -216,6 +216,21 @@ short-summary: Restart instances of the app, default to production deployment. """ +helps['spring app enable-remote-debugging'] = """ + type: command + short-summary: Enable remote debugging for a deployment. +""" + +helps['spring app disable-remote-debugging'] = """ + type: command + short-summary: Disable remote debugging for a deployment. +""" + +helps['spring app get-remote-debugging-config'] = """ + type: command + short-summary: Get the remote debugging configuration of a deployment. +""" + helps['spring app deploy'] = """ type: command short-summary: Deploy source code or pre-built binary to an app and update related configurations. diff --git a/src/spring/azext_spring/_params.py b/src/spring/azext_spring/_params.py index 5eac04615e4..adb97a22cf2 100644 --- a/src/spring/azext_spring/_params.py +++ b/src/spring/azext_spring/_params.py @@ -13,7 +13,8 @@ validate_vnet, validate_vnet_required_parameters, validate_node_resource_group, validate_tracing_parameters_asc_create, validate_tracing_parameters_asc_update, validate_app_insights_parameters, validate_instance_count, validate_java_agent_parameters, - validate_ingress_timeout, validate_jar, validate_ingress_send_timeout, validate_ingress_session_max_age) + validate_ingress_timeout, validate_remote_debugging_port, validate_jar, validate_ingress_send_timeout, + validate_ingress_session_max_age) from ._validators_enterprise import (only_support_enterprise, validate_builder_resource, validate_builder_create, validate_builder_update, validate_build_pool_size, validate_git_uri, validate_acs_patterns, validate_config_file_patterns, @@ -287,6 +288,18 @@ def load_arguments(self, _): c.argument('deployment', options_list=[ '--deployment', '-d'], help='Name of an existing deployment of the app. Default to the production deployment if not specified.', validator=fulfill_deployment_param) + for scope in ['spring app disable-remote-debugging', 'spring app get-remote-debugging-config']: + with self.argument_context(scope) as c: + c.argument('deployment', options_list=[ + '--deployment', '-d'], help='Name of an existing deployment of the app. Default to the production deployment if not specified.', validator=fulfill_deployment_param) + + with self.argument_context('spring app enable-remote-debugging') as c: + c.argument('deployment', options_list=[ + '--deployment', '-d'], help='Name of an existing deployment of the app. Default to the production deployment if not specified.', validator=fulfill_deployment_param) + c.argument('remote_debugging_port', options_list=['--port', '-p'], type=int, default=5005, + help='Remote debugging port, the value should be from 1024 to 65536, default value is 5005', + validator=validate_remote_debugging_port) + with self.argument_context('spring app unset-deployment') as c: c.argument('name', name_type, help='Name of app.', validator=active_deployment_exist) diff --git a/src/spring/azext_spring/_validators.py b/src/spring/azext_spring/_validators.py index e33491ed997..7de18dce789 100644 --- a/src/spring/azext_spring/_validators.py +++ b/src/spring/azext_spring/_validators.py @@ -218,6 +218,12 @@ def validate_ingress_timeout(namespace): raise InvalidArgumentValueError("Invalid value: Ingress read timeout must be in the range [1,1800].") +def validate_remote_debugging_port(namespace): + if namespace.remote_debugging_port is not None and (namespace.remote_debugging_port < 1024 or + namespace.remote_debugging_port > 65535): + raise InvalidArgumentValueError("Invalid value: remote debugging port must be in the range [1024,65535].") + + def validate_ingress_send_timeout(namespace): if namespace.ingress_send_timeout is not None and (namespace.ingress_read_timeout < 1 or namespace.ingress_read_timeout > 1800): diff --git a/src/spring/azext_spring/commands.py b/src/spring/azext_spring/commands.py index 18c8dff7c54..1b96b49de38 100644 --- a/src/spring/azext_spring/commands.py +++ b/src/spring/azext_spring/commands.py @@ -149,6 +149,9 @@ def load_command_table(self, _): g.custom_command('append-persistent-storage', 'app_append_persistent_storage') g.custom_command('append-loaded-public-certificate', 'app_append_loaded_public_certificate') g.custom_command('connect', 'app_connect') + g.custom_command('enable-remote-debugging', 'deployment_enable_remote_debugging', supports_no_wait=True) + g.custom_command('disable-remote-debugging', 'deployment_disable_remote_debugging', supports_no_wait=True) + g.custom_command('get-remote-debugging-config', 'deployment_get_remote_debugging') with self.command_group('spring app identity', custom_command_type=app_managed_identity_command, exception_handler=handle_asc_exception) as g: diff --git a/src/spring/azext_spring/custom.py b/src/spring/azext_spring/custom.py index ea5672558e8..28682f5cf3a 100644 --- a/src/spring/azext_spring/custom.py +++ b/src/spring/azext_spring/custom.py @@ -26,6 +26,7 @@ from .vendored_sdks.appplatform.v2022_01_01_preview import models as models_20220101preview from .vendored_sdks.appplatform.v2022_05_01_preview import models as models_20220501preview from .vendored_sdks.appplatform.v2020_07_01.models import _app_platform_management_client_enums as AppPlatformEnums +from .vendored_sdks.appplatform.v2022_09_01_preview import models as models_20220901preview from .vendored_sdks.appplatform.v2020_11_01_preview import ( AppPlatformManagementClient as AppPlatformManagementClient_20201101preview ) @@ -315,6 +316,23 @@ def app_stop(cmd, client, resource_group, service, name, deployment.name) +def deployment_enable_remote_debugging(cmd, client, resource_group, service, name, remote_debugging_port=None, deployment=None, no_wait=False): + logger.warning("Enable remote debugging for the app '{}', deployment '{}'".format(name, deployment.name)) + remote_debugging_payload = models_20220901preview.RemoteDebuggingPayload(port=remote_debugging_port) + return sdk_no_wait(no_wait, client.deployments.begin_enable_remote_debugging, + resource_group, service, name, deployment.name, remote_debugging_payload) + + +def deployment_disable_remote_debugging(cmd, client, resource_group, service, name, deployment=None, no_wait=False): + logger.warning("Disable remote debugging for the app '{}', deployment '{}'".format(name, deployment.name)) + return sdk_no_wait(no_wait, client.deployments.begin_disable_remote_debugging, + resource_group, service, name, deployment.name) + + +def deployment_get_remote_debugging(cmd, client, resource_group, service, name, deployment=None): + return client.deployments.get_remote_debugging_config(resource_group, service, name, deployment.name) + + def app_restart(cmd, client, resource_group, service, diff --git a/src/spring/azext_spring/tests/latest/recordings/test_remote_debugging.yaml b/src/spring/azext_spring/tests/latest/recordings/test_remote_debugging.yaml new file mode 100644 index 00000000000..2468b8feb35 --- /dev/null +++ b/src/spring/azext_spring/tests/latest/recordings/test_remote_debugging.yaml @@ -0,0 +1,1348 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app create + Connection: + - keep-alive + ParameterSetName: + - -s -g -n + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging?api-version=2022-09-01-preview + response: + body: + string: '{"error":{"code":"NotFound","message":"App was not found","target":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging","details":null}}' + headers: + cache-control: + - no-cache + content-length: + - '235' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:43:28 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-resource-requests: + - '11999' + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 404 + message: Not Found +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app create + Connection: + - keep-alive + ParameterSetName: + - -s -g -n + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest?api-version=2022-09-01-preview + response: + body: + string: '{"properties":{"provisioningState":"Succeeded","zoneRedundant":false,"version":3,"serviceId":"f7d4a4626c344bbd95266264a8882c19","networkProfile":{"outboundIPs":{"publicIPs":["20.244.73.9","20.244.73.29"]},"outboundType":"loadBalancer"},"powerState":"Running","fqdn":"cli-unittest.azuremicroservices.io"},"type":"Microsoft.AppPlatform/Spring","sku":{"name":"S0","tier":"Standard"},"location":"centralindia","tags":null,"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest","name":"cli-unittest","systemData":{"createdBy":"pensh@microsoft.com","createdByType":"User","createdAt":"2022-10-15T03:25:28.7260805Z","lastModifiedBy":"pensh@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-15T03:39:48.8667057Z"}}' + headers: + cache-control: + - no-cache + content-length: + - '800' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:43:29 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-resource-requests: + - '11999' + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +- request: + body: '{"properties": {"public": false, "httpsOnly": false, "temporaryDisk": {"sizeInGB": + 5, "mountPath": "/tmp"}, "enableEndToEndTLS": false}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app create + Connection: + - keep-alive + Content-Length: + - '136' + Content-Type: + - application/json + ParameterSetName: + - -s -g -n + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging?api-version=2022-09-01-preview + response: + body: + string: '{"properties":{"addonConfigs":{"applicationConfigurationService":{},"serviceRegistry":{}},"public":false,"provisioningState":"Creating","httpsOnly":false,"temporaryDisk":{"sizeInGB":5,"mountPath":"/tmp"},"enableEndToEndTLS":false,"ingressSettings":{"readTimeoutInSeconds":300,"sendTimeoutInSeconds":60,"sessionCookieMaxAge":0,"sessionAffinity":"None","backendProtocol":"Default"}},"type":"Microsoft.AppPlatform/Spring/apps","identity":null,"location":"centralindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging","name":"test-remote-debugging","systemData":{"createdBy":"pensh@microsoft.com","createdByType":"User","createdAt":"2022-10-15T03:43:30.4560313Z","lastModifiedBy":"pensh@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-15T03:43:30.4560313Z"}}' + headers: + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/test-remote-debugging/operationId/7343cdc5-7efd-4b5d-92d6-b1a7d7c0a547?api-version=2022-09-01-preview + cache-control: + - no-cache + content-length: + - '884' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:43:30 GMT + expires: + - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationResults/7343cdc5-7efd-4b5d-92d6-b1a7d7c0a547/Spring/test-remote-debugging?api-version=2022-09-01-preview + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-resource-requests: + - '1199' + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app create + Connection: + - keep-alive + ParameterSetName: + - -s -g -n + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/test-remote-debugging/operationId/7343cdc5-7efd-4b5d-92d6-b1a7d7c0a547?api-version=2022-09-01-preview + response: + body: + string: '{"id":"subscriptions/0753feba-86f1-4242-aff1-27938fb04531/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/test-remote-debugging/operationId/7343cdc5-7efd-4b5d-92d6-b1a7d7c0a547","name":"7343cdc5-7efd-4b5d-92d6-b1a7d7c0a547","status":"Running","startTime":"2022-10-15T03:43:30.9301919Z"}' + headers: + cache-control: + - no-cache + content-length: + - '328' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:43:30 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app create + Connection: + - keep-alive + ParameterSetName: + - -s -g -n + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/test-remote-debugging/operationId/7343cdc5-7efd-4b5d-92d6-b1a7d7c0a547?api-version=2022-09-01-preview + response: + body: + string: '{"id":"subscriptions/0753feba-86f1-4242-aff1-27938fb04531/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/test-remote-debugging/operationId/7343cdc5-7efd-4b5d-92d6-b1a7d7c0a547","name":"7343cdc5-7efd-4b5d-92d6-b1a7d7c0a547","status":"Succeeded","startTime":"2022-10-15T03:43:30.9301919Z","endTime":"2022-10-15T03:43:38.783783Z"}' + headers: + cache-control: + - no-cache + content-length: + - '370' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:43:41 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app create + Connection: + - keep-alive + ParameterSetName: + - -s -g -n + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging?api-version=2022-09-01-preview + response: + body: + string: '{"properties":{"addonConfigs":{"applicationConfigurationService":{},"serviceRegistry":{}},"public":false,"provisioningState":"Succeeded","fqdn":"cli-unittest.azuremicroservices.io","httpsOnly":false,"temporaryDisk":{"sizeInGB":5,"mountPath":"/tmp"},"persistentDisk":{"sizeInGB":0,"mountPath":"/persistent"},"enableEndToEndTLS":false,"ingressSettings":{"readTimeoutInSeconds":300,"sendTimeoutInSeconds":60,"sessionCookieMaxAge":0,"sessionAffinity":"None","backendProtocol":"Default"}},"type":"Microsoft.AppPlatform/Spring/apps","identity":null,"location":"centralindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging","name":"test-remote-debugging","systemData":{"createdBy":"pensh@microsoft.com","createdByType":"User","createdAt":"2022-10-15T03:43:30.4560313Z","lastModifiedBy":"pensh@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-15T03:43:30.4560313Z"}}' + headers: + cache-control: + - no-cache + content-length: + - '987' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:43:42 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-resource-requests: + - '11998' + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +- request: + body: '{"properties": {"source": {"type": "Jar", "relativePath": "", + "runtimeVersion": "Java_8"}, "deploymentSettings": {"resourceRequests": {"cpu": + "1", "memory": "1Gi"}}, "active": true}, "sku": {"name": "S0", "tier": "Standard", + "capacity": 1}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app create + Connection: + - keep-alive + Content-Length: + - '249' + Content-Type: + - application/json + ParameterSetName: + - -s -g -n + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging/deployments/mock-deployment?api-version=2022-09-01-preview + response: + body: + string: '{"properties":{"deploymentSettings":{"resourceRequests":{"cpu":"1","memory":"1Gi"},"environmentVariables":null,"terminationGracePeriodSeconds":90},"provisioningState":"Creating","status":"Running","active":true,"instances":null,"source":{"type":"Jar","relativePath":"","runtimeVersion":"Java_8"}},"type":"Microsoft.AppPlatform/Spring/apps/deployments","sku":{"name":"S0","tier":"Standard","capacity":1},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging/deployments/default","name":"default","systemData":{"createdBy":"pensh@microsoft.com","createdByType":"User","createdAt":"2022-10-15T03:43:46.3936757Z","lastModifiedBy":"pensh@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-15T03:43:46.3936757Z"}}' + headers: + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/default/operationId/0ce06d04-bce5-43a6-911a-472209f91483?api-version=2022-09-01-preview + cache-control: + - no-cache + content-length: + - '836' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:43:47 GMT + expires: + - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationResults/0ce06d04-bce5-43a6-911a-472209f91483/Spring/default?api-version=2022-09-01-preview + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-resource-requests: + - '1199' + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 201 + message: Created +- request: + body: '{"properties": {"public": false, "httpsOnly": false, "enableEndToEndTLS": + false}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app create + Connection: + - keep-alive + Content-Length: + - '81' + Content-Type: + - application/json + ParameterSetName: + - -s -g -n + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: PATCH + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging?api-version=2022-09-01-preview + response: + body: + string: '{"properties":{"addonConfigs":{"applicationConfigurationService":{},"serviceRegistry":{}},"public":false,"provisioningState":"Updating","fqdn":"cli-unittest.azuremicroservices.io","httpsOnly":false,"temporaryDisk":{"sizeInGB":5,"mountPath":"/tmp"},"persistentDisk":{"sizeInGB":0,"mountPath":"/persistent"},"enableEndToEndTLS":false,"ingressSettings":{"readTimeoutInSeconds":300,"sendTimeoutInSeconds":60,"sessionCookieMaxAge":0,"sessionAffinity":"None","backendProtocol":"Default"}},"type":"Microsoft.AppPlatform/Spring/apps","identity":null,"location":"centralindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging","name":"test-remote-debugging","systemData":{"createdBy":"pensh@microsoft.com","createdByType":"User","createdAt":"2022-10-15T03:43:30.4560313Z","lastModifiedBy":"pensh@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-15T03:43:47.5343128Z"}}' + headers: + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/test-remote-debugging/operationId/90f2fec1-d813-4768-a3b9-a0972d3b47e8?api-version=2022-09-01-preview + cache-control: + - no-cache + content-length: + - '986' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:43:47 GMT + expires: + - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationResults/90f2fec1-d813-4768-a3b9-a0972d3b47e8/Spring/test-remote-debugging?api-version=2022-09-01-preview + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-resource-requests: + - '1198' + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 202 + message: Accepted +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app create + Connection: + - keep-alive + ParameterSetName: + - -s -g -n + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/test-remote-debugging/operationId/90f2fec1-d813-4768-a3b9-a0972d3b47e8?api-version=2022-09-01-preview + response: + body: + string: '{"id":"subscriptions/0753feba-86f1-4242-aff1-27938fb04531/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/test-remote-debugging/operationId/90f2fec1-d813-4768-a3b9-a0972d3b47e8","name":"90f2fec1-d813-4768-a3b9-a0972d3b47e8","status":"Running","startTime":"2022-10-15T03:43:47.9214433Z"}' + headers: + cache-control: + - no-cache + content-length: + - '328' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:43:48 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app create + Connection: + - keep-alive + ParameterSetName: + - -s -g -n + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/default/operationId/0ce06d04-bce5-43a6-911a-472209f91483?api-version=2022-09-01-preview + response: + body: + string: '{"id":"subscriptions/0753feba-86f1-4242-aff1-27938fb04531/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/default/operationId/0ce06d04-bce5-43a6-911a-472209f91483","name":"0ce06d04-bce5-43a6-911a-472209f91483","status":"Running","startTime":"2022-10-15T03:43:47.1905897Z"}' + headers: + cache-control: + - no-cache + content-length: + - '314' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:43:58 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app create + Connection: + - keep-alive + ParameterSetName: + - -s -g -n + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/test-remote-debugging/operationId/90f2fec1-d813-4768-a3b9-a0972d3b47e8?api-version=2022-09-01-preview + response: + body: + string: '{"id":"subscriptions/0753feba-86f1-4242-aff1-27938fb04531/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/test-remote-debugging/operationId/90f2fec1-d813-4768-a3b9-a0972d3b47e8","name":"90f2fec1-d813-4768-a3b9-a0972d3b47e8","status":"Succeeded","startTime":"2022-10-15T03:43:47.9214433Z","endTime":"2022-10-15T03:43:54.9306385Z"}' + headers: + cache-control: + - no-cache + content-length: + - '371' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:43:58 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app create + Connection: + - keep-alive + ParameterSetName: + - -s -g -n + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging?api-version=2022-09-01-preview + response: + body: + string: '{"properties":{"addonConfigs":{"applicationConfigurationService":{},"serviceRegistry":{}},"public":false,"provisioningState":"Succeeded","fqdn":"cli-unittest.azuremicroservices.io","httpsOnly":false,"temporaryDisk":{"sizeInGB":5,"mountPath":"/tmp"},"persistentDisk":{"sizeInGB":0,"mountPath":"/persistent"},"enableEndToEndTLS":false,"ingressSettings":{"readTimeoutInSeconds":300,"sendTimeoutInSeconds":60,"sessionCookieMaxAge":0,"sessionAffinity":"None","backendProtocol":"Default"}},"type":"Microsoft.AppPlatform/Spring/apps","identity":null,"location":"centralindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging","name":"test-remote-debugging","systemData":{"createdBy":"pensh@microsoft.com","createdByType":"User","createdAt":"2022-10-15T03:43:30.4560313Z","lastModifiedBy":"pensh@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-15T03:43:47.5343128Z"}}' + headers: + cache-control: + - no-cache + content-length: + - '987' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:43:59 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-resource-requests: + - '11999' + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app create + Connection: + - keep-alive + ParameterSetName: + - -s -g -n + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/default/operationId/0ce06d04-bce5-43a6-911a-472209f91483?api-version=2022-09-01-preview + response: + body: + string: '{"id":"subscriptions/0753feba-86f1-4242-aff1-27938fb04531/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/default/operationId/0ce06d04-bce5-43a6-911a-472209f91483","name":"0ce06d04-bce5-43a6-911a-472209f91483","status":"Running","startTime":"2022-10-15T03:43:47.1905897Z"}' + headers: + cache-control: + - no-cache + content-length: + - '314' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:44:08 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app create + Connection: + - keep-alive + ParameterSetName: + - -s -g -n + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/default/operationId/0ce06d04-bce5-43a6-911a-472209f91483?api-version=2022-09-01-preview + response: + body: + string: '{"id":"subscriptions/0753feba-86f1-4242-aff1-27938fb04531/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/default/operationId/0ce06d04-bce5-43a6-911a-472209f91483","name":"0ce06d04-bce5-43a6-911a-472209f91483","status":"Running","startTime":"2022-10-15T03:43:47.1905897Z"}' + headers: + cache-control: + - no-cache + content-length: + - '314' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:44:18 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app create + Connection: + - keep-alive + ParameterSetName: + - -s -g -n + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/default/operationId/0ce06d04-bce5-43a6-911a-472209f91483?api-version=2022-09-01-preview + response: + body: + string: '{"id":"subscriptions/0753feba-86f1-4242-aff1-27938fb04531/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/default/operationId/0ce06d04-bce5-43a6-911a-472209f91483","name":"0ce06d04-bce5-43a6-911a-472209f91483","status":"Running","startTime":"2022-10-15T03:43:47.1905897Z"}' + headers: + cache-control: + - no-cache + content-length: + - '314' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:44:28 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app create + Connection: + - keep-alive + ParameterSetName: + - -s -g -n + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/default/operationId/0ce06d04-bce5-43a6-911a-472209f91483?api-version=2022-09-01-preview + response: + body: + string: '{"id":"subscriptions/0753feba-86f1-4242-aff1-27938fb04531/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/default/operationId/0ce06d04-bce5-43a6-911a-472209f91483","name":"0ce06d04-bce5-43a6-911a-472209f91483","status":"Running","startTime":"2022-10-15T03:43:47.1905897Z"}' + headers: + cache-control: + - no-cache + content-length: + - '314' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:44:38 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app create + Connection: + - keep-alive + ParameterSetName: + - -s -g -n + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/default/operationId/0ce06d04-bce5-43a6-911a-472209f91483?api-version=2022-09-01-preview + response: + body: + string: '{"id":"subscriptions/0753feba-86f1-4242-aff1-27938fb04531/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/default/operationId/0ce06d04-bce5-43a6-911a-472209f91483","name":"0ce06d04-bce5-43a6-911a-472209f91483","status":"Running","startTime":"2022-10-15T03:43:47.1905897Z"}' + headers: + cache-control: + - no-cache + content-length: + - '314' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:44:49 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app create + Connection: + - keep-alive + ParameterSetName: + - -s -g -n + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/default/operationId/0ce06d04-bce5-43a6-911a-472209f91483?api-version=2022-09-01-preview + response: + body: + string: '{"id":"subscriptions/0753feba-86f1-4242-aff1-27938fb04531/resourceGroups/cli/providers/Microsoft.AppPlatform/locations/centralindia/operationStatus/default/operationId/0ce06d04-bce5-43a6-911a-472209f91483","name":"0ce06d04-bce5-43a6-911a-472209f91483","status":"Succeeded","startTime":"2022-10-15T03:43:47.1905897Z","endTime":"2022-10-15T03:44:50.9697693Z"}' + headers: + cache-control: + - no-cache + content-length: + - '357' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:45:00 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app create + Connection: + - keep-alive + ParameterSetName: + - -s -g -n + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging/deployments/mock-deployment?api-version=2022-09-01-preview + response: + body: + string: '{"properties":{"deploymentSettings":{"resourceRequests":{"cpu":"1","memory":"1Gi"},"environmentVariables":null,"terminationGracePeriodSeconds":90,"livenessProbe":{"disableProbe":false,"failureThreshold":24,"initialDelaySeconds":60,"periodSeconds":10,"successThreshold":1,"timeoutSeconds":1,"probeAction":{"type":"TCPSocketAction"}},"readinessProbe":{"disableProbe":false,"failureThreshold":3,"initialDelaySeconds":0,"periodSeconds":10,"successThreshold":1,"timeoutSeconds":1,"probeAction":{"type":"TCPSocketAction"}}},"provisioningState":"Succeeded","status":"Running","active":true,"instances":[{"name":"test-remote-debugging-default-21-68546d98fb-62gxg","status":"Running","discoveryStatus":"UNREGISTERED","startTime":"2022-10-15T03:43:54Z"}],"source":{"type":"Jar","relativePath":"","runtimeVersion":"Java_8"}},"type":"Microsoft.AppPlatform/Spring/apps/deployments","sku":{"name":"S0","tier":"Standard","capacity":1},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging/deployments/default","name":"default","systemData":{"createdBy":"pensh@microsoft.com","createdByType":"User","createdAt":"2022-10-15T03:43:46.3936757Z","lastModifiedBy":"pensh@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-15T03:43:46.3936757Z"}}' + headers: + cache-control: + - no-cache + content-length: + - '1353' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:45:03 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-resource-requests: + - '11999' + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app create + Connection: + - keep-alive + ParameterSetName: + - -s -g -n + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging?api-version=2022-09-01-preview + response: + body: + string: '{"properties":{"addonConfigs":{"applicationConfigurationService":{},"serviceRegistry":{}},"public":false,"provisioningState":"Succeeded","fqdn":"cli-unittest.azuremicroservices.io","httpsOnly":false,"temporaryDisk":{"sizeInGB":5,"mountPath":"/tmp"},"persistentDisk":{"sizeInGB":0,"mountPath":"/persistent"},"enableEndToEndTLS":false,"ingressSettings":{"readTimeoutInSeconds":300,"sendTimeoutInSeconds":60,"sessionCookieMaxAge":0,"sessionAffinity":"None","backendProtocol":"Default"}},"type":"Microsoft.AppPlatform/Spring/apps","identity":null,"location":"centralindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging","name":"test-remote-debugging","systemData":{"createdBy":"pensh@microsoft.com","createdByType":"User","createdAt":"2022-10-15T03:43:30.4560313Z","lastModifiedBy":"pensh@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-15T03:43:47.5343128Z"}}' + headers: + cache-control: + - no-cache + content-length: + - '987' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:45:07 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-resource-requests: + - '11998' + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app create + Connection: + - keep-alive + ParameterSetName: + - -s -g -n + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging/deployments?api-version=2022-09-01-preview + response: + body: + string: '{"value":[{"properties":{"deploymentSettings":{"resourceRequests":{"cpu":"1","memory":"1Gi"},"environmentVariables":null,"terminationGracePeriodSeconds":90,"livenessProbe":{"disableProbe":false,"failureThreshold":24,"initialDelaySeconds":60,"periodSeconds":10,"successThreshold":1,"timeoutSeconds":1,"probeAction":{"type":"TCPSocketAction"}},"readinessProbe":{"disableProbe":false,"failureThreshold":3,"initialDelaySeconds":0,"periodSeconds":10,"successThreshold":1,"timeoutSeconds":1,"probeAction":{"type":"TCPSocketAction"}}},"provisioningState":"Succeeded","status":"Running","active":true,"instances":[{"name":"test-remote-debugging-default-21-68546d98fb-62gxg","status":"Running","discoveryStatus":"UNREGISTERED","startTime":"2022-10-15T03:43:54Z"}],"source":{"type":"Jar","relativePath":"","runtimeVersion":"Java_8"}},"type":"Microsoft.AppPlatform/Spring/apps/deployments","sku":{"name":"S0","tier":"Standard","capacity":1},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging/deployments/default","name":"default","systemData":{"createdBy":"pensh@microsoft.com","createdByType":"User","createdAt":"2022-10-15T03:43:46.3936757Z","lastModifiedBy":"pensh@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-15T03:43:46.3936757Z"}}]}' + headers: + cache-control: + - no-cache + content-length: + - '1365' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:45:10 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-resource-requests: + - '11998' + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app enable-remote-debugging + Connection: + - keep-alive + ParameterSetName: + - -n -g -s -d + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging/deployments/mock-deployment?api-version=2022-05-01-preview + response: + body: + string: '{"properties":{"deploymentSettings":{"resourceRequests":{"cpu":"1","memory":"1Gi"},"environmentVariables":null,"terminationGracePeriodSeconds":90,"livenessProbe":{"disableProbe":false,"failureThreshold":24,"initialDelaySeconds":60,"periodSeconds":10,"successThreshold":1,"timeoutSeconds":1,"probeAction":{"type":"TCPSocketAction"}},"readinessProbe":{"disableProbe":false,"failureThreshold":3,"initialDelaySeconds":0,"periodSeconds":10,"successThreshold":1,"timeoutSeconds":1,"probeAction":{"type":"TCPSocketAction"}}},"provisioningState":"Succeeded","status":"Running","active":true,"instances":[{"name":"test-remote-debugging-default-21-68546d98fb-62gxg","status":"Running","discoveryStatus":"UNREGISTERED","startTime":"2022-10-15T03:43:54Z"}],"source":{"type":"Jar","relativePath":"","runtimeVersion":"Java_8"}},"type":"Microsoft.AppPlatform/Spring/apps/deployments","sku":{"name":"S0","tier":"Standard","capacity":1},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging/deployments/default","name":"default","systemData":{"createdBy":"pensh@microsoft.com","createdByType":"User","createdAt":"2022-10-15T03:43:46.3936757Z","lastModifiedBy":"pensh@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-15T03:43:46.3936757Z"}}' + headers: + cache-control: + - no-cache + content-length: + - '1353' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:45:15 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-resource-requests: + - '11999' + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +- request: + body: '{"port": 5005}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app enable-remote-debugging + Connection: + - keep-alive + Content-Length: + - '14' + Content-Type: + - application/json + ParameterSetName: + - -n -g -s -d + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging/deployments/mock-deployment/enableRemoteDebugging?api-version=2022-09-01-preview + response: + body: + string: '{"error":{"code":"InvalidArgument","message":"Only java applications + support remote debugging","target":null,"details":null}}' + headers: + cache-control: + - no-cache + content-length: + - '125' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:45:16 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 400 + message: Bad Request +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app disable-remote-debugging + Connection: + - keep-alive + ParameterSetName: + - -n -g -s -d + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging/deployments/mock-deployment?api-version=2022-05-01-preview + response: + body: + string: '{"properties":{"deploymentSettings":{"resourceRequests":{"cpu":"1","memory":"1Gi"},"environmentVariables":null,"terminationGracePeriodSeconds":90,"livenessProbe":{"disableProbe":false,"failureThreshold":24,"initialDelaySeconds":60,"periodSeconds":10,"successThreshold":1,"timeoutSeconds":1,"probeAction":{"type":"TCPSocketAction"}},"readinessProbe":{"disableProbe":false,"failureThreshold":3,"initialDelaySeconds":0,"periodSeconds":10,"successThreshold":1,"timeoutSeconds":1,"probeAction":{"type":"TCPSocketAction"}}},"provisioningState":"Succeeded","status":"Running","active":true,"instances":[{"name":"test-remote-debugging-default-21-68546d98fb-62gxg","status":"Running","discoveryStatus":"UNREGISTERED","startTime":"2022-10-15T03:43:54Z"}],"source":{"type":"Jar","relativePath":"","runtimeVersion":"Java_8"}},"type":"Microsoft.AppPlatform/Spring/apps/deployments","sku":{"name":"S0","tier":"Standard","capacity":1},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging/deployments/default","name":"default","systemData":{"createdBy":"pensh@microsoft.com","createdByType":"User","createdAt":"2022-10-15T03:43:46.3936757Z","lastModifiedBy":"pensh@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-15T03:43:46.3936757Z"}}' + headers: + cache-control: + - no-cache + content-length: + - '1353' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:45:20 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-resource-requests: + - '11999' + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app disable-remote-debugging + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -n -g -s -d + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging/deployments/mock-deployment/disableRemoteDebugging?api-version=2022-09-01-preview + response: + body: + string: '{"port":5005,"enabled":false}' + headers: + cache-control: + - no-cache + content-length: + - '29' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:45:20 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1197' + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app get-remote-debugging-config + Connection: + - keep-alive + ParameterSetName: + - -n -g -s -d + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging/deployments/mock-deployment?api-version=2022-05-01-preview + response: + body: + string: '{"properties":{"deploymentSettings":{"resourceRequests":{"cpu":"1","memory":"1Gi"},"environmentVariables":null,"terminationGracePeriodSeconds":90,"livenessProbe":{"disableProbe":false,"failureThreshold":24,"initialDelaySeconds":60,"periodSeconds":10,"successThreshold":1,"timeoutSeconds":1,"probeAction":{"type":"TCPSocketAction"}},"readinessProbe":{"disableProbe":false,"failureThreshold":3,"initialDelaySeconds":0,"periodSeconds":10,"successThreshold":1,"timeoutSeconds":1,"probeAction":{"type":"TCPSocketAction"}}},"provisioningState":"Succeeded","status":"Running","active":true,"instances":[{"name":"test-remote-debugging-default-21-68546d98fb-62gxg","status":"Running","discoveryStatus":"UNREGISTERED","startTime":"2022-10-15T03:43:54Z"}],"source":{"type":"Jar","relativePath":"","runtimeVersion":"Java_8"}},"type":"Microsoft.AppPlatform/Spring/apps/deployments","sku":{"name":"S0","tier":"Standard","capacity":1},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging/deployments/default","name":"default","systemData":{"createdBy":"pensh@microsoft.com","createdByType":"User","createdAt":"2022-10-15T03:43:46.3936757Z","lastModifiedBy":"pensh@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-15T03:43:46.3936757Z"}}' + headers: + cache-control: + - no-cache + content-length: + - '1353' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:45:24 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-resource-requests: + - '11999' + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - spring app get-remote-debugging-config + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -n -g -s -d + User-Agent: + - AZURECLI/2.40.0 (PIP) azsdk-python-mgmt-appplatform/6.1.0 Python/3.10.8 (Windows-10-10.0.22621-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli/providers/Microsoft.AppPlatform/Spring/cli-unittest/apps/test-remote-debugging/deployments/mock-deployment/getRemoteDebuggingConfig?api-version=2022-09-01-preview + response: + body: + string: '{"port":5005,"enabled":false}' + headers: + cache-control: + - no-cache + content-length: + - '29' + content-type: + - application/json; charset=utf-8 + date: + - Sat, 15 Oct 2022 03:45:29 GMT + expires: + - '-1' + pragma: + - no-cache + request-context: + - appId=cid-v1:797d7e4e-8180-497e-a254-780fbd39ba4d + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1198' + x-rp-server-mvid: + - e7de6c4f-f52b-4353-818b-0db9b32d2ec6 + status: + code: 200 + message: OK +version: 1 diff --git a/src/spring/azext_spring/tests/latest/test_asa_scenario.py b/src/spring/azext_spring/tests/latest/test_asa_scenario.py index bd6ab2c6834..8f169cfff38 100644 --- a/src/spring/azext_spring/tests/latest/test_asa_scenario.py +++ b/src/spring/azext_spring/tests/latest/test_asa_scenario.py @@ -231,6 +231,36 @@ def test_app_deploy_container(self): self.check('properties.source.customContainer.languageFramework', 'springboot'), ]) + +class RemoteDebuggingTest(ScenarioTest): + def test_remote_debugging(self): + py_path = os.path.abspath(os.path.dirname(__file__)) + file_path = os.path.join(py_path, 'files/test.jar').replace("\\", "/") + self.kwargs.update({ + 'app': 'test-remote-debugging', + 'serviceName': 'cli-unittest', + 'resourceGroup': 'cli', + 'location': 'centralindia', + 'deployment': 'default', + 'file': file_path + }) + + self.cmd('spring app create -s {serviceName} -g {resourceGroup} -n {app}') + + # remote debugging can only be supported for jar, here will throw exception for default banner + self.cmd( + 'spring app enable-remote-debugging -n {app} -g {resourceGroup} -s {serviceName} -d {deployment}', expect_failure=True) + + self.cmd( + 'spring app disable-remote-debugging -n {app} -g {resourceGroup} -s {serviceName} -d {deployment}') + + self.cmd( + 'spring app get-remote-debugging-config -n {app} -g {resourceGroup} -s {serviceName} -d {deployment}', + checks=[ + self.check('enabled', False) + ]) + + class AppConnectTest(ScenarioTest): def test_app_connect(self): diff --git a/src/spring/setup.py b/src/spring/setup.py index d0b7418d84e..aa89b509d53 100644 --- a/src/spring/setup.py +++ b/src/spring/setup.py @@ -16,7 +16,7 @@ # TODO: Confirm this is the right version number you want and it matches your # HISTORY.rst entry. -VERSION = '1.1.10' +VERSION = '1.1.11' # The full list of classifiers is available at # https://pypi.python.org/pypi?%3Aaction=list_classifiers From b5e0aff2d486d24a6ebe8d4a769ec423a723a140 Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Tue, 25 Oct 2022 07:01:09 +0000 Subject: [PATCH 26/85] [Release] Update index.json for extension [ spring ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=11039&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/80434e6eddb0e7abed11fdb1d4b9b87548bc954f --- src/index.json | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/src/index.json b/src/index.json index f4340e6672f..5cbfa956c1f 100644 --- a/src/index.json +++ b/src/index.json @@ -35783,6 +35783,49 @@ "version": "1.1.10" }, "sha256Digest": "864200fb0cc9f988e40db675c6b6b016c2fcff91ff25551c03a576ae0dd4096b" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/spring-1.1.11-py3-none-any.whl", + "filename": "spring-1.1.11-py3-none-any.whl", + "metadata": { + "azext.isPreview": false, + "azext.minCliCoreVersion": "2.38.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/spring" + } + } + }, + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "spring", + "summary": "Microsoft Azure Command-Line Tools spring Extension", + "version": "1.1.11" + }, + "sha256Digest": "be1471eb6aa6a462d13f352f643f4267fa8411c73f17d2b6d0f842229860dde8" } ], "spring-cloud": [ From d855cc232c7733159579389c98cc5d4286cb069f Mon Sep 17 00:00:00 2001 From: sushil490023 Date: Tue, 25 Oct 2022 12:54:51 +0530 Subject: [PATCH 27/85] Add Python3 CLI Commands (#5415) * Add Python3 CLI Commands * Update Linter exclusion rule for automation account parameter * Added Test cases for PY3 * Updated Test cases recording with latest test cases * Updated Test cases recording with latest test cases * Updated Test cases recording with latest test cases * Updated Test cases recording with latest test cases * Updated Command with examples * Updated Command with examples * Updated Command with examples Co-authored-by: Sushil Upadhyay --- src/automation/HISTORY.rst | 4 + .../aaz/latest/automation/__cmd_group.py | 46 +- .../aaz/latest/automation/__init__.py | 22 +- .../aaz/latest/automation/hrwg/__cmd_group.py | 46 +- .../aaz/latest/automation/hrwg/__init__.py | 32 +- .../aaz/latest/automation/hrwg/_create.py | 505 +-- .../aaz/latest/automation/hrwg/_delete.py | 267 +- .../aaz/latest/automation/hrwg/_list.py | 439 +-- .../aaz/latest/automation/hrwg/_show.py | 429 +-- .../aaz/latest/automation/hrwg/_update.py | 791 ++--- .../latest/automation/hrwg/hrw/__cmd_group.py | 46 +- .../latest/automation/hrwg/hrw/__init__.py | 33 +- .../aaz/latest/automation/hrwg/hrw/_create.py | 523 +-- .../aaz/latest/automation/hrwg/hrw/_delete.py | 297 +- .../aaz/latest/automation/hrwg/hrw/_list.py | 475 +-- .../aaz/latest/automation/hrwg/hrw/_move.py | 324 +- .../aaz/latest/automation/hrwg/hrw/_show.py | 467 +-- .../automation/python3_package/__cmd_group.py | 23 + .../automation/python3_package/__init__.py | 16 + .../automation/python3_package/_create.py | 318 ++ .../automation/python3_package/_delete.py | 145 + .../automation/python3_package/_list.py | 234 ++ .../automation/python3_package/_show.py | 236 ++ .../{hrwg/hrw => python3_package}/_update.py | 867 ++--- .../azext_automation/azext_metadata.json | 6 +- .../latest/recordings/test_automation.yaml | 2989 ++++++++++------- .../recordings/test_automation_schedule.yaml | 674 ++-- ...omation_software_update_configuration.yaml | 2255 +++++++------ .../latest/test_automation_scenario_manual.py | 49 +- src/automation/linter_exclusions.yml | 25 + src/automation/setup.py | 2 +- 31 files changed, 7225 insertions(+), 5360 deletions(-) create mode 100644 src/automation/azext_automation/aaz/latest/automation/python3_package/__cmd_group.py create mode 100644 src/automation/azext_automation/aaz/latest/automation/python3_package/__init__.py create mode 100644 src/automation/azext_automation/aaz/latest/automation/python3_package/_create.py create mode 100644 src/automation/azext_automation/aaz/latest/automation/python3_package/_delete.py create mode 100644 src/automation/azext_automation/aaz/latest/automation/python3_package/_list.py create mode 100644 src/automation/azext_automation/aaz/latest/automation/python3_package/_show.py rename src/automation/azext_automation/aaz/latest/automation/{hrwg/hrw => python3_package}/_update.py (51%) diff --git a/src/automation/HISTORY.rst b/src/automation/HISTORY.rst index fd67c685688..54e332efde1 100644 --- a/src/automation/HISTORY.rst +++ b/src/automation/HISTORY.rst @@ -3,6 +3,10 @@ Release History =============== +0.2.1 +++++++ +* `az automation python3-package`: Add new command group to support managing python3 package operations. + 0.2.0 ++++++ * `az automation schedule`: Add new command group to support managing schedule diff --git a/src/automation/azext_automation/aaz/latest/automation/__cmd_group.py b/src/automation/azext_automation/aaz/latest/automation/__cmd_group.py index c2ac75642d4..336762af7d2 100644 --- a/src/automation/azext_automation/aaz/latest/automation/__cmd_group.py +++ b/src/automation/azext_automation/aaz/latest/automation/__cmd_group.py @@ -1,23 +1,23 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -# Code generated by aaz-dev-tools -# -------------------------------------------------------------------------------------------- - -# pylint: skip-file -# flake8: noqa - -from azure.cli.core.aaz import * - - -@register_command_group( - "automation", -) -class __CMDGroup(AAZCommandGroup): - """Automation Account. - """ - pass - - -__all__ = ["__CMDGroup"] +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command_group( + "automation", +) +class __CMDGroup(AAZCommandGroup): + """Manage Automation Account + """ + pass + + +__all__ = ["__CMDGroup"] diff --git a/src/automation/azext_automation/aaz/latest/automation/__init__.py b/src/automation/azext_automation/aaz/latest/automation/__init__.py index 5a9d61963d6..709a5170d90 100644 --- a/src/automation/azext_automation/aaz/latest/automation/__init__.py +++ b/src/automation/azext_automation/aaz/latest/automation/__init__.py @@ -1,11 +1,11 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -# Code generated by aaz-dev-tools -# -------------------------------------------------------------------------------------------- - -# pylint: skip-file -# flake8: noqa - -from .__cmd_group import * +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from .__cmd_group import * diff --git a/src/automation/azext_automation/aaz/latest/automation/hrwg/__cmd_group.py b/src/automation/azext_automation/aaz/latest/automation/hrwg/__cmd_group.py index b0998384a91..57348b15fe5 100644 --- a/src/automation/azext_automation/aaz/latest/automation/hrwg/__cmd_group.py +++ b/src/automation/azext_automation/aaz/latest/automation/hrwg/__cmd_group.py @@ -1,23 +1,23 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -# Code generated by aaz-dev-tools -# -------------------------------------------------------------------------------------------- - -# pylint: skip-file -# flake8: noqa - -from azure.cli.core.aaz import * - - -@register_command_group( - "automation hrwg", -) -class __CMDGroup(AAZCommandGroup): - """Automation Hybrid Runbook Worker Group - """ - pass - - -__all__ = ["__CMDGroup"] +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command_group( + "automation hrwg", +) +class __CMDGroup(AAZCommandGroup): + """Automation Hybrid Runbook Worker Group + """ + pass + + +__all__ = ["__CMDGroup"] diff --git a/src/automation/azext_automation/aaz/latest/automation/hrwg/__init__.py b/src/automation/azext_automation/aaz/latest/automation/hrwg/__init__.py index c401f439385..1f78565855b 100644 --- a/src/automation/azext_automation/aaz/latest/automation/hrwg/__init__.py +++ b/src/automation/azext_automation/aaz/latest/automation/hrwg/__init__.py @@ -1,16 +1,16 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -# Code generated by aaz-dev-tools -# -------------------------------------------------------------------------------------------- - -# pylint: skip-file -# flake8: noqa - -from .__cmd_group import * -from ._create import * -from ._delete import * -from ._list import * -from ._show import * -from ._update import * +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from .__cmd_group import * +from ._create import * +from ._delete import * +from ._list import * +from ._show import * +from ._update import * diff --git a/src/automation/azext_automation/aaz/latest/automation/hrwg/_create.py b/src/automation/azext_automation/aaz/latest/automation/hrwg/_create.py index f50200f8d8c..d1ae9cf84dd 100644 --- a/src/automation/azext_automation/aaz/latest/automation/hrwg/_create.py +++ b/src/automation/azext_automation/aaz/latest/automation/hrwg/_create.py @@ -1,246 +1,259 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -# Code generated by aaz-dev-tools -# -------------------------------------------------------------------------------------------- - -# pylint: skip-file -# flake8: noqa - -from azure.cli.core.aaz import * - - -@register_command( - "automation hrwg create", -) -class Create(AAZCommand): - """Create a hybrid runbook worker group. - """ - - _aaz_info = { - "version": "2022-02-22", - "resources": [ - ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/hybridrunbookworkergroups/{}", "2022-02-22"], - ] - } - - def _handler(self, command_args): - super()._handler(command_args) - self._execute_operations() - return self._output() - - _args_schema = None - - @classmethod - def _build_arguments_schema(cls, *args, **kwargs): - if cls._args_schema is not None: - return cls._args_schema - cls._args_schema = super()._build_arguments_schema(*args, **kwargs) - - # define Arg Group "" - - _args_schema = cls._args_schema - _args_schema.automation_account_name = AAZStrArg( - options=["--automation-account-name"], - help="The name of the automation account.", - required=True, - id_part="name", - ) - _args_schema.hybrid_runbook_worker_group_name = AAZStrArg( - options=["-n", "--name", "--hybrid-runbook-worker-group-name"], - help="The hybrid runbook worker group name", - required=True, - id_part="child_name_1", - ) - _args_schema.resource_group = AAZResourceGroupNameArg( - required=True, - ) - - # define Arg Group "Properties" - - _args_schema = cls._args_schema - _args_schema.credential = AAZObjectArg( - options=["--credential"], - arg_group="Properties", - help="Set the credential of a worker group.", - ) - - credential = cls._args_schema.credential - credential.name = AAZStrArg( - options=["name"], - help="Get or set the name of the credential.", - ) - return cls._args_schema - - def _execute_operations(self): - self.HybridRunbookWorkerGroupCreate(ctx=self.ctx)() - - def _output(self, *args, **kwargs): - result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) - return result - - class HybridRunbookWorkerGroupCreate(AAZHttpOperation): - CLIENT_TYPE = "MgmtClient" - - def __call__(self, *args, **kwargs): - request = self.make_request() - session = self.client.send_request(request=request, stream=False, **kwargs) - if session.http_response.status_code in [200, 201]: - return self.on_201(session) - - return self.on_error(session.http_response) - - @property - def url(self): - return self.client.format_url( - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}", - **self.url_parameters - ) - - @property - def method(self): - return "PUT" - - @property - def error_format(self): - return "ODataV4Format" - - @property - def url_parameters(self): - parameters = { - **self.serialize_url_param( - "automationAccountName", self.ctx.args.automation_account_name, - required=True, - ), - **self.serialize_url_param( - "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, - required=True, - ), - **self.serialize_url_param( - "resourceGroupName", self.ctx.args.resource_group, - required=True, - ), - **self.serialize_url_param( - "subscriptionId", self.ctx.subscription_id, - required=True, - ), - } - return parameters - - @property - def query_parameters(self): - parameters = { - **self.serialize_query_param( - "api-version", "2022-02-22", - required=True, - ), - } - return parameters - - @property - def header_parameters(self): - parameters = { - **self.serialize_header_param( - "Content-Type", "application/json", - ), - **self.serialize_header_param( - "Accept", "application/json", - ), - } - return parameters - - @property - def content(self): - _content_value, _builder = self.new_content_builder( - self.ctx.args, - typ=AAZObjectType, - typ_kwargs={"flags": {"required": True, "client_flatten": True}} - ) - _builder.set_prop("name", AAZStrType, ".hybrid_runbook_worker_group_name") - _builder.set_prop("properties", AAZObjectType, typ_kwargs={"flags": {"client_flatten": True}}) - - properties = _builder.get(".properties") - if properties is not None: - properties.set_prop("credential", AAZObjectType, ".credential") - - credential = _builder.get(".properties.credential") - if credential is not None: - credential.set_prop("name", AAZStrType, ".name") - - return self.serialize_content(_content_value) - - def on_201(self, session): - data = self.deserialize_http_content(session) - self.ctx.set_var( - "instance", - data, - schema_builder=self._build_schema_on_201 - ) - - _schema_on_201 = None - - @classmethod - def _build_schema_on_201(cls): - if cls._schema_on_201 is not None: - return cls._schema_on_201 - - cls._schema_on_201 = AAZObjectType() - - _schema_on_201 = cls._schema_on_201 - _schema_on_201.id = AAZStrType( - flags={"read_only": True}, - ) - _schema_on_201.name = AAZStrType( - flags={"read_only": True}, - ) - _schema_on_201.properties = AAZObjectType( - flags={"client_flatten": True}, - ) - _schema_on_201.system_data = AAZObjectType( - serialized_name="systemData", - flags={"read_only": True}, - ) - _schema_on_201.type = AAZStrType( - flags={"read_only": True}, - ) - - properties = cls._schema_on_201.properties - properties.credential = AAZObjectType() - properties.group_type = AAZStrType( - serialized_name="groupType", - ) - - credential = cls._schema_on_201.properties.credential - credential.name = AAZStrType() - - system_data = cls._schema_on_201.system_data - system_data.created_at = AAZStrType( - serialized_name="createdAt", - flags={"read_only": True}, - ) - system_data.created_by = AAZStrType( - serialized_name="createdBy", - flags={"read_only": True}, - ) - system_data.created_by_type = AAZStrType( - serialized_name="createdByType", - flags={"read_only": True}, - ) - system_data.last_modified_at = AAZStrType( - serialized_name="lastModifiedAt", - flags={"read_only": True}, - ) - system_data.last_modified_by = AAZStrType( - serialized_name="lastModifiedBy", - flags={"read_only": True}, - ) - system_data.last_modified_by_type = AAZStrType( - serialized_name="lastModifiedByType", - flags={"read_only": True}, - ) - - return cls._schema_on_201 - - -__all__ = ["Create"] +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "automation hrwg create", +) +class Create(AAZCommand): + """Create a hybrid runbook worker group + + :example: Create a hybrid runbook worker group + az automation hrwg create --automation-account-name accountName --resource-group groupName --name hybridrunbookworkergroupName + """ + + _aaz_info = { + "version": "2022-08-08", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/hybridrunbookworkergroups/{}", "2022-08-08"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return self._output() + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.automation_account_name = AAZStrArg( + options=["--automation-account-name"], + help="The name of the automation account.", + required=True, + id_part="name", + ) + _args_schema.hybrid_runbook_worker_group_name = AAZStrArg( + options=["-n", "--name", "--hybrid-runbook-worker-group-name"], + help="The hybrid runbook worker group name", + required=True, + id_part="child_name_1", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + + # define Arg Group "Properties" + + _args_schema = cls._args_schema + _args_schema.credential = AAZObjectArg( + options=["--credential"], + arg_group="Properties", + help="Sets the credential of a worker group.", + ) + + credential = cls._args_schema.credential + credential.name = AAZStrArg( + options=["name"], + help="Gets or sets the name of the credential.", + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.HybridRunbookWorkerGroupCreate(ctx=self.ctx)() + self.post_operations() + + # @register_callback + def pre_operations(self): + pass + + # @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) + return result + + class HybridRunbookWorkerGroupCreate(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200, 201]: + return self.on_200_201(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}", + **self.url_parameters + ) + + @property + def method(self): + return "PUT" + + @property + def error_format(self): + return "ODataV4Format" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "automationAccountName", self.ctx.args.automation_account_name, + required=True, + ), + **self.serialize_url_param( + "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2022-08-08", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Content-Type", "application/json", + ), + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + @property + def content(self): + _content_value, _builder = self.new_content_builder( + self.ctx.args, + typ=AAZObjectType, + typ_kwargs={"flags": {"required": True, "client_flatten": True}} + ) + _builder.set_prop("name", AAZStrType, ".hybrid_runbook_worker_group_name") + _builder.set_prop("properties", AAZObjectType, typ_kwargs={"flags": {"client_flatten": True}}) + + properties = _builder.get(".properties") + if properties is not None: + properties.set_prop("credential", AAZObjectType, ".credential") + + credential = _builder.get(".properties.credential") + if credential is not None: + credential.set_prop("name", AAZStrType, ".name") + + return self.serialize_content(_content_value) + + def on_200_201(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200_201 + ) + + _schema_on_200_201 = None + + @classmethod + def _build_schema_on_200_201(cls): + if cls._schema_on_200_201 is not None: + return cls._schema_on_200_201 + + cls._schema_on_200_201 = AAZObjectType() + + _schema_on_200_201 = cls._schema_on_200_201 + _schema_on_200_201.id = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200_201.name = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200_201.properties = AAZObjectType( + flags={"client_flatten": True}, + ) + _schema_on_200_201.system_data = AAZObjectType( + serialized_name="systemData", + flags={"read_only": True}, + ) + _schema_on_200_201.type = AAZStrType( + flags={"read_only": True}, + ) + + properties = cls._schema_on_200_201.properties + properties.credential = AAZObjectType() + properties.group_type = AAZStrType( + serialized_name="groupType", + ) + + credential = cls._schema_on_200_201.properties.credential + credential.name = AAZStrType() + + system_data = cls._schema_on_200_201.system_data + system_data.created_at = AAZStrType( + serialized_name="createdAt", + flags={"read_only": True}, + ) + system_data.created_by = AAZStrType( + serialized_name="createdBy", + flags={"read_only": True}, + ) + system_data.created_by_type = AAZStrType( + serialized_name="createdByType", + flags={"read_only": True}, + ) + system_data.last_modified_at = AAZStrType( + serialized_name="lastModifiedAt", + flags={"read_only": True}, + ) + system_data.last_modified_by = AAZStrType( + serialized_name="lastModifiedBy", + flags={"read_only": True}, + ) + system_data.last_modified_by_type = AAZStrType( + serialized_name="lastModifiedByType", + flags={"read_only": True}, + ) + + return cls._schema_on_200_201 + + +__all__ = ["Create"] diff --git a/src/automation/azext_automation/aaz/latest/automation/hrwg/_delete.py b/src/automation/azext_automation/aaz/latest/automation/hrwg/_delete.py index edcc6e94be6..7f2ca5c3a7a 100644 --- a/src/automation/azext_automation/aaz/latest/automation/hrwg/_delete.py +++ b/src/automation/azext_automation/aaz/latest/automation/hrwg/_delete.py @@ -1,127 +1,140 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -# Code generated by aaz-dev-tools -# -------------------------------------------------------------------------------------------- - -# pylint: skip-file -# flake8: noqa - -from azure.cli.core.aaz import * - - -@register_command( - "automation hrwg delete", - confirmation="Are you sure you want to perform this operation?", -) -class Delete(AAZCommand): - """Delete a hybrid runbook worker group. - """ - - _aaz_info = { - "version": "2022-02-22", - "resources": [ - ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/hybridrunbookworkergroups/{}", "2022-02-22"], - ] - } - - def _handler(self, command_args): - super()._handler(command_args) - self._execute_operations() - return None - - _args_schema = None - - @classmethod - def _build_arguments_schema(cls, *args, **kwargs): - if cls._args_schema is not None: - return cls._args_schema - cls._args_schema = super()._build_arguments_schema(*args, **kwargs) - - # define Arg Group "" - - _args_schema = cls._args_schema - _args_schema.automation_account_name = AAZStrArg( - options=["--automation-account-name"], - help="The name of the automation account.", - required=True, - id_part="name", - ) - _args_schema.hybrid_runbook_worker_group_name = AAZStrArg( - options=["-n", "--name", "--hybrid-runbook-worker-group-name"], - help="The hybrid runbook worker group name", - required=True, - id_part="child_name_1", - ) - _args_schema.resource_group = AAZResourceGroupNameArg( - required=True, - ) - return cls._args_schema - - def _execute_operations(self): - self.HybridRunbookWorkerGroupDelete(ctx=self.ctx)() - - class HybridRunbookWorkerGroupDelete(AAZHttpOperation): - CLIENT_TYPE = "MgmtClient" - - def __call__(self, *args, **kwargs): - request = self.make_request() - session = self.client.send_request(request=request, stream=False, **kwargs) - if session.http_response.status_code in [200]: - return self.on_200(session) - - return self.on_error(session.http_response) - - @property - def url(self): - return self.client.format_url( - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}", - **self.url_parameters - ) - - @property - def method(self): - return "DELETE" - - @property - def error_format(self): - return "ODataV4Format" - - @property - def url_parameters(self): - parameters = { - **self.serialize_url_param( - "automationAccountName", self.ctx.args.automation_account_name, - required=True, - ), - **self.serialize_url_param( - "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, - required=True, - ), - **self.serialize_url_param( - "resourceGroupName", self.ctx.args.resource_group, - required=True, - ), - **self.serialize_url_param( - "subscriptionId", self.ctx.subscription_id, - required=True, - ), - } - return parameters - - @property - def query_parameters(self): - parameters = { - **self.serialize_query_param( - "api-version", "2022-02-22", - required=True, - ), - } - return parameters - - def on_200(self, session): - pass - - -__all__ = ["Delete"] +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "automation hrwg delete", + confirmation="Are you sure you want to perform this operation?", +) +class Delete(AAZCommand): + """Delete a hybrid runbook worker group. + + :example: Delete hybrid worker group + az automation hrwg delete --automation-account-name accountName --resource-group groupName --name hybridrunbookworkergroupName + """ + + _aaz_info = { + "version": "2022-08-08", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/hybridrunbookworkergroups/{}", "2022-08-08"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return None + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.automation_account_name = AAZStrArg( + options=["--automation-account-name"], + help="The name of the automation account.", + required=True, + id_part="name", + ) + _args_schema.hybrid_runbook_worker_group_name = AAZStrArg( + options=["-n", "--name", "--hybrid-runbook-worker-group-name"], + help="The hybrid runbook worker group name", + required=True, + id_part="child_name_1", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.HybridRunbookWorkerGroupDelete(ctx=self.ctx)() + self.post_operations() + + # @register_callback + def pre_operations(self): + pass + + # @register_callback + def post_operations(self): + pass + + class HybridRunbookWorkerGroupDelete(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}", + **self.url_parameters + ) + + @property + def method(self): + return "DELETE" + + @property + def error_format(self): + return "ODataV4Format" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "automationAccountName", self.ctx.args.automation_account_name, + required=True, + ), + **self.serialize_url_param( + "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2022-08-08", + required=True, + ), + } + return parameters + + def on_200(self, session): + pass + + +__all__ = ["Delete"] diff --git a/src/automation/azext_automation/aaz/latest/automation/hrwg/_list.py b/src/automation/azext_automation/aaz/latest/automation/hrwg/_list.py index d056fe847e9..720360d68bf 100644 --- a/src/automation/azext_automation/aaz/latest/automation/hrwg/_list.py +++ b/src/automation/azext_automation/aaz/latest/automation/hrwg/_list.py @@ -1,213 +1,226 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -# Code generated by aaz-dev-tools -# -------------------------------------------------------------------------------------------- - -# pylint: skip-file -# flake8: noqa - -from azure.cli.core.aaz import * - - -@register_command( - "automation hrwg list", -) -class List(AAZCommand): - """Retrieve a list of hybrid runbook worker groups. - """ - - _aaz_info = { - "version": "2022-02-22", - "resources": [ - ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/hybridrunbookworkergroups", "2022-02-22"], - ] - } - - def _handler(self, command_args): - super()._handler(command_args) - return self.build_paging(self._execute_operations, self._output) - - _args_schema = None - - @classmethod - def _build_arguments_schema(cls, *args, **kwargs): - if cls._args_schema is not None: - return cls._args_schema - cls._args_schema = super()._build_arguments_schema(*args, **kwargs) - - # define Arg Group "" - - _args_schema = cls._args_schema - _args_schema.automation_account_name = AAZStrArg( - options=["--automation-account-name"], - help="The name of the automation account.", - required=True, - ) - _args_schema.resource_group = AAZResourceGroupNameArg( - required=True, - ) - _args_schema.filter = AAZStrArg( - options=["--filter"], - help="The filter to apply on the operation.", - ) - return cls._args_schema - - def _execute_operations(self): - self.HybridRunbookWorkerGroupListByAutomationAccount(ctx=self.ctx)() - - def _output(self, *args, **kwargs): - result = self.deserialize_output(self.ctx.vars.instance.value, client_flatten=True) - next_link = self.deserialize_output(self.ctx.vars.instance.next_link) - return result, next_link - - class HybridRunbookWorkerGroupListByAutomationAccount(AAZHttpOperation): - CLIENT_TYPE = "MgmtClient" - - def __call__(self, *args, **kwargs): - request = self.make_request() - session = self.client.send_request(request=request, stream=False, **kwargs) - if session.http_response.status_code in [200]: - return self.on_200(session) - - return self.on_error(session.http_response) - - @property - def url(self): - return self.client.format_url( - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups", - **self.url_parameters - ) - - @property - def method(self): - return "GET" - - @property - def error_format(self): - return "ODataV4Format" - - @property - def url_parameters(self): - parameters = { - **self.serialize_url_param( - "automationAccountName", self.ctx.args.automation_account_name, - required=True, - ), - **self.serialize_url_param( - "resourceGroupName", self.ctx.args.resource_group, - required=True, - ), - **self.serialize_url_param( - "subscriptionId", self.ctx.subscription_id, - required=True, - ), - } - return parameters - - @property - def query_parameters(self): - parameters = { - **self.serialize_query_param( - "$filter", self.ctx.args.filter, - ), - **self.serialize_query_param( - "api-version", "2022-02-22", - required=True, - ), - } - return parameters - - @property - def header_parameters(self): - parameters = { - **self.serialize_header_param( - "Accept", "application/json", - ), - } - return parameters - - def on_200(self, session): - data = self.deserialize_http_content(session) - self.ctx.set_var( - "instance", - data, - schema_builder=self._build_schema_on_200 - ) - - _schema_on_200 = None - - @classmethod - def _build_schema_on_200(cls): - if cls._schema_on_200 is not None: - return cls._schema_on_200 - - cls._schema_on_200 = AAZObjectType() - - _schema_on_200 = cls._schema_on_200 - _schema_on_200.next_link = AAZStrType( - serialized_name="nextLink", - ) - _schema_on_200.value = AAZListType() - - value = cls._schema_on_200.value - value.Element = AAZObjectType() - - _element = cls._schema_on_200.value.Element - _element.id = AAZStrType( - flags={"read_only": True}, - ) - _element.name = AAZStrType( - flags={"read_only": True}, - ) - _element.properties = AAZObjectType( - flags={"client_flatten": True}, - ) - _element.system_data = AAZObjectType( - serialized_name="systemData", - flags={"read_only": True}, - ) - _element.type = AAZStrType( - flags={"read_only": True}, - ) - - properties = cls._schema_on_200.value.Element.properties - properties.credential = AAZObjectType() - properties.group_type = AAZStrType( - serialized_name="groupType", - ) - - credential = cls._schema_on_200.value.Element.properties.credential - credential.name = AAZStrType() - - system_data = cls._schema_on_200.value.Element.system_data - system_data.created_at = AAZStrType( - serialized_name="createdAt", - flags={"read_only": True}, - ) - system_data.created_by = AAZStrType( - serialized_name="createdBy", - flags={"read_only": True}, - ) - system_data.created_by_type = AAZStrType( - serialized_name="createdByType", - flags={"read_only": True}, - ) - system_data.last_modified_at = AAZStrType( - serialized_name="lastModifiedAt", - flags={"read_only": True}, - ) - system_data.last_modified_by = AAZStrType( - serialized_name="lastModifiedBy", - flags={"read_only": True}, - ) - system_data.last_modified_by_type = AAZStrType( - serialized_name="lastModifiedByType", - flags={"read_only": True}, - ) - - return cls._schema_on_200 - - -__all__ = ["List"] +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "automation hrwg list", +) +class List(AAZCommand): + """List all hybrid runbook worker groups + + :example: List all hybrid runbook worker groups + az automation hrwg list --automation-account-name accountName --resource-group groupName + """ + + _aaz_info = { + "version": "2022-08-08", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/hybridrunbookworkergroups", "2022-08-08"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + return self.build_paging(self._execute_operations, self._output) + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.automation_account_name = AAZStrArg( + options=["--automation-account-name"], + help="The name of the automation account.", + required=True, + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + _args_schema.filter = AAZStrArg( + options=["--filter"], + help="The filter to apply on the operation.", + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.HybridRunbookWorkerGroupListByAutomationAccount(ctx=self.ctx)() + self.post_operations() + + # @register_callback + def pre_operations(self): + pass + + # @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance.value, client_flatten=True) + next_link = self.deserialize_output(self.ctx.vars.instance.next_link) + return result, next_link + + class HybridRunbookWorkerGroupListByAutomationAccount(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups", + **self.url_parameters + ) + + @property + def method(self): + return "GET" + + @property + def error_format(self): + return "ODataV4Format" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "automationAccountName", self.ctx.args.automation_account_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "$filter", self.ctx.args.filter, + ), + **self.serialize_query_param( + "api-version", "2022-08-08", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.next_link = AAZStrType( + serialized_name="nextLink", + ) + _schema_on_200.value = AAZListType() + + value = cls._schema_on_200.value + value.Element = AAZObjectType() + + _element = cls._schema_on_200.value.Element + _element.id = AAZStrType( + flags={"read_only": True}, + ) + _element.name = AAZStrType( + flags={"read_only": True}, + ) + _element.properties = AAZObjectType( + flags={"client_flatten": True}, + ) + _element.system_data = AAZObjectType( + serialized_name="systemData", + flags={"read_only": True}, + ) + _element.type = AAZStrType( + flags={"read_only": True}, + ) + + properties = cls._schema_on_200.value.Element.properties + properties.credential = AAZObjectType() + properties.group_type = AAZStrType( + serialized_name="groupType", + ) + + credential = cls._schema_on_200.value.Element.properties.credential + credential.name = AAZStrType() + + system_data = cls._schema_on_200.value.Element.system_data + system_data.created_at = AAZStrType( + serialized_name="createdAt", + flags={"read_only": True}, + ) + system_data.created_by = AAZStrType( + serialized_name="createdBy", + flags={"read_only": True}, + ) + system_data.created_by_type = AAZStrType( + serialized_name="createdByType", + flags={"read_only": True}, + ) + system_data.last_modified_at = AAZStrType( + serialized_name="lastModifiedAt", + flags={"read_only": True}, + ) + system_data.last_modified_by = AAZStrType( + serialized_name="lastModifiedBy", + flags={"read_only": True}, + ) + system_data.last_modified_by_type = AAZStrType( + serialized_name="lastModifiedByType", + flags={"read_only": True}, + ) + + return cls._schema_on_200 + + +__all__ = ["List"] diff --git a/src/automation/azext_automation/aaz/latest/automation/hrwg/_show.py b/src/automation/azext_automation/aaz/latest/automation/hrwg/_show.py index cc3d13250e4..54e1f3e29cb 100644 --- a/src/automation/azext_automation/aaz/latest/automation/hrwg/_show.py +++ b/src/automation/azext_automation/aaz/latest/automation/hrwg/_show.py @@ -1,208 +1,221 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -# Code generated by aaz-dev-tools -# -------------------------------------------------------------------------------------------- - -# pylint: skip-file -# flake8: noqa - -from azure.cli.core.aaz import * - - -@register_command( - "automation hrwg show", -) -class Show(AAZCommand): - """Retrieve a hybrid runbook worker group. - """ - - _aaz_info = { - "version": "2022-02-22", - "resources": [ - ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/hybridrunbookworkergroups/{}", "2022-02-22"], - ] - } - - def _handler(self, command_args): - super()._handler(command_args) - self._execute_operations() - return self._output() - - _args_schema = None - - @classmethod - def _build_arguments_schema(cls, *args, **kwargs): - if cls._args_schema is not None: - return cls._args_schema - cls._args_schema = super()._build_arguments_schema(*args, **kwargs) - - # define Arg Group "" - - _args_schema = cls._args_schema - _args_schema.automation_account_name = AAZStrArg( - options=["--automation-account-name"], - help="The name of the automation account.", - required=True, - id_part="name", - ) - _args_schema.hybrid_runbook_worker_group_name = AAZStrArg( - options=["-n", "--name", "--hybrid-runbook-worker-group-name"], - help="The hybrid runbook worker group name", - required=True, - id_part="child_name_1", - ) - _args_schema.resource_group = AAZResourceGroupNameArg( - required=True, - ) - return cls._args_schema - - def _execute_operations(self): - self.HybridRunbookWorkerGroupGet(ctx=self.ctx)() - - def _output(self, *args, **kwargs): - result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) - return result - - class HybridRunbookWorkerGroupGet(AAZHttpOperation): - CLIENT_TYPE = "MgmtClient" - - def __call__(self, *args, **kwargs): - request = self.make_request() - session = self.client.send_request(request=request, stream=False, **kwargs) - if session.http_response.status_code in [200]: - return self.on_200(session) - - return self.on_error(session.http_response) - - @property - def url(self): - return self.client.format_url( - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}", - **self.url_parameters - ) - - @property - def method(self): - return "GET" - - @property - def error_format(self): - return "ODataV4Format" - - @property - def url_parameters(self): - parameters = { - **self.serialize_url_param( - "automationAccountName", self.ctx.args.automation_account_name, - required=True, - ), - **self.serialize_url_param( - "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, - required=True, - ), - **self.serialize_url_param( - "resourceGroupName", self.ctx.args.resource_group, - required=True, - ), - **self.serialize_url_param( - "subscriptionId", self.ctx.subscription_id, - required=True, - ), - } - return parameters - - @property - def query_parameters(self): - parameters = { - **self.serialize_query_param( - "api-version", "2022-02-22", - required=True, - ), - } - return parameters - - @property - def header_parameters(self): - parameters = { - **self.serialize_header_param( - "Accept", "application/json", - ), - } - return parameters - - def on_200(self, session): - data = self.deserialize_http_content(session) - self.ctx.set_var( - "instance", - data, - schema_builder=self._build_schema_on_200 - ) - - _schema_on_200 = None - - @classmethod - def _build_schema_on_200(cls): - if cls._schema_on_200 is not None: - return cls._schema_on_200 - - cls._schema_on_200 = AAZObjectType() - - _schema_on_200 = cls._schema_on_200 - _schema_on_200.id = AAZStrType( - flags={"read_only": True}, - ) - _schema_on_200.name = AAZStrType( - flags={"read_only": True}, - ) - _schema_on_200.properties = AAZObjectType( - flags={"client_flatten": True}, - ) - _schema_on_200.system_data = AAZObjectType( - serialized_name="systemData", - flags={"read_only": True}, - ) - _schema_on_200.type = AAZStrType( - flags={"read_only": True}, - ) - - properties = cls._schema_on_200.properties - properties.credential = AAZObjectType() - properties.group_type = AAZStrType( - serialized_name="groupType", - ) - - credential = cls._schema_on_200.properties.credential - credential.name = AAZStrType() - - system_data = cls._schema_on_200.system_data - system_data.created_at = AAZStrType( - serialized_name="createdAt", - flags={"read_only": True}, - ) - system_data.created_by = AAZStrType( - serialized_name="createdBy", - flags={"read_only": True}, - ) - system_data.created_by_type = AAZStrType( - serialized_name="createdByType", - flags={"read_only": True}, - ) - system_data.last_modified_at = AAZStrType( - serialized_name="lastModifiedAt", - flags={"read_only": True}, - ) - system_data.last_modified_by = AAZStrType( - serialized_name="lastModifiedBy", - flags={"read_only": True}, - ) - system_data.last_modified_by_type = AAZStrType( - serialized_name="lastModifiedByType", - flags={"read_only": True}, - ) - - return cls._schema_on_200 - - -__all__ = ["Show"] +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "automation hrwg show", +) +class Show(AAZCommand): + """Get hybrid worker group + + :example: Get hybrid worker group + az automation hrwg show --automation-account-name accountName --resource-group groupName --name hybridrunbookworkergroupName + """ + + _aaz_info = { + "version": "2022-08-08", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/hybridrunbookworkergroups/{}", "2022-08-08"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return self._output() + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.automation_account_name = AAZStrArg( + options=["--automation-account-name"], + help="The name of the automation account.", + required=True, + id_part="name", + ) + _args_schema.hybrid_runbook_worker_group_name = AAZStrArg( + options=["-n", "--name", "--hybrid-runbook-worker-group-name"], + help="The hybrid runbook worker group name", + required=True, + id_part="child_name_1", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.HybridRunbookWorkerGroupGet(ctx=self.ctx)() + self.post_operations() + + # @register_callback + def pre_operations(self): + pass + + # @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) + return result + + class HybridRunbookWorkerGroupGet(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}", + **self.url_parameters + ) + + @property + def method(self): + return "GET" + + @property + def error_format(self): + return "ODataV4Format" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "automationAccountName", self.ctx.args.automation_account_name, + required=True, + ), + **self.serialize_url_param( + "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2022-08-08", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.id = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.name = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.properties = AAZObjectType( + flags={"client_flatten": True}, + ) + _schema_on_200.system_data = AAZObjectType( + serialized_name="systemData", + flags={"read_only": True}, + ) + _schema_on_200.type = AAZStrType( + flags={"read_only": True}, + ) + + properties = cls._schema_on_200.properties + properties.credential = AAZObjectType() + properties.group_type = AAZStrType( + serialized_name="groupType", + ) + + credential = cls._schema_on_200.properties.credential + credential.name = AAZStrType() + + system_data = cls._schema_on_200.system_data + system_data.created_at = AAZStrType( + serialized_name="createdAt", + flags={"read_only": True}, + ) + system_data.created_by = AAZStrType( + serialized_name="createdBy", + flags={"read_only": True}, + ) + system_data.created_by_type = AAZStrType( + serialized_name="createdByType", + flags={"read_only": True}, + ) + system_data.last_modified_at = AAZStrType( + serialized_name="lastModifiedAt", + flags={"read_only": True}, + ) + system_data.last_modified_by = AAZStrType( + serialized_name="lastModifiedBy", + flags={"read_only": True}, + ) + system_data.last_modified_by_type = AAZStrType( + serialized_name="lastModifiedByType", + flags={"read_only": True}, + ) + + return cls._schema_on_200 + + +__all__ = ["Show"] diff --git a/src/automation/azext_automation/aaz/latest/automation/hrwg/_update.py b/src/automation/azext_automation/aaz/latest/automation/hrwg/_update.py index 27dad24b0a6..8246ddfe9e5 100644 --- a/src/automation/azext_automation/aaz/latest/automation/hrwg/_update.py +++ b/src/automation/azext_automation/aaz/latest/automation/hrwg/_update.py @@ -1,384 +1,407 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -# Code generated by aaz-dev-tools -# -------------------------------------------------------------------------------------------- - -# pylint: skip-file -# flake8: noqa - -from azure.cli.core.aaz import * - - -@register_command( - "automation hrwg update", -) -class Update(AAZCommand): - """Create a hybrid runbook worker group. - """ - - _aaz_info = { - "version": "2022-02-22", - "resources": [ - ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/hybridrunbookworkergroups/{}", "2022-02-22"], - ] - } - - AZ_SUPPORT_GENERIC_UPDATE = True - - def _handler(self, command_args): - super()._handler(command_args) - self._execute_operations() - return self._output() - - _args_schema = None - - @classmethod - def _build_arguments_schema(cls, *args, **kwargs): - if cls._args_schema is not None: - return cls._args_schema - cls._args_schema = super()._build_arguments_schema(*args, **kwargs) - - # define Arg Group "" - - _args_schema = cls._args_schema - _args_schema.automation_account_name = AAZStrArg( - options=["--automation-account-name"], - help="The name of the automation account.", - required=True, - id_part="name", - ) - _args_schema.hybrid_runbook_worker_group_name = AAZStrArg( - options=["-n", "--name", "--hybrid-runbook-worker-group-name"], - help="The hybrid runbook worker group name", - required=True, - id_part="child_name_1", - ) - _args_schema.resource_group = AAZResourceGroupNameArg( - required=True, - ) - - # define Arg Group "Properties" - - _args_schema = cls._args_schema - _args_schema.credential = AAZObjectArg( - options=["--credential"], - arg_group="Properties", - help="Sets the credential of a worker group.", - nullable=True, - ) - - credential = cls._args_schema.credential - credential.name = AAZStrArg( - options=["name"], - help="Gets or sets the name of the credential.", - nullable=True, - ) - return cls._args_schema - - def _execute_operations(self): - self.HybridRunbookWorkerGroupGet(ctx=self.ctx)() - self.InstanceUpdateByJson(ctx=self.ctx)() - self.InstanceUpdateByGeneric(ctx=self.ctx)() - self.HybridRunbookWorkerGroupCreate(ctx=self.ctx)() - - def _output(self, *args, **kwargs): - result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) - return result - - class HybridRunbookWorkerGroupGet(AAZHttpOperation): - CLIENT_TYPE = "MgmtClient" - - def __call__(self, *args, **kwargs): - request = self.make_request() - session = self.client.send_request(request=request, stream=False, **kwargs) - if session.http_response.status_code in [200]: - return self.on_200(session) - - return self.on_error(session.http_response) - - @property - def url(self): - return self.client.format_url( - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}", - **self.url_parameters - ) - - @property - def method(self): - return "GET" - - @property - def error_format(self): - return "ODataV4Format" - - @property - def url_parameters(self): - parameters = { - **self.serialize_url_param( - "automationAccountName", self.ctx.args.automation_account_name, - required=True, - ), - **self.serialize_url_param( - "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, - required=True, - ), - **self.serialize_url_param( - "resourceGroupName", self.ctx.args.resource_group, - required=True, - ), - **self.serialize_url_param( - "subscriptionId", self.ctx.subscription_id, - required=True, - ), - } - return parameters - - @property - def query_parameters(self): - parameters = { - **self.serialize_query_param( - "api-version", "2022-02-22", - required=True, - ), - } - return parameters - - @property - def header_parameters(self): - parameters = { - **self.serialize_header_param( - "Accept", "application/json", - ), - } - return parameters - - def on_200(self, session): - data = self.deserialize_http_content(session) - self.ctx.set_var( - "instance", - data, - schema_builder=self._build_schema_on_200 - ) - - _schema_on_200 = None - - @classmethod - def _build_schema_on_200(cls): - if cls._schema_on_200 is not None: - return cls._schema_on_200 - - cls._schema_on_200 = AAZObjectType() - _build_schema_hybrid_runbook_worker_group_read(cls._schema_on_200) - - return cls._schema_on_200 - - class HybridRunbookWorkerGroupCreate(AAZHttpOperation): - CLIENT_TYPE = "MgmtClient" - - def __call__(self, *args, **kwargs): - request = self.make_request() - session = self.client.send_request(request=request, stream=False, **kwargs) - if session.http_response.status_code in [200]: - return self.on_200(session) - - return self.on_error(session.http_response) - - @property - def url(self): - return self.client.format_url( - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}", - **self.url_parameters - ) - - @property - def method(self): - return "PUT" - - @property - def error_format(self): - return "ODataV4Format" - - @property - def url_parameters(self): - parameters = { - **self.serialize_url_param( - "automationAccountName", self.ctx.args.automation_account_name, - required=True, - ), - **self.serialize_url_param( - "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, - required=True, - ), - **self.serialize_url_param( - "resourceGroupName", self.ctx.args.resource_group, - required=True, - ), - **self.serialize_url_param( - "subscriptionId", self.ctx.subscription_id, - required=True, - ), - } - return parameters - - @property - def query_parameters(self): - parameters = { - **self.serialize_query_param( - "api-version", "2022-02-22", - required=True, - ), - } - return parameters - - @property - def header_parameters(self): - parameters = { - **self.serialize_header_param( - "Content-Type", "application/json", - ), - **self.serialize_header_param( - "Accept", "application/json", - ), - } - return parameters - - @property - def content(self): - _content_value, _builder = self.new_content_builder( - self.ctx.args, - value=self.ctx.vars.instance, - ) - - return self.serialize_content(_content_value) - - def on_200(self, session): - data = self.deserialize_http_content(session) - self.ctx.set_var( - "instance", - data, - schema_builder=self._build_schema_on_200 - ) - - _schema_on_200 = None - - @classmethod - def _build_schema_on_200(cls): - if cls._schema_on_200 is not None: - return cls._schema_on_200 - - cls._schema_on_200 = AAZObjectType() - _build_schema_hybrid_runbook_worker_group_read(cls._schema_on_200) - - return cls._schema_on_200 - - class InstanceUpdateByJson(AAZJsonInstanceUpdateOperation): - - def __call__(self, *args, **kwargs): - self._update_instance(self.ctx.vars.instance) - - def _update_instance(self, instance): - _instance_value, _builder = self.new_content_builder( - self.ctx.args, - value=instance, - typ=AAZObjectType - ) - _builder.set_prop("name", AAZStrType, ".hybrid_runbook_worker_group_name") - _builder.set_prop("properties", AAZObjectType, typ_kwargs={"flags": {"client_flatten": True}}) - - properties = _builder.get(".properties") - if properties is not None: - properties.set_prop("credential", AAZObjectType, ".credential") - - credential = _builder.get(".properties.credential") - if credential is not None: - credential.set_prop("name", AAZStrType, ".name") - - return _instance_value - - class InstanceUpdateByGeneric(AAZGenericInstanceUpdateOperation): - - def __call__(self, *args, **kwargs): - self._update_instance_by_generic( - self.ctx.vars.instance, - self.ctx.generic_update_args - ) - - -_schema_hybrid_runbook_worker_group_read = None - - -def _build_schema_hybrid_runbook_worker_group_read(_schema): - global _schema_hybrid_runbook_worker_group_read - if _schema_hybrid_runbook_worker_group_read is not None: - _schema.id = _schema_hybrid_runbook_worker_group_read.id - _schema.name = _schema_hybrid_runbook_worker_group_read.name - _schema.properties = _schema_hybrid_runbook_worker_group_read.properties - _schema.system_data = _schema_hybrid_runbook_worker_group_read.system_data - _schema.type = _schema_hybrid_runbook_worker_group_read.type - return - - _schema_hybrid_runbook_worker_group_read = AAZObjectType() - - hybrid_runbook_worker_group_read = _schema_hybrid_runbook_worker_group_read - hybrid_runbook_worker_group_read.id = AAZStrType( - flags={"read_only": True}, - ) - hybrid_runbook_worker_group_read.name = AAZStrType( - flags={"read_only": True}, - ) - hybrid_runbook_worker_group_read.properties = AAZObjectType( - flags={"client_flatten": True}, - ) - hybrid_runbook_worker_group_read.system_data = AAZObjectType( - serialized_name="systemData", - flags={"read_only": True}, - ) - hybrid_runbook_worker_group_read.type = AAZStrType( - flags={"read_only": True}, - ) - - properties = _schema_hybrid_runbook_worker_group_read.properties - properties.credential = AAZObjectType() - properties.group_type = AAZStrType( - serialized_name="groupType", - ) - - credential = _schema_hybrid_runbook_worker_group_read.properties.credential - credential.name = AAZStrType() - - system_data = _schema_hybrid_runbook_worker_group_read.system_data - system_data.created_at = AAZStrType( - serialized_name="createdAt", - flags={"read_only": True}, - ) - system_data.created_by = AAZStrType( - serialized_name="createdBy", - flags={"read_only": True}, - ) - system_data.created_by_type = AAZStrType( - serialized_name="createdByType", - flags={"read_only": True}, - ) - system_data.last_modified_at = AAZStrType( - serialized_name="lastModifiedAt", - flags={"read_only": True}, - ) - system_data.last_modified_by = AAZStrType( - serialized_name="lastModifiedBy", - flags={"read_only": True}, - ) - system_data.last_modified_by_type = AAZStrType( - serialized_name="lastModifiedByType", - flags={"read_only": True}, - ) - - _schema.id = _schema_hybrid_runbook_worker_group_read.id - _schema.name = _schema_hybrid_runbook_worker_group_read.name - _schema.properties = _schema_hybrid_runbook_worker_group_read.properties - _schema.system_data = _schema_hybrid_runbook_worker_group_read.system_data - _schema.type = _schema_hybrid_runbook_worker_group_read.type - - -__all__ = ["Update"] +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "automation hrwg update", +) +class Update(AAZCommand): + """Update a hybrid runbook worker group. + + :example: Update hybrid worker group + az automation hrwg update --automation-account-name accountName --resource-group groupName --name hybridrunbookworkergroupName --credential "{name: credentialname}" + """ + + _aaz_info = { + "version": "2022-08-08", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/hybridrunbookworkergroups/{}", "2022-08-08"], + ] + } + + AZ_SUPPORT_GENERIC_UPDATE = True + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return self._output() + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.automation_account_name = AAZStrArg( + options=["--automation-account-name"], + help="The name of the automation account.", + required=True, + id_part="name", + ) + _args_schema.hybrid_runbook_worker_group_name = AAZStrArg( + options=["-n", "--name", "--hybrid-runbook-worker-group-name"], + help="The hybrid runbook worker group name", + required=True, + id_part="child_name_1", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + + # define Arg Group "Properties" + + _args_schema = cls._args_schema + _args_schema.credential = AAZObjectArg( + options=["--credential"], + arg_group="Properties", + help="Sets the credential of a worker group.", + nullable=True, + ) + + credential = cls._args_schema.credential + credential.name = AAZStrArg( + options=["name"], + help="Gets or sets the name of the credential.", + nullable=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.HybridRunbookWorkerGroupGet(ctx=self.ctx)() + self.pre_instance_update(self.ctx.vars.instance) + self.InstanceUpdateByJson(ctx=self.ctx)() + self.InstanceUpdateByGeneric(ctx=self.ctx)() + self.post_instance_update(self.ctx.vars.instance) + self.HybridRunbookWorkerGroupCreate(ctx=self.ctx)() + self.post_operations() + + # @register_callback + def pre_operations(self): + pass + + # @register_callback + def post_operations(self): + pass + + # @register_callback + def pre_instance_update(self, instance): + pass + + # @register_callback + def post_instance_update(self, instance): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) + return result + + class HybridRunbookWorkerGroupGet(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}", + **self.url_parameters + ) + + @property + def method(self): + return "GET" + + @property + def error_format(self): + return "ODataV4Format" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "automationAccountName", self.ctx.args.automation_account_name, + required=True, + ), + **self.serialize_url_param( + "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2022-08-08", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + _build_schema_hybrid_runbook_worker_group_read(cls._schema_on_200) + + return cls._schema_on_200 + + class HybridRunbookWorkerGroupCreate(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200, 201]: + return self.on_200_201(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}", + **self.url_parameters + ) + + @property + def method(self): + return "PUT" + + @property + def error_format(self): + return "ODataV4Format" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "automationAccountName", self.ctx.args.automation_account_name, + required=True, + ), + **self.serialize_url_param( + "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2022-08-08", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Content-Type", "application/json", + ), + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + @property + def content(self): + _content_value, _builder = self.new_content_builder( + self.ctx.args, + value=self.ctx.vars.instance, + ) + + return self.serialize_content(_content_value) + + def on_200_201(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200_201 + ) + + _schema_on_200_201 = None + + @classmethod + def _build_schema_on_200_201(cls): + if cls._schema_on_200_201 is not None: + return cls._schema_on_200_201 + + cls._schema_on_200_201 = AAZObjectType() + _build_schema_hybrid_runbook_worker_group_read(cls._schema_on_200_201) + + return cls._schema_on_200_201 + + class InstanceUpdateByJson(AAZJsonInstanceUpdateOperation): + + def __call__(self, *args, **kwargs): + self._update_instance(self.ctx.vars.instance) + + def _update_instance(self, instance): + _instance_value, _builder = self.new_content_builder( + self.ctx.args, + value=instance, + typ=AAZObjectType + ) + _builder.set_prop("name", AAZStrType, ".hybrid_runbook_worker_group_name") + _builder.set_prop("properties", AAZObjectType, typ_kwargs={"flags": {"client_flatten": True}}) + + properties = _builder.get(".properties") + if properties is not None: + properties.set_prop("credential", AAZObjectType, ".credential") + + credential = _builder.get(".properties.credential") + if credential is not None: + credential.set_prop("name", AAZStrType, ".name") + + return _instance_value + + class InstanceUpdateByGeneric(AAZGenericInstanceUpdateOperation): + + def __call__(self, *args, **kwargs): + self._update_instance_by_generic( + self.ctx.vars.instance, + self.ctx.generic_update_args + ) + + +_schema_hybrid_runbook_worker_group_read = None + + +def _build_schema_hybrid_runbook_worker_group_read(_schema): + global _schema_hybrid_runbook_worker_group_read + if _schema_hybrid_runbook_worker_group_read is not None: + _schema.id = _schema_hybrid_runbook_worker_group_read.id + _schema.name = _schema_hybrid_runbook_worker_group_read.name + _schema.properties = _schema_hybrid_runbook_worker_group_read.properties + _schema.system_data = _schema_hybrid_runbook_worker_group_read.system_data + _schema.type = _schema_hybrid_runbook_worker_group_read.type + return + + _schema_hybrid_runbook_worker_group_read = AAZObjectType() + + hybrid_runbook_worker_group_read = _schema_hybrid_runbook_worker_group_read + hybrid_runbook_worker_group_read.id = AAZStrType( + flags={"read_only": True}, + ) + hybrid_runbook_worker_group_read.name = AAZStrType( + flags={"read_only": True}, + ) + hybrid_runbook_worker_group_read.properties = AAZObjectType( + flags={"client_flatten": True}, + ) + hybrid_runbook_worker_group_read.system_data = AAZObjectType( + serialized_name="systemData", + flags={"read_only": True}, + ) + hybrid_runbook_worker_group_read.type = AAZStrType( + flags={"read_only": True}, + ) + + properties = _schema_hybrid_runbook_worker_group_read.properties + properties.credential = AAZObjectType() + properties.group_type = AAZStrType( + serialized_name="groupType", + ) + + credential = _schema_hybrid_runbook_worker_group_read.properties.credential + credential.name = AAZStrType() + + system_data = _schema_hybrid_runbook_worker_group_read.system_data + system_data.created_at = AAZStrType( + serialized_name="createdAt", + flags={"read_only": True}, + ) + system_data.created_by = AAZStrType( + serialized_name="createdBy", + flags={"read_only": True}, + ) + system_data.created_by_type = AAZStrType( + serialized_name="createdByType", + flags={"read_only": True}, + ) + system_data.last_modified_at = AAZStrType( + serialized_name="lastModifiedAt", + flags={"read_only": True}, + ) + system_data.last_modified_by = AAZStrType( + serialized_name="lastModifiedBy", + flags={"read_only": True}, + ) + system_data.last_modified_by_type = AAZStrType( + serialized_name="lastModifiedByType", + flags={"read_only": True}, + ) + + _schema.id = _schema_hybrid_runbook_worker_group_read.id + _schema.name = _schema_hybrid_runbook_worker_group_read.name + _schema.properties = _schema_hybrid_runbook_worker_group_read.properties + _schema.system_data = _schema_hybrid_runbook_worker_group_read.system_data + _schema.type = _schema_hybrid_runbook_worker_group_read.type + + +__all__ = ["Update"] diff --git a/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/__cmd_group.py b/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/__cmd_group.py index 49c641aa764..e0316265730 100644 --- a/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/__cmd_group.py +++ b/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/__cmd_group.py @@ -1,23 +1,23 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -# Code generated by aaz-dev-tools -# -------------------------------------------------------------------------------------------- - -# pylint: skip-file -# flake8: noqa - -from azure.cli.core.aaz import * - - -@register_command_group( - "automation hrwg hrw", -) -class __CMDGroup(AAZCommandGroup): - """Automation Hybrid Runbook Worker - """ - pass - - -__all__ = ["__CMDGroup"] +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command_group( + "automation hrwg hrw", +) +class __CMDGroup(AAZCommandGroup): + """hrw + """ + pass + + +__all__ = ["__CMDGroup"] diff --git a/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/__init__.py b/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/__init__.py index a0ec108bbe4..ad9df666711 100644 --- a/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/__init__.py +++ b/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/__init__.py @@ -1,17 +1,16 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -# Code generated by aaz-dev-tools -# -------------------------------------------------------------------------------------------- - -# pylint: skip-file -# flake8: noqa - -from .__cmd_group import * -from ._create import * -from ._delete import * -from ._list import * -from ._move import * -from ._show import * -from ._update import * +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from .__cmd_group import * +from ._create import * +from ._delete import * +from ._list import * +from ._move import * +from ._show import * diff --git a/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_create.py b/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_create.py index a85390a136f..c5579094087 100644 --- a/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_create.py +++ b/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_create.py @@ -1,255 +1,268 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -# Code generated by aaz-dev-tools -# -------------------------------------------------------------------------------------------- - -# pylint: skip-file -# flake8: noqa - -from azure.cli.core.aaz import * - - -@register_command( - "automation hrwg hrw create", -) -class Create(AAZCommand): - """Create a hybrid runbook worker. - """ - - _aaz_info = { - "version": "2021-06-22", - "resources": [ - ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/hybridrunbookworkergroups/{}/hybridrunbookworkers/{}", "2021-06-22"], - ] - } - - def _handler(self, command_args): - super()._handler(command_args) - self._execute_operations() - return self._output() - - _args_schema = None - - @classmethod - def _build_arguments_schema(cls, *args, **kwargs): - if cls._args_schema is not None: - return cls._args_schema - cls._args_schema = super()._build_arguments_schema(*args, **kwargs) - - # define Arg Group "" - - _args_schema = cls._args_schema - _args_schema.automation_account_name = AAZStrArg( - options=["--automation-account-name"], - help="The name of the automation account.", - required=True, - id_part="name", - ) - _args_schema.hybrid_runbook_worker_group_name = AAZStrArg( - options=["--hybrid-runbook-worker-group-name"], - help="The hybrid runbook worker group name", - required=True, - id_part="child_name_1", - ) - _args_schema.hybrid_runbook_worker_id = AAZStrArg( - options=["-n", "--name", "--hybrid-runbook-worker-id"], - help="The hybrid runbook worker id", - required=True, - id_part="child_name_2", - ) - _args_schema.resource_group = AAZResourceGroupNameArg( - required=True, - ) - - # define Arg Group "Properties" - - _args_schema = cls._args_schema - _args_schema.vm_resource_id = AAZStrArg( - options=["--vm-resource-id"], - arg_group="Properties", - help="Azure Resource Manager Id for a virtual machine.", - ) - return cls._args_schema - - def _execute_operations(self): - self.HybridRunbookWorkersCreate(ctx=self.ctx)() - - def _output(self, *args, **kwargs): - result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) - return result - - class HybridRunbookWorkersCreate(AAZHttpOperation): - CLIENT_TYPE = "MgmtClient" - - def __call__(self, *args, **kwargs): - request = self.make_request() - session = self.client.send_request(request=request, stream=False, **kwargs) - if session.http_response.status_code in [200, 201]: - return self.on_200(session) - - return self.on_error(session.http_response) - - @property - def url(self): - return self.client.format_url( - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}/hybridRunbookWorkers/{hybridRunbookWorkerId}", - **self.url_parameters - ) - - @property - def method(self): - return "PUT" - - @property - def error_format(self): - return "ODataV4Format" - - @property - def url_parameters(self): - parameters = { - **self.serialize_url_param( - "automationAccountName", self.ctx.args.automation_account_name, - required=True, - ), - **self.serialize_url_param( - "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, - required=True, - ), - **self.serialize_url_param( - "hybridRunbookWorkerId", self.ctx.args.hybrid_runbook_worker_id, - required=True, - ), - **self.serialize_url_param( - "resourceGroupName", self.ctx.args.resource_group, - required=True, - ), - **self.serialize_url_param( - "subscriptionId", self.ctx.subscription_id, - required=True, - ), - } - return parameters - - @property - def query_parameters(self): - parameters = { - **self.serialize_query_param( - "api-version", "2021-06-22", - required=True, - ), - } - return parameters - - @property - def header_parameters(self): - parameters = { - **self.serialize_header_param( - "Content-Type", "application/json", - ), - **self.serialize_header_param( - "Accept", "application/json", - ), - } - return parameters - - @property - def content(self): - _content_value, _builder = self.new_content_builder( - self.ctx.args, - typ=AAZObjectType, - typ_kwargs={"flags": {"required": True, "client_flatten": True}} - ) - _builder.set_prop("name", AAZStrType, ".hybrid_runbook_worker_id") - _builder.set_prop("properties", AAZObjectType, ".", typ_kwargs={"flags": {"required": True, "client_flatten": True}}) - - properties = _builder.get(".properties") - if properties is not None: - properties.set_prop("vmResourceId", AAZStrType, ".vm_resource_id") - - return self.serialize_content(_content_value) - - def on_200(self, session): - data = self.deserialize_http_content(session) - self.ctx.set_var( - "instance", - data, - schema_builder=self._build_schema_on_200 - ) - - _schema_on_200 = None - - @classmethod - def _build_schema_on_200(cls): - if cls._schema_on_200 is not None: - return cls._schema_on_200 - - cls._schema_on_200 = AAZObjectType() - - _schema_on_200 = cls._schema_on_200 - _schema_on_200.id = AAZStrType( - flags={"read_only": True}, - ) - _schema_on_200.name = AAZStrType( - flags={"read_only": True}, - ) - _schema_on_200.properties = AAZObjectType( - flags={"client_flatten": True}, - ) - _schema_on_200.system_data = AAZObjectType( - serialized_name="systemData", - flags={"read_only": True}, - ) - _schema_on_200.type = AAZStrType( - flags={"read_only": True}, - ) - - properties = cls._schema_on_200.properties - properties.ip = AAZStrType() - properties.last_seen_date_time = AAZStrType( - serialized_name="lastSeenDateTime", - ) - properties.registered_date_time = AAZStrType( - serialized_name="registeredDateTime", - ) - properties.vm_resource_id = AAZStrType( - serialized_name="vmResourceId", - ) - properties.worker_name = AAZStrType( - serialized_name="workerName", - ) - properties.worker_type = AAZStrType( - serialized_name="workerType", - ) - - system_data = cls._schema_on_200.system_data - system_data.created_at = AAZStrType( - serialized_name="createdAt", - flags={"read_only": True}, - ) - system_data.created_by = AAZStrType( - serialized_name="createdBy", - flags={"read_only": True}, - ) - system_data.created_by_type = AAZStrType( - serialized_name="createdByType", - flags={"read_only": True}, - ) - system_data.last_modified_at = AAZStrType( - serialized_name="lastModifiedAt", - flags={"read_only": True}, - ) - system_data.last_modified_by = AAZStrType( - serialized_name="lastModifiedBy", - flags={"read_only": True}, - ) - system_data.last_modified_by_type = AAZStrType( - serialized_name="lastModifiedByType", - flags={"read_only": True}, - ) - - return cls._schema_on_200 - - -__all__ = ["Create"] +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "automation hrwg hrw create", +) +class Create(AAZCommand): + """Create a hybrid runbook worker. + + :example: Create a hybrid runbook worker + az automation hrwg hrw create --automation-account-name accountName --resource-group groupName --hybrid-runbook-worker-group-name hybridRunbookWorkerGroupName --hybrid-runbook-worker-id hybridRunbookWorkerId --vm-resource-id vmResourceId + """ + + _aaz_info = { + "version": "2022-08-08", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/hybridrunbookworkergroups/{}/hybridrunbookworkers/{}", "2022-08-08"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return self._output() + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.automation_account_name = AAZStrArg( + options=["--automation-account-name"], + help="The name of the automation account.", + required=True, + id_part="name", + ) + _args_schema.hybrid_runbook_worker_group_name = AAZStrArg( + options=["--hybrid-runbook-worker-group-name"], + help="The hybrid runbook worker group name", + required=True, + id_part="child_name_1", + ) + _args_schema.hybrid_runbook_worker_id = AAZStrArg( + options=["-n", "--name", "--hybrid-runbook-worker-id"], + help="The hybrid runbook worker id", + required=True, + id_part="child_name_2", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + + # define Arg Group "Properties" + + _args_schema = cls._args_schema + _args_schema.vm_resource_id = AAZStrArg( + options=["--vm-resource-id"], + arg_group="Properties", + help="Azure Resource Manager Id for a virtual machine.", + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.HybridRunbookWorkersCreate(ctx=self.ctx)() + self.post_operations() + + # @register_callback + def pre_operations(self): + pass + + # @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) + return result + + class HybridRunbookWorkersCreate(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200, 201]: + return self.on_200_201(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}/hybridRunbookWorkers/{hybridRunbookWorkerId}", + **self.url_parameters + ) + + @property + def method(self): + return "PUT" + + @property + def error_format(self): + return "ODataV4Format" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "automationAccountName", self.ctx.args.automation_account_name, + required=True, + ), + **self.serialize_url_param( + "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, + required=True, + ), + **self.serialize_url_param( + "hybridRunbookWorkerId", self.ctx.args.hybrid_runbook_worker_id, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2022-08-08", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Content-Type", "application/json", + ), + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + @property + def content(self): + _content_value, _builder = self.new_content_builder( + self.ctx.args, + typ=AAZObjectType, + typ_kwargs={"flags": {"required": True, "client_flatten": True}} + ) + _builder.set_prop("name", AAZStrType, ".hybrid_runbook_worker_id") + _builder.set_prop("properties", AAZObjectType, ".", typ_kwargs={"flags": {"required": True, "client_flatten": True}}) + + properties = _builder.get(".properties") + if properties is not None: + properties.set_prop("vmResourceId", AAZStrType, ".vm_resource_id") + + return self.serialize_content(_content_value) + + def on_200_201(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200_201 + ) + + _schema_on_200_201 = None + + @classmethod + def _build_schema_on_200_201(cls): + if cls._schema_on_200_201 is not None: + return cls._schema_on_200_201 + + cls._schema_on_200_201 = AAZObjectType() + + _schema_on_200_201 = cls._schema_on_200_201 + _schema_on_200_201.id = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200_201.name = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200_201.properties = AAZObjectType( + flags={"client_flatten": True}, + ) + _schema_on_200_201.system_data = AAZObjectType( + serialized_name="systemData", + flags={"read_only": True}, + ) + _schema_on_200_201.type = AAZStrType( + flags={"read_only": True}, + ) + + properties = cls._schema_on_200_201.properties + properties.ip = AAZStrType() + properties.last_seen_date_time = AAZStrType( + serialized_name="lastSeenDateTime", + ) + properties.registered_date_time = AAZStrType( + serialized_name="registeredDateTime", + ) + properties.vm_resource_id = AAZStrType( + serialized_name="vmResourceId", + ) + properties.worker_name = AAZStrType( + serialized_name="workerName", + ) + properties.worker_type = AAZStrType( + serialized_name="workerType", + ) + + system_data = cls._schema_on_200_201.system_data + system_data.created_at = AAZStrType( + serialized_name="createdAt", + flags={"read_only": True}, + ) + system_data.created_by = AAZStrType( + serialized_name="createdBy", + flags={"read_only": True}, + ) + system_data.created_by_type = AAZStrType( + serialized_name="createdByType", + flags={"read_only": True}, + ) + system_data.last_modified_at = AAZStrType( + serialized_name="lastModifiedAt", + flags={"read_only": True}, + ) + system_data.last_modified_by = AAZStrType( + serialized_name="lastModifiedBy", + flags={"read_only": True}, + ) + system_data.last_modified_by_type = AAZStrType( + serialized_name="lastModifiedByType", + flags={"read_only": True}, + ) + + return cls._schema_on_200_201 + + +__all__ = ["Create"] diff --git a/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_delete.py b/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_delete.py index 826c80aeccb..a1fec9c7079 100644 --- a/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_delete.py +++ b/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_delete.py @@ -1,142 +1,155 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -# Code generated by aaz-dev-tools -# -------------------------------------------------------------------------------------------- - -# pylint: skip-file -# flake8: noqa - -from azure.cli.core.aaz import * - - -@register_command( - "automation hrwg hrw delete", - confirmation="Are you sure you want to perform this operation?", -) -class Delete(AAZCommand): - """Delete a hybrid runbook worker. - """ - - _aaz_info = { - "version": "2021-06-22", - "resources": [ - ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/hybridrunbookworkergroups/{}/hybridrunbookworkers/{}", "2021-06-22"], - ] - } - - def _handler(self, command_args): - super()._handler(command_args) - self._execute_operations() - return None - - _args_schema = None - - @classmethod - def _build_arguments_schema(cls, *args, **kwargs): - if cls._args_schema is not None: - return cls._args_schema - cls._args_schema = super()._build_arguments_schema(*args, **kwargs) - - # define Arg Group "" - - _args_schema = cls._args_schema - _args_schema.automation_account_name = AAZStrArg( - options=["--automation-account-name"], - help="The name of the automation account.", - required=True, - id_part="name", - ) - _args_schema.hybrid_runbook_worker_group_name = AAZStrArg( - options=["--hybrid-runbook-worker-group-name"], - help="The hybrid runbook worker group name", - required=True, - id_part="child_name_1", - ) - _args_schema.hybrid_runbook_worker_id = AAZStrArg( - options=["-n", "--name", "--hybrid-runbook-worker-id"], - help="The hybrid runbook worker id", - required=True, - id_part="child_name_2", - ) - _args_schema.resource_group = AAZResourceGroupNameArg( - required=True, - ) - return cls._args_schema - - def _execute_operations(self): - self.HybridRunbookWorkersDelete(ctx=self.ctx)() - - class HybridRunbookWorkersDelete(AAZHttpOperation): - CLIENT_TYPE = "MgmtClient" - - def __call__(self, *args, **kwargs): - request = self.make_request() - session = self.client.send_request(request=request, stream=False, **kwargs) - if session.http_response.status_code in [200]: - return self.on_200(session) - if session.http_response.status_code in [204]: - return self.on_204(session) - - return self.on_error(session.http_response) - - @property - def url(self): - return self.client.format_url( - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}/hybridRunbookWorkers/{hybridRunbookWorkerId}", - **self.url_parameters - ) - - @property - def method(self): - return "DELETE" - - @property - def error_format(self): - return "ODataV4Format" - - @property - def url_parameters(self): - parameters = { - **self.serialize_url_param( - "automationAccountName", self.ctx.args.automation_account_name, - required=True, - ), - **self.serialize_url_param( - "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, - required=True, - ), - **self.serialize_url_param( - "hybridRunbookWorkerId", self.ctx.args.hybrid_runbook_worker_id, - required=True, - ), - **self.serialize_url_param( - "resourceGroupName", self.ctx.args.resource_group, - required=True, - ), - **self.serialize_url_param( - "subscriptionId", self.ctx.subscription_id, - required=True, - ), - } - return parameters - - @property - def query_parameters(self): - parameters = { - **self.serialize_query_param( - "api-version", "2021-06-22", - required=True, - ), - } - return parameters - - def on_200(self, session): - pass - - def on_204(self, session): - pass - - -__all__ = ["Delete"] +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "automation hrwg hrw delete", + confirmation="Are you sure you want to perform this operation?", +) +class Delete(AAZCommand): + """Delete a hybrid runbook worker. + + :example: Delete a hybrid worker + az automation hrwg hrw delete --automation-account-name accountName --resource-group groupName --hybrid-runbook-worker-group-name hybridRunbookWorkerGroupName --hybrid-runbook-worker-id hybridRunbookWorkerId + """ + + _aaz_info = { + "version": "2022-08-08", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/hybridrunbookworkergroups/{}/hybridrunbookworkers/{}", "2022-08-08"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return None + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.automation_account_name = AAZStrArg( + options=["--automation-account-name"], + help="The name of the automation account.", + required=True, + id_part="name", + ) + _args_schema.hybrid_runbook_worker_group_name = AAZStrArg( + options=["--hybrid-runbook-worker-group-name"], + help="The hybrid runbook worker group name", + required=True, + id_part="child_name_1", + ) + _args_schema.hybrid_runbook_worker_id = AAZStrArg( + options=["-n", "--name", "--hybrid-runbook-worker-id"], + help="The hybrid runbook worker id", + required=True, + id_part="child_name_2", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.HybridRunbookWorkersDelete(ctx=self.ctx)() + self.post_operations() + + # @register_callback + def pre_operations(self): + pass + + # @register_callback + def post_operations(self): + pass + + class HybridRunbookWorkersDelete(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + if session.http_response.status_code in [204]: + return self.on_204(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}/hybridRunbookWorkers/{hybridRunbookWorkerId}", + **self.url_parameters + ) + + @property + def method(self): + return "DELETE" + + @property + def error_format(self): + return "ODataV4Format" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "automationAccountName", self.ctx.args.automation_account_name, + required=True, + ), + **self.serialize_url_param( + "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, + required=True, + ), + **self.serialize_url_param( + "hybridRunbookWorkerId", self.ctx.args.hybrid_runbook_worker_id, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2022-08-08", + required=True, + ), + } + return parameters + + def on_200(self, session): + pass + + def on_204(self, session): + pass + + +__all__ = ["Delete"] diff --git a/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_list.py b/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_list.py index 799abdb8581..d23cbbe405a 100644 --- a/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_list.py +++ b/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_list.py @@ -1,231 +1,244 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -# Code generated by aaz-dev-tools -# -------------------------------------------------------------------------------------------- - -# pylint: skip-file -# flake8: noqa - -from azure.cli.core.aaz import * - - -@register_command( - "automation hrwg hrw list", -) -class List(AAZCommand): - """Retrieve a list of hybrid runbook workers. - """ - - _aaz_info = { - "version": "2021-06-22", - "resources": [ - ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/hybridrunbookworkergroups/{}/hybridrunbookworkers", "2021-06-22"], - ] - } - - def _handler(self, command_args): - super()._handler(command_args) - return self.build_paging(self._execute_operations, self._output) - - _args_schema = None - - @classmethod - def _build_arguments_schema(cls, *args, **kwargs): - if cls._args_schema is not None: - return cls._args_schema - cls._args_schema = super()._build_arguments_schema(*args, **kwargs) - - # define Arg Group "" - - _args_schema = cls._args_schema - _args_schema.automation_account_name = AAZStrArg( - options=["--automation-account-name"], - help="The name of the automation account.", - required=True, - ) - _args_schema.hybrid_runbook_worker_group_name = AAZStrArg( - options=["--hybrid-runbook-worker-group-name"], - help="The hybrid runbook worker group name", - required=True, - ) - _args_schema.resource_group = AAZResourceGroupNameArg( - required=True, - ) - _args_schema.filter = AAZStrArg( - options=["--filter"], - help="The filter to apply on the operation.", - ) - return cls._args_schema - - def _execute_operations(self): - self.HybridRunbookWorkersListByHybridRunbookWorkerGroup(ctx=self.ctx)() - - def _output(self, *args, **kwargs): - result = self.deserialize_output(self.ctx.vars.instance.value, client_flatten=True) - next_link = self.deserialize_output(self.ctx.vars.instance.next_link) - return result, next_link - - class HybridRunbookWorkersListByHybridRunbookWorkerGroup(AAZHttpOperation): - CLIENT_TYPE = "MgmtClient" - - def __call__(self, *args, **kwargs): - request = self.make_request() - session = self.client.send_request(request=request, stream=False, **kwargs) - if session.http_response.status_code in [200]: - return self.on_200(session) - - return self.on_error(session.http_response) - - @property - def url(self): - return self.client.format_url( - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}/hybridRunbookWorkers", - **self.url_parameters - ) - - @property - def method(self): - return "GET" - - @property - def error_format(self): - return "ODataV4Format" - - @property - def url_parameters(self): - parameters = { - **self.serialize_url_param( - "automationAccountName", self.ctx.args.automation_account_name, - required=True, - ), - **self.serialize_url_param( - "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, - required=True, - ), - **self.serialize_url_param( - "resourceGroupName", self.ctx.args.resource_group, - required=True, - ), - **self.serialize_url_param( - "subscriptionId", self.ctx.subscription_id, - required=True, - ), - } - return parameters - - @property - def query_parameters(self): - parameters = { - **self.serialize_query_param( - "$filter", self.ctx.args.filter, - ), - **self.serialize_query_param( - "api-version", "2021-06-22", - required=True, - ), - } - return parameters - - @property - def header_parameters(self): - parameters = { - **self.serialize_header_param( - "Accept", "application/json", - ), - } - return parameters - - def on_200(self, session): - data = self.deserialize_http_content(session) - self.ctx.set_var( - "instance", - data, - schema_builder=self._build_schema_on_200 - ) - - _schema_on_200 = None - - @classmethod - def _build_schema_on_200(cls): - if cls._schema_on_200 is not None: - return cls._schema_on_200 - - cls._schema_on_200 = AAZObjectType() - - _schema_on_200 = cls._schema_on_200 - _schema_on_200.next_link = AAZStrType( - serialized_name="nextLink", - ) - _schema_on_200.value = AAZListType() - - value = cls._schema_on_200.value - value.Element = AAZObjectType() - - _element = cls._schema_on_200.value.Element - _element.id = AAZStrType( - flags={"read_only": True}, - ) - _element.name = AAZStrType( - flags={"read_only": True}, - ) - _element.properties = AAZObjectType( - flags={"client_flatten": True}, - ) - _element.system_data = AAZObjectType( - serialized_name="systemData", - flags={"read_only": True}, - ) - _element.type = AAZStrType( - flags={"read_only": True}, - ) - - properties = cls._schema_on_200.value.Element.properties - properties.ip = AAZStrType() - properties.last_seen_date_time = AAZStrType( - serialized_name="lastSeenDateTime", - ) - properties.registered_date_time = AAZStrType( - serialized_name="registeredDateTime", - ) - properties.vm_resource_id = AAZStrType( - serialized_name="vmResourceId", - ) - properties.worker_name = AAZStrType( - serialized_name="workerName", - ) - properties.worker_type = AAZStrType( - serialized_name="workerType", - ) - - system_data = cls._schema_on_200.value.Element.system_data - system_data.created_at = AAZStrType( - serialized_name="createdAt", - flags={"read_only": True}, - ) - system_data.created_by = AAZStrType( - serialized_name="createdBy", - flags={"read_only": True}, - ) - system_data.created_by_type = AAZStrType( - serialized_name="createdByType", - flags={"read_only": True}, - ) - system_data.last_modified_at = AAZStrType( - serialized_name="lastModifiedAt", - flags={"read_only": True}, - ) - system_data.last_modified_by = AAZStrType( - serialized_name="lastModifiedBy", - flags={"read_only": True}, - ) - system_data.last_modified_by_type = AAZStrType( - serialized_name="lastModifiedByType", - flags={"read_only": True}, - ) - - return cls._schema_on_200 - - -__all__ = ["List"] +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "automation hrwg hrw list", +) +class List(AAZCommand): + """List a list of hybrid runbook workers. + + :example: List all hybrid runbook workers in a worker group + az automation hrwg hrw list --automation-account-name accountName --resource-group groupName --hybrid-runbook-worker-group-name hybridRunbookWorkerGroupName + """ + + _aaz_info = { + "version": "2022-08-08", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/hybridrunbookworkergroups/{}/hybridrunbookworkers", "2022-08-08"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + return self.build_paging(self._execute_operations, self._output) + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.automation_account_name = AAZStrArg( + options=["--automation-account-name"], + help="The name of the automation account.", + required=True, + ) + _args_schema.hybrid_runbook_worker_group_name = AAZStrArg( + options=["--hybrid-runbook-worker-group-name"], + help="The hybrid runbook worker group name", + required=True, + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + _args_schema.filter = AAZStrArg( + options=["--filter"], + help="The filter to apply on the operation.", + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.HybridRunbookWorkersListByHybridRunbookWorkerGroup(ctx=self.ctx)() + self.post_operations() + + # @register_callback + def pre_operations(self): + pass + + # @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance.value, client_flatten=True) + next_link = self.deserialize_output(self.ctx.vars.instance.next_link) + return result, next_link + + class HybridRunbookWorkersListByHybridRunbookWorkerGroup(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}/hybridRunbookWorkers", + **self.url_parameters + ) + + @property + def method(self): + return "GET" + + @property + def error_format(self): + return "ODataV4Format" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "automationAccountName", self.ctx.args.automation_account_name, + required=True, + ), + **self.serialize_url_param( + "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "$filter", self.ctx.args.filter, + ), + **self.serialize_query_param( + "api-version", "2022-08-08", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.next_link = AAZStrType( + serialized_name="nextLink", + ) + _schema_on_200.value = AAZListType() + + value = cls._schema_on_200.value + value.Element = AAZObjectType() + + _element = cls._schema_on_200.value.Element + _element.id = AAZStrType( + flags={"read_only": True}, + ) + _element.name = AAZStrType( + flags={"read_only": True}, + ) + _element.properties = AAZObjectType( + flags={"client_flatten": True}, + ) + _element.system_data = AAZObjectType( + serialized_name="systemData", + flags={"read_only": True}, + ) + _element.type = AAZStrType( + flags={"read_only": True}, + ) + + properties = cls._schema_on_200.value.Element.properties + properties.ip = AAZStrType() + properties.last_seen_date_time = AAZStrType( + serialized_name="lastSeenDateTime", + ) + properties.registered_date_time = AAZStrType( + serialized_name="registeredDateTime", + ) + properties.vm_resource_id = AAZStrType( + serialized_name="vmResourceId", + ) + properties.worker_name = AAZStrType( + serialized_name="workerName", + ) + properties.worker_type = AAZStrType( + serialized_name="workerType", + ) + + system_data = cls._schema_on_200.value.Element.system_data + system_data.created_at = AAZStrType( + serialized_name="createdAt", + flags={"read_only": True}, + ) + system_data.created_by = AAZStrType( + serialized_name="createdBy", + flags={"read_only": True}, + ) + system_data.created_by_type = AAZStrType( + serialized_name="createdByType", + flags={"read_only": True}, + ) + system_data.last_modified_at = AAZStrType( + serialized_name="lastModifiedAt", + flags={"read_only": True}, + ) + system_data.last_modified_by = AAZStrType( + serialized_name="lastModifiedBy", + flags={"read_only": True}, + ) + system_data.last_modified_by_type = AAZStrType( + serialized_name="lastModifiedByType", + flags={"read_only": True}, + ) + + return cls._schema_on_200 + + +__all__ = ["List"] diff --git a/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_move.py b/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_move.py index 1bdf533fc0f..0ec1ee79b3c 100644 --- a/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_move.py +++ b/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_move.py @@ -1,158 +1,166 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -# Code generated by aaz-dev-tools -# -------------------------------------------------------------------------------------------- - -# pylint: skip-file -# flake8: noqa - -from azure.cli.core.aaz import * - - -@register_command( - "automation hrwg hrw move", -) -class Move(AAZCommand): - """Move a hybrid worker to a different group. - """ - - _aaz_info = { - "version": "2021-06-22", - "resources": [ - ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/hybridrunbookworkergroups/{}/hybridrunbookworkers/{}/move", "2021-06-22"], - ] - } - - def _handler(self, command_args): - super()._handler(command_args) - self._execute_operations() - return None - - _args_schema = None - - @classmethod - def _build_arguments_schema(cls, *args, **kwargs): - if cls._args_schema is not None: - return cls._args_schema - cls._args_schema = super()._build_arguments_schema(*args, **kwargs) - - # define Arg Group "" - - _args_schema = cls._args_schema - _args_schema.automation_account_name = AAZStrArg( - options=["--automation-account-name"], - help="The name of the automation account.", - required=True, - ) - _args_schema.hybrid_runbook_worker_group_name = AAZStrArg( - options=["--hybrid-runbook-worker-group-name"], - help="The hybrid runbook worker group name", - required=True, - ) - _args_schema.target_hybrid_runbook_worker_group_name = AAZStrArg( - options=["--target-hybrid-runbook-worker-group-name"], - help="The target hybrid worker group name", - required=True, - ) - _args_schema.hybrid_runbook_worker_id = AAZStrArg( - options=["-n", "--name","--hybrid-runbook-worker-id"], - help="The hybrid runbook worker id", - required=True, - ) - _args_schema.resource_group = AAZResourceGroupNameArg( - required=True, - ) - return cls._args_schema - - def _execute_operations(self): - self.HybridRunbookWorkersMove(ctx=self.ctx)() - - class HybridRunbookWorkersMove(AAZHttpOperation): - CLIENT_TYPE = "MgmtClient" - - def __call__(self, *args, **kwargs): - request = self.make_request() - session = self.client.send_request(request=request, stream=False, **kwargs) - if session.http_response.status_code in [200]: - return self.on_200(session) - - return self.on_error(session.http_response) - - @property - def url(self): - return self.client.format_url( - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}/hybridRunbookWorkers/{hybridRunbookWorkerId}/move", - **self.url_parameters - ) - - @property - def method(self): - return "POST" - - @property - def error_format(self): - return "ODataV4Format" - - @property - def url_parameters(self): - parameters = { - **self.serialize_url_param( - "automationAccountName", self.ctx.args.automation_account_name, - required=True, - ), - **self.serialize_url_param( - "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, - required=True, - ), - **self.serialize_url_param( - "hybridRunbookWorkerId", self.ctx.args.hybrid_runbook_worker_id, - required=True, - ), - **self.serialize_url_param( - "resourceGroupName", self.ctx.args.resource_group, - required=True, - ), - **self.serialize_url_param( - "subscriptionId", self.ctx.subscription_id, - required=True, - ), - } - return parameters - - @property - def query_parameters(self): - parameters = { - **self.serialize_query_param( - "api-version", "2021-06-22", - required=True, - ), - } - return parameters - - @property - def header_parameters(self): - parameters = { - **self.serialize_header_param( - "Content-Type", "application/json", - ), - } - return parameters - - @property - def content(self): - _content_value, _builder = self.new_content_builder( - self.ctx.args, - typ=AAZObjectType, - typ_kwargs={"flags": {"required": True, "client_flatten": True}} - ) - _builder.set_prop("hybridRunbookWorkerGroupName", AAZStrType, ".target_hybrid_runbook_worker_group_name") - - return self.serialize_content(_content_value) - - def on_200(self, session): - pass - - -__all__ = ["Move"] +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "automation hrwg hrw move", +) +class Move(AAZCommand): + """Move a hybrid worker to a different group. + + :example: Move a hybrid runbook worker to a different hybrid runbook worker group + az automation hrwg hrw move --automation-account-name accountName --resource-group groupName --hybrid-runbook-worker-group-name hybridRunbookWorkerGroupName --target-hybrid-runbook-worker-group-name targetHybridWorkerGroupName --hybrid-runbook-worker-id hybridRunbookWorkerId + """ + + _aaz_info = { + "version": "2022-08-08", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/hybridrunbookworkergroups/{}/hybridrunbookworkers/{}/move", "2022-08-08"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return None + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.automation_account_name = AAZStrArg( + options=["--automation-account-name"], + help="The name of the automation account.", + required=True, + ) + _args_schema.hybrid_runbook_worker_group_name = AAZStrArg( + options=["--hybrid-runbook-worker-group-name"], + help="The hybrid runbook worker group name", + required=True, + ) + _args_schema.hybrid_runbook_worker_id = AAZStrArg( + options=["--hybrid-runbook-worker-id"], + help="The hybrid runbook worker id", + required=True, + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.HybridRunbookWorkersMove(ctx=self.ctx)() + self.post_operations() + + # @register_callback + def pre_operations(self): + pass + + # @register_callback + def post_operations(self): + pass + + class HybridRunbookWorkersMove(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}/hybridRunbookWorkers/{hybridRunbookWorkerId}/move", + **self.url_parameters + ) + + @property + def method(self): + return "POST" + + @property + def error_format(self): + return "ODataV4Format" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "automationAccountName", self.ctx.args.automation_account_name, + required=True, + ), + **self.serialize_url_param( + "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, + required=True, + ), + **self.serialize_url_param( + "hybridRunbookWorkerId", self.ctx.args.hybrid_runbook_worker_id, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2022-08-08", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Content-Type", "application/json", + ), + } + return parameters + + @property + def content(self): + _content_value, _builder = self.new_content_builder( + self.ctx.args, + typ=AAZObjectType, + typ_kwargs={"flags": {"required": True, "client_flatten": True}} + ) + _builder.set_prop("hybridRunbookWorkerGroupName", AAZStrType, ".hybrid_runbook_worker_group_name") + + return self.serialize_content(_content_value) + + def on_200(self, session): + pass + + +__all__ = ["Move"] diff --git a/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_show.py b/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_show.py index af76b559722..c59aba8f436 100644 --- a/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_show.py +++ b/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_show.py @@ -1,227 +1,240 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -# Code generated by aaz-dev-tools -# -------------------------------------------------------------------------------------------- - -# pylint: skip-file -# flake8: noqa - -from azure.cli.core.aaz import * - - -@register_command( - "automation hrwg hrw show", -) -class Show(AAZCommand): - """Retrieve a hybrid runbook worker. - """ - - _aaz_info = { - "version": "2021-06-22", - "resources": [ - ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/hybridrunbookworkergroups/{}/hybridrunbookworkers/{}", "2021-06-22"], - ] - } - - def _handler(self, command_args): - super()._handler(command_args) - self._execute_operations() - return self._output() - - _args_schema = None - - @classmethod - def _build_arguments_schema(cls, *args, **kwargs): - if cls._args_schema is not None: - return cls._args_schema - cls._args_schema = super()._build_arguments_schema(*args, **kwargs) - - # define Arg Group "" - - _args_schema = cls._args_schema - _args_schema.automation_account_name = AAZStrArg( - options=["--automation-account-name"], - help="The name of the automation account.", - required=True, - id_part="name", - ) - _args_schema.hybrid_runbook_worker_group_name = AAZStrArg( - options=["--hybrid-runbook-worker-group-name"], - help="The hybrid runbook worker group name", - required=True, - id_part="child_name_1", - ) - _args_schema.hybrid_runbook_worker_id = AAZStrArg( - options=["-n", "--name", "--hybrid-runbook-worker-id"], - help="The hybrid runbook worker id", - required=True, - id_part="child_name_2", - ) - _args_schema.resource_group = AAZResourceGroupNameArg( - required=True, - ) - return cls._args_schema - - def _execute_operations(self): - self.HybridRunbookWorkersGet(ctx=self.ctx)() - - def _output(self, *args, **kwargs): - result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) - return result - - class HybridRunbookWorkersGet(AAZHttpOperation): - CLIENT_TYPE = "MgmtClient" - - def __call__(self, *args, **kwargs): - request = self.make_request() - session = self.client.send_request(request=request, stream=False, **kwargs) - if session.http_response.status_code in [200]: - return self.on_200(session) - - return self.on_error(session.http_response) - - @property - def url(self): - return self.client.format_url( - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}/hybridRunbookWorkers/{hybridRunbookWorkerId}", - **self.url_parameters - ) - - @property - def method(self): - return "GET" - - @property - def error_format(self): - return "ODataV4Format" - - @property - def url_parameters(self): - parameters = { - **self.serialize_url_param( - "automationAccountName", self.ctx.args.automation_account_name, - required=True, - ), - **self.serialize_url_param( - "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, - required=True, - ), - **self.serialize_url_param( - "hybridRunbookWorkerId", self.ctx.args.hybrid_runbook_worker_id, - required=True, - ), - **self.serialize_url_param( - "resourceGroupName", self.ctx.args.resource_group, - required=True, - ), - **self.serialize_url_param( - "subscriptionId", self.ctx.subscription_id, - required=True, - ), - } - return parameters - - @property - def query_parameters(self): - parameters = { - **self.serialize_query_param( - "api-version", "2021-06-22", - required=True, - ), - } - return parameters - - @property - def header_parameters(self): - parameters = { - **self.serialize_header_param( - "Accept", "application/json", - ), - } - return parameters - - def on_200(self, session): - data = self.deserialize_http_content(session) - self.ctx.set_var( - "instance", - data, - schema_builder=self._build_schema_on_200 - ) - - _schema_on_200 = None - - @classmethod - def _build_schema_on_200(cls): - if cls._schema_on_200 is not None: - return cls._schema_on_200 - - cls._schema_on_200 = AAZObjectType() - - _schema_on_200 = cls._schema_on_200 - _schema_on_200.id = AAZStrType( - flags={"read_only": True}, - ) - _schema_on_200.name = AAZStrType( - flags={"read_only": True}, - ) - _schema_on_200.properties = AAZObjectType( - flags={"client_flatten": True}, - ) - _schema_on_200.system_data = AAZObjectType( - serialized_name="systemData", - flags={"read_only": True}, - ) - _schema_on_200.type = AAZStrType( - flags={"read_only": True}, - ) - - properties = cls._schema_on_200.properties - properties.ip = AAZStrType() - properties.last_seen_date_time = AAZStrType( - serialized_name="lastSeenDateTime", - ) - properties.registered_date_time = AAZStrType( - serialized_name="registeredDateTime", - ) - properties.vm_resource_id = AAZStrType( - serialized_name="vmResourceId", - ) - properties.worker_name = AAZStrType( - serialized_name="workerName", - ) - properties.worker_type = AAZStrType( - serialized_name="workerType", - ) - - system_data = cls._schema_on_200.system_data - system_data.created_at = AAZStrType( - serialized_name="createdAt", - flags={"read_only": True}, - ) - system_data.created_by = AAZStrType( - serialized_name="createdBy", - flags={"read_only": True}, - ) - system_data.created_by_type = AAZStrType( - serialized_name="createdByType", - flags={"read_only": True}, - ) - system_data.last_modified_at = AAZStrType( - serialized_name="lastModifiedAt", - flags={"read_only": True}, - ) - system_data.last_modified_by = AAZStrType( - serialized_name="lastModifiedBy", - flags={"read_only": True}, - ) - system_data.last_modified_by_type = AAZStrType( - serialized_name="lastModifiedByType", - flags={"read_only": True}, - ) - - return cls._schema_on_200 - - -__all__ = ["Show"] +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "automation hrwg hrw show", +) +class Show(AAZCommand): + """Get a hybrid runbook worker. + + :example: Get hybrid runbook worker + az automation hrwg hrw show --automation-account-name accountName --resource-group groupName --hybrid-runbook-worker-group-name hybridRunbookWorkerGroupName --hybrid-runbook-worker-id hybridRunbookWorkerId + """ + + _aaz_info = { + "version": "2022-08-08", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/hybridrunbookworkergroups/{}/hybridrunbookworkers/{}", "2022-08-08"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return self._output() + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.automation_account_name = AAZStrArg( + options=["--automation-account-name"], + help="The name of the automation account.", + required=True, + id_part="name", + ) + _args_schema.hybrid_runbook_worker_group_name = AAZStrArg( + options=["--hybrid-runbook-worker-group-name"], + help="The hybrid runbook worker group name", + required=True, + id_part="child_name_1", + ) + _args_schema.hybrid_runbook_worker_id = AAZStrArg( + options=["-n", "--name", "--hybrid-runbook-worker-id"], + help="The hybrid runbook worker id", + required=True, + id_part="child_name_2", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.HybridRunbookWorkersGet(ctx=self.ctx)() + self.post_operations() + + # @register_callback + def pre_operations(self): + pass + + # @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) + return result + + class HybridRunbookWorkersGet(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}/hybridRunbookWorkers/{hybridRunbookWorkerId}", + **self.url_parameters + ) + + @property + def method(self): + return "GET" + + @property + def error_format(self): + return "ODataV4Format" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "automationAccountName", self.ctx.args.automation_account_name, + required=True, + ), + **self.serialize_url_param( + "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, + required=True, + ), + **self.serialize_url_param( + "hybridRunbookWorkerId", self.ctx.args.hybrid_runbook_worker_id, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2022-08-08", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.id = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.name = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.properties = AAZObjectType( + flags={"client_flatten": True}, + ) + _schema_on_200.system_data = AAZObjectType( + serialized_name="systemData", + flags={"read_only": True}, + ) + _schema_on_200.type = AAZStrType( + flags={"read_only": True}, + ) + + properties = cls._schema_on_200.properties + properties.ip = AAZStrType() + properties.last_seen_date_time = AAZStrType( + serialized_name="lastSeenDateTime", + ) + properties.registered_date_time = AAZStrType( + serialized_name="registeredDateTime", + ) + properties.vm_resource_id = AAZStrType( + serialized_name="vmResourceId", + ) + properties.worker_name = AAZStrType( + serialized_name="workerName", + ) + properties.worker_type = AAZStrType( + serialized_name="workerType", + ) + + system_data = cls._schema_on_200.system_data + system_data.created_at = AAZStrType( + serialized_name="createdAt", + flags={"read_only": True}, + ) + system_data.created_by = AAZStrType( + serialized_name="createdBy", + flags={"read_only": True}, + ) + system_data.created_by_type = AAZStrType( + serialized_name="createdByType", + flags={"read_only": True}, + ) + system_data.last_modified_at = AAZStrType( + serialized_name="lastModifiedAt", + flags={"read_only": True}, + ) + system_data.last_modified_by = AAZStrType( + serialized_name="lastModifiedBy", + flags={"read_only": True}, + ) + system_data.last_modified_by_type = AAZStrType( + serialized_name="lastModifiedByType", + flags={"read_only": True}, + ) + + return cls._schema_on_200 + + +__all__ = ["Show"] diff --git a/src/automation/azext_automation/aaz/latest/automation/python3_package/__cmd_group.py b/src/automation/azext_automation/aaz/latest/automation/python3_package/__cmd_group.py new file mode 100644 index 00000000000..5158bd13e76 --- /dev/null +++ b/src/automation/azext_automation/aaz/latest/automation/python3_package/__cmd_group.py @@ -0,0 +1,23 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command_group( + "automation python3-package", +) +class __CMDGroup(AAZCommandGroup): + """python3-package + """ + pass + + +__all__ = ["__CMDGroup"] diff --git a/src/automation/azext_automation/aaz/latest/automation/python3_package/__init__.py b/src/automation/azext_automation/aaz/latest/automation/python3_package/__init__.py new file mode 100644 index 00000000000..1f78565855b --- /dev/null +++ b/src/automation/azext_automation/aaz/latest/automation/python3_package/__init__.py @@ -0,0 +1,16 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from .__cmd_group import * +from ._create import * +from ._delete import * +from ._list import * +from ._show import * +from ._update import * diff --git a/src/automation/azext_automation/aaz/latest/automation/python3_package/_create.py b/src/automation/azext_automation/aaz/latest/automation/python3_package/_create.py new file mode 100644 index 00000000000..4961be427da --- /dev/null +++ b/src/automation/azext_automation/aaz/latest/automation/python3_package/_create.py @@ -0,0 +1,318 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "automation python3-package create", +) +class Create(AAZCommand): + """Create or Update the python 3 package identified by package name. + + :example: Add Python3 Package to automation account + az automation python3-package create --automation-account-name "MyAutomationAccount" --resource-group "MyResourceGroup" --name "PackageName" --content-link "uri=https://PackageUri.com" + """ + + _aaz_info = { + "version": "2022-08-08", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/python3packages/{}", "2022-08-08"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return self._output() + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.automation_account_name = AAZStrArg( + options=["--automation-account-name"], + help="The name of the automation account.", + required=True, + id_part="name", + ) + _args_schema.package_name = AAZStrArg( + options=["-n", "--name", "--package-name"], + help="The name of python package.", + required=True, + id_part="child_name_1", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + + # define Arg Group "Parameters" + + _args_schema = cls._args_schema + _args_schema.tags = AAZDictArg( + options=["--tags"], + arg_group="Parameters", + help="Gets or sets the tags attached to the resource.", + ) + + tags = cls._args_schema.tags + tags.Element = AAZStrArg() + + # define Arg Group "Properties" + + _args_schema = cls._args_schema + _args_schema.content_link = AAZObjectArg( + options=["--content-link"], + arg_group="Properties", + help="Gets or sets the module content link.", + required=True, + ) + + content_link = cls._args_schema.content_link + content_link.content_hash = AAZObjectArg( + options=["content-hash"], + help="Gets or sets the hash.", + ) + content_link.uri = AAZStrArg( + options=["uri"], + help="Gets or sets the uri of the runbook content.", + ) + content_link.version = AAZStrArg( + options=["version"], + help="Gets or sets the version of the content.", + ) + + content_hash = cls._args_schema.content_link.content_hash + content_hash.algorithm = AAZStrArg( + options=["algorithm"], + help="Gets or sets the content hash algorithm used to hash the content.", + required=True, + ) + content_hash.value = AAZStrArg( + options=["value"], + help="Gets or sets expected hash value of the content.", + required=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.Python3PackageCreateOrUpdate(ctx=self.ctx)() + self.post_operations() + + # @register_callback + def pre_operations(self): + pass + + # @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) + return result + + class Python3PackageCreateOrUpdate(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200, 201]: + return self.on_200_201(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/python3Packages/{packageName}", + **self.url_parameters + ) + + @property + def method(self): + return "PUT" + + @property + def error_format(self): + return "ODataV4Format" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "automationAccountName", self.ctx.args.automation_account_name, + required=True, + ), + **self.serialize_url_param( + "packageName", self.ctx.args.package_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2022-08-08", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Content-Type", "application/json", + ), + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + @property + def content(self): + _content_value, _builder = self.new_content_builder( + self.ctx.args, + typ=AAZObjectType, + typ_kwargs={"flags": {"required": True, "client_flatten": True}} + ) + _builder.set_prop("properties", AAZObjectType, ".", typ_kwargs={"flags": {"required": True, "client_flatten": True}}) + _builder.set_prop("tags", AAZDictType, ".tags") + + properties = _builder.get(".properties") + if properties is not None: + properties.set_prop("contentLink", AAZObjectType, ".content_link", typ_kwargs={"flags": {"required": True}}) + + content_link = _builder.get(".properties.contentLink") + if content_link is not None: + content_link.set_prop("contentHash", AAZObjectType, ".content_hash") + content_link.set_prop("uri", AAZStrType, ".uri") + content_link.set_prop("version", AAZStrType, ".version") + + content_hash = _builder.get(".properties.contentLink.contentHash") + if content_hash is not None: + content_hash.set_prop("algorithm", AAZStrType, ".algorithm", typ_kwargs={"flags": {"required": True}}) + content_hash.set_prop("value", AAZStrType, ".value", typ_kwargs={"flags": {"required": True}}) + + tags = _builder.get(".tags") + if tags is not None: + tags.set_elements(AAZStrType, ".") + + return self.serialize_content(_content_value) + + def on_200_201(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200_201 + ) + + _schema_on_200_201 = None + + @classmethod + def _build_schema_on_200_201(cls): + if cls._schema_on_200_201 is not None: + return cls._schema_on_200_201 + + cls._schema_on_200_201 = AAZObjectType() + + _schema_on_200_201 = cls._schema_on_200_201 + _schema_on_200_201.etag = AAZStrType() + _schema_on_200_201.id = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200_201.location = AAZStrType() + _schema_on_200_201.name = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200_201.properties = AAZObjectType( + flags={"client_flatten": True}, + ) + _schema_on_200_201.tags = AAZDictType() + _schema_on_200_201.type = AAZStrType( + flags={"read_only": True}, + ) + + properties = cls._schema_on_200_201.properties + properties.activity_count = AAZIntType( + serialized_name="activityCount", + ) + properties.content_link = AAZObjectType( + serialized_name="contentLink", + ) + properties.creation_time = AAZStrType( + serialized_name="creationTime", + ) + properties.description = AAZStrType() + properties.error = AAZObjectType() + properties.is_composite = AAZBoolType( + serialized_name="isComposite", + ) + properties.is_global = AAZBoolType( + serialized_name="isGlobal", + ) + properties.last_modified_time = AAZStrType( + serialized_name="lastModifiedTime", + ) + properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + properties.size_in_bytes = AAZIntType( + serialized_name="sizeInBytes", + ) + properties.version = AAZStrType() + + content_link = cls._schema_on_200_201.properties.content_link + content_link.content_hash = AAZObjectType( + serialized_name="contentHash", + ) + content_link.uri = AAZStrType() + content_link.version = AAZStrType() + + content_hash = cls._schema_on_200_201.properties.content_link.content_hash + content_hash.algorithm = AAZStrType( + flags={"required": True}, + ) + content_hash.value = AAZStrType( + flags={"required": True}, + ) + + error = cls._schema_on_200_201.properties.error + error.code = AAZStrType() + error.message = AAZStrType() + + tags = cls._schema_on_200_201.tags + tags.Element = AAZStrType() + + return cls._schema_on_200_201 + + +__all__ = ["Create"] diff --git a/src/automation/azext_automation/aaz/latest/automation/python3_package/_delete.py b/src/automation/azext_automation/aaz/latest/automation/python3_package/_delete.py new file mode 100644 index 00000000000..22429c1a4e7 --- /dev/null +++ b/src/automation/azext_automation/aaz/latest/automation/python3_package/_delete.py @@ -0,0 +1,145 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "automation python3-package delete", + confirmation="Are you sure you want to perform this operation?", +) +class Delete(AAZCommand): + """Delete the python 3 package by name. + + :example: Delete Python3 Package by Name + az automation python3-package delete --automation-account-name "MyAutomationAccount" --resource-group "MyResourceGroup" --name "PackageName" + """ + + _aaz_info = { + "version": "2022-08-08", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/python3packages/{}", "2022-08-08"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return None + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.automation_account_name = AAZStrArg( + options=["--automation-account-name"], + help="The name of the automation account.", + required=True, + id_part="name", + ) + _args_schema.package_name = AAZStrArg( + options=["-n", "--name", "--package-name"], + help="The python package name.", + required=True, + id_part="child_name_1", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.Python3PackageDelete(ctx=self.ctx)() + self.post_operations() + + # @register_callback + def pre_operations(self): + pass + + # @register_callback + def post_operations(self): + pass + + class Python3PackageDelete(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + if session.http_response.status_code in [204]: + return self.on_204(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/python3Packages/{packageName}", + **self.url_parameters + ) + + @property + def method(self): + return "DELETE" + + @property + def error_format(self): + return "ODataV4Format" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "automationAccountName", self.ctx.args.automation_account_name, + required=True, + ), + **self.serialize_url_param( + "packageName", self.ctx.args.package_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2022-08-08", + required=True, + ), + } + return parameters + + def on_200(self, session): + pass + + def on_204(self, session): + pass + + +__all__ = ["Delete"] diff --git a/src/automation/azext_automation/aaz/latest/automation/python3_package/_list.py b/src/automation/azext_automation/aaz/latest/automation/python3_package/_list.py new file mode 100644 index 00000000000..5a735a0c4de --- /dev/null +++ b/src/automation/azext_automation/aaz/latest/automation/python3_package/_list.py @@ -0,0 +1,234 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "automation python3-package list", +) +class List(AAZCommand): + """Retrieve a list of python 3 packages. + + :example: List all Custom Python3Package in AutomationAccount + az automation python3-package list --automation-account-name "MyAutomationAccount" --resource-group "MyResourceGroup" + """ + + _aaz_info = { + "version": "2022-08-08", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/python3packages", "2022-08-08"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + return self.build_paging(self._execute_operations, self._output) + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.automation_account_name = AAZStrArg( + options=["--automation-account-name"], + help="The name of the automation account.", + required=True, + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.Python3PackageListByAutomationAccount(ctx=self.ctx)() + self.post_operations() + + # @register_callback + def pre_operations(self): + pass + + # @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance.value, client_flatten=True) + next_link = self.deserialize_output(self.ctx.vars.instance.next_link) + return result, next_link + + class Python3PackageListByAutomationAccount(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/python3Packages", + **self.url_parameters + ) + + @property + def method(self): + return "GET" + + @property + def error_format(self): + return "ODataV4Format" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "automationAccountName", self.ctx.args.automation_account_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2022-08-08", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.next_link = AAZStrType( + serialized_name="nextLink", + ) + _schema_on_200.value = AAZListType() + + value = cls._schema_on_200.value + value.Element = AAZObjectType() + + _element = cls._schema_on_200.value.Element + _element.etag = AAZStrType() + _element.id = AAZStrType( + flags={"read_only": True}, + ) + _element.location = AAZStrType() + _element.name = AAZStrType( + flags={"read_only": True}, + ) + _element.properties = AAZObjectType( + flags={"client_flatten": True}, + ) + _element.tags = AAZDictType() + _element.type = AAZStrType( + flags={"read_only": True}, + ) + + properties = cls._schema_on_200.value.Element.properties + properties.activity_count = AAZIntType( + serialized_name="activityCount", + ) + properties.content_link = AAZObjectType( + serialized_name="contentLink", + ) + properties.creation_time = AAZStrType( + serialized_name="creationTime", + ) + properties.description = AAZStrType() + properties.error = AAZObjectType() + properties.is_composite = AAZBoolType( + serialized_name="isComposite", + ) + properties.is_global = AAZBoolType( + serialized_name="isGlobal", + ) + properties.last_modified_time = AAZStrType( + serialized_name="lastModifiedTime", + ) + properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + properties.size_in_bytes = AAZIntType( + serialized_name="sizeInBytes", + ) + properties.version = AAZStrType() + + content_link = cls._schema_on_200.value.Element.properties.content_link + content_link.content_hash = AAZObjectType( + serialized_name="contentHash", + ) + content_link.uri = AAZStrType() + content_link.version = AAZStrType() + + content_hash = cls._schema_on_200.value.Element.properties.content_link.content_hash + content_hash.algorithm = AAZStrType( + flags={"required": True}, + ) + content_hash.value = AAZStrType( + flags={"required": True}, + ) + + error = cls._schema_on_200.value.Element.properties.error + error.code = AAZStrType() + error.message = AAZStrType() + + tags = cls._schema_on_200.value.Element.tags + tags.Element = AAZStrType() + + return cls._schema_on_200 + + +__all__ = ["List"] diff --git a/src/automation/azext_automation/aaz/latest/automation/python3_package/_show.py b/src/automation/azext_automation/aaz/latest/automation/python3_package/_show.py new file mode 100644 index 00000000000..b27df9d4ee4 --- /dev/null +++ b/src/automation/azext_automation/aaz/latest/automation/python3_package/_show.py @@ -0,0 +1,236 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "automation python3-package show", +) +class Show(AAZCommand): + """Retrieve the python 3 package identified by package name. + + :example: Get Python3Package by Name + az automation python3-package show --automation-account-name "MyAutomationAccount" --resource-group "MyResourceGroup" --name "PackageName" + """ + + _aaz_info = { + "version": "2022-08-08", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/python3packages/{}", "2022-08-08"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return self._output() + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.automation_account_name = AAZStrArg( + options=["--automation-account-name"], + help="The name of the automation account.", + required=True, + id_part="name", + ) + _args_schema.package_name = AAZStrArg( + options=["-n", "--name", "--package-name"], + help="The python package name.", + required=True, + id_part="child_name_1", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.Python3PackageGet(ctx=self.ctx)() + self.post_operations() + + # @register_callback + def pre_operations(self): + pass + + # @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) + return result + + class Python3PackageGet(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/python3Packages/{packageName}", + **self.url_parameters + ) + + @property + def method(self): + return "GET" + + @property + def error_format(self): + return "ODataV4Format" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "automationAccountName", self.ctx.args.automation_account_name, + required=True, + ), + **self.serialize_url_param( + "packageName", self.ctx.args.package_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2022-08-08", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.etag = AAZStrType() + _schema_on_200.id = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.location = AAZStrType() + _schema_on_200.name = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.properties = AAZObjectType( + flags={"client_flatten": True}, + ) + _schema_on_200.tags = AAZDictType() + _schema_on_200.type = AAZStrType( + flags={"read_only": True}, + ) + + properties = cls._schema_on_200.properties + properties.activity_count = AAZIntType( + serialized_name="activityCount", + ) + properties.content_link = AAZObjectType( + serialized_name="contentLink", + ) + properties.creation_time = AAZStrType( + serialized_name="creationTime", + ) + properties.description = AAZStrType() + properties.error = AAZObjectType() + properties.is_composite = AAZBoolType( + serialized_name="isComposite", + ) + properties.is_global = AAZBoolType( + serialized_name="isGlobal", + ) + properties.last_modified_time = AAZStrType( + serialized_name="lastModifiedTime", + ) + properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + properties.size_in_bytes = AAZIntType( + serialized_name="sizeInBytes", + ) + properties.version = AAZStrType() + + content_link = cls._schema_on_200.properties.content_link + content_link.content_hash = AAZObjectType( + serialized_name="contentHash", + ) + content_link.uri = AAZStrType() + content_link.version = AAZStrType() + + content_hash = cls._schema_on_200.properties.content_link.content_hash + content_hash.algorithm = AAZStrType( + flags={"required": True}, + ) + content_hash.value = AAZStrType( + flags={"required": True}, + ) + + error = cls._schema_on_200.properties.error + error.code = AAZStrType() + error.message = AAZStrType() + + tags = cls._schema_on_200.tags + tags.Element = AAZStrType() + + return cls._schema_on_200 + + +__all__ = ["Show"] diff --git a/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_update.py b/src/automation/azext_automation/aaz/latest/automation/python3_package/_update.py similarity index 51% rename from src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_update.py rename to src/automation/azext_automation/aaz/latest/automation/python3_package/_update.py index 50fa1af063f..00f07edef7b 100644 --- a/src/automation/azext_automation/aaz/latest/automation/hrwg/hrw/_update.py +++ b/src/automation/azext_automation/aaz/latest/automation/python3_package/_update.py @@ -1,396 +1,471 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -# Code generated by aaz-dev-tools -# -------------------------------------------------------------------------------------------- - -# pylint: skip-file -# flake8: noqa - -from azure.cli.core.aaz import * - - -@register_command( - "automation hrwg hrw update", -) -class Update(AAZCommand): - """Create a hybrid runbook worker. - """ - - _aaz_info = { - "version": "2021-06-22", - "resources": [ - ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/hybridrunbookworkergroups/{}/hybridrunbookworkers/{}", "2021-06-22"], - ] - } - - AZ_SUPPORT_GENERIC_UPDATE = True - - def _handler(self, command_args): - super()._handler(command_args) - self._execute_operations() - return self._output() - - _args_schema = None - - @classmethod - def _build_arguments_schema(cls, *args, **kwargs): - if cls._args_schema is not None: - return cls._args_schema - cls._args_schema = super()._build_arguments_schema(*args, **kwargs) - - # define Arg Group "" - - _args_schema = cls._args_schema - _args_schema.automation_account_name = AAZStrArg( - options=["--automation-account-name"], - help="The name of the automation account.", - required=True, - id_part="name", - ) - _args_schema.hybrid_runbook_worker_group_name = AAZStrArg( - options=["--hybrid-runbook-worker-group-name"], - help="The hybrid runbook worker group name", - required=True, - id_part="child_name_1", - ) - _args_schema.hybrid_runbook_worker_id = AAZStrArg( - options=["-n", "--name", "--hybrid-runbook-worker-id"], - help="The hybrid runbook worker id", - required=True, - id_part="child_name_2", - ) - _args_schema.resource_group = AAZResourceGroupNameArg( - required=True, - ) - - # define Arg Group "Properties" - - _args_schema = cls._args_schema - _args_schema.vm_resource_id = AAZStrArg( - options=["--vm-resource-id"], - arg_group="Properties", - help="Azure Resource Manager Id for a virtual machine.", - nullable=True, - ) - return cls._args_schema - - def _execute_operations(self): - self.HybridRunbookWorkersGet(ctx=self.ctx)() - self.InstanceUpdateByJson(ctx=self.ctx)() - self.InstanceUpdateByGeneric(ctx=self.ctx)() - self.HybridRunbookWorkersCreate(ctx=self.ctx)() - - def _output(self, *args, **kwargs): - result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) - return result - - class HybridRunbookWorkersGet(AAZHttpOperation): - CLIENT_TYPE = "MgmtClient" - - def __call__(self, *args, **kwargs): - request = self.make_request() - session = self.client.send_request(request=request, stream=False, **kwargs) - if session.http_response.status_code in [200]: - return self.on_200(session) - - return self.on_error(session.http_response) - - @property - def url(self): - return self.client.format_url( - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}/hybridRunbookWorkers/{hybridRunbookWorkerId}", - **self.url_parameters - ) - - @property - def method(self): - return "GET" - - @property - def error_format(self): - return "ODataV4Format" - - @property - def url_parameters(self): - parameters = { - **self.serialize_url_param( - "automationAccountName", self.ctx.args.automation_account_name, - required=True, - ), - **self.serialize_url_param( - "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, - required=True, - ), - **self.serialize_url_param( - "hybridRunbookWorkerId", self.ctx.args.hybrid_runbook_worker_id, - required=True, - ), - **self.serialize_url_param( - "resourceGroupName", self.ctx.args.resource_group, - required=True, - ), - **self.serialize_url_param( - "subscriptionId", self.ctx.subscription_id, - required=True, - ), - } - return parameters - - @property - def query_parameters(self): - parameters = { - **self.serialize_query_param( - "api-version", "2021-06-22", - required=True, - ), - } - return parameters - - @property - def header_parameters(self): - parameters = { - **self.serialize_header_param( - "Accept", "application/json", - ), - } - return parameters - - def on_200(self, session): - data = self.deserialize_http_content(session) - self.ctx.set_var( - "instance", - data, - schema_builder=self._build_schema_on_200 - ) - - _schema_on_200 = None - - @classmethod - def _build_schema_on_200(cls): - if cls._schema_on_200 is not None: - return cls._schema_on_200 - - cls._schema_on_200 = AAZObjectType() - _build_schema_hybrid_runbook_worker_read(cls._schema_on_200) - - return cls._schema_on_200 - - class HybridRunbookWorkersCreate(AAZHttpOperation): - CLIENT_TYPE = "MgmtClient" - - def __call__(self, *args, **kwargs): - request = self.make_request() - session = self.client.send_request(request=request, stream=False, **kwargs) - if session.http_response.status_code in [200]: - return self.on_200(session) - - return self.on_error(session.http_response) - - @property - def url(self): - return self.client.format_url( - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/hybridRunbookWorkerGroups/{hybridRunbookWorkerGroupName}/hybridRunbookWorkers/{hybridRunbookWorkerId}", - **self.url_parameters - ) - - @property - def method(self): - return "PUT" - - @property - def error_format(self): - return "ODataV4Format" - - @property - def url_parameters(self): - parameters = { - **self.serialize_url_param( - "automationAccountName", self.ctx.args.automation_account_name, - required=True, - ), - **self.serialize_url_param( - "hybridRunbookWorkerGroupName", self.ctx.args.hybrid_runbook_worker_group_name, - required=True, - ), - **self.serialize_url_param( - "hybridRunbookWorkerId", self.ctx.args.hybrid_runbook_worker_id, - required=True, - ), - **self.serialize_url_param( - "resourceGroupName", self.ctx.args.resource_group, - required=True, - ), - **self.serialize_url_param( - "subscriptionId", self.ctx.subscription_id, - required=True, - ), - } - return parameters - - @property - def query_parameters(self): - parameters = { - **self.serialize_query_param( - "api-version", "2021-06-22", - required=True, - ), - } - return parameters - - @property - def header_parameters(self): - parameters = { - **self.serialize_header_param( - "Content-Type", "application/json", - ), - **self.serialize_header_param( - "Accept", "application/json", - ), - } - return parameters - - @property - def content(self): - _content_value, _builder = self.new_content_builder( - self.ctx.args, - value=self.ctx.vars.instance, - ) - - return self.serialize_content(_content_value) - - def on_200(self, session): - data = self.deserialize_http_content(session) - self.ctx.set_var( - "instance", - data, - schema_builder=self._build_schema_on_200 - ) - - _schema_on_200 = None - - @classmethod - def _build_schema_on_200(cls): - if cls._schema_on_200 is not None: - return cls._schema_on_200 - - cls._schema_on_200 = AAZObjectType() - _build_schema_hybrid_runbook_worker_read(cls._schema_on_200) - - return cls._schema_on_200 - - class InstanceUpdateByJson(AAZJsonInstanceUpdateOperation): - - def __call__(self, *args, **kwargs): - self._update_instance(self.ctx.vars.instance) - - def _update_instance(self, instance): - _instance_value, _builder = self.new_content_builder( - self.ctx.args, - value=instance, - typ=AAZObjectType - ) - _builder.set_prop("name", AAZStrType, ".hybrid_runbook_worker_id") - _builder.set_prop("properties", AAZObjectType, ".", typ_kwargs={"flags": {"required": True, "client_flatten": True}}) - - properties = _builder.get(".properties") - if properties is not None: - properties.set_prop("vmResourceId", AAZStrType, ".vm_resource_id") - - return _instance_value - - class InstanceUpdateByGeneric(AAZGenericInstanceUpdateOperation): - - def __call__(self, *args, **kwargs): - self._update_instance_by_generic( - self.ctx.vars.instance, - self.ctx.generic_update_args - ) - - -_schema_hybrid_runbook_worker_read = None - - -def _build_schema_hybrid_runbook_worker_read(_schema): - global _schema_hybrid_runbook_worker_read - if _schema_hybrid_runbook_worker_read is not None: - _schema.id = _schema_hybrid_runbook_worker_read.id - _schema.name = _schema_hybrid_runbook_worker_read.name - _schema.properties = _schema_hybrid_runbook_worker_read.properties - _schema.system_data = _schema_hybrid_runbook_worker_read.system_data - _schema.type = _schema_hybrid_runbook_worker_read.type - return - - _schema_hybrid_runbook_worker_read = AAZObjectType() - - hybrid_runbook_worker_read = _schema_hybrid_runbook_worker_read - hybrid_runbook_worker_read.id = AAZStrType( - flags={"read_only": True}, - ) - hybrid_runbook_worker_read.name = AAZStrType( - flags={"read_only": True}, - ) - hybrid_runbook_worker_read.properties = AAZObjectType( - flags={"client_flatten": True}, - ) - hybrid_runbook_worker_read.system_data = AAZObjectType( - serialized_name="systemData", - flags={"read_only": True}, - ) - hybrid_runbook_worker_read.type = AAZStrType( - flags={"read_only": True}, - ) - - properties = _schema_hybrid_runbook_worker_read.properties - properties.ip = AAZStrType() - properties.last_seen_date_time = AAZStrType( - serialized_name="lastSeenDateTime", - ) - properties.registered_date_time = AAZStrType( - serialized_name="registeredDateTime", - ) - properties.vm_resource_id = AAZStrType( - serialized_name="vmResourceId", - ) - properties.worker_name = AAZStrType( - serialized_name="workerName", - ) - properties.worker_type = AAZStrType( - serialized_name="workerType", - ) - - system_data = _schema_hybrid_runbook_worker_read.system_data - system_data.created_at = AAZStrType( - serialized_name="createdAt", - flags={"read_only": True}, - ) - system_data.created_by = AAZStrType( - serialized_name="createdBy", - flags={"read_only": True}, - ) - system_data.created_by_type = AAZStrType( - serialized_name="createdByType", - flags={"read_only": True}, - ) - system_data.last_modified_at = AAZStrType( - serialized_name="lastModifiedAt", - flags={"read_only": True}, - ) - system_data.last_modified_by = AAZStrType( - serialized_name="lastModifiedBy", - flags={"read_only": True}, - ) - system_data.last_modified_by_type = AAZStrType( - serialized_name="lastModifiedByType", - flags={"read_only": True}, - ) - - _schema.id = _schema_hybrid_runbook_worker_read.id - _schema.name = _schema_hybrid_runbook_worker_read.name - _schema.properties = _schema_hybrid_runbook_worker_read.properties - _schema.system_data = _schema_hybrid_runbook_worker_read.system_data - _schema.type = _schema_hybrid_runbook_worker_read.type - - -__all__ = ["Update"] +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "automation python3-package update", +) +class Update(AAZCommand): + """Create or Update the python 3 package identified by package name. + + :example: Update Python3Package by Name + az automation python3-package update --automation-account-name "MyAutomationAccount" --resource-group "MyResourceGroup" --name "PackageName" --content-link "uri=https://PackageUri.com" + """ + + _aaz_info = { + "version": "2022-08-08", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.automation/automationaccounts/{}/python3packages/{}", "2022-08-08"], + ] + } + + AZ_SUPPORT_GENERIC_UPDATE = True + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return self._output() + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.automation_account_name = AAZStrArg( + options=["--automation-account-name"], + help="The name of the automation account.", + required=True, + id_part="name", + ) + _args_schema.package_name = AAZStrArg( + options=["-n", "--name", "--package-name"], + help="The python package name.", + required=True, + id_part="child_name_1", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + + # define Arg Group "Parameters" + + _args_schema = cls._args_schema + _args_schema.tags = AAZDictArg( + options=["--tags"], + arg_group="Parameters", + help="Gets or sets the tags attached to the resource.", + nullable=True, + ) + + tags = cls._args_schema.tags + tags.Element = AAZStrArg( + nullable=True, + ) + + # define Arg Group "Properties" + + _args_schema = cls._args_schema + _args_schema.content_link = AAZObjectArg( + options=["--content-link"], + arg_group="Properties", + help="Gets or sets the module content link.", + ) + + content_link = cls._args_schema.content_link + content_link.content_hash = AAZObjectArg( + options=["content-hash"], + help="Gets or sets the hash.", + nullable=True, + ) + content_link.uri = AAZStrArg( + options=["uri"], + help="Gets or sets the uri of the runbook content.", + nullable=True, + ) + content_link.version = AAZStrArg( + options=["version"], + help="Gets or sets the version of the content.", + nullable=True, + ) + + content_hash = cls._args_schema.content_link.content_hash + content_hash.algorithm = AAZStrArg( + options=["algorithm"], + help="Gets or sets the content hash algorithm used to hash the content.", + ) + content_hash.value = AAZStrArg( + options=["value"], + help="Gets or sets expected hash value of the content.", + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.Python3PackageGet(ctx=self.ctx)() + self.pre_instance_update(self.ctx.vars.instance) + self.InstanceUpdateByJson(ctx=self.ctx)() + self.InstanceUpdateByGeneric(ctx=self.ctx)() + self.post_instance_update(self.ctx.vars.instance) + self.Python3PackageCreateOrUpdate(ctx=self.ctx)() + self.post_operations() + + # @register_callback + def pre_operations(self): + pass + + # @register_callback + def post_operations(self): + pass + + # @register_callback + def pre_instance_update(self, instance): + pass + + # @register_callback + def post_instance_update(self, instance): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) + return result + + class Python3PackageGet(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/python3Packages/{packageName}", + **self.url_parameters + ) + + @property + def method(self): + return "GET" + + @property + def error_format(self): + return "ODataV4Format" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "automationAccountName", self.ctx.args.automation_account_name, + required=True, + ), + **self.serialize_url_param( + "packageName", self.ctx.args.package_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2022-08-08", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + _build_schema_module_read(cls._schema_on_200) + + return cls._schema_on_200 + + class Python3PackageCreateOrUpdate(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200, 201]: + return self.on_200_201(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/python3Packages/{packageName}", + **self.url_parameters + ) + + @property + def method(self): + return "PUT" + + @property + def error_format(self): + return "ODataV4Format" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "automationAccountName", self.ctx.args.automation_account_name, + required=True, + ), + **self.serialize_url_param( + "packageName", self.ctx.args.package_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2022-08-08", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Content-Type", "application/json", + ), + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + @property + def content(self): + _content_value, _builder = self.new_content_builder( + self.ctx.args, + value=self.ctx.vars.instance, + ) + + return self.serialize_content(_content_value) + + def on_200_201(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200_201 + ) + + _schema_on_200_201 = None + + @classmethod + def _build_schema_on_200_201(cls): + if cls._schema_on_200_201 is not None: + return cls._schema_on_200_201 + + cls._schema_on_200_201 = AAZObjectType() + _build_schema_module_read(cls._schema_on_200_201) + + return cls._schema_on_200_201 + + class InstanceUpdateByJson(AAZJsonInstanceUpdateOperation): + + def __call__(self, *args, **kwargs): + self._update_instance(self.ctx.vars.instance) + + def _update_instance(self, instance): + _instance_value, _builder = self.new_content_builder( + self.ctx.args, + value=instance, + typ=AAZObjectType + ) + _builder.set_prop("properties", AAZObjectType, ".", typ_kwargs={"flags": {"required": True, "client_flatten": True}}) + _builder.set_prop("tags", AAZDictType, ".tags") + + properties = _builder.get(".properties") + if properties is not None: + properties.set_prop("contentLink", AAZObjectType, ".content_link", typ_kwargs={"flags": {"required": True}}) + + content_link = _builder.get(".properties.contentLink") + if content_link is not None: + content_link.set_prop("contentHash", AAZObjectType, ".content_hash") + content_link.set_prop("uri", AAZStrType, ".uri") + content_link.set_prop("version", AAZStrType, ".version") + + content_hash = _builder.get(".properties.contentLink.contentHash") + if content_hash is not None: + content_hash.set_prop("algorithm", AAZStrType, ".algorithm", typ_kwargs={"flags": {"required": True}}) + content_hash.set_prop("value", AAZStrType, ".value", typ_kwargs={"flags": {"required": True}}) + + tags = _builder.get(".tags") + if tags is not None: + tags.set_elements(AAZStrType, ".") + + return _instance_value + + class InstanceUpdateByGeneric(AAZGenericInstanceUpdateOperation): + + def __call__(self, *args, **kwargs): + self._update_instance_by_generic( + self.ctx.vars.instance, + self.ctx.generic_update_args + ) + + +_schema_module_read = None + + +def _build_schema_module_read(_schema): + global _schema_module_read + if _schema_module_read is not None: + _schema.etag = _schema_module_read.etag + _schema.id = _schema_module_read.id + _schema.location = _schema_module_read.location + _schema.name = _schema_module_read.name + _schema.properties = _schema_module_read.properties + _schema.tags = _schema_module_read.tags + _schema.type = _schema_module_read.type + return + + _schema_module_read = AAZObjectType() + + module_read = _schema_module_read + module_read.etag = AAZStrType() + module_read.id = AAZStrType( + flags={"read_only": True}, + ) + module_read.location = AAZStrType() + module_read.name = AAZStrType( + flags={"read_only": True}, + ) + module_read.properties = AAZObjectType( + flags={"client_flatten": True}, + ) + module_read.tags = AAZDictType() + module_read.type = AAZStrType( + flags={"read_only": True}, + ) + + properties = _schema_module_read.properties + properties.activity_count = AAZIntType( + serialized_name="activityCount", + ) + properties.content_link = AAZObjectType( + serialized_name="contentLink", + ) + properties.creation_time = AAZStrType( + serialized_name="creationTime", + ) + properties.description = AAZStrType() + properties.error = AAZObjectType() + properties.is_composite = AAZBoolType( + serialized_name="isComposite", + ) + properties.is_global = AAZBoolType( + serialized_name="isGlobal", + ) + properties.last_modified_time = AAZStrType( + serialized_name="lastModifiedTime", + ) + properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + properties.size_in_bytes = AAZIntType( + serialized_name="sizeInBytes", + ) + properties.version = AAZStrType() + + content_link = _schema_module_read.properties.content_link + content_link.content_hash = AAZObjectType( + serialized_name="contentHash", + ) + content_link.uri = AAZStrType() + content_link.version = AAZStrType() + + content_hash = _schema_module_read.properties.content_link.content_hash + content_hash.algorithm = AAZStrType( + flags={"required": True}, + ) + content_hash.value = AAZStrType( + flags={"required": True}, + ) + + error = _schema_module_read.properties.error + error.code = AAZStrType() + error.message = AAZStrType() + + tags = _schema_module_read.tags + tags.Element = AAZStrType() + + _schema.etag = _schema_module_read.etag + _schema.id = _schema_module_read.id + _schema.location = _schema_module_read.location + _schema.name = _schema_module_read.name + _schema.properties = _schema_module_read.properties + _schema.tags = _schema_module_read.tags + _schema.type = _schema_module_read.type + + +__all__ = ["Update"] diff --git a/src/automation/azext_automation/azext_metadata.json b/src/automation/azext_automation/azext_metadata.json index a0d4f25e0f3..a3beef2a804 100644 --- a/src/automation/azext_automation/azext_metadata.json +++ b/src/automation/azext_automation/azext_metadata.json @@ -1,4 +1,4 @@ -{ - "azext.isExperimental": true, - "azext.minCliCoreVersion": "2.39.0" +{ + "azext.isExperimental": true, + "azext.minCliCoreVersion": "2.40.0" } \ No newline at end of file diff --git a/src/automation/azext_automation/tests/latest/recordings/test_automation.yaml b/src/automation/azext_automation/tests/latest/recordings/test_automation.yaml index 4600c0ab6f4..2dc956b1abb 100644 --- a/src/automation/azext_automation/tests/latest/recordings/test_automation.yaml +++ b/src/automation/azext_automation/tests/latest/recordings/test_automation.yaml @@ -1,1188 +1,1801 @@ -interactions: -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - unknown - Connection: - - keep-alive - User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/locations?api-version=2019-11-01 - response: - body: - string: "{\"value\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\",\"name\":\"eastus\",\"displayName\":\"East - US\",\"regionalDisplayName\":\"(US) East US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-79.8164\",\"latitude\":\"37.3719\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\":\"westus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2\",\"name\":\"eastus2\",\"displayName\":\"East - US 2\",\"regionalDisplayName\":\"(US) East US 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-78.3889\",\"latitude\":\"36.6681\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\":\"centralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\",\"name\":\"southcentralus\",\"displayName\":\"South - Central US\",\"regionalDisplayName\":\"(US) South Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-98.5\",\"latitude\":\"29.4167\",\"physicalLocation\":\"Texas\",\"pairedRegion\":[{\"name\":\"northcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2\",\"name\":\"westus2\",\"displayName\":\"West - US 2\",\"regionalDisplayName\":\"(US) West US 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-119.852\",\"latitude\":\"47.233\",\"physicalLocation\":\"Washington\",\"pairedRegion\":[{\"name\":\"westcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus3\",\"name\":\"westus3\",\"displayName\":\"West - US 3\",\"regionalDisplayName\":\"(US) West US 3\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-112.074036\",\"latitude\":\"33.448376\",\"physicalLocation\":\"Phoenix\",\"pairedRegion\":[{\"name\":\"eastus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast\",\"name\":\"australiaeast\",\"displayName\":\"Australia - East\",\"regionalDisplayName\":\"(Asia Pacific) Australia East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia - Pacific\",\"longitude\":\"151.2094\",\"latitude\":\"-33.86\",\"physicalLocation\":\"New - South Wales\",\"pairedRegion\":[{\"name\":\"australiasoutheast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia\",\"name\":\"southeastasia\",\"displayName\":\"Southeast - Asia\",\"regionalDisplayName\":\"(Asia Pacific) Southeast Asia\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia - Pacific\",\"longitude\":\"103.833\",\"latitude\":\"1.283\",\"physicalLocation\":\"Singapore\",\"pairedRegion\":[{\"name\":\"eastasia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope\",\"name\":\"northeurope\",\"displayName\":\"North - Europe\",\"regionalDisplayName\":\"(Europe) North Europe\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"-6.2597\",\"latitude\":\"53.3478\",\"physicalLocation\":\"Ireland\",\"pairedRegion\":[{\"name\":\"westeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedencentral\",\"name\":\"swedencentral\",\"displayName\":\"Sweden - Central\",\"regionalDisplayName\":\"(Europe) Sweden Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"17.14127\",\"latitude\":\"60.67488\",\"physicalLocation\":\"G\xE4vle\",\"pairedRegion\":[{\"name\":\"swedensouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedensouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth\",\"name\":\"uksouth\",\"displayName\":\"UK - South\",\"regionalDisplayName\":\"(Europe) UK South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"-0.799\",\"latitude\":\"50.941\",\"physicalLocation\":\"London\",\"pairedRegion\":[{\"name\":\"ukwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\",\"name\":\"westeurope\",\"displayName\":\"West - Europe\",\"regionalDisplayName\":\"(Europe) West Europe\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"4.9\",\"latitude\":\"52.3667\",\"physicalLocation\":\"Netherlands\",\"pairedRegion\":[{\"name\":\"northeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus\",\"name\":\"centralus\",\"displayName\":\"Central - US\",\"regionalDisplayName\":\"(US) Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-93.6208\",\"latitude\":\"41.5908\",\"physicalLocation\":\"Iowa\",\"pairedRegion\":[{\"name\":\"eastus2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth\",\"name\":\"southafricanorth\",\"displayName\":\"South - Africa North\",\"regionalDisplayName\":\"(Africa) South Africa North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Africa\",\"longitude\":\"28.218370\",\"latitude\":\"-25.731340\",\"physicalLocation\":\"Johannesburg\",\"pairedRegion\":[{\"name\":\"southafricawest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia\",\"name\":\"centralindia\",\"displayName\":\"Central - India\",\"regionalDisplayName\":\"(Asia Pacific) Central India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia - Pacific\",\"longitude\":\"73.9197\",\"latitude\":\"18.5822\",\"physicalLocation\":\"Pune\",\"pairedRegion\":[{\"name\":\"southindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia\",\"name\":\"eastasia\",\"displayName\":\"East - Asia\",\"regionalDisplayName\":\"(Asia Pacific) East Asia\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia - Pacific\",\"longitude\":\"114.188\",\"latitude\":\"22.267\",\"physicalLocation\":\"Hong - Kong\",\"pairedRegion\":[{\"name\":\"southeastasia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast\",\"name\":\"japaneast\",\"displayName\":\"Japan - East\",\"regionalDisplayName\":\"(Asia Pacific) Japan East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia - Pacific\",\"longitude\":\"139.77\",\"latitude\":\"35.68\",\"physicalLocation\":\"Tokyo, - Saitama\",\"pairedRegion\":[{\"name\":\"japanwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral\",\"name\":\"koreacentral\",\"displayName\":\"Korea - Central\",\"regionalDisplayName\":\"(Asia Pacific) Korea Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia - Pacific\",\"longitude\":\"126.9780\",\"latitude\":\"37.5665\",\"physicalLocation\":\"Seoul\",\"pairedRegion\":[{\"name\":\"koreasouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral\",\"name\":\"canadacentral\",\"displayName\":\"Canada - Central\",\"regionalDisplayName\":\"(Canada) Canada Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Canada\",\"longitude\":\"-79.383\",\"latitude\":\"43.653\",\"physicalLocation\":\"Toronto\",\"pairedRegion\":[{\"name\":\"canadaeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral\",\"name\":\"francecentral\",\"displayName\":\"France - Central\",\"regionalDisplayName\":\"(Europe) France Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"2.3730\",\"latitude\":\"46.3772\",\"physicalLocation\":\"Paris\",\"pairedRegion\":[{\"name\":\"francesouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral\",\"name\":\"germanywestcentral\",\"displayName\":\"Germany - West Central\",\"regionalDisplayName\":\"(Europe) Germany West Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.682127\",\"latitude\":\"50.110924\",\"physicalLocation\":\"Frankfurt\",\"pairedRegion\":[{\"name\":\"germanynorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast\",\"name\":\"norwayeast\",\"displayName\":\"Norway - East\",\"regionalDisplayName\":\"(Europe) Norway East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"10.752245\",\"latitude\":\"59.913868\",\"physicalLocation\":\"Norway\",\"pairedRegion\":[{\"name\":\"norwaywest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth\",\"name\":\"switzerlandnorth\",\"displayName\":\"Switzerland - North\",\"regionalDisplayName\":\"(Europe) Switzerland North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.564572\",\"latitude\":\"47.451542\",\"physicalLocation\":\"Zurich\",\"pairedRegion\":[{\"name\":\"switzerlandwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth\",\"name\":\"uaenorth\",\"displayName\":\"UAE - North\",\"regionalDisplayName\":\"(Middle East) UAE North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Middle - East\",\"longitude\":\"55.316666\",\"latitude\":\"25.266666\",\"physicalLocation\":\"Dubai\",\"pairedRegion\":[{\"name\":\"uaecentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth\",\"name\":\"brazilsouth\",\"displayName\":\"Brazil - South\",\"regionalDisplayName\":\"(South America) Brazil South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"South - America\",\"longitude\":\"-46.633\",\"latitude\":\"-23.55\",\"physicalLocation\":\"Sao - Paulo State\",\"pairedRegion\":[{\"name\":\"southcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap\",\"name\":\"eastus2euap\",\"displayName\":\"East - US 2 EUAP\",\"regionalDisplayName\":\"(US) East US 2 EUAP\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-78.3889\",\"latitude\":\"36.6681\",\"pairedRegion\":[{\"name\":\"centraluseuap\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/qatarcentral\",\"name\":\"qatarcentral\",\"displayName\":\"Qatar - Central\",\"regionalDisplayName\":\"(Middle East) Qatar Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Middle - East\",\"longitude\":\"51.439327\",\"latitude\":\"25.551462\",\"physicalLocation\":\"Doha\",\"pairedRegion\":[{\"name\":\"westeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage\",\"name\":\"centralusstage\",\"displayName\":\"Central - US (Stage)\",\"regionalDisplayName\":\"(US) Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstage\",\"name\":\"eastusstage\",\"displayName\":\"East - US (Stage)\",\"regionalDisplayName\":\"(US) East US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2stage\",\"name\":\"eastus2stage\",\"displayName\":\"East - US 2 (Stage)\",\"regionalDisplayName\":\"(US) East US 2 (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralusstage\",\"name\":\"northcentralusstage\",\"displayName\":\"North - Central US (Stage)\",\"regionalDisplayName\":\"(US) North Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstage\",\"name\":\"southcentralusstage\",\"displayName\":\"South - Central US (Stage)\",\"regionalDisplayName\":\"(US) South Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westusstage\",\"name\":\"westusstage\",\"displayName\":\"West - US (Stage)\",\"regionalDisplayName\":\"(US) West US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2stage\",\"name\":\"westus2stage\",\"displayName\":\"West - US 2 (Stage)\",\"regionalDisplayName\":\"(US) West US 2 (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asia\",\"name\":\"asia\",\"displayName\":\"Asia\",\"regionalDisplayName\":\"Asia\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asiapacific\",\"name\":\"asiapacific\",\"displayName\":\"Asia - Pacific\",\"regionalDisplayName\":\"Asia Pacific\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia\",\"name\":\"australia\",\"displayName\":\"Australia\",\"regionalDisplayName\":\"Australia\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil\",\"name\":\"brazil\",\"displayName\":\"Brazil\",\"regionalDisplayName\":\"Brazil\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada\",\"name\":\"canada\",\"displayName\":\"Canada\",\"regionalDisplayName\":\"Canada\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe\",\"name\":\"europe\",\"displayName\":\"Europe\",\"regionalDisplayName\":\"Europe\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/france\",\"name\":\"france\",\"displayName\":\"France\",\"regionalDisplayName\":\"France\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germany\",\"name\":\"germany\",\"displayName\":\"Germany\",\"regionalDisplayName\":\"Germany\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global\",\"name\":\"global\",\"displayName\":\"Global\",\"regionalDisplayName\":\"Global\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india\",\"name\":\"india\",\"displayName\":\"India\",\"regionalDisplayName\":\"India\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan\",\"name\":\"japan\",\"displayName\":\"Japan\",\"regionalDisplayName\":\"Japan\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/korea\",\"name\":\"korea\",\"displayName\":\"Korea\",\"regionalDisplayName\":\"Korea\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norway\",\"name\":\"norway\",\"displayName\":\"Norway\",\"regionalDisplayName\":\"Norway\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/singapore\",\"name\":\"singapore\",\"displayName\":\"Singapore\",\"regionalDisplayName\":\"Singapore\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafrica\",\"name\":\"southafrica\",\"displayName\":\"South - Africa\",\"regionalDisplayName\":\"South Africa\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerland\",\"name\":\"switzerland\",\"displayName\":\"Switzerland\",\"regionalDisplayName\":\"Switzerland\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uae\",\"name\":\"uae\",\"displayName\":\"United - Arab Emirates\",\"regionalDisplayName\":\"United Arab Emirates\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk\",\"name\":\"uk\",\"displayName\":\"United - Kingdom\",\"regionalDisplayName\":\"United Kingdom\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstates\",\"name\":\"unitedstates\",\"displayName\":\"United - States\",\"regionalDisplayName\":\"United States\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstateseuap\",\"name\":\"unitedstateseuap\",\"displayName\":\"United - States EUAP\",\"regionalDisplayName\":\"United States EUAP\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage\",\"name\":\"eastasiastage\",\"displayName\":\"East - Asia (Stage)\",\"regionalDisplayName\":\"(Asia Pacific) East Asia (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia - Pacific\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasiastage\",\"name\":\"southeastasiastage\",\"displayName\":\"Southeast - Asia (Stage)\",\"regionalDisplayName\":\"(Asia Pacific) Southeast Asia (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia - Pacific\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus\",\"name\":\"northcentralus\",\"displayName\":\"North - Central US\",\"regionalDisplayName\":\"(US) North Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-87.6278\",\"latitude\":\"41.8819\",\"physicalLocation\":\"Illinois\",\"pairedRegion\":[{\"name\":\"southcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus\",\"name\":\"westus\",\"displayName\":\"West - US\",\"regionalDisplayName\":\"(US) West US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-122.417\",\"latitude\":\"37.783\",\"physicalLocation\":\"California\",\"pairedRegion\":[{\"name\":\"eastus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest\",\"name\":\"jioindiawest\",\"displayName\":\"Jio - India West\",\"regionalDisplayName\":\"(Asia Pacific) Jio India West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia - Pacific\",\"longitude\":\"70.05773\",\"latitude\":\"22.470701\",\"physicalLocation\":\"Jamnagar\",\"pairedRegion\":[{\"name\":\"jioindiacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap\",\"name\":\"centraluseuap\",\"displayName\":\"Central - US EUAP\",\"regionalDisplayName\":\"(US) Central US EUAP\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-93.6208\",\"latitude\":\"41.5908\",\"pairedRegion\":[{\"name\":\"eastus2euap\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus\",\"name\":\"westcentralus\",\"displayName\":\"West - Central US\",\"regionalDisplayName\":\"(US) West Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-110.234\",\"latitude\":\"40.890\",\"physicalLocation\":\"Wyoming\",\"pairedRegion\":[{\"name\":\"westus2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest\",\"name\":\"southafricawest\",\"displayName\":\"South - Africa West\",\"regionalDisplayName\":\"(Africa) South Africa West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Africa\",\"longitude\":\"18.843266\",\"latitude\":\"-34.075691\",\"physicalLocation\":\"Cape - Town\",\"pairedRegion\":[{\"name\":\"southafricanorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral\",\"name\":\"australiacentral\",\"displayName\":\"Australia - Central\",\"regionalDisplayName\":\"(Asia Pacific) Australia Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia - Pacific\",\"longitude\":\"149.1244\",\"latitude\":\"-35.3075\",\"physicalLocation\":\"Canberra\",\"pairedRegion\":[{\"name\":\"australiacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2\",\"name\":\"australiacentral2\",\"displayName\":\"Australia - Central 2\",\"regionalDisplayName\":\"(Asia Pacific) Australia Central 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia - Pacific\",\"longitude\":\"149.1244\",\"latitude\":\"-35.3075\",\"physicalLocation\":\"Canberra\",\"pairedRegion\":[{\"name\":\"australiacentral2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast\",\"name\":\"australiasoutheast\",\"displayName\":\"Australia - Southeast\",\"regionalDisplayName\":\"(Asia Pacific) Australia Southeast\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia - Pacific\",\"longitude\":\"144.9631\",\"latitude\":\"-37.8136\",\"physicalLocation\":\"Victoria\",\"pairedRegion\":[{\"name\":\"australiaeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest\",\"name\":\"japanwest\",\"displayName\":\"Japan - West\",\"regionalDisplayName\":\"(Asia Pacific) Japan West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia - Pacific\",\"longitude\":\"135.5022\",\"latitude\":\"34.6939\",\"physicalLocation\":\"Osaka\",\"pairedRegion\":[{\"name\":\"japaneast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral\",\"name\":\"jioindiacentral\",\"displayName\":\"Jio - India Central\",\"regionalDisplayName\":\"(Asia Pacific) Jio India Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia - Pacific\",\"longitude\":\"79.08886\",\"latitude\":\"21.146633\",\"physicalLocation\":\"Nagpur\",\"pairedRegion\":[{\"name\":\"jioindiawest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth\",\"name\":\"koreasouth\",\"displayName\":\"Korea - South\",\"regionalDisplayName\":\"(Asia Pacific) Korea South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia - Pacific\",\"longitude\":\"129.0756\",\"latitude\":\"35.1796\",\"physicalLocation\":\"Busan\",\"pairedRegion\":[{\"name\":\"koreacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\",\"name\":\"southindia\",\"displayName\":\"South - India\",\"regionalDisplayName\":\"(Asia Pacific) South India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia - Pacific\",\"longitude\":\"80.1636\",\"latitude\":\"12.9822\",\"physicalLocation\":\"Chennai\",\"pairedRegion\":[{\"name\":\"centralindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westindia\",\"name\":\"westindia\",\"displayName\":\"West - India\",\"regionalDisplayName\":\"(Asia Pacific) West India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia - Pacific\",\"longitude\":\"72.868\",\"latitude\":\"19.088\",\"physicalLocation\":\"Mumbai\",\"pairedRegion\":[{\"name\":\"southindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast\",\"name\":\"canadaeast\",\"displayName\":\"Canada - East\",\"regionalDisplayName\":\"(Canada) Canada East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Canada\",\"longitude\":\"-71.217\",\"latitude\":\"46.817\",\"physicalLocation\":\"Quebec\",\"pairedRegion\":[{\"name\":\"canadacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth\",\"name\":\"francesouth\",\"displayName\":\"France - South\",\"regionalDisplayName\":\"(Europe) France South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"2.1972\",\"latitude\":\"43.8345\",\"physicalLocation\":\"Marseille\",\"pairedRegion\":[{\"name\":\"francecentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth\",\"name\":\"germanynorth\",\"displayName\":\"Germany - North\",\"regionalDisplayName\":\"(Europe) Germany North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.806422\",\"latitude\":\"53.073635\",\"physicalLocation\":\"Berlin\",\"pairedRegion\":[{\"name\":\"germanywestcentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest\",\"name\":\"norwaywest\",\"displayName\":\"Norway - West\",\"regionalDisplayName\":\"(Europe) Norway West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"5.733107\",\"latitude\":\"58.969975\",\"physicalLocation\":\"Norway\",\"pairedRegion\":[{\"name\":\"norwayeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest\",\"name\":\"switzerlandwest\",\"displayName\":\"Switzerland - West\",\"regionalDisplayName\":\"(Europe) Switzerland West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"6.143158\",\"latitude\":\"46.204391\",\"physicalLocation\":\"Geneva\",\"pairedRegion\":[{\"name\":\"switzerlandnorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest\",\"name\":\"ukwest\",\"displayName\":\"UK - West\",\"regionalDisplayName\":\"(Europe) UK West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"-3.084\",\"latitude\":\"53.427\",\"physicalLocation\":\"Cardiff\",\"pairedRegion\":[{\"name\":\"uksouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral\",\"name\":\"uaecentral\",\"displayName\":\"UAE - Central\",\"regionalDisplayName\":\"(Middle East) UAE Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Middle - East\",\"longitude\":\"54.366669\",\"latitude\":\"24.466667\",\"physicalLocation\":\"Abu - Dhabi\",\"pairedRegion\":[{\"name\":\"uaenorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsoutheast\",\"name\":\"brazilsoutheast\",\"displayName\":\"Brazil - Southeast\",\"regionalDisplayName\":\"(South America) Brazil Southeast\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"South - America\",\"longitude\":\"-43.2075\",\"latitude\":\"-22.90278\",\"physicalLocation\":\"Rio\",\"pairedRegion\":[{\"name\":\"brazilsouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth\"}]}}]}" - headers: - cache-control: - - no-cache - content-length: - - '29470' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 26 Aug 2022 03:07:54 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: '{"name": "test-account-000002", "location": "westus2", "properties": {"sku": - {"name": "Basic"}}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation account create - Connection: - - keep-alive - Content-Length: - - '96' - Content-Type: - - application/json - ParameterSetName: - - --resource-group --name --location - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002?api-version=2021-06-22 - response: - body: - string: '{"name":"test-account-000002","systemData":{"createdAt":"2022-08-26T03:08:02.76+00:00","lastModifiedAt":"2022-08-26T03:08:02.76+00:00"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002","type":"Microsoft.Automation/AutomationAccounts","location":"westus2","tags":{},"etag":null,"properties":{"disableLocalAuth":false,"sku":{"name":"Basic","family":null,"capacity":null},"state":"Ok","RegistrationUrl":"https://a8380925-d3cc-4d03-afc3-f5a88f2156e3.agentsvc.wus2.azure-automation.net/accounts/a8380925-d3cc-4d03-afc3-f5a88f2156e3","encryption":{"keySource":"Microsoft.Automation","identity":{"userAssignedIdentity":null}},"automationHybridServiceUrl":"https://a8380925-d3cc-4d03-afc3-f5a88f2156e3.jrds.wus2.azure-automation.net/automationAccounts/a8380925-d3cc-4d03-afc3-f5a88f2156e3","RuntimeConfiguration":{"powershell":{"builtinModules":{"Az":"8.0.0"}},"powershell7":{"builtinModules":{"Az":"8.0.0"}}},"creationTime":"2022-08-26T03:08:02.76+00:00","lastModifiedBy":null,"lastModifiedTime":"2022-08-26T03:08:02.76+00:00"}}' - headers: - cache-control: - - no-cache - content-length: - - '1143' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 26 Aug 2022 03:08:03 GMT - expires: - - '-1' - location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002?api-version=2021-06-22 - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - status: - code: 201 - message: Created -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation account update - Connection: - - keep-alive - ParameterSetName: - - --resource-group --name --tags - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002?api-version=2021-06-22 - response: - body: - string: '{"name":"test-account-000002","systemData":{"createdAt":"2022-08-26T03:08:02.76+00:00","lastModifiedAt":"2022-08-26T03:08:02.76+00:00"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002","type":"Microsoft.Automation/AutomationAccounts","location":"westus2","tags":{},"etag":null,"properties":{"disableLocalAuth":false,"sku":{"name":"Basic","family":null,"capacity":null},"state":"Ok","RegistrationUrl":"https://a8380925-d3cc-4d03-afc3-f5a88f2156e3.agentsvc.wus2.azure-automation.net/accounts/a8380925-d3cc-4d03-afc3-f5a88f2156e3","encryption":{"keySource":"Microsoft.Automation","identity":{"userAssignedIdentity":null}},"privateEndpointConnections":[],"automationHybridServiceUrl":"https://a8380925-d3cc-4d03-afc3-f5a88f2156e3.jrds.wus2.azure-automation.net/automationAccounts/a8380925-d3cc-4d03-afc3-f5a88f2156e3","RuntimeConfiguration":{"powershell":{"builtinModules":{"Az":"8.0.0"}},"powershell7":{"builtinModules":{"Az":"8.0.0"}}},"creationTime":"2022-08-26T03:08:02.76+00:00","lastModifiedBy":null,"lastModifiedTime":"2022-08-26T03:08:02.76+00:00"}}' - headers: - cache-control: - - no-cache - content-length: - - '1175' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 26 Aug 2022 03:08:04 GMT - expires: - - '-1' - ocp-automation-accountid: - - a8380925-d3cc-4d03-afc3-f5a88f2156e3 - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: '{"name": "test-account-000002", "tags": {"A": "a"}, "properties": {"sku": - {"name": "Basic"}}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation account update - Connection: - - keep-alive - Content-Length: - - '93' - Content-Type: - - application/json - ParameterSetName: - - --resource-group --name --tags - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: PATCH - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002?api-version=2021-06-22 - response: - body: - string: '{"name":"test-account-000002","systemData":{"createdAt":"2022-08-26T03:08:02.76+00:00","lastModifiedAt":"2022-08-26T03:08:07.7733333+00:00"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002","type":"Microsoft.Automation/AutomationAccounts","location":"westus2","tags":{"A":"a"},"etag":null,"properties":{"disableLocalAuth":false,"sku":{"name":"Basic","family":null,"capacity":null},"state":"Ok","RegistrationUrl":"https://a8380925-d3cc-4d03-afc3-f5a88f2156e3.agentsvc.wus2.azure-automation.net/accounts/a8380925-d3cc-4d03-afc3-f5a88f2156e3","encryption":{"keySource":"Microsoft.Automation","identity":{"userAssignedIdentity":null}},"automationHybridServiceUrl":"https://a8380925-d3cc-4d03-afc3-f5a88f2156e3.jrds.wus2.azure-automation.net/automationAccounts/a8380925-d3cc-4d03-afc3-f5a88f2156e3","RuntimeConfiguration":{"powershell":{"builtinModules":{"Az":"8.0.0"}},"powershell7":{"builtinModules":{"Az":"8.0.0"}}},"creationTime":"2022-08-26T03:08:02.76+00:00","lastModifiedBy":null,"lastModifiedTime":"2022-08-26T03:08:07.7733333+00:00"}}' - headers: - cache-control: - - no-cache - content-length: - - '1160' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 26 Aug 2022 03:08:10 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1198' - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation account show - Connection: - - keep-alive - ParameterSetName: - - --resource-group --name - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002?api-version=2021-06-22 - response: - body: - string: '{"name":"test-account-000002","systemData":{"createdAt":"2022-08-26T03:08:02.76+00:00","lastModifiedAt":"2022-08-26T03:08:07.7733333+00:00"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002","type":"Microsoft.Automation/AutomationAccounts","location":"westus2","tags":{"A":"a"},"etag":null,"properties":{"disableLocalAuth":false,"sku":{"name":"Basic","family":null,"capacity":null},"state":"Ok","RegistrationUrl":"https://a8380925-d3cc-4d03-afc3-f5a88f2156e3.agentsvc.wus2.azure-automation.net/accounts/a8380925-d3cc-4d03-afc3-f5a88f2156e3","encryption":{"keySource":"Microsoft.Automation","identity":{"userAssignedIdentity":null}},"privateEndpointConnections":[],"automationHybridServiceUrl":"https://a8380925-d3cc-4d03-afc3-f5a88f2156e3.jrds.wus2.azure-automation.net/automationAccounts/a8380925-d3cc-4d03-afc3-f5a88f2156e3","RuntimeConfiguration":{"powershell":{"builtinModules":{"Az":"8.0.0"}},"powershell7":{"builtinModules":{"Az":"8.0.0"}}},"creationTime":"2022-08-26T03:08:02.76+00:00","lastModifiedBy":null,"lastModifiedTime":"2022-08-26T03:08:07.7733333+00:00"}}' - headers: - cache-control: - - no-cache - content-length: - - '1192' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 26 Aug 2022 03:08:11 GMT - expires: - - '-1' - ocp-automation-accountid: - - a8380925-d3cc-4d03-afc3-f5a88f2156e3 - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation account list - Connection: - - keep-alive - ParameterSetName: - - --resource-group - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts?api-version=2021-06-22 - response: - body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002","location":"westus2","name":"test-account-000002","type":"Microsoft.Automation/AutomationAccounts","tags":{"A":"a"},"properties":{"creationTime":"2022-08-26T03:08:02.76+00:00","lastModifiedTime":"2022-08-26T03:08:07.7733333+00:00","state":"Ok","disableLocalAuth":false}}]}' - headers: - cache-control: - - no-cache - content-length: - - '454' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 26 Aug 2022 03:08:13 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation runbook create - Connection: - - keep-alive - ParameterSetName: - - --resource-group --automation-account-name --name --type - User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_automation_000001?api-version=2021-04-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001","name":"cli_test_automation_000001","type":"Microsoft.Resources/resourceGroups","location":"westus2","tags":{"product":"azurecli","cause":"automation","date":"2022-08-26T03:07:48Z"},"properties":{"provisioningState":"Succeeded"}}' - headers: - cache-control: - - no-cache - content-length: - - '331' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 26 Aug 2022 03:08:13 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: '{"name": "test-runbook-000003", "location": "westus2", "properties": {"runbookType": - "PowerShell", "draft": {}}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation runbook create - Connection: - - keep-alive - Content-Length: - - '112' - Content-Type: - - application/json - ParameterSetName: - - --resource-group --automation-account-name --name --type - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003?api-version=2018-06-30 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003","name":"test-runbook-000003","type":"Microsoft.Automation/AutomationAccounts/Runbooks","location":"westus2","tags":{},"etag":"\"637970801000700000\"","properties":{"description":null,"logVerbose":false,"logProgress":false,"logActivityTrace":0,"runbookType":"PowerShell","parameters":{},"state":"New","jobCount":0,"provisioningState":"Succeeded","serviceManagementTags":null,"outputTypes":[],"creationTime":"2022-08-26T03:08:20.07+00:00","lastModifiedBy":null,"lastModifiedTime":"2022-08-26T03:08:20.07+00:00"}}' - headers: - cache-control: - - no-cache - content-length: - - '711' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 26 Aug 2022 03:08:20 GMT - etag: - - '"637970801000700000"' - expires: - - '-1' - location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003?api-version=2018-06-30 - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - status: - code: 201 - message: Created -- request: - body: '{"properties": {"logVerbose": true, "logProgress": true, "logActivityTrace": - 1}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation runbook update - Connection: - - keep-alive - Content-Length: - - '80' - Content-Type: - - application/json - ParameterSetName: - - --resource-group --automation-account-name --name --log-activity-trace --log-verbose - --log-progress - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: PATCH - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003?api-version=2018-06-30 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003","name":"test-runbook-000003","type":"Microsoft.Automation/AutomationAccounts/Runbooks","location":"westus2","tags":{},"etag":"\"637970801025333333\"","properties":{"description":null,"logVerbose":true,"logProgress":true,"logActivityTrace":1,"runbookType":"PowerShell","parameters":{},"state":"New","jobCount":0,"provisioningState":"Succeeded","serviceManagementTags":null,"outputTypes":[],"creationTime":"2022-08-26T03:08:20.07+00:00","lastModifiedBy":"{scrubbed}","lastModifiedTime":"2022-08-26T03:08:22.5333333+00:00"}}' - headers: - cache-control: - - no-cache - content-length: - - '722' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 26 Aug 2022 03:08:22 GMT - etag: - - '"637970801025333333"' - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - status: - code: 200 - message: OK -- request: - body: '@C:Usersv-jingszhangAppDataLocalTempPowerShell.ps' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation runbook replace-content - Connection: - - keep-alive - Content-Length: - - '49' - Content-Type: - - text/powershell - ParameterSetName: - - --resource-group --automation-account-name --name --content - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003/draft/content?api-version=2018-06-30 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Fri, 26 Aug 2022 03:08:23 GMT - expires: - - '-1' - location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003/operationResults/04e88edf-8cc0-4f74-83fe-621a1ccc182a?api-version=2018-06-30 - ocp-automation-operationresultid: - - 04e88edf-8cc0-4f74-83fe-621a1ccc182a - - 04e88edf-8cc0-4f74-83fe-621a1ccc182a - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - status: - code: 202 - message: Accepted -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - automation runbook replace-content - Connection: - - keep-alive - ParameterSetName: - - --resource-group --automation-account-name --name --content - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003/operationResults/04e88edf-8cc0-4f74-83fe-621a1ccc182a?api-version=2018-06-30 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - content-type: - - text/plain; charset=utf-8 - date: - - Fri, 26 Aug 2022 03:08:29 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation runbook publish - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - --resource-group --automation-account-name --name - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003/publish?api-version=2018-06-30 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Fri, 26 Aug 2022 03:08:31 GMT - expires: - - '-1' - location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003/operationResults/2a5b4a99-b765-4413-9d79-ddace074e5f9?api-version=2018-06-30 - ocp-automation-operationresultid: - - 2a5b4a99-b765-4413-9d79-ddace074e5f9 - - 2a5b4a99-b765-4413-9d79-ddace074e5f9 - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - status: - code: 202 - message: Accepted -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - automation runbook publish - Connection: - - keep-alive - ParameterSetName: - - --resource-group --automation-account-name --name - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003/operationResults/2a5b4a99-b765-4413-9d79-ddace074e5f9?api-version=2018-06-30 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - content-type: - - text/plain; charset=utf-8 - date: - - Fri, 26 Aug 2022 03:08:37 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: '{"name": "hwg-000004"}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation hrwg create - Connection: - - keep-alive - Content-Length: - - '22' - Content-Type: - - application/json - ParameterSetName: - - --resource-group --automation-account-name --name - User-Agent: - - AZURECLI/2.39.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups/hwg-000004?api-version=2022-02-22 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups/hwg-000004","name":"hwg-000004","type":"Microsoft.Automation/AutomationAccounts/HybridRunbookWorkerGroups","properties":{"groupType":"User","credential":{"name":null},"hybridRunbookWorkers":null},"systemData":{"createdAt":"2022-08-26T03:08:39.6229199+00:00","lastModifiedAt":"2022-08-26T03:08:39.6229199+00:00"}}' - headers: - cache-control: - - no-cache - content-length: - - '509' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 26 Aug 2022 03:08:38 GMT - expires: - - '-1' - location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups/hwg-000004?api-version=2022-02-22 - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1196' - status: - code: 201 - message: Created -- request: - body: '{"name": "hwg-000004"}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation hrwg create - Connection: - - keep-alive - Content-Length: - - '22' - Content-Type: - - application/json - ParameterSetName: - - --resource-group --automation-account-name --name - User-Agent: - - AZURECLI/2.39.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups/hwg-000004?api-version=2022-02-22 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups/hwg-000004","name":"hwg-000004","type":"Microsoft.Automation/AutomationAccounts/HybridRunbookWorkerGroups","properties":{"groupType":"User","credential":{"name":null},"hybridRunbookWorkers":null},"systemData":{"createdAt":"2022-08-26T03:08:39.6229199+00:00","lastModifiedAt":"2022-08-26T03:08:40.575999+00:00"}}' - headers: - cache-control: - - no-cache - content-length: - - '508' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 26 Aug 2022 03:08:40 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1198' - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation hrwg show - Connection: - - keep-alive - ParameterSetName: - - --resource-group --automation-account-name --name - User-Agent: - - AZURECLI/2.39.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups/hwg-000004?api-version=2022-02-22 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups/hwg-000004","name":"hwg-000004","type":"Microsoft.Automation/AutomationAccounts/HybridRunbookWorkerGroups","properties":{"groupType":"User","credential":{"name":null},"hybridRunbookWorkers":null},"systemData":{"createdAt":"2022-08-26T03:08:39.6229199+00:00","lastModifiedAt":"2022-08-26T03:08:40.575999+00:00"}}' - headers: - cache-control: - - no-cache - content-length: - - '508' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 26 Aug 2022 03:08:41 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation hrwg list - Connection: - - keep-alive - ParameterSetName: - - --resource-group --automation-account-name - User-Agent: - - AZURECLI/2.39.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups?api-version=2022-02-22 - response: - body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups/hwg-000004","name":"hwg-000004","type":"Microsoft.Automation/AutomationAccounts/HybridRunbookWorkerGroups","properties":{"groupType":"User","credential":{"name":null},"hybridRunbookWorkers":null},"systemData":{"createdAt":"2022-08-26T03:08:39.6229199+00:00","lastModifiedAt":"2022-08-26T03:08:40.575999+00:00"}}]}' - headers: - cache-control: - - no-cache - content-length: - - '520' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 26 Aug 2022 03:08:42 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation hrwg hrw list - Connection: - - keep-alive - ParameterSetName: - - --automation-account-name --hybrid-runbook-worker-group-name -g - User-Agent: - - AZURECLI/2.39.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups/hwg-000004/hybridRunbookWorkers?api-version=2021-06-22 - response: - body: - string: '{"value":[]}' - headers: - cache-control: - - no-cache - content-length: - - '12' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 26 Aug 2022 03:08:57 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - automation hrwg delete - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - --resource-group --automation-account-name --name --yes - User-Agent: - - AZURECLI/2.39.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups/hwg-000004?api-version=2022-02-22 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Fri, 26 Aug 2022 03:08:59 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-deletes: - - '14999' - status: - code: 200 - message: OK -- request: - body: '{"properties": {"runbook": {"name": "test-runbook-000003"}}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation runbook start - Connection: - - keep-alive - Content-Length: - - '60' - Content-Type: - - application/json - ParameterSetName: - - --resource-group --automation-account-name --name - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/jobs/ef6919bf-b827-4b18-8475-8d4a03a49d0e?api-version=2019-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/jobs/ef6919bf-b827-4b18-8475-8d4a03a49d0e","name":"ef6919bf-b827-4b18-8475-8d4a03a49d0e","type":"Microsoft.Automation/AutomationAccounts/Jobs","properties":{"jobId":"9ed4cf88-39bc-4bd1-9dc4-753832e24b17","creationTime":"2022-08-26T03:09:01.72+00:00","provisioningState":"Processing","status":"New","statusDetails":"None","startedBy":null,"startTime":null,"endTime":null,"lastModifiedTime":"2022-08-26T03:09:01.72+00:00","lastStatusModifiedTime":"2022-08-26T03:09:01.72+00:00","exception":null,"parameters":{},"runOn":null,"runbook":{"name":"test-runbook-000003"}}}' - headers: - cache-control: - - no-cache - content-length: - - '735' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 26 Aug 2022 03:09:01 GMT - expires: - - '-1' - location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/jobs/ef6919bf-b827-4b18-8475-8d4a03a49d0e?api-version=2019-06-01 - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-resource-requests: - - '1999' - status: - code: 201 - message: Created -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation job list - Connection: - - keep-alive - ParameterSetName: - - --resource-group --automation-account-name - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/jobs?api-version=2019-06-01 - response: - body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/jobs/ef6919bf-b827-4b18-8475-8d4a03a49d0e","name":"ef6919bf-b827-4b18-8475-8d4a03a49d0e","type":"Microsoft.Automation/AutomationAccounts/Jobs","properties":{"jobId":"9ed4cf88-39bc-4bd1-9dc4-753832e24b17","runbook":{"name":"test-runbook-000003"},"provisioningState":"Processing","status":"New","creationTime":"2022-08-26T03:09:01.7300573+00:00","startTime":null,"lastModifiedTime":"2022-08-26T03:09:01.7300573+00:00","endTime":null,"runOn":null}}]}' - headers: - cache-control: - - no-cache - content-length: - - '628' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 26 Aug 2022 03:09:02 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-resource-requests: - - '599' - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation job show - Connection: - - keep-alive - ParameterSetName: - - --resource-group --automation-account-name --name - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/jobs/ef6919bf-b827-4b18-8475-8d4a03a49d0e?api-version=2019-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/jobs/ef6919bf-b827-4b18-8475-8d4a03a49d0e","name":"ef6919bf-b827-4b18-8475-8d4a03a49d0e","type":"Microsoft.Automation/AutomationAccounts/Jobs","properties":{"jobId":"9ed4cf88-39bc-4bd1-9dc4-753832e24b17","creationTime":"2022-08-26T03:09:01.7300573+00:00","provisioningState":"Processing","status":"New","statusDetails":"None","startedBy":"{scrubbed}","startTime":null,"endTime":null,"lastModifiedTime":"2022-08-26T03:09:01.7300573+00:00","lastStatusModifiedTime":"2022-08-26T03:09:01.7300573+00:00","exception":null,"parameters":{},"runOn":null,"runbook":{"name":"test-runbook-000003"}}}' - headers: - cache-control: - - no-cache - content-length: - - '758' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 26 Aug 2022 03:09:04 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-resource-requests: - - '599' - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation account delete - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - --resource-group --name -y - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002?api-version=2021-06-22 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Fri, 26 Aug 2022 03:09:12 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-deletes: - - '14999' - status: - code: 200 - message: OK -version: 1 +interactions: +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - unknown + Connection: + - keep-alive + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/locations?api-version=2019-11-01 + response: + body: + string: "{\"value\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\"\ + ,\"name\":\"eastus\",\"displayName\":\"East US\",\"regionalDisplayName\":\"\ + (US) East US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-79.8164\",\"latitude\"\ + :\"37.3719\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\"\ + :\"westus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2\"\ + ,\"name\":\"eastus2\",\"displayName\":\"East US 2\",\"regionalDisplayName\"\ + :\"(US) East US 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-78.3889\",\"latitude\"\ + :\"36.6681\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\"\ + :\"centralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\"\ + ,\"name\":\"southcentralus\",\"displayName\":\"South Central US\",\"regionalDisplayName\"\ + :\"(US) South Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-98.5\",\"latitude\"\ + :\"29.4167\",\"physicalLocation\":\"Texas\",\"pairedRegion\":[{\"name\":\"\ + northcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2\"\ + ,\"name\":\"westus2\",\"displayName\":\"West US 2\",\"regionalDisplayName\"\ + :\"(US) West US 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-119.852\",\"latitude\"\ + :\"47.233\",\"physicalLocation\":\"Washington\",\"pairedRegion\":[{\"name\"\ + :\"westcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus3\"\ + ,\"name\":\"westus3\",\"displayName\":\"West US 3\",\"regionalDisplayName\"\ + :\"(US) West US 3\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-112.074036\",\"\ + latitude\":\"33.448376\",\"physicalLocation\":\"Phoenix\",\"pairedRegion\"\ + :[{\"name\":\"eastus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast\"\ + ,\"name\":\"australiaeast\",\"displayName\":\"Australia East\",\"regionalDisplayName\"\ + :\"(Asia Pacific) Australia East\",\"metadata\":{\"regionType\":\"Physical\"\ + ,\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia Pacific\",\"\ + longitude\":\"151.2094\",\"latitude\":\"-33.86\",\"physicalLocation\":\"New\ + \ South Wales\",\"pairedRegion\":[{\"name\":\"australiasoutheast\",\"id\"\ + :\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia\"\ + ,\"name\":\"southeastasia\",\"displayName\":\"Southeast Asia\",\"regionalDisplayName\"\ + :\"(Asia Pacific) Southeast Asia\",\"metadata\":{\"regionType\":\"Physical\"\ + ,\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia Pacific\",\"\ + longitude\":\"103.833\",\"latitude\":\"1.283\",\"physicalLocation\":\"Singapore\"\ + ,\"pairedRegion\":[{\"name\":\"eastasia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope\"\ + ,\"name\":\"northeurope\",\"displayName\":\"North Europe\",\"regionalDisplayName\"\ + :\"(Europe) North Europe\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"-6.2597\",\"\ + latitude\":\"53.3478\",\"physicalLocation\":\"Ireland\",\"pairedRegion\":[{\"\ + name\":\"westeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedencentral\"\ + ,\"name\":\"swedencentral\",\"displayName\":\"Sweden Central\",\"regionalDisplayName\"\ + :\"(Europe) Sweden Central\",\"metadata\":{\"regionType\":\"Physical\",\"\ + regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\"\ + :\"17.14127\",\"latitude\":\"60.67488\",\"physicalLocation\":\"G\xE4vle\"\ + ,\"pairedRegion\":[{\"name\":\"swedensouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedensouth\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth\"\ + ,\"name\":\"uksouth\",\"displayName\":\"UK South\",\"regionalDisplayName\"\ + :\"(Europe) UK South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"-0.799\",\"\ + latitude\":\"50.941\",\"physicalLocation\":\"London\",\"pairedRegion\":[{\"\ + name\":\"ukwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\"\ + ,\"name\":\"westeurope\",\"displayName\":\"West Europe\",\"regionalDisplayName\"\ + :\"(Europe) West Europe\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"4.9\",\"latitude\"\ + :\"52.3667\",\"physicalLocation\":\"Netherlands\",\"pairedRegion\":[{\"name\"\ + :\"northeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus\"\ + ,\"name\":\"centralus\",\"displayName\":\"Central US\",\"regionalDisplayName\"\ + :\"(US) Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-93.6208\",\"latitude\"\ + :\"41.5908\",\"physicalLocation\":\"Iowa\",\"pairedRegion\":[{\"name\":\"\ + eastus2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth\"\ + ,\"name\":\"southafricanorth\",\"displayName\":\"South Africa North\",\"regionalDisplayName\"\ + :\"(Africa) South Africa North\",\"metadata\":{\"regionType\":\"Physical\"\ + ,\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Africa\",\"longitude\"\ + :\"28.218370\",\"latitude\":\"-25.731340\",\"physicalLocation\":\"Johannesburg\"\ + ,\"pairedRegion\":[{\"name\":\"southafricawest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia\"\ + ,\"name\":\"centralindia\",\"displayName\":\"Central India\",\"regionalDisplayName\"\ + :\"(Asia Pacific) Central India\",\"metadata\":{\"regionType\":\"Physical\"\ + ,\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia Pacific\",\"\ + longitude\":\"73.9197\",\"latitude\":\"18.5822\",\"physicalLocation\":\"Pune\"\ + ,\"pairedRegion\":[{\"name\":\"southindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia\"\ + ,\"name\":\"eastasia\",\"displayName\":\"East Asia\",\"regionalDisplayName\"\ + :\"(Asia Pacific) East Asia\",\"metadata\":{\"regionType\":\"Physical\",\"\ + regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia Pacific\",\"longitude\"\ + :\"114.188\",\"latitude\":\"22.267\",\"physicalLocation\":\"Hong Kong\",\"\ + pairedRegion\":[{\"name\":\"southeastasia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast\"\ + ,\"name\":\"japaneast\",\"displayName\":\"Japan East\",\"regionalDisplayName\"\ + :\"(Asia Pacific) Japan East\",\"metadata\":{\"regionType\":\"Physical\",\"\ + regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia Pacific\",\"longitude\"\ + :\"139.77\",\"latitude\":\"35.68\",\"physicalLocation\":\"Tokyo, Saitama\"\ + ,\"pairedRegion\":[{\"name\":\"japanwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral\"\ + ,\"name\":\"koreacentral\",\"displayName\":\"Korea Central\",\"regionalDisplayName\"\ + :\"(Asia Pacific) Korea Central\",\"metadata\":{\"regionType\":\"Physical\"\ + ,\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia Pacific\",\"\ + longitude\":\"126.9780\",\"latitude\":\"37.5665\",\"physicalLocation\":\"\ + Seoul\",\"pairedRegion\":[{\"name\":\"koreasouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral\"\ + ,\"name\":\"canadacentral\",\"displayName\":\"Canada Central\",\"regionalDisplayName\"\ + :\"(Canada) Canada Central\",\"metadata\":{\"regionType\":\"Physical\",\"\ + regionCategory\":\"Recommended\",\"geographyGroup\":\"Canada\",\"longitude\"\ + :\"-79.383\",\"latitude\":\"43.653\",\"physicalLocation\":\"Toronto\",\"pairedRegion\"\ + :[{\"name\":\"canadaeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral\"\ + ,\"name\":\"francecentral\",\"displayName\":\"France Central\",\"regionalDisplayName\"\ + :\"(Europe) France Central\",\"metadata\":{\"regionType\":\"Physical\",\"\ + regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\"\ + :\"2.3730\",\"latitude\":\"46.3772\",\"physicalLocation\":\"Paris\",\"pairedRegion\"\ + :[{\"name\":\"francesouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral\"\ + ,\"name\":\"germanywestcentral\",\"displayName\":\"Germany West Central\"\ + ,\"regionalDisplayName\":\"(Europe) Germany West Central\",\"metadata\":{\"\ + regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\"\ + :\"Europe\",\"longitude\":\"8.682127\",\"latitude\":\"50.110924\",\"physicalLocation\"\ + :\"Frankfurt\",\"pairedRegion\":[{\"name\":\"germanynorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast\"\ + ,\"name\":\"norwayeast\",\"displayName\":\"Norway East\",\"regionalDisplayName\"\ + :\"(Europe) Norway East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"10.752245\"\ + ,\"latitude\":\"59.913868\",\"physicalLocation\":\"Norway\",\"pairedRegion\"\ + :[{\"name\":\"norwaywest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth\"\ + ,\"name\":\"switzerlandnorth\",\"displayName\":\"Switzerland North\",\"regionalDisplayName\"\ + :\"(Europe) Switzerland North\",\"metadata\":{\"regionType\":\"Physical\"\ + ,\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\"\ + :\"8.564572\",\"latitude\":\"47.451542\",\"physicalLocation\":\"Zurich\",\"\ + pairedRegion\":[{\"name\":\"switzerlandwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth\"\ + ,\"name\":\"uaenorth\",\"displayName\":\"UAE North\",\"regionalDisplayName\"\ + :\"(Middle East) UAE North\",\"metadata\":{\"regionType\":\"Physical\",\"\ + regionCategory\":\"Recommended\",\"geographyGroup\":\"Middle East\",\"longitude\"\ + :\"55.316666\",\"latitude\":\"25.266666\",\"physicalLocation\":\"Dubai\",\"\ + pairedRegion\":[{\"name\":\"uaecentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth\"\ + ,\"name\":\"brazilsouth\",\"displayName\":\"Brazil South\",\"regionalDisplayName\"\ + :\"(South America) Brazil South\",\"metadata\":{\"regionType\":\"Physical\"\ + ,\"regionCategory\":\"Recommended\",\"geographyGroup\":\"South America\",\"\ + longitude\":\"-46.633\",\"latitude\":\"-23.55\",\"physicalLocation\":\"Sao\ + \ Paulo State\",\"pairedRegion\":[{\"name\":\"southcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap\"\ + ,\"name\":\"eastus2euap\",\"displayName\":\"East US 2 EUAP\",\"regionalDisplayName\"\ + :\"(US) East US 2 EUAP\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-78.3889\",\"latitude\"\ + :\"36.6681\",\"pairedRegion\":[{\"name\":\"centraluseuap\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/qatarcentral\"\ + ,\"name\":\"qatarcentral\",\"displayName\":\"Qatar Central\",\"regionalDisplayName\"\ + :\"(Middle East) Qatar Central\",\"metadata\":{\"regionType\":\"Physical\"\ + ,\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Middle East\",\"\ + longitude\":\"51.439327\",\"latitude\":\"25.551462\",\"physicalLocation\"\ + :\"Doha\",\"pairedRegion\":[{\"name\":\"westeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage\"\ + ,\"name\":\"centralusstage\",\"displayName\":\"Central US (Stage)\",\"regionalDisplayName\"\ + :\"(US) Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\"\ + :\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstage\"\ + ,\"name\":\"eastusstage\",\"displayName\":\"East US (Stage)\",\"regionalDisplayName\"\ + :\"(US) East US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\"\ + :\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2stage\"\ + ,\"name\":\"eastus2stage\",\"displayName\":\"East US 2 (Stage)\",\"regionalDisplayName\"\ + :\"(US) East US 2 (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\"\ + :\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralusstage\"\ + ,\"name\":\"northcentralusstage\",\"displayName\":\"North Central US (Stage)\"\ + ,\"regionalDisplayName\":\"(US) North Central US (Stage)\",\"metadata\":{\"\ + regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"\ + US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstage\"\ + ,\"name\":\"southcentralusstage\",\"displayName\":\"South Central US (Stage)\"\ + ,\"regionalDisplayName\":\"(US) South Central US (Stage)\",\"metadata\":{\"\ + regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"\ + US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westusstage\"\ + ,\"name\":\"westusstage\",\"displayName\":\"West US (Stage)\",\"regionalDisplayName\"\ + :\"(US) West US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\"\ + :\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2stage\"\ + ,\"name\":\"westus2stage\",\"displayName\":\"West US 2 (Stage)\",\"regionalDisplayName\"\ + :\"(US) West US 2 (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\"\ + :\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asia\"\ + ,\"name\":\"asia\",\"displayName\":\"Asia\",\"regionalDisplayName\":\"Asia\"\ + ,\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"\ + id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asiapacific\"\ + ,\"name\":\"asiapacific\",\"displayName\":\"Asia Pacific\",\"regionalDisplayName\"\ + :\"Asia Pacific\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\"\ + :\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia\"\ + ,\"name\":\"australia\",\"displayName\":\"Australia\",\"regionalDisplayName\"\ + :\"Australia\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\"\ + :\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil\"\ + ,\"name\":\"brazil\",\"displayName\":\"Brazil\",\"regionalDisplayName\":\"\ + Brazil\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"\ + }},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada\"\ + ,\"name\":\"canada\",\"displayName\":\"Canada\",\"regionalDisplayName\":\"\ + Canada\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"\ + }},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe\"\ + ,\"name\":\"europe\",\"displayName\":\"Europe\",\"regionalDisplayName\":\"\ + Europe\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"\ + }},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/france\"\ + ,\"name\":\"france\",\"displayName\":\"France\",\"regionalDisplayName\":\"\ + France\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"\ + }},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germany\"\ + ,\"name\":\"germany\",\"displayName\":\"Germany\",\"regionalDisplayName\"\ + :\"Germany\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"\ + Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global\"\ + ,\"name\":\"global\",\"displayName\":\"Global\",\"regionalDisplayName\":\"\ + Global\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"\ + }},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india\"\ + ,\"name\":\"india\",\"displayName\":\"India\",\"regionalDisplayName\":\"India\"\ + ,\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"\ + id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan\"\ + ,\"name\":\"japan\",\"displayName\":\"Japan\",\"regionalDisplayName\":\"Japan\"\ + ,\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"\ + id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/korea\"\ + ,\"name\":\"korea\",\"displayName\":\"Korea\",\"regionalDisplayName\":\"Korea\"\ + ,\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"\ + id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norway\"\ + ,\"name\":\"norway\",\"displayName\":\"Norway\",\"regionalDisplayName\":\"\ + Norway\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"\ + }},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/singapore\"\ + ,\"name\":\"singapore\",\"displayName\":\"Singapore\",\"regionalDisplayName\"\ + :\"Singapore\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\"\ + :\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafrica\"\ + ,\"name\":\"southafrica\",\"displayName\":\"South Africa\",\"regionalDisplayName\"\ + :\"South Africa\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\"\ + :\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerland\"\ + ,\"name\":\"switzerland\",\"displayName\":\"Switzerland\",\"regionalDisplayName\"\ + :\"Switzerland\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\"\ + :\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uae\"\ + ,\"name\":\"uae\",\"displayName\":\"United Arab Emirates\",\"regionalDisplayName\"\ + :\"United Arab Emirates\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\"\ + :\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk\"\ + ,\"name\":\"uk\",\"displayName\":\"United Kingdom\",\"regionalDisplayName\"\ + :\"United Kingdom\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\"\ + :\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstates\"\ + ,\"name\":\"unitedstates\",\"displayName\":\"United States\",\"regionalDisplayName\"\ + :\"United States\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\"\ + :\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstateseuap\"\ + ,\"name\":\"unitedstateseuap\",\"displayName\":\"United States EUAP\",\"regionalDisplayName\"\ + :\"United States EUAP\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\"\ + :\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage\"\ + ,\"name\":\"eastasiastage\",\"displayName\":\"East Asia (Stage)\",\"regionalDisplayName\"\ + :\"(Asia Pacific) East Asia (Stage)\",\"metadata\":{\"regionType\":\"Logical\"\ + ,\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia Pacific\"}},{\"id\"\ + :\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasiastage\"\ + ,\"name\":\"southeastasiastage\",\"displayName\":\"Southeast Asia (Stage)\"\ + ,\"regionalDisplayName\":\"(Asia Pacific) Southeast Asia (Stage)\",\"metadata\"\ + :{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\"\ + :\"Asia Pacific\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstg\"\ + ,\"name\":\"eastusstg\",\"displayName\":\"East US STG\",\"regionalDisplayName\"\ + :\"(US) East US STG\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-79.8164\",\"latitude\"\ + :\"37.3719\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\"\ + :\"southcentralusstg\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstg\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstg\"\ + ,\"name\":\"southcentralusstg\",\"displayName\":\"South Central US STG\",\"\ + regionalDisplayName\":\"(US) South Central US STG\",\"metadata\":{\"regionType\"\ + :\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\"\ + :\"-98.5\",\"latitude\":\"29.4167\",\"physicalLocation\":\"Texas\",\"pairedRegion\"\ + :[{\"name\":\"eastusstg\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstg\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus\"\ + ,\"name\":\"northcentralus\",\"displayName\":\"North Central US\",\"regionalDisplayName\"\ + :\"(US) North Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-87.6278\",\"latitude\"\ + :\"41.8819\",\"physicalLocation\":\"Illinois\",\"pairedRegion\":[{\"name\"\ + :\"southcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus\"\ + ,\"name\":\"westus\",\"displayName\":\"West US\",\"regionalDisplayName\":\"\ + (US) West US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-122.417\",\"latitude\"\ + :\"37.783\",\"physicalLocation\":\"California\",\"pairedRegion\":[{\"name\"\ + :\"eastus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest\"\ + ,\"name\":\"jioindiawest\",\"displayName\":\"Jio India West\",\"regionalDisplayName\"\ + :\"(Asia Pacific) Jio India West\",\"metadata\":{\"regionType\":\"Physical\"\ + ,\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia Pacific\",\"longitude\"\ + :\"70.05773\",\"latitude\":\"22.470701\",\"physicalLocation\":\"Jamnagar\"\ + ,\"pairedRegion\":[{\"name\":\"jioindiacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap\"\ + ,\"name\":\"centraluseuap\",\"displayName\":\"Central US EUAP\",\"regionalDisplayName\"\ + :\"(US) Central US EUAP\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-93.6208\",\"latitude\"\ + :\"41.5908\",\"pairedRegion\":[{\"name\":\"eastus2euap\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus\"\ + ,\"name\":\"westcentralus\",\"displayName\":\"West Central US\",\"regionalDisplayName\"\ + :\"(US) West Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-110.234\",\"latitude\"\ + :\"40.890\",\"physicalLocation\":\"Wyoming\",\"pairedRegion\":[{\"name\":\"\ + westus2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest\"\ + ,\"name\":\"southafricawest\",\"displayName\":\"South Africa West\",\"regionalDisplayName\"\ + :\"(Africa) South Africa West\",\"metadata\":{\"regionType\":\"Physical\"\ + ,\"regionCategory\":\"Other\",\"geographyGroup\":\"Africa\",\"longitude\"\ + :\"18.843266\",\"latitude\":\"-34.075691\",\"physicalLocation\":\"Cape Town\"\ + ,\"pairedRegion\":[{\"name\":\"southafricanorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral\"\ + ,\"name\":\"australiacentral\",\"displayName\":\"Australia Central\",\"regionalDisplayName\"\ + :\"(Asia Pacific) Australia Central\",\"metadata\":{\"regionType\":\"Physical\"\ + ,\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia Pacific\",\"longitude\"\ + :\"149.1244\",\"latitude\":\"-35.3075\",\"physicalLocation\":\"Canberra\"\ + ,\"pairedRegion\":[{\"name\":\"australiacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2\"\ + ,\"name\":\"australiacentral2\",\"displayName\":\"Australia Central 2\",\"\ + regionalDisplayName\":\"(Asia Pacific) Australia Central 2\",\"metadata\"\ + :{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\"\ + :\"Asia Pacific\",\"longitude\":\"149.1244\",\"latitude\":\"-35.3075\",\"\ + physicalLocation\":\"Canberra\",\"pairedRegion\":[{\"name\":\"australiacentral2\"\ + ,\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast\"\ + ,\"name\":\"australiasoutheast\",\"displayName\":\"Australia Southeast\",\"\ + regionalDisplayName\":\"(Asia Pacific) Australia Southeast\",\"metadata\"\ + :{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\"\ + :\"Asia Pacific\",\"longitude\":\"144.9631\",\"latitude\":\"-37.8136\",\"\ + physicalLocation\":\"Victoria\",\"pairedRegion\":[{\"name\":\"australiaeast\"\ + ,\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest\"\ + ,\"name\":\"japanwest\",\"displayName\":\"Japan West\",\"regionalDisplayName\"\ + :\"(Asia Pacific) Japan West\",\"metadata\":{\"regionType\":\"Physical\",\"\ + regionCategory\":\"Other\",\"geographyGroup\":\"Asia Pacific\",\"longitude\"\ + :\"135.5022\",\"latitude\":\"34.6939\",\"physicalLocation\":\"Osaka\",\"pairedRegion\"\ + :[{\"name\":\"japaneast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral\"\ + ,\"name\":\"jioindiacentral\",\"displayName\":\"Jio India Central\",\"regionalDisplayName\"\ + :\"(Asia Pacific) Jio India Central\",\"metadata\":{\"regionType\":\"Physical\"\ + ,\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia Pacific\",\"longitude\"\ + :\"79.08886\",\"latitude\":\"21.146633\",\"physicalLocation\":\"Nagpur\",\"\ + pairedRegion\":[{\"name\":\"jioindiawest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth\"\ + ,\"name\":\"koreasouth\",\"displayName\":\"Korea South\",\"regionalDisplayName\"\ + :\"(Asia Pacific) Korea South\",\"metadata\":{\"regionType\":\"Physical\"\ + ,\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia Pacific\",\"longitude\"\ + :\"129.0756\",\"latitude\":\"35.1796\",\"physicalLocation\":\"Busan\",\"pairedRegion\"\ + :[{\"name\":\"koreacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\"\ + ,\"name\":\"southindia\",\"displayName\":\"South India\",\"regionalDisplayName\"\ + :\"(Asia Pacific) South India\",\"metadata\":{\"regionType\":\"Physical\"\ + ,\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia Pacific\",\"longitude\"\ + :\"80.1636\",\"latitude\":\"12.9822\",\"physicalLocation\":\"Chennai\",\"\ + pairedRegion\":[{\"name\":\"centralindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westindia\"\ + ,\"name\":\"westindia\",\"displayName\":\"West India\",\"regionalDisplayName\"\ + :\"(Asia Pacific) West India\",\"metadata\":{\"regionType\":\"Physical\",\"\ + regionCategory\":\"Other\",\"geographyGroup\":\"Asia Pacific\",\"longitude\"\ + :\"72.868\",\"latitude\":\"19.088\",\"physicalLocation\":\"Mumbai\",\"pairedRegion\"\ + :[{\"name\":\"southindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast\"\ + ,\"name\":\"canadaeast\",\"displayName\":\"Canada East\",\"regionalDisplayName\"\ + :\"(Canada) Canada East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Other\",\"geographyGroup\":\"Canada\",\"longitude\":\"-71.217\",\"latitude\"\ + :\"46.817\",\"physicalLocation\":\"Quebec\",\"pairedRegion\":[{\"name\":\"\ + canadacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth\"\ + ,\"name\":\"francesouth\",\"displayName\":\"France South\",\"regionalDisplayName\"\ + :\"(Europe) France South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"2.1972\",\"latitude\"\ + :\"43.8345\",\"physicalLocation\":\"Marseille\",\"pairedRegion\":[{\"name\"\ + :\"francecentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth\"\ + ,\"name\":\"germanynorth\",\"displayName\":\"Germany North\",\"regionalDisplayName\"\ + :\"(Europe) Germany North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.806422\",\"latitude\"\ + :\"53.073635\",\"physicalLocation\":\"Berlin\",\"pairedRegion\":[{\"name\"\ + :\"germanywestcentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest\"\ + ,\"name\":\"norwaywest\",\"displayName\":\"Norway West\",\"regionalDisplayName\"\ + :\"(Europe) Norway West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"5.733107\",\"latitude\"\ + :\"58.969975\",\"physicalLocation\":\"Norway\",\"pairedRegion\":[{\"name\"\ + :\"norwayeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedensouth\"\ + ,\"name\":\"swedensouth\",\"displayName\":\"Sweden South\",\"regionalDisplayName\"\ + :\"(Europe) Sweden South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"13.0007\",\"latitude\"\ + :\"55.6059\",\"physicalLocation\":\"Malmo\",\"pairedRegion\":[{\"name\":\"\ + swedencentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedencentral\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest\"\ + ,\"name\":\"switzerlandwest\",\"displayName\":\"Switzerland West\",\"regionalDisplayName\"\ + :\"(Europe) Switzerland West\",\"metadata\":{\"regionType\":\"Physical\",\"\ + regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"\ + 6.143158\",\"latitude\":\"46.204391\",\"physicalLocation\":\"Geneva\",\"pairedRegion\"\ + :[{\"name\":\"switzerlandnorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest\"\ + ,\"name\":\"ukwest\",\"displayName\":\"UK West\",\"regionalDisplayName\":\"\ + (Europe) UK West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\"\ + :\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"-3.084\",\"latitude\"\ + :\"53.427\",\"physicalLocation\":\"Cardiff\",\"pairedRegion\":[{\"name\":\"\ + uksouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral\"\ + ,\"name\":\"uaecentral\",\"displayName\":\"UAE Central\",\"regionalDisplayName\"\ + :\"(Middle East) UAE Central\",\"metadata\":{\"regionType\":\"Physical\",\"\ + regionCategory\":\"Other\",\"geographyGroup\":\"Middle East\",\"longitude\"\ + :\"54.366669\",\"latitude\":\"24.466667\",\"physicalLocation\":\"Abu Dhabi\"\ + ,\"pairedRegion\":[{\"name\":\"uaenorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsoutheast\"\ + ,\"name\":\"brazilsoutheast\",\"displayName\":\"Brazil Southeast\",\"regionalDisplayName\"\ + :\"(South America) Brazil Southeast\",\"metadata\":{\"regionType\":\"Physical\"\ + ,\"regionCategory\":\"Other\",\"geographyGroup\":\"South America\",\"longitude\"\ + :\"-43.2075\",\"latitude\":\"-22.90278\",\"physicalLocation\":\"Rio\",\"pairedRegion\"\ + :[{\"name\":\"brazilsouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv\"\ + ,\"name\":\"eastusslv\",\"displayName\":\"East US SLV\",\"regionalDisplayName\"\ + :\"(South America) East US SLV\",\"metadata\":{\"regionType\":\"Physical\"\ + ,\"regionCategory\":\"Other\",\"geographyGroup\":\"South America\",\"longitude\"\ + :\"-43.2075\",\"latitude\":\"-22.90278\",\"physicalLocation\":\"Silverstone\"\ + ,\"pairedRegion\":[{\"name\":\"eastusslv\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/polandcentral\"\ + ,\"name\":\"polandcentral\",\"displayName\":\"Poland Central\",\"regionalDisplayName\"\ + :\"(Europe) Poland Central\",\"metadata\":{\"regionType\":\"Physical\",\"\ + regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"\ + 14.6512702\",\"latitude\":\"51.8685079\",\"physicalLocation\":\"Warsaw\",\"\ + pairedRegion\":[{\"name\":\"swedencentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedencentral\"\ + }]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/israelcentral\"\ + ,\"name\":\"israelcentral\",\"displayName\":\"Israel Central\",\"regionalDisplayName\"\ + :\"(Middle East) Israel Central\",\"metadata\":{\"regionType\":\"Physical\"\ + ,\"regionCategory\":\"Other\",\"geographyGroup\":\"Middle East\",\"longitude\"\ + :\"33.4506633\",\"latitude\":\"31.2655698\",\"physicalLocation\":\"Israel\"\ + ,\"pairedRegion\":[{\"name\":\"swedencentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedencentral\"\ + }]}}]}" + headers: + cache-control: + - no-cache + content-length: + - '32299' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:11:51 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"name": "test-account-000002", "location": "westus2", "properties": {"sku": + {"name": "Basic"}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation account create + Connection: + - keep-alive + Content-Length: + - '96' + Content-Type: + - application/json + ParameterSetName: + - --resource-group --name --location + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002?api-version=2021-06-22 + response: + body: + string: '{"name":"test-account-000002","systemData":{"createdAt":"2022-10-19T08:11:57.4033333+00:00","lastModifiedAt":"2022-10-19T08:11:57.4033333+00:00"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002","type":"Microsoft.Automation/AutomationAccounts","location":"westus2","tags":{},"etag":null,"properties":{"disableLocalAuth":false,"sku":{"name":"Basic","family":null,"capacity":null},"state":"Ok","RegistrationUrl":"https://0ff783fa-f9d8-49dd-9d0a-abd183083726.agentsvc.wus2.azure-automation.net/accounts/0ff783fa-f9d8-49dd-9d0a-abd183083726","encryption":{"keySource":"Microsoft.Automation","identity":{"userAssignedIdentity":null}},"automationHybridServiceUrl":"https://0ff783fa-f9d8-49dd-9d0a-abd183083726.jrds.wus2.azure-automation.net/automationAccounts/0ff783fa-f9d8-49dd-9d0a-abd183083726","RuntimeConfiguration":{"powershell":{"builtinModules":{"Az":"8.0.0"}},"powershell7":{"builtinModules":{"Az":"8.0.0"}}},"creationTime":"2022-10-19T08:11:57.4033333+00:00","lastModifiedBy":null,"lastModifiedTime":"2022-10-19T08:11:57.4033333+00:00"}}' + headers: + cache-control: + - no-cache + content-length: + - '1163' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:11:59 GMT + expires: + - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002?api-version=2021-06-22 + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1195' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation account update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --tags + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002?api-version=2021-06-22 + response: + body: + string: '{"name":"test-account-000002","systemData":{"createdAt":"2022-10-19T08:11:57.4033333+00:00","lastModifiedAt":"2022-10-19T08:11:57.4033333+00:00"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002","type":"Microsoft.Automation/AutomationAccounts","location":"westus2","tags":{},"etag":null,"properties":{"disableLocalAuth":false,"sku":{"name":"Basic","family":null,"capacity":null},"state":"Ok","RegistrationUrl":"https://0ff783fa-f9d8-49dd-9d0a-abd183083726.agentsvc.wus2.azure-automation.net/accounts/0ff783fa-f9d8-49dd-9d0a-abd183083726","encryption":{"keySource":"Microsoft.Automation","identity":{"userAssignedIdentity":null}},"privateEndpointConnections":[],"automationHybridServiceUrl":"https://0ff783fa-f9d8-49dd-9d0a-abd183083726.jrds.wus2.azure-automation.net/automationAccounts/0ff783fa-f9d8-49dd-9d0a-abd183083726","RuntimeConfiguration":{"powershell":{"builtinModules":{"Az":"8.0.0"}},"powershell7":{"builtinModules":{"Az":"8.0.0"}}},"creationTime":"2022-10-19T08:11:57.4033333+00:00","lastModifiedBy":null,"lastModifiedTime":"2022-10-19T08:11:57.4033333+00:00"}}' + headers: + cache-control: + - no-cache + content-length: + - '1195' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:00 GMT + expires: + - '-1' + ocp-automation-accountid: + - 0ff783fa-f9d8-49dd-9d0a-abd183083726 + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"name": "test-account-000002", "tags": {"A": "a"}, "properties": {"sku": + {"name": "Basic"}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation account update + Connection: + - keep-alive + Content-Length: + - '93' + Content-Type: + - application/json + ParameterSetName: + - --resource-group --name --tags + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: PATCH + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002?api-version=2021-06-22 + response: + body: + string: '{"name":"test-account-000002","systemData":{"createdAt":"2022-10-19T08:11:57.4033333+00:00","lastModifiedAt":"2022-10-19T08:12:01.98+00:00"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002","type":"Microsoft.Automation/AutomationAccounts","location":"westus2","tags":{"A":"a"},"etag":null,"properties":{"disableLocalAuth":false,"sku":{"name":"Basic","family":null,"capacity":null},"state":"Ok","RegistrationUrl":"https://0ff783fa-f9d8-49dd-9d0a-abd183083726.agentsvc.wus2.azure-automation.net/accounts/0ff783fa-f9d8-49dd-9d0a-abd183083726","encryption":{"keySource":"Microsoft.Automation","identity":{"userAssignedIdentity":null}},"automationHybridServiceUrl":"https://0ff783fa-f9d8-49dd-9d0a-abd183083726.jrds.wus2.azure-automation.net/automationAccounts/0ff783fa-f9d8-49dd-9d0a-abd183083726","RuntimeConfiguration":{"powershell":{"builtinModules":{"Az":"8.0.0"}},"powershell7":{"builtinModules":{"Az":"8.0.0"}}},"creationTime":"2022-10-19T08:11:57.4033333+00:00","lastModifiedBy":null,"lastModifiedTime":"2022-10-19T08:12:01.98+00:00"}}' + headers: + cache-control: + - no-cache + content-length: + - '1160' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:03 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1197' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation account show + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002?api-version=2021-06-22 + response: + body: + string: '{"name":"test-account-000002","systemData":{"createdAt":"2022-10-19T08:11:57.4033333+00:00","lastModifiedAt":"2022-10-19T08:12:01.98+00:00"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002","type":"Microsoft.Automation/AutomationAccounts","location":"westus2","tags":{"A":"a"},"etag":null,"properties":{"disableLocalAuth":false,"sku":{"name":"Basic","family":null,"capacity":null},"state":"Ok","RegistrationUrl":"https://0ff783fa-f9d8-49dd-9d0a-abd183083726.agentsvc.wus2.azure-automation.net/accounts/0ff783fa-f9d8-49dd-9d0a-abd183083726","encryption":{"keySource":"Microsoft.Automation","identity":{"userAssignedIdentity":null}},"privateEndpointConnections":[],"automationHybridServiceUrl":"https://0ff783fa-f9d8-49dd-9d0a-abd183083726.jrds.wus2.azure-automation.net/automationAccounts/0ff783fa-f9d8-49dd-9d0a-abd183083726","RuntimeConfiguration":{"powershell":{"builtinModules":{"Az":"8.0.0"}},"powershell7":{"builtinModules":{"Az":"8.0.0"}}},"creationTime":"2022-10-19T08:11:57.4033333+00:00","lastModifiedBy":null,"lastModifiedTime":"2022-10-19T08:12:01.98+00:00"}}' + headers: + cache-control: + - no-cache + content-length: + - '1192' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:04 GMT + expires: + - '-1' + ocp-automation-accountid: + - 0ff783fa-f9d8-49dd-9d0a-abd183083726 + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation account list + Connection: + - keep-alive + ParameterSetName: + - --resource-group + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts?api-version=2021-06-22 + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002","location":"westus2","name":"test-account-000002","type":"Microsoft.Automation/AutomationAccounts","tags":{"A":"a"},"properties":{"creationTime":"2022-10-19T08:11:57.4033333+00:00","lastModifiedTime":"2022-10-19T08:12:01.98+00:00","state":"Ok","disableLocalAuth":false}}]}' + headers: + cache-control: + - no-cache + content-length: + - '454' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:06 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation runbook create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --automation-account-name --name --type + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_automation_000001?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001","name":"cli_test_automation_000001","type":"Microsoft.Resources/resourceGroups","location":"westus2","tags":{"product":"azurecli","cause":"automation","date":"2022-10-19T08:11:44Z"},"properties":{"provisioningState":"Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '331' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:06 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"name": "test-runbook-000003", "location": "westus2", "properties": {"runbookType": + "PowerShell", "draft": {}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation runbook create + Connection: + - keep-alive + Content-Length: + - '112' + Content-Type: + - application/json + ParameterSetName: + - --resource-group --automation-account-name --name --type + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003?api-version=2018-06-30 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003","name":"test-runbook-000003","type":"Microsoft.Automation/AutomationAccounts/Runbooks","location":"westus2","tags":{},"etag":"\"638017639321533333\"","properties":{"description":null,"logVerbose":false,"logProgress":false,"logActivityTrace":0,"runbookType":"PowerShell","parameters":{},"state":"New","jobCount":0,"provisioningState":"Succeeded","serviceManagementTags":null,"outputTypes":[],"creationTime":"2022-10-19T08:12:12.1533333+00:00","lastModifiedBy":null,"lastModifiedTime":"2022-10-19T08:12:12.1533333+00:00"}}' + headers: + cache-control: + - no-cache + content-length: + - '721' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:12 GMT + etag: + - '"638017639321533333"' + expires: + - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003?api-version=2018-06-30 + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 201 + message: Created +- request: + body: '{"properties": {"logVerbose": true, "logProgress": true, "logActivityTrace": + 1}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation runbook update + Connection: + - keep-alive + Content-Length: + - '80' + Content-Type: + - application/json + ParameterSetName: + - --resource-group --automation-account-name --name --log-activity-trace --log-verbose + --log-progress + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: PATCH + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003?api-version=2018-06-30 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003","name":"test-runbook-000003","type":"Microsoft.Automation/AutomationAccounts/Runbooks","location":"westus2","tags":{},"etag":"\"638017639348900000\"","properties":{"description":null,"logVerbose":true,"logProgress":true,"logActivityTrace":1,"runbookType":"PowerShell","parameters":{},"state":"New","jobCount":0,"provisioningState":"Succeeded","serviceManagementTags":null,"outputTypes":[],"creationTime":"2022-10-19T08:12:12.1533333+00:00","lastModifiedBy":"{scrubbed}","lastModifiedTime":"2022-10-19T08:12:14.89+00:00"}}' + headers: + cache-control: + - no-cache + content-length: + - '722' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:14 GMT + etag: + - '"638017639348900000"' + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1198' + status: + code: 200 + message: OK +- request: + body: '@C:UserssupadhyayAppDataLocalTempPowerShell.ps' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation runbook replace-content + Connection: + - keep-alive + Content-Length: + - '46' + Content-Type: + - text/powershell + ParameterSetName: + - --resource-group --automation-account-name --name --content + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003/draft/content?api-version=2018-06-30 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Wed, 19 Oct 2022 08:12:15 GMT + expires: + - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003/operationResults/9822976c-fa29-4199-a837-8dcf4ebeb129?api-version=2018-06-30 + ocp-automation-operationresultid: + - 9822976c-fa29-4199-a837-8dcf4ebeb129 + - 9822976c-fa29-4199-a837-8dcf4ebeb129 + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1198' + status: + code: 202 + message: Accepted +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - automation runbook replace-content + Connection: + - keep-alive + ParameterSetName: + - --resource-group --automation-account-name --name --content + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003/operationResults/9822976c-fa29-4199-a837-8dcf4ebeb129?api-version=2018-06-30 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + content-type: + - text/plain; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:22 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation runbook publish + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --resource-group --automation-account-name --name + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003/publish?api-version=2018-06-30 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Wed, 19 Oct 2022 08:12:24 GMT + expires: + - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003/operationResults/d36523b8-0a10-483b-948b-86c157f24ee4?api-version=2018-06-30 + ocp-automation-operationresultid: + - d36523b8-0a10-483b-948b-86c157f24ee4 + - d36523b8-0a10-483b-948b-86c157f24ee4 + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 202 + message: Accepted +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - automation runbook publish + Connection: + - keep-alive + ParameterSetName: + - --resource-group --automation-account-name --name + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/runbooks/test-runbook-000003/operationResults/d36523b8-0a10-483b-948b-86c157f24ee4?api-version=2018-06-30 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + content-type: + - text/plain; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:30 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"name": "hwg-000004"}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation hrwg create + Connection: + - keep-alive + Content-Length: + - '22' + Content-Type: + - application/json + ParameterSetName: + - --resource-group --automation-account-name --name + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.24.0 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups/hwg-000004?api-version=2022-08-08 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups/hwg-000004","name":"hwg-000004","type":"Microsoft.Automation/AutomationAccounts/HybridRunbookWorkerGroups","properties":{"groupType":"User","credential":{"name":null},"hybridRunbookWorkers":null},"systemData":{"createdAt":"2022-10-19T08:12:33.4464471+00:00","lastModifiedAt":"2022-10-19T08:12:33.4464471+00:00"}}' + headers: + cache-control: + - no-cache + content-length: + - '509' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:33 GMT + expires: + - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups/hwg-000004?api-version=2022-08-08 + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 201 + message: Created +- request: + body: '{"name": "hwg-000004"}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation hrwg create + Connection: + - keep-alive + Content-Length: + - '22' + Content-Type: + - application/json + ParameterSetName: + - --resource-group --automation-account-name --name + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.24.0 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups/hwg-000004?api-version=2022-08-08 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups/hwg-000004","name":"hwg-000004","type":"Microsoft.Automation/AutomationAccounts/HybridRunbookWorkerGroups","properties":{"groupType":"User","credential":{"name":null},"hybridRunbookWorkers":null},"systemData":{"createdAt":"2022-10-19T08:12:33.4464471+00:00","lastModifiedAt":"2022-10-19T08:12:35.2433953+00:00"}}' + headers: + cache-control: + - no-cache + content-length: + - '509' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:35 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1196' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation hrwg show + Connection: + - keep-alive + ParameterSetName: + - --resource-group --automation-account-name --name + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.24.0 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups/hwg-000004?api-version=2022-08-08 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups/hwg-000004","name":"hwg-000004","type":"Microsoft.Automation/AutomationAccounts/HybridRunbookWorkerGroups","properties":{"groupType":"User","credential":{"name":null},"hybridRunbookWorkers":null},"systemData":{"createdAt":"2022-10-19T08:12:33.4464471+00:00","lastModifiedAt":"2022-10-19T08:12:35.2433953+00:00"}}' + headers: + cache-control: + - no-cache + content-length: + - '509' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:36 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation hrwg list + Connection: + - keep-alive + ParameterSetName: + - --resource-group --automation-account-name + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.24.0 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups?api-version=2022-08-08 + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups/hwg-000004","name":"hwg-000004","type":"Microsoft.Automation/AutomationAccounts/HybridRunbookWorkerGroups","properties":{"groupType":"User","credential":{"name":null},"hybridRunbookWorkers":null},"systemData":{"createdAt":"2022-10-19T08:12:33.4464471+00:00","lastModifiedAt":"2022-10-19T08:12:35.2433953+00:00"}}]}' + headers: + cache-control: + - no-cache + content-length: + - '521' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:38 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation hrwg hrw list + Connection: + - keep-alive + ParameterSetName: + - --automation-account-name --hybrid-runbook-worker-group-name -g + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.24.0 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups/hwg-000004/hybridRunbookWorkers?api-version=2022-08-08 + response: + body: + string: '{"value":[]}' + headers: + cache-control: + - no-cache + content-length: + - '12' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:41 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - automation hrwg delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --resource-group --automation-account-name --name --yes + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.24.0 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/hybridRunbookWorkerGroups/hwg-000004?api-version=2022-08-08 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Wed, 19 Oct 2022 08:12:42 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-deletes: + - '14997' + status: + code: 200 + message: OK +- request: + body: '{"properties": {"contentLink": {"uri": "https://files.pythonhosted.org/packages/7f/e2/85dfb9f7364cbd7a9213caea0e91fc948da3c912a2b222a3e43bc9cc6432/requires.io-0.2.6-py2.py3-none-any.whl"}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation python3-package create + Connection: + - keep-alive + Content-Length: + - '189' + Content-Type: + - application/json + ParameterSetName: + - --resource-group --automation-account-name --name --content-link + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.24.0 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/python3Packages/py3-package-000005?api-version=2022-08-08 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/python3Packages/py3-package-000005","name":"py3-package-000005","type":"Microsoft.Automation/AutomationAccounts/Python3Packages","location":"westus2","tags":{},"etag":null,"properties":{"isGlobal":false,"version":null,"sizeInBytes":0,"activityCount":0,"creationTime":"2022-10-19T08:12:44.83+00:00","lastModifiedTime":"2022-10-19T08:12:44.8866667+00:00","error":{"code":null,"message":null},"provisioningState":"Creating","isComposite":false}}' + headers: + cache-control: + - no-cache + content-length: + - '613' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:44 GMT + expires: + - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/python3Packages/py3-package-000005?api-version=2022-08-08 + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1197' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation python3-package update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --automation-account-name --name --content-link + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.24.0 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/python3Packages/py3-package-000005?api-version=2022-08-08 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/python3Packages/py3-package-000005","name":"py3-package-000005","type":"Microsoft.Automation/AutomationAccounts/Python3Packages","location":"westus2","tags":{},"etag":null,"properties":{"isGlobal":false,"version":null,"sizeInBytes":0,"activityCount":0,"creationTime":"2022-10-19T08:12:44.83+00:00","lastModifiedTime":"2022-10-19T08:12:44.8866667+00:00","error":{"code":null,"message":""},"provisioningState":"Creating","isComposite":false}}' + headers: + cache-control: + - no-cache + content-length: + - '611' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:46 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"location": "westus2", "properties": {"activityCount": 0, "contentLink": + {"uri": "https://files.pythonhosted.org/packages/7f/e2/85dfb9f7364cbd7a9213caea0e91fc948da3c912a2b222a3e43bc9cc6432/requires.io-0.2.6-py2.py3-none-any.whl"}, + "creationTime": "2022-10-19T08:12:44.83+00:00", "error": {"message": ""}, "isComposite": + false, "isGlobal": false, "lastModifiedTime": "2022-10-19T08:12:44.8866667+00:00", + "provisioningState": "Creating", "sizeInBytes": 0}, "tags": {}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation python3-package update + Connection: + - keep-alive + Content-Length: + - '467' + Content-Type: + - application/json + ParameterSetName: + - --resource-group --automation-account-name --name --content-link + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.24.0 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/python3Packages/py3-package-000005?api-version=2022-08-08 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/python3Packages/py3-package-000005","name":"py3-package-000005","type":"Microsoft.Automation/AutomationAccounts/Python3Packages","location":"westus2","tags":{},"etag":null,"properties":{"isGlobal":false,"version":null,"sizeInBytes":0,"activityCount":0,"creationTime":"2022-10-19T08:12:44.83+00:00","lastModifiedTime":"2022-10-19T08:12:47.3733333+00:00","error":{"code":null,"message":null},"provisioningState":"Creating","isComposite":false}}' + headers: + cache-control: + - no-cache + content-length: + - '613' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:46 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1197' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation python3-package show + Connection: + - keep-alive + ParameterSetName: + - --resource-group --automation-account-name --name + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.24.0 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/python3Packages/py3-package-000005?api-version=2022-08-08 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/python3Packages/py3-package-000005","name":"py3-package-000005","type":"Microsoft.Automation/AutomationAccounts/Python3Packages","location":"westus2","tags":{},"etag":null,"properties":{"isGlobal":false,"version":null,"sizeInBytes":0,"activityCount":0,"creationTime":"2022-10-19T08:12:44.83+00:00","lastModifiedTime":"2022-10-19T08:12:47.3733333+00:00","error":{"code":null,"message":""},"provisioningState":"Creating","isComposite":false}}' + headers: + cache-control: + - no-cache + content-length: + - '611' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:48 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation python3-package list + Connection: + - keep-alive + ParameterSetName: + - --resource-group --automation-account-name + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.24.0 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/python3Packages?api-version=2022-08-08 + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/python3Packages/py3-package-000005","name":"py3-package-000005","properties":{"isGlobal":false,"version":null,"sizeInBytes":0,"activityCount":0,"creationTime":"2022-10-19T08:12:44.83+00:00","lastModifiedTime":"2022-10-19T08:12:47.3733333+00:00","provisioningState":"Creating","isComposite":false}}]}' + headers: + cache-control: + - no-cache + content-length: + - '480' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:49 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - automation python3-package delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --resource-group --automation-account-name --name --yes + User-Agent: + - AZURECLI/2.41.0 (AAZ) azsdk-python-core/1.24.0 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/python3Packages/py3-package-000005?api-version=2022-08-08 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Wed, 19 Oct 2022 08:12:52 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + status: + code: 200 + message: OK +- request: + body: '{"properties": {"runbook": {"name": "test-runbook-000003"}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation runbook start + Connection: + - keep-alive + Content-Length: + - '60' + Content-Type: + - application/json + ParameterSetName: + - --resource-group --automation-account-name --name + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/jobs/ef6919bf-b827-4b18-8475-8d4a03a49d0e?api-version=2019-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/jobs/ef6919bf-b827-4b18-8475-8d4a03a49d0e","name":"ef6919bf-b827-4b18-8475-8d4a03a49d0e","type":"Microsoft.Automation/AutomationAccounts/Jobs","properties":{"jobId":"0f8e8dbc-426f-4dcf-beef-4aff5e8d3a63","creationTime":"2022-10-19T08:12:56.1766667+00:00","provisioningState":"Processing","status":"New","statusDetails":"None","startedBy":null,"startTime":null,"endTime":null,"lastModifiedTime":"2022-10-19T08:12:56.1766667+00:00","lastStatusModifiedTime":"2022-10-19T08:12:56.1766667+00:00","exception":null,"parameters":{},"runOn":null,"runbook":{"name":"test-runbook-000003"}}}' + headers: + cache-control: + - no-cache + content-length: + - '750' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:56 GMT + expires: + - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/jobs/ef6919bf-b827-4b18-8475-8d4a03a49d0e?api-version=2019-06-01 + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-resource-requests: + - '1999' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation job list + Connection: + - keep-alive + ParameterSetName: + - --resource-group --automation-account-name + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/jobs?api-version=2019-06-01 + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/jobs/ef6919bf-b827-4b18-8475-8d4a03a49d0e","name":"ef6919bf-b827-4b18-8475-8d4a03a49d0e","type":"Microsoft.Automation/AutomationAccounts/Jobs","properties":{"jobId":"0f8e8dbc-426f-4dcf-beef-4aff5e8d3a63","runbook":{"name":"test-runbook-000003"},"provisioningState":"Processing","status":"New","creationTime":"2022-10-19T08:12:56.1906698+00:00","startTime":null,"lastModifiedTime":"2022-10-19T08:12:56.1906698+00:00","endTime":null,"runOn":null}}]}' + headers: + cache-control: + - no-cache + content-length: + - '628' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:57 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-resource-requests: + - '599' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation job show + Connection: + - keep-alive + ParameterSetName: + - --resource-group --automation-account-name --name + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/jobs/ef6919bf-b827-4b18-8475-8d4a03a49d0e?api-version=2019-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002/jobs/ef6919bf-b827-4b18-8475-8d4a03a49d0e","name":"ef6919bf-b827-4b18-8475-8d4a03a49d0e","type":"Microsoft.Automation/AutomationAccounts/Jobs","properties":{"jobId":"0f8e8dbc-426f-4dcf-beef-4aff5e8d3a63","creationTime":"2022-10-19T08:12:56.1906698+00:00","provisioningState":"Processing","status":"New","statusDetails":"None","startedBy":"{scrubbed}","startTime":null,"endTime":null,"lastModifiedTime":"2022-10-19T08:12:56.1906698+00:00","lastStatusModifiedTime":"2022-10-19T08:12:56.1906698+00:00","exception":null,"parameters":{},"runOn":null,"runbook":{"name":"test-runbook-000003"}}}' + headers: + cache-control: + - no-cache + content-length: + - '758' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:59 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-resource-requests: + - '599' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation account delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --resource-group --name -y + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_000001/providers/Microsoft.Automation/automationAccounts/test-account-000002?api-version=2021-06-22 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Wed, 19 Oct 2022 08:13:12 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + status: + code: 200 + message: OK +version: 1 diff --git a/src/automation/azext_automation/tests/latest/recordings/test_automation_schedule.yaml b/src/automation/azext_automation/tests/latest/recordings/test_automation_schedule.yaml index 2165be28aaa..ec367de5216 100644 --- a/src/automation/azext_automation/tests/latest/recordings/test_automation_schedule.yaml +++ b/src/automation/azext_automation/tests/latest/recordings/test_automation_schedule.yaml @@ -1,337 +1,337 @@ -interactions: -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation account create - Connection: - - keep-alive - ParameterSetName: - - -n -g - User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_automation_schedule000001?api-version=2021-04-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001","name":"cli_test_automation_schedule000001","type":"Microsoft.Resources/resourceGroups","location":"westus","tags":{"product":"azurecli","cause":"automation","date":"2022-08-22T02:10:38Z"},"properties":{"provisioningState":"Succeeded"}}' - headers: - cache-control: - - no-cache - content-length: - - '346' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 02:10:40 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: '{"name": "account-000002", "location": "westus", "properties": {"sku": - {"name": "Basic"}}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation account create - Connection: - - keep-alive - Content-Length: - - '90' - Content-Type: - - application/json - ParameterSetName: - - -n -g - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002?api-version=2021-06-22 - response: - body: - string: '{"name":"account-000002","systemData":{"createdAt":"2022-08-22T02:10:48.5+00:00","lastModifiedAt":"2022-08-22T02:10:48.5+00:00"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002","type":"Microsoft.Automation/AutomationAccounts","location":"westus","tags":{},"etag":null,"properties":{"disableLocalAuth":false,"sku":{"name":"Basic","family":null,"capacity":null},"state":"Ok","RegistrationUrl":"https://87b4f14f-867a-40e9-bc31-280aafe76a13.agentsvc.wus.azure-automation.net/accounts/87b4f14f-867a-40e9-bc31-280aafe76a13","encryption":{"keySource":"Microsoft.Automation","identity":{"userAssignedIdentity":null}},"automationHybridServiceUrl":"https://87b4f14f-867a-40e9-bc31-280aafe76a13.jrds.wus.azure-automation.net/automationAccounts/87b4f14f-867a-40e9-bc31-280aafe76a13","RuntimeConfiguration":{"powershell":{"builtinModules":{"Az":"8.0.0"}},"powershell7":{"builtinModules":{"Az":"8.0.0"}}},"creationTime":"2022-08-22T02:10:48.5+00:00","lastModifiedBy":null,"lastModifiedTime":"2022-08-22T02:10:48.5+00:00"}}' - headers: - cache-control: - - no-cache - content-length: - - '1134' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 02:10:50 GMT - expires: - - '-1' - location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002?api-version=2021-06-22 - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - status: - code: 201 - message: Created -- request: - body: '{"name": "schedule-000003", "properties": {"description": "test", "startTime": - "2022-08-30T10:00:00.000Z", "interval": 1, "frequency": "Hour", "timeZone": - "UTC+08:00"}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation schedule create - Connection: - - keep-alive - Content-Length: - - '168' - Content-Type: - - application/json - ParameterSetName: - - -n -g --automation-account-name --description --frequency --interval --start-time - --time-zone - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002/schedules/schedule-000003?api-version=2020-01-13-preview - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002/schedules/schedule-000003","name":"schedule-000003","type":"Microsoft.Automation/AutomationAccounts/Schedules","properties":{"description":"test","startTime":"2022-08-30T18:00:00+08:00","startTimeOffsetMinutes":480.0,"expiryTime":"9999-12-31T23:59:59.9999999+00:00","expiryTimeOffsetMinutes":0.0,"isEnabled":true,"nextRun":"2022-08-30T18:00:00+08:00","nextRunOffsetMinutes":480.0,"interval":1,"frequency":"Hour","creationTime":"2022-08-22T02:10:51.9833333+00:00","lastModifiedTime":"2022-08-22T02:10:51.9833333+00:00","timeZone":"UTC+08:00","advancedSchedule":null}}' - headers: - cache-control: - - no-cache - content-length: - - '740' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 02:10:51 GMT - expires: - - '-1' - location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002/schedules/schedule-000003?api-version=2020-01-13-preview - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1198' - status: - code: 201 - message: Created -- request: - body: '{"name": "schedule-000003", "properties": {"description": "test1", "isEnabled": - false}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation schedule update - Connection: - - keep-alive - Content-Length: - - '87' - Content-Type: - - application/json - ParameterSetName: - - -n -g --automation-account-name --description --is-enabled - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: PATCH - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002/schedules/schedule-000003?api-version=2020-01-13-preview - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002/schedules/schedule-000003","name":"schedule-000003","type":"Microsoft.Automation/AutomationAccounts/Schedules","properties":{"description":"test1","startTime":"2022-08-30T18:00:00+08:00","startTimeOffsetMinutes":480.0,"expiryTime":"9999-12-31T23:59:59.9999999+00:00","expiryTimeOffsetMinutes":0.0,"isEnabled":false,"nextRun":"2022-08-30T18:00:00+08:00","nextRunOffsetMinutes":480.0,"interval":1,"frequency":"Hour","creationTime":"2022-08-22T02:10:51.9833333+00:00","lastModifiedTime":"2022-08-22T02:10:51.9833333+00:00","timeZone":"UTC+08:00","advancedSchedule":null}}' - headers: - cache-control: - - no-cache - content-length: - - '742' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 02:10:52 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation schedule list - Connection: - - keep-alive - ParameterSetName: - - -g --automation-account-name - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002/schedules?api-version=2020-01-13-preview - response: - body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002/schedules/schedule-000003","name":"schedule-000003","type":"Microsoft.Automation/AutomationAccounts/Schedules","properties":{"description":"test1","startTime":"2022-08-30T18:00:00+08:00","startTimeOffsetMinutes":480.0,"expiryTime":"9999-12-31T23:59:59.9999999+00:00","expiryTimeOffsetMinutes":0.0,"isEnabled":false,"nextRun":"2022-08-30T18:00:00+08:00","nextRunOffsetMinutes":480.0,"interval":1,"frequency":"Hour","creationTime":"2022-08-22T02:10:51.9833333+00:00","lastModifiedTime":"2022-08-22T02:10:52.7933333+00:00","timeZone":"UTC+08:00","advancedSchedule":null}}]}' - headers: - cache-control: - - no-cache - content-length: - - '754' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 02:10:53 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation schedule show - Connection: - - keep-alive - ParameterSetName: - - -n -g --automation-account-name - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002/schedules/schedule-000003?api-version=2020-01-13-preview - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002/schedules/schedule-000003","name":"schedule-000003","type":"Microsoft.Automation/AutomationAccounts/Schedules","properties":{"description":"test1","startTime":"2022-08-30T18:00:00+08:00","startTimeOffsetMinutes":480.0,"expiryTime":"9999-12-31T23:59:59.9999999+00:00","expiryTimeOffsetMinutes":0.0,"isEnabled":false,"nextRun":"2022-08-30T18:00:00+08:00","nextRunOffsetMinutes":480.0,"interval":1,"frequency":"Hour","creationTime":"2022-08-22T02:10:51.9833333+00:00","lastModifiedTime":"2022-08-22T02:10:52.7933333+00:00","timeZone":"UTC+08:00","advancedSchedule":null}}' - headers: - cache-control: - - no-cache - content-length: - - '742' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 02:10:55 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation schedule delete - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - -n -g --automation-account-name -y - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002/schedules/schedule-000003?api-version=2020-01-13-preview - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Mon, 22 Aug 2022 02:10:56 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-deletes: - - '14999' - status: - code: 200 - message: OK -version: 1 +interactions: +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation account create + Connection: + - keep-alive + ParameterSetName: + - -n -g + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_automation_schedule000001?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001","name":"cli_test_automation_schedule000001","type":"Microsoft.Resources/resourceGroups","location":"westus","tags":{"product":"azurecli","cause":"automation","date":"2022-10-19T08:11:44Z"},"properties":{"provisioningState":"Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '346' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:11:47 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"name": "account-000002", "location": "westus", "properties": {"sku": + {"name": "Basic"}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation account create + Connection: + - keep-alive + Content-Length: + - '90' + Content-Type: + - application/json + ParameterSetName: + - -n -g + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002?api-version=2021-06-22 + response: + body: + string: '{"name":"account-000002","systemData":{"createdAt":"2022-10-19T08:11:54.79+00:00","lastModifiedAt":"2022-10-19T08:11:54.79+00:00"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002","type":"Microsoft.Automation/AutomationAccounts","location":"westus","tags":{},"etag":null,"properties":{"disableLocalAuth":false,"sku":{"name":"Basic","family":null,"capacity":null},"state":"Ok","RegistrationUrl":"https://7b780971-74f5-4e6a-b810-2678be066970.agentsvc.wus.azure-automation.net/accounts/7b780971-74f5-4e6a-b810-2678be066970","encryption":{"keySource":"Microsoft.Automation","identity":{"userAssignedIdentity":null}},"automationHybridServiceUrl":"https://7b780971-74f5-4e6a-b810-2678be066970.jrds.wus.azure-automation.net/automationAccounts/7b780971-74f5-4e6a-b810-2678be066970","RuntimeConfiguration":{"powershell":{"builtinModules":{"Az":"8.0.0"}},"powershell7":{"builtinModules":{"Az":"8.0.0"}}},"creationTime":"2022-10-19T08:11:54.79+00:00","lastModifiedBy":null,"lastModifiedTime":"2022-10-19T08:11:54.79+00:00"}}' + headers: + cache-control: + - no-cache + content-length: + - '1138' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:11:56 GMT + expires: + - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002?api-version=2021-06-22 + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1197' + status: + code: 201 + message: Created +- request: + body: '{"name": "schedule-000003", "properties": {"description": "test", "startTime": + "2022-10-19T10:08:00.000Z", "interval": 1, "frequency": "Hour", "timeZone": + "UTC+08:00"}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation schedule create + Connection: + - keep-alive + Content-Length: + - '168' + Content-Type: + - application/json + ParameterSetName: + - -n -g --automation-account-name --description --frequency --interval --start-time + --time-zone + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002/schedules/schedule-000003?api-version=2020-01-13-preview + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002/schedules/schedule-000003","name":"schedule-000003","type":"Microsoft.Automation/AutomationAccounts/Schedules","properties":{"description":"test","startTime":"2022-10-19T18:08:00+08:00","startTimeOffsetMinutes":480.0,"expiryTime":"9999-12-31T23:59:59.9999999+00:00","expiryTimeOffsetMinutes":0.0,"isEnabled":true,"nextRun":"2022-10-19T18:08:00+08:00","nextRunOffsetMinutes":480.0,"interval":1,"frequency":"Hour","creationTime":"2022-10-19T08:12:00.7566667+00:00","lastModifiedTime":"2022-10-19T08:12:00.7566667+00:00","timeZone":"UTC+08:00","advancedSchedule":null}}' + headers: + cache-control: + - no-cache + content-length: + - '740' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:11:59 GMT + expires: + - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002/schedules/schedule-000003?api-version=2020-01-13-preview + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1198' + status: + code: 201 + message: Created +- request: + body: '{"name": "schedule-000003", "properties": {"description": "test1", "isEnabled": + false}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation schedule update + Connection: + - keep-alive + Content-Length: + - '87' + Content-Type: + - application/json + ParameterSetName: + - -n -g --automation-account-name --description --is-enabled + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: PATCH + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002/schedules/schedule-000003?api-version=2020-01-13-preview + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002/schedules/schedule-000003","name":"schedule-000003","type":"Microsoft.Automation/AutomationAccounts/Schedules","properties":{"description":"test1","startTime":"2022-10-19T18:08:00+08:00","startTimeOffsetMinutes":480.0,"expiryTime":"9999-12-31T23:59:59.9999999+00:00","expiryTimeOffsetMinutes":0.0,"isEnabled":false,"nextRun":"2022-10-19T18:08:00+08:00","nextRunOffsetMinutes":480.0,"interval":1,"frequency":"Hour","creationTime":"2022-10-19T08:12:00.7566667+00:00","lastModifiedTime":"2022-10-19T08:12:00.7566667+00:00","timeZone":"UTC+08:00","advancedSchedule":null}}' + headers: + cache-control: + - no-cache + content-length: + - '742' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:01 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1198' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation schedule list + Connection: + - keep-alive + ParameterSetName: + - -g --automation-account-name + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002/schedules?api-version=2020-01-13-preview + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002/schedules/schedule-000003","name":"schedule-000003","type":"Microsoft.Automation/AutomationAccounts/Schedules","properties":{"description":"test1","startTime":"2022-10-19T18:08:00+08:00","startTimeOffsetMinutes":480.0,"expiryTime":"9999-12-31T23:59:59.9999999+00:00","expiryTimeOffsetMinutes":0.0,"isEnabled":false,"nextRun":"2022-10-19T18:08:00+08:00","nextRunOffsetMinutes":480.0,"interval":1,"frequency":"Hour","creationTime":"2022-10-19T08:12:00.7566667+00:00","lastModifiedTime":"2022-10-19T08:12:02.37+00:00","timeZone":"UTC+08:00","advancedSchedule":null}}]}' + headers: + cache-control: + - no-cache + content-length: + - '749' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:04 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation schedule show + Connection: + - keep-alive + ParameterSetName: + - -n -g --automation-account-name + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002/schedules/schedule-000003?api-version=2020-01-13-preview + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002/schedules/schedule-000003","name":"schedule-000003","type":"Microsoft.Automation/AutomationAccounts/Schedules","properties":{"description":"test1","startTime":"2022-10-19T18:08:00+08:00","startTimeOffsetMinutes":480.0,"expiryTime":"9999-12-31T23:59:59.9999999+00:00","expiryTimeOffsetMinutes":0.0,"isEnabled":false,"nextRun":"2022-10-19T18:08:00+08:00","nextRunOffsetMinutes":480.0,"interval":1,"frequency":"Hour","creationTime":"2022-10-19T08:12:00.7566667+00:00","lastModifiedTime":"2022-10-19T08:12:02.37+00:00","timeZone":"UTC+08:00","advancedSchedule":null}}' + headers: + cache-control: + - no-cache + content-length: + - '737' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:06 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation schedule delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -n -g --automation-account-name -y + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_schedule000001/providers/Microsoft.Automation/automationAccounts/account-000002/schedules/schedule-000003?api-version=2020-01-13-preview + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Wed, 19 Oct 2022 08:12:07 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + status: + code: 200 + message: OK +version: 1 diff --git a/src/automation/azext_automation/tests/latest/recordings/test_automation_software_update_configuration.yaml b/src/automation/azext_automation/tests/latest/recordings/test_automation_software_update_configuration.yaml index 82cee3964d1..4ca4908f183 100644 --- a/src/automation/azext_automation/tests/latest/recordings/test_automation_software_update_configuration.yaml +++ b/src/automation/azext_automation/tests/latest/recordings/test_automation_software_update_configuration.yaml @@ -1,1117 +1,1138 @@ -interactions: -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - vm create - Connection: - - keep-alive - ParameterSetName: - - -n -g --image --generate-ssh-key --nsg-rule - User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_automation_software_update_configuration000001?api-version=2021-04-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001","name":"cli_test_automation_software_update_configuration000001","type":"Microsoft.Resources/resourceGroups","location":"westus","tags":{"product":"azurecli","cause":"automation","date":"2022-08-22T05:39:51Z"},"properties":{"provisioningState":"Succeeded"}}' - headers: - cache-control: - - no-cache - content-length: - - '388' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 05:39:54 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - User-Agent: - - python-requests/2.26.0 - method: GET - uri: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/arm-compute/quickstart-templates/aliases.json - response: - body: - string: "{\n \"$schema\": \"http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json\",\n - \ \"contentVersion\": \"1.0.0.0\",\n \"parameters\": {},\n \"variables\": - {},\n \"resources\": [],\n \"outputs\": {\n \"aliases\": {\n \"type\": - \"object\",\n \"value\": {\n \"Linux\": {\n \"CentOS\": - {\n \"publisher\": \"OpenLogic\",\n \"offer\": \"CentOS\",\n - \ \"sku\": \"7.5\",\n \"version\": \"latest\",\n \"architecture\": - \"x64\"\n },\n \"Debian\": {\n \"publisher\": - \"Debian\",\n \"offer\": \"debian-10\",\n \"sku\": \"10\",\n - \ \"version\": \"latest\",\n \"architecture\": \"x64\"\n - \ },\n \"Flatcar\": {\n \"publisher\": \"kinvolk\",\n - \ \"offer\": \"flatcar-container-linux-free\",\n \"sku\": - \"stable\",\n \"version\": \"latest\",\n \"architecture\": - \"x64\"\n },\n \"openSUSE-Leap\": {\n \"publisher\": - \"SUSE\",\n \"offer\": \"opensuse-leap-15-3\",\n \"sku\": - \"gen2\",\n \"version\": \"latest\",\n \"architecture\": - \"x64\"\n },\n \"RHEL\": {\n \"publisher\": \"RedHat\",\n - \ \"offer\": \"RHEL\",\n \"sku\": \"7-LVM\",\n \"version\": - \"latest\",\n \"architecture\": \"x64\"\n },\n \"SLES\": - {\n \"publisher\": \"SUSE\",\n \"offer\": \"sles-15-sp3\",\n - \ \"sku\": \"gen2\",\n \"version\": \"latest\",\n \"architecture\": - \"x64\"\n },\n \"UbuntuLTS\": {\n \"publisher\": - \"Canonical\",\n \"offer\": \"UbuntuServer\",\n \"sku\": - \"18.04-LTS\",\n \"version\": \"latest\",\n \"architecture\": - \"x64\"\n }\n },\n \"Windows\": {\n \"Win2022Datacenter\": - {\n \"publisher\": \"MicrosoftWindowsServer\",\n \"offer\": - \"WindowsServer\",\n \"sku\": \"2022-Datacenter\",\n \"version\": - \"latest\",\n \"architecture\": \"x64\"\n },\n \"Win2022AzureEditionCore\": - {\n \"publisher\": \"MicrosoftWindowsServer\",\n \"offer\": - \"WindowsServer\",\n \"sku\": \"2022-datacenter-azure-edition-core\",\n - \ \"version\": \"latest\",\n \"architecture\": \"x64\"\n - \ },\n \"Win2019Datacenter\": {\n \"publisher\": - \"MicrosoftWindowsServer\",\n \"offer\": \"WindowsServer\",\n \"sku\": - \"2019-Datacenter\",\n \"version\": \"latest\",\n \"architecture\": - \"x64\"\n },\n \"Win2016Datacenter\": {\n \"publisher\": - \"MicrosoftWindowsServer\",\n \"offer\": \"WindowsServer\",\n \"sku\": - \"2016-Datacenter\",\n \"version\": \"latest\",\n \"architecture\": - \"x64\"\n },\n \"Win2012R2Datacenter\": {\n \"publisher\": - \"MicrosoftWindowsServer\",\n \"offer\": \"WindowsServer\",\n \"sku\": - \"2012-R2-Datacenter\",\n \"version\": \"latest\",\n \"architecture\": - \"x64\"\n },\n \"Win2012Datacenter\": {\n \"publisher\": - \"MicrosoftWindowsServer\",\n \"offer\": \"WindowsServer\",\n \"sku\": - \"2012-Datacenter\",\n \"version\": \"latest\",\n \"architecture\": - \"x64\"\n },\n \"Win2008R2SP1\": {\n \"publisher\": - \"MicrosoftWindowsServer\",\n \"offer\": \"WindowsServer\",\n \"sku\": - \"2008-R2-SP1\",\n \"version\": \"latest\",\n \"architecture\": - \"x64\"\n }\n }\n }\n }\n }\n}" - headers: - accept-ranges: - - bytes - access-control-allow-origin: - - '*' - cache-control: - - max-age=300 - connection: - - keep-alive - content-length: - - '3463' - content-security-policy: - - default-src 'none'; style-src 'unsafe-inline'; sandbox - content-type: - - text/plain; charset=utf-8 - date: - - Mon, 22 Aug 2022 05:39:54 GMT - etag: - - W/"41b202f4dc5098d126019dc00721a4c5e30df0c5196794514fadc3710ee2a5cb" - expires: - - Mon, 22 Aug 2022 05:44:54 GMT - source-age: - - '251' - strict-transport-security: - - max-age=31536000 - vary: - - Authorization,Accept-Encoding,Origin - via: - - 1.1 varnish - x-cache: - - HIT - x-cache-hits: - - '1' - x-content-type-options: - - nosniff - x-fastly-request-id: - - 12f92d5b2dd218f325ab8e4f300ecedbc968129e - x-frame-options: - - deny - x-github-request-id: - - 33FC:5BE0:2C6A2C:3D3004:63026BCF - x-served-by: - - cache-qpg1252-QPG - x-timer: - - S1661146795.997334,VS0,VE1 - x-xss-protection: - - 1; mode=block - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - vm create - Connection: - - keep-alive - ParameterSetName: - - -n -g --image --generate-ssh-key --nsg-rule - User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus/publishers/Canonical/artifacttypes/vmimage/offers/UbuntuServer/skus/18.04-LTS/versions?$top=1&$orderby=name%20desc&api-version=2022-03-01 - response: - body: - string: "[\r\n {\r\n \"location\": \"westus\",\r\n \"name\": \"18.04.202208100\",\r\n - \ \"id\": \"/Subscriptions/00000000-0000-0000-0000-000000000000/Providers/Microsoft.Compute/Locations/westus/Publishers/Canonical/ArtifactTypes/VMImage/Offers/UbuntuServer/Skus/18.04-LTS/Versions/18.04.202208100\"\r\n - \ }\r\n]" - headers: - cache-control: - - no-cache - content-length: - - '286' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 05:39:55 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/ListVMImagesVersionsFromLocation3Min;15999,Microsoft.Compute/ListVMImagesVersionsFromLocation30Min;43999 - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - vm create - Connection: - - keep-alive - ParameterSetName: - - -n -g --image --generate-ssh-key --nsg-rule - User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus/publishers/Canonical/artifacttypes/vmimage/offers/UbuntuServer/skus/18.04-LTS/versions/18.04.202208100?api-version=2022-03-01 - response: - body: - string: "{\r\n \"properties\": {\r\n \"hyperVGeneration\": \"V1\",\r\n \"architecture\": - \"x64\",\r\n \"replicaType\": \"Unmanaged\",\r\n \"disallowed\": {\r\n - \ \"vmDiskType\": \"None\"\r\n },\r\n \"automaticOSUpgradeProperties\": - {\r\n \"automaticOSUpgradeSupported\": true\r\n },\r\n \"imageDeprecationStatus\": - {\r\n \"imageState\": \"Active\"\r\n },\r\n \"features\": [\r\n - \ {\r\n \"name\": \"IsAcceleratedNetworkSupported\",\r\n \"value\": - \"True\"\r\n },\r\n {\r\n \"name\": \"DiskControllerTypes\",\r\n - \ \"value\": \"SCSI\"\r\n },\r\n {\r\n \"name\": \"IsHibernateSupported\",\r\n - \ \"value\": \"True\"\r\n }\r\n ],\r\n \"osDiskImage\": {\r\n - \ \"operatingSystem\": \"Linux\",\r\n \"sizeInGb\": 31,\r\n \"sizeInBytes\": - 32213303808\r\n },\r\n \"dataDiskImages\": []\r\n },\r\n \"location\": - \"westus\",\r\n \"name\": \"18.04.202208100\",\r\n \"id\": \"/Subscriptions/00000000-0000-0000-0000-000000000000/Providers/Microsoft.Compute/Locations/westus/Publishers/Canonical/ArtifactTypes/VMImage/Offers/UbuntuServer/Skus/18.04-LTS/Versions/18.04.202208100\"\r\n}" - headers: - cache-control: - - no-cache - content-length: - - '1042' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 05:39:57 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/GetVMImageFromLocation3Min;12999,Microsoft.Compute/GetVMImageFromLocation30Min;73999 - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json, text/json - Accept-Encoding: - - gzip, deflate - CommandName: - - vm create - Connection: - - keep-alive - ParameterSetName: - - -n -g --image --generate-ssh-key --nsg-rule - User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-network/20.0.0 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/virtualNetworks?api-version=2018-01-01 - response: - body: - string: '{"value":[]}' - headers: - cache-control: - - no-cache - content-length: - - '12' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 05:39:57 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: '{"properties": {"template": {"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", - "contentVersion": "1.0.0.0", "parameters": {}, "variables": {}, "resources": - [{"name": "vm-000004VNET", "type": "Microsoft.Network/virtualNetworks", "location": - "westus", "apiVersion": "2015-06-15", "dependsOn": [], "tags": {}, "properties": - {"addressSpace": {"addressPrefixes": ["10.0.0.0/16"]}, "subnets": [{"name": - "vm-000004Subnet", "properties": {"addressPrefix": "10.0.0.0/24"}}]}}, {"type": - "Microsoft.Network/networkSecurityGroups", "name": "vm-000004NSG", "apiVersion": - "2015-06-15", "location": "westus", "tags": {}, "dependsOn": []}, {"apiVersion": - "2018-01-01", "type": "Microsoft.Network/publicIPAddresses", "name": "vm-000004PublicIP", - "location": "westus", "tags": {}, "dependsOn": [], "properties": {"publicIPAllocationMethod": - null}}, {"apiVersion": "2015-06-15", "type": "Microsoft.Network/networkInterfaces", - "name": "vm-000004VMNic", "location": "westus", "tags": {}, "dependsOn": ["Microsoft.Network/virtualNetworks/vm-000004VNET", - "Microsoft.Network/networkSecurityGroups/vm-000004NSG", "Microsoft.Network/publicIpAddresses/vm-000004PublicIP"], - "properties": {"ipConfigurations": [{"name": "ipconfigvm-000004", "properties": - {"privateIPAllocationMethod": "Dynamic", "subnet": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/virtualNetworks/vm-000004VNET/subnets/vm-000004Subnet"}, - "publicIPAddress": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/publicIPAddresses/vm-000004PublicIP"}}}], - "networkSecurityGroup": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkSecurityGroups/vm-000004NSG"}}}, - {"apiVersion": "2022-03-01", "type": "Microsoft.Compute/virtualMachines", "name": - "vm-000004", "location": "westus", "tags": {}, "dependsOn": ["Microsoft.Network/networkInterfaces/vm-000004VMNic"], - "properties": {"hardwareProfile": {"vmSize": "Standard_DS1_v2"}, "networkProfile": - {"networkInterfaces": [{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic", - "properties": {"deleteOption": null}}]}, "storageProfile": {"osDisk": {"createOption": - "fromImage", "name": null, "caching": "ReadWrite", "managedDisk": {"storageAccountType": - null}}, "imageReference": {"publisher": "Canonical", "offer": "UbuntuServer", - "sku": "18.04-LTS", "version": "latest"}}, "osProfile": {"computerName": "vm-000004", - "adminUsername": "v-jingszhang", "linuxConfiguration": {"disablePasswordAuthentication": - true, "ssh": {"publicKeys": [{"keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDL2PtV5sp++a43U/dRJ2Fyjso9qDFNbWEnbYk7mA4CfXy0gvxm65oYVd90JysNmGBF/89hIvCTN3ul4aEIuPkzywozRbdyWiJngSd/7OrNBJzpQQSjsGXwoVNDRAJSzlvuQVUR2vwBHeN2xMIvufSvzO3LGI3xcSIWIYlSvU9urnV+Pefd4T6x/OXgTpE02AgMWOspdZTzg0ZKsSU3sG5nYSNoq+8qrHQSXLbLLdWzz5lYKe8p64fQC/xhXrNa3/Nw5vy8YGsyqGueM/Rj6gCI+ivgBlQg908Aa50yQLvwsMLIKxhgPlj73Am8zm27PS3DKVjkr0nTjbEp/3FzZnyB", - "path": "/home/v-jingszhang/.ssh/authorized_keys"}]}}}}}], "outputs": {}}, "parameters": - {}, "mode": "incremental"}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - vm create - Connection: - - keep-alive - Content-Length: - - '3448' - Content-Type: - - application/json - ParameterSetName: - - -n -g --image --generate-ssh-key --nsg-rule - User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2021-04-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Resources/deployments/vm_deploy_IOAZZ7McLeUT3c5qKWwSrOypoBRZRJfA","name":"vm_deploy_IOAZZ7McLeUT3c5qKWwSrOypoBRZRJfA","type":"Microsoft.Resources/deployments","properties":{"templateHash":"16179749164063259327","parameters":{},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2022-08-22T05:40:03.6785827Z","duration":"PT0.0007205S","correlationId":"0024d00d-2f01-4d2b-8ffb-545e7103edd9","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"virtualNetworks","locations":["westus"]},{"resourceType":"networkSecurityGroups","locations":["westus"]},{"resourceType":"publicIPAddresses","locations":["westus"]},{"resourceType":"networkInterfaces","locations":["westus"]}]},{"namespace":"Microsoft.Compute","resourceTypes":[{"resourceType":"virtualMachines","locations":["westus"]}]}],"dependencies":[{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/virtualNetworks/vm-000004VNET","resourceType":"Microsoft.Network/virtualNetworks","resourceName":"vm-000004VNET"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkSecurityGroups/vm-000004NSG","resourceType":"Microsoft.Network/networkSecurityGroups","resourceName":"vm-000004NSG"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/publicIPAddresses/vm-000004PublicIP","resourceType":"Microsoft.Network/publicIPAddresses","resourceName":"vm-000004PublicIP"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic","resourceType":"Microsoft.Network/networkInterfaces","resourceName":"vm-000004VMNic"},{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic","resourceType":"Microsoft.Network/networkInterfaces","resourceName":"vm-000004VMNic"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/virtualMachines/vm-000004","resourceType":"Microsoft.Compute/virtualMachines","resourceName":"vm-000004"}]}}' - headers: - azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Resources/deployments/vm_deploy_IOAZZ7McLeUT3c5qKWwSrOypoBRZRJfA/operationStatuses/08585404600848431998?api-version=2021-04-01 - cache-control: - - no-cache - content-length: - - '2675' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 05:40:04 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - status: - code: 201 - message: Created -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - vm create - Connection: - - keep-alive - ParameterSetName: - - -n -g --image --generate-ssh-key --nsg-rule - User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08585404600848431998?api-version=2021-04-01 - response: - body: - string: '{"status":"Running"}' - headers: - cache-control: - - no-cache - content-length: - - '20' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 05:40:34 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - vm create - Connection: - - keep-alive - ParameterSetName: - - -n -g --image --generate-ssh-key --nsg-rule - User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08585404600848431998?api-version=2021-04-01 - response: - body: - string: '{"status":"Succeeded"}' - headers: - cache-control: - - no-cache - content-length: - - '22' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 05:41:05 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - vm create - Connection: - - keep-alive - ParameterSetName: - - -n -g --image --generate-ssh-key --nsg-rule - User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2021-04-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Resources/deployments/vm_deploy_IOAZZ7McLeUT3c5qKWwSrOypoBRZRJfA","name":"vm_deploy_IOAZZ7McLeUT3c5qKWwSrOypoBRZRJfA","type":"Microsoft.Resources/deployments","properties":{"templateHash":"16179749164063259327","parameters":{},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2022-08-22T05:40:45.7082565Z","duration":"PT42.0303943S","correlationId":"0024d00d-2f01-4d2b-8ffb-545e7103edd9","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"virtualNetworks","locations":["westus"]},{"resourceType":"networkSecurityGroups","locations":["westus"]},{"resourceType":"publicIPAddresses","locations":["westus"]},{"resourceType":"networkInterfaces","locations":["westus"]}]},{"namespace":"Microsoft.Compute","resourceTypes":[{"resourceType":"virtualMachines","locations":["westus"]}]}],"dependencies":[{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/virtualNetworks/vm-000004VNET","resourceType":"Microsoft.Network/virtualNetworks","resourceName":"vm-000004VNET"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkSecurityGroups/vm-000004NSG","resourceType":"Microsoft.Network/networkSecurityGroups","resourceName":"vm-000004NSG"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/publicIPAddresses/vm-000004PublicIP","resourceType":"Microsoft.Network/publicIPAddresses","resourceName":"vm-000004PublicIP"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic","resourceType":"Microsoft.Network/networkInterfaces","resourceName":"vm-000004VMNic"},{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic","resourceType":"Microsoft.Network/networkInterfaces","resourceName":"vm-000004VMNic"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/virtualMachines/vm-000004","resourceType":"Microsoft.Compute/virtualMachines","resourceName":"vm-000004"}],"outputs":{},"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/virtualMachines/vm-000004"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkSecurityGroups/vm-000004NSG"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/publicIPAddresses/vm-000004PublicIP"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/virtualNetworks/vm-000004VNET"}]}}' - headers: - cache-control: - - no-cache - content-length: - - '3670' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 05:41:06 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - vm create - Connection: - - keep-alive - ParameterSetName: - - -n -g --image --generate-ssh-key --nsg-rule - User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-compute/27.1.0 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/virtualMachines/vm-000004?$expand=instanceView&api-version=2022-03-01 - response: - body: - string: "{\r\n \"name\": \"vm-000004\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/virtualMachines/vm-000004\",\r\n - \ \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\": \"westus\",\r\n - \ \"tags\": {},\r\n \"properties\": {\r\n \"vmId\": \"68f120c5-6029-4df3-875a-c571965e804f\",\r\n - \ \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\n },\r\n - \ \"storageProfile\": {\r\n \"imageReference\": {\r\n \"publisher\": - \"Canonical\",\r\n \"offer\": \"UbuntuServer\",\r\n \"sku\": - \"18.04-LTS\",\r\n \"version\": \"latest\",\r\n \"exactVersion\": - \"18.04.202208100\"\r\n },\r\n \"osDisk\": {\r\n \"osType\": - \"Linux\",\r\n \"name\": \"vm-000004_disk1_13000f9122ad4d23909fe5ecae19ae6d\",\r\n - \ \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\",\r\n - \ \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/disks/vm-000004_disk1_13000f9122ad4d23909fe5ecae19ae6d\"\r\n - \ },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\": - 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\": - {\r\n \"computerName\": \"vm-000004\",\r\n \"adminUsername\": \"v-jingszhang\",\r\n - \ \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\": - true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n {\r\n - \ \"path\": \"/home/v-jingszhang/.ssh/authorized_keys\",\r\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDL2PtV5sp++a43U/dRJ2Fyjso9qDFNbWEnbYk7mA4CfXy0gvxm65oYVd90JysNmGBF/89hIvCTN3ul4aEIuPkzywozRbdyWiJngSd/7OrNBJzpQQSjsGXwoVNDRAJSzlvuQVUR2vwBHeN2xMIvufSvzO3LGI3xcSIWIYlSvU9urnV+Pefd4T6x/OXgTpE02AgMWOspdZTzg0ZKsSU3sG5nYSNoq+8qrHQSXLbLLdWzz5lYKe8p64fQC/xhXrNa3/Nw5vy8YGsyqGueM/Rj6gCI+ivgBlQg908Aa50yQLvwsMLIKxhgPlj73Am8zm27PS3DKVjkr0nTjbEp/3FzZnyB\"\r\n - \ }\r\n ]\r\n },\r\n \"provisionVMAgent\": - true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\",\r\n - \ \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \"enableVMAgentPlatformUpdates\": - false\r\n },\r\n \"secrets\": [],\r\n \"allowExtensionOperations\": - true,\r\n \"requireGuestProvisionSignal\": true\r\n },\r\n \"networkProfile\": - {\"networkInterfaces\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic\"}]},\r\n - \ \"provisioningState\": \"Succeeded\",\r\n \"instanceView\": {\r\n \"computerName\": - \"vm-000004\",\r\n \"osName\": \"ubuntu\",\r\n \"osVersion\": \"18.04\",\r\n - \ \"vmAgent\": {\r\n \"vmAgentVersion\": \"2.8.0.9\",\r\n \"statuses\": - [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n - \ \"message\": \"Guest Agent is running\",\r\n \"time\": - \"2022-08-22T05:40:53+00:00\"\r\n }\r\n ],\r\n \"extensionHandlers\": - []\r\n },\r\n \"disks\": [\r\n {\r\n \"name\": \"vm-000004_disk1_13000f9122ad4d23909fe5ecae19ae6d\",\r\n - \ \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-22T05:40:29.4866822+00:00\"\r\n - \ }\r\n ]\r\n }\r\n ],\r\n \"hyperVGeneration\": - \"V1\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"Provisioning - succeeded\",\r\n \"time\": \"2022-08-22T05:40:38.752419+00:00\"\r\n - \ },\r\n {\r\n \"code\": \"PowerState/running\",\r\n - \ \"level\": \"Info\",\r\n \"displayStatus\": \"VM running\"\r\n - \ }\r\n ]\r\n },\r\n \"timeCreated\": \"2022-08-22T05:40:28.2366178+00:00\"\r\n - \ }\r\n}" - headers: - cache-control: - - no-cache - content-length: - - '4001' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 05:41:06 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-resource: - - Microsoft.Compute/LowCostGet3Min;3996,Microsoft.Compute/LowCostGet30Min;31989 - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json, text/json - Accept-Encoding: - - gzip, deflate - CommandName: - - vm create - Connection: - - keep-alive - ParameterSetName: - - -n -g --image --generate-ssh-key --nsg-rule - User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-network/20.0.0 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic?api-version=2018-01-01 - response: - body: - string: "{\r\n \"name\": \"vm-000004VMNic\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic\",\r\n - \ \"etag\": \"W/\\\"155461a8-a571-47c6-92e7-8aff2f06b28b\\\"\",\r\n \"tags\": - {},\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\",\r\n - \ \"resourceGuid\": \"d5d2f63f-6d6f-4e95-a194-bfc0c5e9a232\",\r\n \"ipConfigurations\": - [\r\n {\r\n \"name\": \"ipconfigvm-000004\",\r\n \"id\": - \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic/ipConfigurations/ipconfigvm-000004\",\r\n - \ \"etag\": \"W/\\\"155461a8-a571-47c6-92e7-8aff2f06b28b\\\"\",\r\n - \ \"type\": \"Microsoft.Network/networkInterfaces/ipConfigurations\",\r\n - \ \"properties\": {\r\n \"provisioningState\": \"Succeeded\",\r\n - \ \"privateIPAddress\": \"10.0.0.4\",\r\n \"privateIPAllocationMethod\": - \"Dynamic\",\r\n \"publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/publicIPAddresses/vm-000004PublicIP\"\r\n - \ },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/virtualNetworks/vm-000004VNET/subnets/vm-000004Subnet\"\r\n - \ },\r\n \"primary\": true,\r\n \"privateIPAddressVersion\": - \"IPv4\"\r\n }\r\n }\r\n ],\r\n \"dnsSettings\": {\r\n \"dnsServers\": - [],\r\n \"appliedDnsServers\": [],\r\n \"internalDomainNameSuffix\": - \"4or4apske5qudpbba1cbg520jh.dx.internal.cloudapp.net\"\r\n },\r\n \"macAddress\": - \"00-22-48-09-F9-A2\",\r\n \"enableIPForwarding\": false,\r\n \"networkSecurityGroup\": - {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkSecurityGroups/vm-000004NSG\"\r\n - \ },\r\n \"primary\": true,\r\n \"virtualMachine\": {\r\n \"id\": - \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/virtualMachines/vm-000004\"\r\n - \ }\r\n },\r\n \"type\": \"Microsoft.Network/networkInterfaces\",\r\n - \ \"location\": \"westus\"\r\n}" - headers: - cache-control: - - no-cache - content-length: - - '2465' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 05:41:08 GMT - etag: - - W/"155461a8-a571-47c6-92e7-8aff2f06b28b" - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-arm-service-request-id: - - 27b92bb6-5efd-45fa-a6ab-44ee39c15155 - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json, text/json - Accept-Encoding: - - gzip, deflate - CommandName: - - vm create - Connection: - - keep-alive - ParameterSetName: - - -n -g --image --generate-ssh-key --nsg-rule - User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-network/20.0.0 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/publicIPAddresses/vm-000004PublicIP?api-version=2018-01-01 - response: - body: - string: "{\r\n \"name\": \"vm-000004PublicIP\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/publicIPAddresses/vm-000004PublicIP\",\r\n - \ \"etag\": \"W/\\\"35e7b165-cc9c-46dd-95b2-077382326dd0\\\"\",\r\n \"location\": - \"westus\",\r\n \"tags\": {},\r\n \"properties\": {\r\n \"provisioningState\": - \"Succeeded\",\r\n \"resourceGuid\": \"2fa0cf63-b303-4b87-97cc-3e152076d054\",\r\n - \ \"ipAddress\": \"20.245.59.182\",\r\n \"publicIPAddressVersion\": \"IPv4\",\r\n - \ \"publicIPAllocationMethod\": \"Dynamic\",\r\n \"idleTimeoutInMinutes\": - 4,\r\n \"ipTags\": [],\r\n \"ipConfiguration\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic/ipConfigurations/ipconfigvm-000004\"\r\n - \ }\r\n },\r\n \"type\": \"Microsoft.Network/publicIPAddresses\",\r\n - \ \"sku\": {\r\n \"name\": \"Basic\"\r\n }\r\n}" - headers: - cache-control: - - no-cache - content-length: - - '981' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 05:41:08 GMT - etag: - - W/"35e7b165-cc9c-46dd-95b2-077382326dd0" - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-arm-service-request-id: - - 271f83fb-f68d-4114-b7e6-db7dc6034209 - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation account create - Connection: - - keep-alive - ParameterSetName: - - -n -g - User-Agent: - - AZURECLI/2.39.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_automation_software_update_configuration000001?api-version=2021-04-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001","name":"cli_test_automation_software_update_configuration000001","type":"Microsoft.Resources/resourceGroups","location":"westus","tags":{"product":"azurecli","cause":"automation","date":"2022-08-22T05:39:51Z"},"properties":{"provisioningState":"Succeeded"}}' - headers: - cache-control: - - no-cache - content-length: - - '388' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 05:41:08 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: '{"name": "account-000002", "location": "westus", "properties": {"sku": - {"name": "Basic"}}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation account create - Connection: - - keep-alive - Content-Length: - - '90' - Content-Type: - - application/json - ParameterSetName: - - -n -g - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002?api-version=2021-06-22 - response: - body: - string: '{"name":"account-000002","systemData":{"createdAt":"2022-08-22T05:41:14.2666667+00:00","lastModifiedAt":"2022-08-22T05:41:14.2666667+00:00"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002","type":"Microsoft.Automation/AutomationAccounts","location":"westus","tags":{},"etag":null,"properties":{"disableLocalAuth":false,"sku":{"name":"Basic","family":null,"capacity":null},"state":"Ok","RegistrationUrl":"https://f2174187-6b34-4d5c-9c5b-a16beb6fbbec.agentsvc.wus.azure-automation.net/accounts/f2174187-6b34-4d5c-9c5b-a16beb6fbbec","encryption":{"keySource":"Microsoft.Automation","identity":{"userAssignedIdentity":null}},"automationHybridServiceUrl":"https://f2174187-6b34-4d5c-9c5b-a16beb6fbbec.jrds.wus.azure-automation.net/automationAccounts/f2174187-6b34-4d5c-9c5b-a16beb6fbbec","RuntimeConfiguration":{"powershell":{"builtinModules":{"Az":"8.0.0"}},"powershell7":{"builtinModules":{"Az":"8.0.0"}}},"creationTime":"2022-08-22T05:41:14.2666667+00:00","lastModifiedBy":null,"lastModifiedTime":"2022-08-22T05:41:14.2666667+00:00"}}' - headers: - cache-control: - - no-cache - content-length: - - '1179' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 05:41:16 GMT - expires: - - '-1' - location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002?api-version=2021-06-22 - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - status: - code: 201 - message: Created -- request: - body: '{"properties": {"updateConfiguration": {"operatingSystem": "Windows", "windows": - {"includedUpdateClassifications": "Critical", "excludedKbNumbers": ["16800", - "16800"], "includedKbNumbers": ["15000", "15000"], "rebootSetting": "IfRequired"}, - "duration": "PT2H", "azureVirtualMachines": ["/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/virtualMachines/vm-000004"], - "nonAzureComputerNames": ["nonvm1", "nonvm2"], "targets": {"azureQueries": [{"scope": - ["/subscriptions/00000000-0000-0000-0000-000000000000"], "locations": ["eastus", - "westus"], "tagSettings": {"tags": {"tag": ["tag1", "tag2"]}}}]}}, "scheduleInfo": - {"startTime": "2022-08-23T10:00:00.000Z", "expiryTime": "2022-08-30T10:00:00.000Z", - "nextRun": "2022-08-25T10:00:00.000Z", "interval": 1, "frequency": "Hour", "timeZone": - "UTC+08:00", "description": "test"}}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation software-update-configuration create - Connection: - - keep-alive - Content-Length: - - '925' - Content-Type: - - application/json - ParameterSetName: - - -n -g --automation-account-name --description --frequency --interval --operating-system - --excluded-kb-numbers --included-kb-numbers --included-update-classifications - --duration --azure-virtual-machines --time-zone --start-time --expiry-time - --next-run --non-azure-computer-names --reboot-setting --azure-queries-scope - --azure-queries-location --tags - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002/softwareUpdateConfigurations/conf-000003?api-version=2019-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002/softwareUpdateConfigurations/conf-000003","name":"conf-000003","type":null,"properties":{"updateConfiguration":{"operatingSystem":"Windows","windows":{"includedUpdateClassifications":"Critical","excludedKbNumbers":["16800","16800"],"includedKbNumbers":["15000","15000"],"rebootSetting":"IfRequired"},"linux":null,"targets":{"azureQueries":[{"scope":["/subscriptions/00000000-0000-0000-0000-000000000000"],"tagSettings":{"tags":{"tag":["tag1","tag2"]},"filterOperator":0},"locations":["eastus","westus"]}],"nonAzureQueries":null},"duration":"PT2H","azureVirtualMachines":["/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/virtualMachines/vm-000004"],"nonAzureComputerNames":["nonvm1","nonvm2"]},"scheduleInfo":{"description":"test","startTime":"2022-08-23T10:00:00+08:00","startTimeOffsetMinutes":480.0,"expiryTime":"2022-08-30T10:00:00+08:00","expiryTimeOffsetMinutes":480.0,"isEnabled":true,"nextRun":"2022-08-23T10:00:00+08:00","nextRunOffsetMinutes":480.0,"interval":1,"frequency":"Hour","creationTime":"2022-08-22T05:41:17.8133333+00:00","lastModifiedTime":"2022-08-22T05:41:17.8133333+00:00","timeZone":"UTC+08:00","advancedSchedule":null},"provisioningState":"Provisioning","createdBy":"{scrubbed}","error":null,"tasks":null,"creationTime":"2022-08-22T05:41:17.8933333+00:00","lastModifiedBy":null,"lastModifiedTime":"2022-08-22T05:41:17.8933333+00:00"}}' - headers: - cache-control: - - no-cache - content-length: - - '1657' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 05:41:17 GMT - expires: - - '-1' - location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002/softwareUpdateConfigurations/conf-000003?api-version=2019-06-01 - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-resource-requests: - - '99' - status: - code: 201 - message: Created -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation software-update-configuration list - Connection: - - keep-alive - ParameterSetName: - - -g --automation-account-name - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002/softwareUpdateConfigurations?api-version=2019-06-01 - response: - body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002/softwareUpdateConfigurations/conf-000003","name":"conf-000003","properties":{"updateConfiguration":{"operatingSystem":"Windows","windows":{"includedUpdateClassifications":"Critical","excludedKbNumbers":["16800","16800"],"includedKbNumbers":["15000","15000"],"rebootSetting":"IfRequired"},"linux":null,"targets":{"azureQueries":[{"scope":["/subscriptions/00000000-0000-0000-0000-000000000000"],"tagSettings":{"tags":{"tag":["tag1","tag2"]},"filterOperator":0},"locations":["eastus","westus"]}],"nonAzureQueries":null},"duration":"PT2H","azureVirtualMachines":["/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/virtualMachines/vm-000004"],"nonAzureComputerNames":["nonvm1","nonvm2"]},"frequency":"Hour","startTime":"2022-08-23T10:00:00+08:00","creationTime":"2022-08-22T05:41:17.8933333+00:00","lastModifiedTime":"2022-08-22T05:41:17.8933333+00:00","provisioningState":"Provisioning","nextRun":"2022-08-23T10:00:00+08:00","tasks":null}}]}' - headers: - cache-control: - - no-cache - content-length: - - '1243' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 05:41:18 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation software-update-configuration show - Connection: - - keep-alive - ParameterSetName: - - -n -g --automation-account-name -n - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002/softwareUpdateConfigurations/conf-000003?api-version=2019-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002/softwareUpdateConfigurations/conf-000003","name":"conf-000003","type":null,"properties":{"updateConfiguration":{"operatingSystem":"Windows","windows":{"includedUpdateClassifications":"Critical","excludedKbNumbers":["16800","16800"],"includedKbNumbers":["15000","15000"],"rebootSetting":"IfRequired"},"linux":null,"targets":{"azureQueries":[{"scope":["/subscriptions/00000000-0000-0000-0000-000000000000"],"tagSettings":{"tags":{"tag":["tag1","tag2"]},"filterOperator":0},"locations":["eastus","westus"]}],"nonAzureQueries":null},"duration":"PT2H","azureVirtualMachines":["/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/virtualMachines/vm-000004"],"nonAzureComputerNames":["nonvm1","nonvm2"]},"scheduleInfo":{"description":"test","startTime":"2022-08-23T10:00:00+08:00","startTimeOffsetMinutes":480.0,"expiryTime":"2022-08-30T10:00:00+08:00","expiryTimeOffsetMinutes":480.0,"isEnabled":true,"nextRun":"2022-08-23T10:00:00+08:00","nextRunOffsetMinutes":480.0,"interval":1,"frequency":"Hour","creationTime":"2022-08-22T05:41:17.8133333+00:00","lastModifiedTime":"2022-08-22T05:41:17.8133333+00:00","timeZone":"UTC+08:00","advancedSchedule":null},"provisioningState":"Provisioning","createdBy":"{scrubbed}","error":null,"tasks":null,"creationTime":"2022-08-22T05:41:17.8933333+00:00","lastModifiedBy":null,"lastModifiedTime":"2022-08-22T05:41:17.8933333+00:00"}}' - headers: - cache-control: - - no-cache - content-length: - - '1657' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 05:41:20 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation software-update-configuration runs list - Connection: - - keep-alive - ParameterSetName: - - -g --automation-account-name - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002/softwareUpdateConfigurationRuns?api-version=2019-06-01 - response: - body: - string: '{"value":[]}' - headers: - cache-control: - - no-cache - content-length: - - '12' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 05:41:21 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation software-update-configuration machine-runs list - Connection: - - keep-alive - ParameterSetName: - - -g --automation-account-name - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002/softwareUpdateConfigurationMachineRuns?api-version=2019-06-01 - response: - body: - string: '{"value":[]}' - headers: - cache-control: - - no-cache - content-length: - - '12' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 22 Aug 2022 05:41:23 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - automation software-update-configuration delete - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - -n -g --automation-account-name -y - User-Agent: - - AZURECLI/2.39.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.8.9 (Windows-10-10.0.22000-SP0) - method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002/softwareUpdateConfigurations/conf-000003?api-version=2019-06-01 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Mon, 22 Aug 2022 05:41:24 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-HTTPAPI/2.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-resource-requests: - - '99' - status: - code: 200 - message: OK -version: 1 +interactions: +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - vm create + Connection: + - keep-alive + ParameterSetName: + - -n -g --image --generate-ssh-key --nsg-rule + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_automation_software_update_configuration000001?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001","name":"cli_test_automation_software_update_configuration000001","type":"Microsoft.Resources/resourceGroups","location":"westus","tags":{"product":"azurecli","cause":"automation","date":"2022-10-19T08:11:44Z"},"properties":{"provisioningState":"Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '388' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:11:49 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.26.0 + method: GET + uri: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/arm-compute/quickstart-templates/aliases.json + response: + body: + string: "{\n \"$schema\": \"http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json\"\ + ,\n \"contentVersion\": \"1.0.0.0\",\n \"parameters\": {},\n \"variables\"\ + : {},\n \"resources\": [],\n \"outputs\": {\n \"aliases\": {\n \"\ + type\": \"object\",\n \"value\": {\n \"Linux\": {\n \"\ + CentOS\": {\n \"publisher\": \"OpenLogic\",\n \"offer\"\ + : \"CentOS\",\n \"sku\": \"7.5\",\n \"version\": \"\ + latest\",\n \"architecture\": \"x64\"\n },\n \ + \ \"Debian\": {\n \"publisher\": \"Debian\",\n \"offer\"\ + : \"debian-10\",\n \"sku\": \"10\",\n \"version\": \"\ + latest\",\n \"architecture\": \"x64\"\n },\n \ + \ \"Flatcar\": {\n \"publisher\": \"kinvolk\",\n \"\ + offer\": \"flatcar-container-linux-free\",\n \"sku\": \"stable\"\ + ,\n \"version\": \"latest\",\n \"architecture\": \"\ + x64\"\n },\n \"openSUSE-Leap\": {\n \"publisher\"\ + : \"SUSE\",\n \"offer\": \"opensuse-leap-15-3\",\n \"\ + sku\": \"gen2\",\n \"version\": \"latest\",\n \"architecture\"\ + : \"x64\"\n },\n \"RHEL\": {\n \"publisher\"\ + : \"RedHat\",\n \"offer\": \"RHEL\",\n \"sku\": \"7-LVM\"\ + ,\n \"version\": \"latest\",\n \"architecture\": \"\ + x64\"\n },\n \"SLES\": {\n \"publisher\": \"\ + SUSE\",\n \"offer\": \"sles-15-sp3\",\n \"sku\": \"\ + gen2\",\n \"version\": \"latest\",\n \"architecture\"\ + : \"x64\"\n },\n \"UbuntuLTS\": {\n \"publisher\"\ + : \"Canonical\",\n \"offer\": \"UbuntuServer\",\n \"\ + sku\": \"18.04-LTS\",\n \"version\": \"latest\",\n \"\ + architecture\": \"x64\"\n }\n },\n \"Windows\": {\n\ + \ \"Win2022Datacenter\": {\n \"publisher\": \"MicrosoftWindowsServer\"\ + ,\n \"offer\": \"WindowsServer\",\n \"sku\": \"2022-Datacenter\"\ + ,\n \"version\": \"latest\",\n \"architecture\": \"\ + x64\"\n },\n \"Win2022AzureEditionCore\": {\n \ + \ \"publisher\": \"MicrosoftWindowsServer\",\n \"offer\": \"WindowsServer\"\ + ,\n \"sku\": \"2022-datacenter-azure-edition-core\",\n \ + \ \"version\": \"latest\",\n \"architecture\": \"x64\"\n \ + \ },\n \"Win2019Datacenter\": {\n \"publisher\"\ + : \"MicrosoftWindowsServer\",\n \"offer\": \"WindowsServer\",\n\ + \ \"sku\": \"2019-Datacenter\",\n \"version\": \"latest\"\ + ,\n \"architecture\": \"x64\"\n },\n \"Win2016Datacenter\"\ + : {\n \"publisher\": \"MicrosoftWindowsServer\",\n \"\ + offer\": \"WindowsServer\",\n \"sku\": \"2016-Datacenter\",\n \ + \ \"version\": \"latest\",\n \"architecture\": \"x64\"\ + \n },\n \"Win2012R2Datacenter\": {\n \"publisher\"\ + : \"MicrosoftWindowsServer\",\n \"offer\": \"WindowsServer\",\n\ + \ \"sku\": \"2012-R2-Datacenter\",\n \"version\": \"\ + latest\",\n \"architecture\": \"x64\"\n },\n \ + \ \"Win2012Datacenter\": {\n \"publisher\": \"MicrosoftWindowsServer\"\ + ,\n \"offer\": \"WindowsServer\",\n \"sku\": \"2012-Datacenter\"\ + ,\n \"version\": \"latest\",\n \"architecture\": \"\ + x64\"\n },\n \"Win2008R2SP1\": {\n \"publisher\"\ + : \"MicrosoftWindowsServer\",\n \"offer\": \"WindowsServer\",\n\ + \ \"sku\": \"2008-R2-SP1\",\n \"version\": \"latest\"\ + ,\n \"architecture\": \"x64\"\n }\n }\n }\n\ + \ }\n }\n}" + headers: + accept-ranges: + - bytes + access-control-allow-origin: + - '*' + cache-control: + - max-age=300 + connection: + - keep-alive + content-length: + - '3463' + content-security-policy: + - default-src 'none'; style-src 'unsafe-inline'; sandbox + content-type: + - text/plain; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:11:50 GMT + etag: + - W/"41b202f4dc5098d126019dc00721a4c5e30df0c5196794514fadc3710ee2a5cb" + expires: + - Wed, 19 Oct 2022 08:16:50 GMT + source-age: + - '129' + strict-transport-security: + - max-age=31536000 + vary: + - Authorization,Accept-Encoding,Origin + via: + - 1.1 varnish + x-cache: + - HIT + x-cache-hits: + - '1' + x-content-type-options: + - nosniff + x-fastly-request-id: + - 303979f6af52df95605c456c80ed2c63178fe06c + x-frame-options: + - deny + x-github-request-id: + - 0802:27E7:2B5028:3A34D4:634E3D7C + x-served-by: + - cache-ccu830044-CCU + x-timer: + - S1666167110.162744,VS0,VE1 + x-xss-protection: + - 1; mode=block + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - vm create + Connection: + - keep-alive + ParameterSetName: + - -n -g --image --generate-ssh-key --nsg-rule + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus/publishers/Canonical/artifacttypes/vmimage/offers/UbuntuServer/skus/18.04-LTS/versions?$top=1&$orderby=name%20desc&api-version=2022-08-01 + response: + body: + string: "[\r\n {\r\n \"location\": \"westus\",\r\n \"name\": \"18.04.202210180\"\ + ,\r\n \"id\": \"/Subscriptions/00000000-0000-0000-0000-000000000000/Providers/Microsoft.Compute/Locations/westus/Publishers/Canonical/ArtifactTypes/VMImage/Offers/UbuntuServer/Skus/18.04-LTS/Versions/18.04.202210180\"\ + \r\n }\r\n]" + headers: + cache-control: + - no-cache + content-length: + - '286' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:11:50 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/ListVMImagesVersionsFromLocation3Min;15997,Microsoft.Compute/ListVMImagesVersionsFromLocation30Min;43989 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - vm create + Connection: + - keep-alive + ParameterSetName: + - -n -g --image --generate-ssh-key --nsg-rule + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus/publishers/Canonical/artifacttypes/vmimage/offers/UbuntuServer/skus/18.04-LTS/versions/18.04.202210180?api-version=2022-08-01 + response: + body: + string: "{\r\n \"properties\": {\r\n \"hyperVGeneration\": \"V1\",\r\n \ + \ \"architecture\": \"x64\",\r\n \"replicaType\": \"Unmanaged\",\r\n\ + \ \"disallowed\": {\r\n \"vmDiskType\": \"None\"\r\n },\r\n \ + \ \"automaticOSUpgradeProperties\": {\r\n \"automaticOSUpgradeSupported\"\ + : true\r\n },\r\n \"imageDeprecationStatus\": {\r\n \"imageState\"\ + : \"Active\"\r\n },\r\n \"features\": [\r\n {\r\n \"name\"\ + : \"IsAcceleratedNetworkSupported\",\r\n \"value\": \"True\"\r\n \ + \ },\r\n {\r\n \"name\": \"DiskControllerTypes\",\r\n \ + \ \"value\": \"SCSI, NVMe\"\r\n },\r\n {\r\n \"name\":\ + \ \"IsHibernateSupported\",\r\n \"value\": \"True\"\r\n }\r\n\ + \ ],\r\n \"osDiskImage\": {\r\n \"operatingSystem\": \"Linux\"\ + ,\r\n \"sizeInGb\": 31,\r\n \"sizeInBytes\": 32213303808\r\n \ + \ },\r\n \"dataDiskImages\": []\r\n },\r\n \"location\": \"westus\",\r\ + \n \"name\": \"18.04.202210180\",\r\n \"id\": \"/Subscriptions/00000000-0000-0000-0000-000000000000/Providers/Microsoft.Compute/Locations/westus/Publishers/Canonical/ArtifactTypes/VMImage/Offers/UbuntuServer/Skus/18.04-LTS/Versions/18.04.202210180\"\ + \r\n}" + headers: + cache-control: + - no-cache + content-length: + - '1048' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:11:52 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/GetVMImageFromLocation3Min;12997,Microsoft.Compute/GetVMImageFromLocation30Min;73991 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - vm create + Connection: + - keep-alive + ParameterSetName: + - -n -g --image --generate-ssh-key --nsg-rule + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-network/21.0.1 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/virtualNetworks?api-version=2022-01-01 + response: + body: + string: '{"value":[]}' + headers: + cache-control: + - no-cache + content-length: + - '12' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:11:52 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"properties": {"template": {"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", "parameters": {}, "variables": {}, "resources": + [{"name": "vm-000004VNET", "type": "Microsoft.Network/virtualNetworks", "location": + "westus", "apiVersion": "2015-06-15", "dependsOn": [], "tags": {}, "properties": + {"addressSpace": {"addressPrefixes": ["10.0.0.0/16"]}, "subnets": [{"name": + "vm-000004Subnet", "properties": {"addressPrefix": "10.0.0.0/24"}}]}}, {"type": + "Microsoft.Network/networkSecurityGroups", "name": "vm-000004NSG", "apiVersion": + "2015-06-15", "location": "westus", "tags": {}, "dependsOn": []}, {"apiVersion": + "2022-01-01", "type": "Microsoft.Network/publicIPAddresses", "name": "vm-000004PublicIP", + "location": "westus", "tags": {}, "dependsOn": [], "properties": {"publicIPAllocationMethod": + null}}, {"apiVersion": "2015-06-15", "type": "Microsoft.Network/networkInterfaces", + "name": "vm-000004VMNic", "location": "westus", "tags": {}, "dependsOn": ["Microsoft.Network/virtualNetworks/vm-000004VNET", + "Microsoft.Network/networkSecurityGroups/vm-000004NSG", "Microsoft.Network/publicIpAddresses/vm-000004PublicIP"], + "properties": {"ipConfigurations": [{"name": "ipconfigvm-000004", "properties": + {"privateIPAllocationMethod": "Dynamic", "subnet": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/virtualNetworks/vm-000004VNET/subnets/vm-000004Subnet"}, + "publicIPAddress": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/publicIPAddresses/vm-000004PublicIP"}}}], + "networkSecurityGroup": {"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkSecurityGroups/vm-000004NSG"}}}, + {"apiVersion": "2022-08-01", "type": "Microsoft.Compute/virtualMachines", "name": + "vm-000004", "location": "westus", "tags": {}, "dependsOn": ["Microsoft.Network/networkInterfaces/vm-000004VMNic"], + "properties": {"hardwareProfile": {"vmSize": "Standard_DS1_v2"}, "networkProfile": + {"networkInterfaces": [{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic", + "properties": {"deleteOption": null}}]}, "storageProfile": {"osDisk": {"createOption": + "fromImage", "name": null, "caching": "ReadWrite", "managedDisk": {"storageAccountType": + null}}, "imageReference": {"publisher": "Canonical", "offer": "UbuntuServer", + "sku": "18.04-LTS", "version": "latest"}}, "osProfile": {"computerName": "vm-000004", + "adminUsername": "supadhyay", "linuxConfiguration": {"disablePasswordAuthentication": + true, "ssh": {"publicKeys": [{"keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC7DmFgToC3d0MlE8SOOFXZCK6SJ04QBWWBRvspL+shVcWsv0ufvwDZ74ewurSf23eWNs+NgWazMeg9GEjVuQVGS+1+UBHZZMm1bE7WTymA6Opck730pz5EEXj5+l0oSJVFqChEqYVZH9Umc9TThF0s+jqpajA8mFuAYPwT/sxK8xbz9/zglGUaGSeQ2xyyZh7APJkJVTkRrgHf61YX6v3NfkJKjnfy2bU8Lw/N1fLhcrtMLOYT55dDEMLvIrDD+SvYp3uA8pXsQm/UXzVtu+88v0lVgVXv09FAKzC1lDDYUbo3NYKIYJNRlnTt6M6sVEszQH23p7V+6erOC6syejCv", + "path": "/home/supadhyay/.ssh/authorized_keys"}]}}}}}], "outputs": {}}, "parameters": + {}, "mode": "incremental"}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - vm create + Connection: + - keep-alive + Content-Length: + - '3442' + Content-Type: + - application/json + ParameterSetName: + - -n -g --image --generate-ssh-key --nsg-rule + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Resources/deployments/vm_deploy_32b1BvaCQGJrrsW9Of4i9R9YI5GV1lKp","name":"vm_deploy_32b1BvaCQGJrrsW9Of4i9R9YI5GV1lKp","type":"Microsoft.Resources/deployments","properties":{"templateHash":"9949138699710208634","parameters":{},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2022-10-19T08:12:01.2424159Z","duration":"PT0.0004949S","correlationId":"9844566b-59ca-40f1-9db4-67039879c29c","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"virtualNetworks","locations":["westus"]},{"resourceType":"networkSecurityGroups","locations":["westus"]},{"resourceType":"publicIPAddresses","locations":["westus"]},{"resourceType":"networkInterfaces","locations":["westus"]}]},{"namespace":"Microsoft.Compute","resourceTypes":[{"resourceType":"virtualMachines","locations":["westus"]}]}],"dependencies":[{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/virtualNetworks/vm-000004VNET","resourceType":"Microsoft.Network/virtualNetworks","resourceName":"vm-000004VNET"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkSecurityGroups/vm-000004NSG","resourceType":"Microsoft.Network/networkSecurityGroups","resourceName":"vm-000004NSG"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/publicIPAddresses/vm-000004PublicIP","resourceType":"Microsoft.Network/publicIPAddresses","resourceName":"vm-000004PublicIP"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic","resourceType":"Microsoft.Network/networkInterfaces","resourceName":"vm-000004VMNic"},{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic","resourceType":"Microsoft.Network/networkInterfaces","resourceName":"vm-000004VMNic"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/virtualMachines/vm-000004","resourceType":"Microsoft.Compute/virtualMachines","resourceName":"vm-000004"}]}}' + headers: + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Resources/deployments/vm_deploy_32b1BvaCQGJrrsW9Of4i9R9YI5GV1lKp/operationStatuses/08585354397684869414?api-version=2021-04-01 + cache-control: + - no-cache + content-length: + - '2674' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:02 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1197' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - vm create + Connection: + - keep-alive + ParameterSetName: + - -n -g --image --generate-ssh-key --nsg-rule + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08585354397684869414?api-version=2021-04-01 + response: + body: + string: '{"status":"Running"}' + headers: + cache-control: + - no-cache + content-length: + - '20' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:12:33 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - vm create + Connection: + - keep-alive + ParameterSetName: + - -n -g --image --generate-ssh-key --nsg-rule + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08585354397684869414?api-version=2021-04-01 + response: + body: + string: '{"status":"Succeeded"}' + headers: + cache-control: + - no-cache + content-length: + - '22' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:13:04 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - vm create + Connection: + - keep-alive + ParameterSetName: + - -n -g --image --generate-ssh-key --nsg-rule + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Resources/deployments/vm_deploy_32b1BvaCQGJrrsW9Of4i9R9YI5GV1lKp","name":"vm_deploy_32b1BvaCQGJrrsW9Of4i9R9YI5GV1lKp","type":"Microsoft.Resources/deployments","properties":{"templateHash":"9949138699710208634","parameters":{},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2022-10-19T08:12:48.265627Z","duration":"PT47.023706S","correlationId":"9844566b-59ca-40f1-9db4-67039879c29c","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"virtualNetworks","locations":["westus"]},{"resourceType":"networkSecurityGroups","locations":["westus"]},{"resourceType":"publicIPAddresses","locations":["westus"]},{"resourceType":"networkInterfaces","locations":["westus"]}]},{"namespace":"Microsoft.Compute","resourceTypes":[{"resourceType":"virtualMachines","locations":["westus"]}]}],"dependencies":[{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/virtualNetworks/vm-000004VNET","resourceType":"Microsoft.Network/virtualNetworks","resourceName":"vm-000004VNET"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkSecurityGroups/vm-000004NSG","resourceType":"Microsoft.Network/networkSecurityGroups","resourceName":"vm-000004NSG"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/publicIPAddresses/vm-000004PublicIP","resourceType":"Microsoft.Network/publicIPAddresses","resourceName":"vm-000004PublicIP"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic","resourceType":"Microsoft.Network/networkInterfaces","resourceName":"vm-000004VMNic"},{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic","resourceType":"Microsoft.Network/networkInterfaces","resourceName":"vm-000004VMNic"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/virtualMachines/vm-000004","resourceType":"Microsoft.Compute/virtualMachines","resourceName":"vm-000004"}],"outputs":{},"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/virtualMachines/vm-000004"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkSecurityGroups/vm-000004NSG"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/publicIPAddresses/vm-000004PublicIP"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/virtualNetworks/vm-000004VNET"}]}}' + headers: + cache-control: + - no-cache + content-length: + - '3667' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:13:04 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - vm create + Connection: + - keep-alive + ParameterSetName: + - -n -g --image --generate-ssh-key --nsg-rule + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-compute/28.0.0 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/virtualMachines/vm-000004?$expand=instanceView&api-version=2022-08-01 + response: + body: + string: "{\r\n \"name\": \"vm-000004\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/virtualMachines/vm-000004\"\ + ,\r\n \"type\": \"Microsoft.Compute/virtualMachines\",\r\n \"location\"\ + : \"westus\",\r\n \"tags\": {\r\n \"azsecpack\": \"nonprod\",\r\n \"\ + platformsettings.host_environment.service.platform_optedin_for_rootcerts\"\ + : \"true\"\r\n },\r\n \"properties\": {\r\n \"vmId\": \"137faf30-d19f-4d56-927b-4602e372090b\"\ + ,\r\n \"hardwareProfile\": {\r\n \"vmSize\": \"Standard_DS1_v2\"\r\ + \n },\r\n \"storageProfile\": {\r\n \"imageReference\": {\r\n \ + \ \"publisher\": \"Canonical\",\r\n \"offer\": \"UbuntuServer\"\ + ,\r\n \"sku\": \"18.04-LTS\",\r\n \"version\": \"latest\",\r\ + \n \"exactVersion\": \"18.04.202210180\"\r\n },\r\n \"osDisk\"\ + : {\r\n \"osType\": \"Linux\",\r\n \"name\": \"vm-000004_disk1_5135dbb675364ec5915a8922e897103f\"\ + ,\r\n \"createOption\": \"FromImage\",\r\n \"caching\": \"ReadWrite\"\ + ,\r\n \"managedDisk\": {\r\n \"storageAccountType\": \"Premium_LRS\"\ + ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/disks/vm-000004_disk1_5135dbb675364ec5915a8922e897103f\"\ + \r\n },\r\n \"deleteOption\": \"Detach\",\r\n \"diskSizeGB\"\ + : 30\r\n },\r\n \"dataDisks\": []\r\n },\r\n \"osProfile\"\ + : {\r\n \"computerName\": \"vm-000004\",\r\n \"adminUsername\":\ + \ \"supadhyay\",\r\n \"linuxConfiguration\": {\r\n \"disablePasswordAuthentication\"\ + : true,\r\n \"ssh\": {\r\n \"publicKeys\": [\r\n \ + \ {\r\n \"path\": \"/home/supadhyay/.ssh/authorized_keys\"\ + ,\r\n \"keyData\": \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC7DmFgToC3d0MlE8SOOFXZCK6SJ04QBWWBRvspL+shVcWsv0ufvwDZ74ewurSf23eWNs+NgWazMeg9GEjVuQVGS+1+UBHZZMm1bE7WTymA6Opck730pz5EEXj5+l0oSJVFqChEqYVZH9Umc9TThF0s+jqpajA8mFuAYPwT/sxK8xbz9/zglGUaGSeQ2xyyZh7APJkJVTkRrgHf61YX6v3NfkJKjnfy2bU8Lw/N1fLhcrtMLOYT55dDEMLvIrDD+SvYp3uA8pXsQm/UXzVtu+88v0lVgVXv09FAKzC1lDDYUbo3NYKIYJNRlnTt6M6sVEszQH23p7V+6erOC6syejCv\"\ + \r\n }\r\n ]\r\n },\r\n \"provisionVMAgent\"\ + : true,\r\n \"patchSettings\": {\r\n \"patchMode\": \"ImageDefault\"\ + ,\r\n \"assessmentMode\": \"ImageDefault\"\r\n },\r\n \ + \ \"enableVMAgentPlatformUpdates\": false\r\n },\r\n \"secrets\"\ + : [],\r\n \"allowExtensionOperations\": true,\r\n \"requireGuestProvisionSignal\"\ + : true\r\n },\r\n \"networkProfile\": {\"networkInterfaces\":[{\"id\"\ + :\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic\"\ + }]},\r\n \"provisioningState\": \"Succeeded\",\r\n \"instanceView\"\ + : {\r\n \"computerName\": \"vm-000004\",\r\n \"osName\": \"ubuntu\"\ + ,\r\n \"osVersion\": \"18.04\",\r\n \"vmAgent\": {\r\n \"\ + vmAgentVersion\": \"2.8.0.11\",\r\n \"statuses\": [\r\n {\r\ + \n \"code\": \"ProvisioningState/succeeded\",\r\n \"\ + level\": \"Info\",\r\n \"displayStatus\": \"Ready\",\r\n \ + \ \"message\": \"Guest Agent is running\",\r\n \"time\": \"\ + 2022-10-19T08:12:56+00:00\"\r\n }\r\n ],\r\n \"extensionHandlers\"\ + : []\r\n },\r\n \"disks\": [\r\n {\r\n \"name\"\ + : \"vm-000004_disk1_5135dbb675364ec5915a8922e897103f\",\r\n \"statuses\"\ + : [\r\n {\r\n \"code\": \"ProvisioningState/succeeded\"\ + ,\r\n \"level\": \"Info\",\r\n \"displayStatus\"\ + : \"Provisioning succeeded\",\r\n \"time\": \"2022-10-19T08:12:31.4725121+00:00\"\ + \r\n }\r\n ]\r\n }\r\n ],\r\n \"hyperVGeneration\"\ + : \"V1\",\r\n \"statuses\": [\r\n {\r\n \"code\": \"\ + ProvisioningState/succeeded\",\r\n \"level\": \"Info\",\r\n \ + \ \"displayStatus\": \"Provisioning succeeded\",\r\n \"time\"\ + : \"2022-10-19T08:12:44.2226469+00:00\"\r\n },\r\n {\r\n \ + \ \"code\": \"PowerState/running\",\r\n \"level\": \"Info\"\ + ,\r\n \"displayStatus\": \"VM running\"\r\n }\r\n ]\r\ + \n },\r\n \"timeCreated\": \"2022-10-19T08:12:30.2537515+00:00\"\r\n\ + \ }\r\n}" + headers: + cache-control: + - no-cache + content-length: + - '4118' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:13:06 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-resource: + - Microsoft.Compute/LowCostGet3Min;3990,Microsoft.Compute/LowCostGet30Min;31947 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - vm create + Connection: + - keep-alive + ParameterSetName: + - -n -g --image --generate-ssh-key --nsg-rule + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-network/21.0.1 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic?api-version=2022-01-01 + response: + body: + string: "{\r\n \"name\": \"vm-000004VMNic\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic\"\ + ,\r\n \"etag\": \"W/\\\"d3a2eae0-7a80-4220-a29b-a0bd2e71a899\\\"\",\r\n \ + \ \"tags\": {},\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\ + ,\r\n \"resourceGuid\": \"35efe0bd-6651-4d62-8f7c-c012005df769\",\r\n \ + \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"ipconfigvm-000004\"\ + ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic/ipConfigurations/ipconfigvm-000004\"\ + ,\r\n \"etag\": \"W/\\\"d3a2eae0-7a80-4220-a29b-a0bd2e71a899\\\"\"\ + ,\r\n \"type\": \"Microsoft.Network/networkInterfaces/ipConfigurations\"\ + ,\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\ + ,\r\n \"privateIPAddress\": \"10.0.0.4\",\r\n \"privateIPAllocationMethod\"\ + : \"Dynamic\",\r\n \"publicIPAddress\": {\r\n \"id\":\ + \ \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/publicIPAddresses/vm-000004PublicIP\"\ + \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/virtualNetworks/vm-000004VNET/subnets/vm-000004Subnet\"\ + \r\n },\r\n \"primary\": true,\r\n \"privateIPAddressVersion\"\ + : \"IPv4\"\r\n }\r\n }\r\n ],\r\n \"dnsSettings\": {\r\n\ + \ \"dnsServers\": [],\r\n \"appliedDnsServers\": [],\r\n \"\ + internalDomainNameSuffix\": \"1ukvbqp11cuele5hwoqwj514wh.dx.internal.cloudapp.net\"\ + \r\n },\r\n \"macAddress\": \"00-22-48-08-E7-53\",\r\n \"vnetEncryptionSupported\"\ + : false,\r\n \"enableIPForwarding\": false,\r\n \"networkSecurityGroup\"\ + : {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkSecurityGroups/vm-000004NSG\"\ + \r\n },\r\n \"primary\": true,\r\n \"virtualMachine\": {\r\n \ + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/virtualMachines/vm-000004\"\ + \r\n },\r\n \"hostedWorkloads\": [],\r\n \"tapConfigurations\": [],\r\ + \n \"nicType\": \"Standard\",\r\n \"allowPort25Out\": true\r\n },\r\ + \n \"type\": \"Microsoft.Network/networkInterfaces\",\r\n \"location\":\ + \ \"westus\",\r\n \"kind\": \"Regular\"\r\n}" + headers: + cache-control: + - no-cache + content-length: + - '2641' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:13:08 GMT + etag: + - W/"d3a2eae0-7a80-4220-a29b-a0bd2e71a899" + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-arm-service-request-id: + - 1e59a065-f210-4284-9c4f-08e687bd270b + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - vm create + Connection: + - keep-alive + ParameterSetName: + - -n -g --image --generate-ssh-key --nsg-rule + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-network/21.0.1 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/publicIPAddresses/vm-000004PublicIP?api-version=2022-01-01 + response: + body: + string: "{\r\n \"name\": \"vm-000004PublicIP\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/publicIPAddresses/vm-000004PublicIP\"\ + ,\r\n \"etag\": \"W/\\\"d84373da-b1e2-4916-8cf4-e4ce89bcd646\\\"\",\r\n \ + \ \"location\": \"westus\",\r\n \"tags\": {},\r\n \"properties\": {\r\n\ + \ \"provisioningState\": \"Succeeded\",\r\n \"resourceGuid\": \"9e84ed36-3452-4d69-8957-b317e105e542\"\ + ,\r\n \"ipAddress\": \"20.253.130.91\",\r\n \"publicIPAddressVersion\"\ + : \"IPv4\",\r\n \"publicIPAllocationMethod\": \"Dynamic\",\r\n \"idleTimeoutInMinutes\"\ + : 4,\r\n \"ipTags\": [],\r\n \"ipConfiguration\": {\r\n \"id\"\ + : \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Network/networkInterfaces/vm-000004VMNic/ipConfigurations/ipconfigvm-000004\"\ + \r\n }\r\n },\r\n \"type\": \"Microsoft.Network/publicIPAddresses\",\r\ + \n \"sku\": {\r\n \"name\": \"Basic\",\r\n \"tier\": \"Regional\"\r\ + \n }\r\n}" + headers: + cache-control: + - no-cache + content-length: + - '1006' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:13:08 GMT + etag: + - W/"d84373da-b1e2-4916-8cf4-e4ce89bcd646" + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-arm-service-request-id: + - bca317d7-e114-4358-8f46-ee572c70c605 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation account create + Connection: + - keep-alive + ParameterSetName: + - -n -g + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_automation_software_update_configuration000001?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001","name":"cli_test_automation_software_update_configuration000001","type":"Microsoft.Resources/resourceGroups","location":"westus","tags":{"product":"azurecli","cause":"automation","date":"2022-10-19T08:11:44Z"},"properties":{"provisioningState":"Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '388' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:13:09 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"name": "account-000002", "location": "westus", "properties": {"sku": + {"name": "Basic"}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation account create + Connection: + - keep-alive + Content-Length: + - '90' + Content-Type: + - application/json + ParameterSetName: + - -n -g + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002?api-version=2021-06-22 + response: + body: + string: '{"name":"account-000002","systemData":{"createdAt":"2022-10-19T08:13:16.3533333+00:00","lastModifiedAt":"2022-10-19T08:13:16.3533333+00:00"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002","type":"Microsoft.Automation/AutomationAccounts","location":"westus","tags":{},"etag":null,"properties":{"disableLocalAuth":false,"sku":{"name":"Basic","family":null,"capacity":null},"state":"Ok","RegistrationUrl":"https://d5b5a4bb-3247-4f99-b63d-2d13097def43.agentsvc.wus.azure-automation.net/accounts/d5b5a4bb-3247-4f99-b63d-2d13097def43","encryption":{"keySource":"Microsoft.Automation","identity":{"userAssignedIdentity":null}},"automationHybridServiceUrl":"https://d5b5a4bb-3247-4f99-b63d-2d13097def43.jrds.wus.azure-automation.net/automationAccounts/d5b5a4bb-3247-4f99-b63d-2d13097def43","RuntimeConfiguration":{"powershell":{"builtinModules":{"Az":"8.0.0"}},"powershell7":{"builtinModules":{"Az":"8.0.0"}}},"creationTime":"2022-10-19T08:13:16.3533333+00:00","lastModifiedBy":null,"lastModifiedTime":"2022-10-19T08:13:16.3533333+00:00"}}' + headers: + cache-control: + - no-cache + content-length: + - '1179' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:13:18 GMT + expires: + - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002?api-version=2021-06-22 + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 201 + message: Created +- request: + body: '{"properties": {"updateConfiguration": {"operatingSystem": "Windows", "windows": + {"includedUpdateClassifications": "Critical", "excludedKbNumbers": ["16800", + "16800"], "includedKbNumbers": ["15000", "15000"], "rebootSetting": "IfRequired"}, + "duration": "PT2H", "azureVirtualMachines": ["/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/virtualMachines/vm-000004"], + "nonAzureComputerNames": ["nonvm1", "nonvm2"], "targets": {"azureQueries": [{"scope": + ["/subscriptions/00000000-0000-0000-0000-000000000000"], "locations": ["eastus", + "westus"], "tagSettings": {"tags": {"tag": ["tag1", "tag2"]}}}]}}, "scheduleInfo": + {"startTime": "2022-10-23T12:30:00.000Z", "expiryTime": "2022-10-30T12:30:00.000Z", + "nextRun": "2022-10-25T12:30:00.000Z", "interval": 1, "frequency": "Hour", "timeZone": + "UTC+08:00", "description": "test"}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation software-update-configuration create + Connection: + - keep-alive + Content-Length: + - '925' + Content-Type: + - application/json + ParameterSetName: + - -n -g --automation-account-name --description --frequency --interval --operating-system + --excluded-kb-numbers --included-kb-numbers --included-update-classifications + --duration --azure-virtual-machines --time-zone --start-time --expiry-time + --next-run --non-azure-computer-names --reboot-setting --azure-queries-scope + --azure-queries-location --azure-queries-tags + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002/softwareUpdateConfigurations/conf-000003?api-version=2019-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002/softwareUpdateConfigurations/conf-000003","name":"conf-000003","type":null,"properties":{"updateConfiguration":{"operatingSystem":"Windows","windows":{"includedUpdateClassifications":"Critical","excludedKbNumbers":["16800","16800"],"includedKbNumbers":["15000","15000"],"rebootSetting":"IfRequired"},"linux":null,"targets":{"azureQueries":[{"scope":["/subscriptions/00000000-0000-0000-0000-000000000000"],"tagSettings":{"tags":{"tag":["tag1","tag2"]},"filterOperator":0},"locations":["eastus","westus"]}],"nonAzureQueries":null},"duration":"PT2H","azureVirtualMachines":["/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/virtualMachines/vm-000004"],"nonAzureComputerNames":["nonvm1","nonvm2"]},"scheduleInfo":{"description":"test","startTime":"2022-10-23T12:30:00+08:00","startTimeOffsetMinutes":480.0,"expiryTime":"2022-10-30T12:30:00+08:00","expiryTimeOffsetMinutes":480.0,"isEnabled":true,"nextRun":"2022-10-23T12:30:00+08:00","nextRunOffsetMinutes":480.0,"interval":1,"frequency":"Hour","creationTime":"2022-10-19T08:13:21.3133333+00:00","lastModifiedTime":"2022-10-19T08:13:21.3133333+00:00","timeZone":"UTC+08:00","advancedSchedule":null},"provisioningState":"Provisioning","createdBy":"{scrubbed}","error":null,"tasks":null,"creationTime":"2022-10-19T08:13:21.4+00:00","lastModifiedBy":null,"lastModifiedTime":"2022-10-19T08:13:21.4+00:00"}}' + headers: + cache-control: + - no-cache + content-length: + - '1645' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:13:21 GMT + expires: + - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002/softwareUpdateConfigurations/conf-000003?api-version=2019-06-01 + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-resource-requests: + - '99' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation software-update-configuration list + Connection: + - keep-alive + ParameterSetName: + - -g --automation-account-name + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002/softwareUpdateConfigurations?api-version=2019-06-01 + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002/softwareUpdateConfigurations/conf-000003","name":"conf-000003","properties":{"updateConfiguration":{"operatingSystem":"Windows","windows":{"includedUpdateClassifications":"Critical","excludedKbNumbers":["16800","16800"],"includedKbNumbers":["15000","15000"],"rebootSetting":"IfRequired"},"linux":null,"targets":{"azureQueries":[{"scope":["/subscriptions/00000000-0000-0000-0000-000000000000"],"tagSettings":{"tags":{"tag":["tag1","tag2"]},"filterOperator":0},"locations":["eastus","westus"]}],"nonAzureQueries":null},"duration":"PT2H","azureVirtualMachines":["/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/virtualMachines/vm-000004"],"nonAzureComputerNames":["nonvm1","nonvm2"]},"frequency":"Hour","startTime":"2022-10-23T12:30:00+08:00","creationTime":"2022-10-19T08:13:21.4+00:00","lastModifiedTime":"2022-10-19T08:13:21.4+00:00","provisioningState":"Provisioning","nextRun":"2022-10-23T12:30:00+08:00","tasks":null}}]}' + headers: + cache-control: + - no-cache + content-length: + - '1231' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:13:22 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation software-update-configuration show + Connection: + - keep-alive + ParameterSetName: + - -n -g --automation-account-name -n + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002/softwareUpdateConfigurations/conf-000003?api-version=2019-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002/softwareUpdateConfigurations/conf-000003","name":"conf-000003","type":null,"properties":{"updateConfiguration":{"operatingSystem":"Windows","windows":{"includedUpdateClassifications":"Critical","excludedKbNumbers":["16800","16800"],"includedKbNumbers":["15000","15000"],"rebootSetting":"IfRequired"},"linux":null,"targets":{"azureQueries":[{"scope":["/subscriptions/00000000-0000-0000-0000-000000000000"],"tagSettings":{"tags":{"tag":["tag1","tag2"]},"filterOperator":0},"locations":["eastus","westus"]}],"nonAzureQueries":null},"duration":"PT2H","azureVirtualMachines":["/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Compute/virtualMachines/vm-000004"],"nonAzureComputerNames":["nonvm1","nonvm2"]},"scheduleInfo":{"description":"test","startTime":"2022-10-23T12:30:00+08:00","startTimeOffsetMinutes":480.0,"expiryTime":"2022-10-30T12:30:00+08:00","expiryTimeOffsetMinutes":480.0,"isEnabled":true,"nextRun":"2022-10-23T12:30:00+08:00","nextRunOffsetMinutes":480.0,"interval":1,"frequency":"Hour","creationTime":"2022-10-19T08:13:21.3133333+00:00","lastModifiedTime":"2022-10-19T08:13:21.3133333+00:00","timeZone":"UTC+08:00","advancedSchedule":null},"provisioningState":"Provisioning","createdBy":"{scrubbed}","error":null,"tasks":null,"creationTime":"2022-10-19T08:13:21.4+00:00","lastModifiedBy":null,"lastModifiedTime":"2022-10-19T08:13:21.4+00:00"}}' + headers: + cache-control: + - no-cache + content-length: + - '1645' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:13:24 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation software-update-configuration runs list + Connection: + - keep-alive + ParameterSetName: + - -g --automation-account-name + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002/softwareUpdateConfigurationRuns?api-version=2019-06-01 + response: + body: + string: '{"value":[]}' + headers: + cache-control: + - no-cache + content-length: + - '12' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:13:26 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation software-update-configuration machine-runs list + Connection: + - keep-alive + ParameterSetName: + - -g --automation-account-name + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002/softwareUpdateConfigurationMachineRuns?api-version=2019-06-01 + response: + body: + string: '{"value":[]}' + headers: + cache-control: + - no-cache + content-length: + - '12' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 19 Oct 2022 08:13:28 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - automation software-update-configuration delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -n -g --automation-account-name -y + User-Agent: + - AZURECLI/2.41.0 azsdk-python-mgmt-automation/1.1.0b2 Python/3.10.5 (Windows-10-10.0.22621-SP0) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_automation_software_update_configuration000001/providers/Microsoft.Automation/automationAccounts/account-000002/softwareUpdateConfigurations/conf-000003?api-version=2019-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Wed, 19 Oct 2022 08:13:30 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-HTTPAPI/2.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-resource-requests: + - '99' + status: + code: 200 + message: OK +version: 1 diff --git a/src/automation/azext_automation/tests/latest/test_automation_scenario_manual.py b/src/automation/azext_automation/tests/latest/test_automation_scenario_manual.py index 6274e211e3e..10246a35798 100644 --- a/src/automation/azext_automation/tests/latest/test_automation_scenario_manual.py +++ b/src/automation/azext_automation/tests/latest/test_automation_scenario_manual.py @@ -6,6 +6,7 @@ import os from unittest import mock +from azure.cli.testsdk.scenario_tests import AllowLargeResponse import tempfile from azure.cli.testsdk import ScenarioTest, ResourceGroupPreparer @@ -64,14 +65,16 @@ def _uuid(): class AutomationScenarioTest(ScenarioTest): - + @AllowLargeResponse() @ResourceGroupPreparer(name_prefix='cli_test_automation_', key='rg', location='westus2') def test_automation(self, resource_group): self.kwargs.update({ 'account_name': self.create_random_name(prefix='test-account-', length=24), 'runbook_name': self.create_random_name(prefix='test-runbook-', length=24), 'runbook_content': RUNBOOK_CONTENT, - 'hybrid_runbook_worker_group_name' : self.create_random_name(prefix='hwg-', length=10) + 'hybrid_runbook_worker_group_name' : self.create_random_name(prefix='hwg-', length=10), + 'python3Package_name': self.create_random_name(prefix='py3-package-', length=24), + 'python3PackageContentUri':'uri=https://files.pythonhosted.org/packages/7f/e2/85dfb9f7364cbd7a9213caea0e91fc948da3c912a2b222a3e43bc9cc6432/requires.io-0.2.6-py2.py3-none-any.whl' }) self.cmd('automation account create --resource-group {rg} --name {account_name} --location "West US 2"', checks=[self.check('name', '{account_name}')]) @@ -126,6 +129,20 @@ def test_automation(self, resource_group): self.cmd('automation hrwg delete --resource-group {rg} --automation-account-name {account_name} --name {hybrid_runbook_worker_group_name} --yes') + self.cmd('automation python3-package create --resource-group {rg} --automation-account-name {account_name} --name {python3Package_name} --content-link {python3PackageContentUri}', + checks=[self.check('name', '{python3Package_name}')]) + + self.cmd('automation python3-package update --resource-group {rg} --automation-account-name {account_name} --name {python3Package_name} --content-link {python3PackageContentUri}', + checks=[self.check('name', '{python3Package_name}')]) + + self.cmd('automation python3-package show --resource-group {rg} --automation-account-name {account_name} --name {python3Package_name}', + checks=[self.check('name', '{python3Package_name}')]) + + self.cmd('automation python3-package list --resource-group {rg} --automation-account-name {account_name} ', + checks=[self.check('length(@)', 1)]) + + self.cmd('automation python3-package delete --resource-group {rg} --automation-account-name {account_name} --name {python3Package_name} --yes') + with mock.patch('azext_automation.manual.custom._uuid', side_effect=_uuid): job = self.cmd('automation runbook start --resource-group {rg} --automation-account-name {account_name} ' '--name {runbook_name}').get_output_in_json() @@ -142,6 +159,7 @@ def test_automation(self, resource_group): self.cmd('automation account delete --resource-group {rg} --name {account_name} -y') @ResourceGroupPreparer(name_prefix='cli_test_automation_schedule') + @AllowLargeResponse() def test_automation_schedule(self, resource_group): self.kwargs.update({ 'account_name': self.create_random_name('account-', 15), @@ -149,10 +167,10 @@ def test_automation_schedule(self, resource_group): }) self.cmd('automation account create -n {account_name} -g {rg}') - self.cmd('automation schedule create -n {schedule_name} -g {rg} --automation-account-name {account_name} --description test --frequency Hour --interval 1 --start-time 2022-08-30 18:00:00 --time-zone UTC+08:00', checks=[ + self.cmd('automation schedule create -n {schedule_name} -g {rg} --automation-account-name {account_name} --description test --frequency Hour --interval 1 --start-time 2022-10-19 15:38:00 --time-zone UTC+08:00', checks=[ self.check('frequency', 'Hour'), self.check('interval', '1'), - self.check('startTime', '2022-08-30T18:00:00+08:00'), + self.check('startTime', '2022-10-19T18:08:00+08:00'), self.check('timeZone', 'UTC+08:00'), self.check('description', 'test'), self.check('isEnabled', True) @@ -160,7 +178,7 @@ def test_automation_schedule(self, resource_group): self.cmd('automation schedule update -n {schedule_name} -g {rg} --automation-account-name {account_name} --description test1 --is-enabled false', checks=[ self.check('frequency', 'Hour'), self.check('interval', '1'), - self.check('startTime', '2022-08-30T18:00:00+08:00'), + self.check('startTime', '2022-10-19T18:08:00+08:00'), self.check('timeZone', 'UTC+08:00'), self.check('description', 'test1'), self.check('isEnabled', False) @@ -168,7 +186,7 @@ def test_automation_schedule(self, resource_group): self.cmd('automation schedule list -g {rg} --automation-account-name {account_name} ', checks=[ self.check('[0].frequency', 'Hour'), self.check('[0].interval', '1'), - self.check('[0].startTime', '2022-08-30T18:00:00+08:00'), + self.check('[0].startTime', '2022-10-19T18:08:00+08:00'), self.check('[0].timeZone', 'UTC+08:00'), self.check('[0].description', 'test1'), self.check('[0].isEnabled', False) @@ -176,7 +194,7 @@ def test_automation_schedule(self, resource_group): self.cmd('automation schedule show -n {schedule_name} -g {rg} --automation-account-name {account_name} ', checks=[ self.check('frequency', 'Hour'), self.check('interval', '1'), - self.check('startTime', '2022-08-30T18:00:00+08:00'), + self.check('startTime', '2022-10-19T18:08:00+08:00'), self.check('timeZone', 'UTC+08:00'), self.check('description', 'test1'), self.check('isEnabled', False), @@ -184,6 +202,7 @@ def test_automation_schedule(self, resource_group): self.cmd('automation schedule delete -n {schedule_name} -g {rg} --automation-account-name {account_name} -y') @ResourceGroupPreparer(name_prefix='cli_test_automation_software_update_configuration') + @AllowLargeResponse() def test_automation_software_update_configuration(self, resource_group): self.kwargs.update({ 'account_name': self.create_random_name('account-', 15), @@ -191,17 +210,19 @@ def test_automation_software_update_configuration(self, resource_group): 'vm_name':self.create_random_name('vm-', 15), }) + sub = '/subscriptions/' + self.get_subscription_id() vm_id = self.cmd('vm create -n {vm_name} -g {rg} --image ubuntults --generate-ssh-key --nsg-rule NONE').get_output_in_json()['id'] self.kwargs.update({ - 'vm_id': vm_id + 'vm_id': vm_id, + 'sub': sub }) self.cmd('automation account create -n {account_name} -g {rg}') - self.cmd('automation software-update-configuration create -n {conf_name} -g {rg} --automation-account-name {account_name} --description test --frequency Hour --interval 1 --operating-system windows --excluded-kb-numbers 16800 16800 --included-kb-numbers 15000 15000 --included-update-classifications Critical --duration pT2H0M --azure-virtual-machines {vm_id} --time-zone UTC+08:00 --start-time 2022-08-23 18:00:00 --expiry-time 2022-08-30 18:00:00 --next-run 2022-08-25 18:00:00 --non-azure-computer-names nonvm1 nonvm2 --reboot-setting IfRequired --azure-queries-scope /subscriptions/00000000-0000-0000-0000-000000000000 --azure-queries-location eastus westus --azure-queries-tags tag1 tag2', checks=[ + self.cmd('automation software-update-configuration create -n {conf_name} -g {rg} --automation-account-name {account_name} --description test --frequency Hour --interval 1 --operating-system windows --excluded-kb-numbers 16800 16800 --included-kb-numbers 15000 15000 --included-update-classifications Critical --duration pT2H0M --azure-virtual-machines {vm_id} --time-zone UTC+08:00 --start-time 2022-10-23 18:00:00 --expiry-time 2022-10-30 18:00:00 --next-run 2022-10-25 18:00:00 --non-azure-computer-names nonvm1 nonvm2 --reboot-setting IfRequired --azure-queries-scope {sub} --azure-queries-location eastus westus --azure-queries-tags tag1 tag2', checks=[ self.check('name', '{conf_name}'), self.check('scheduleInfo.description', 'test'), self.check('scheduleInfo.frequency', 'Hour'), self.check('scheduleInfo.interval', '1'), - self.check('scheduleInfo.startTime', '2022-08-23T10:00:00+08:00'), + self.check('scheduleInfo.startTime', '2022-10-23T12:30:00+08:00'), self.check('scheduleInfo.timeZone', 'UTC+08:00'), self.check('scheduleInfo.description', 'test'), self.check('scheduleInfo.isEnabled', True), @@ -210,7 +231,7 @@ def test_automation_software_update_configuration(self, resource_group): self.check('updateConfiguration.nonAzureComputerNames', ['nonvm1', 'nonvm2']), self.check('updateConfiguration.operatingSystem', 'Windows'), self.check('updateConfiguration.targets.azureQueries[0].locations', ['eastus', 'westus']), - self.check('updateConfiguration.targets.azureQueries[0].scope', ['/subscriptions/00000000-0000-0000-0000-000000000000']), + self.check('updateConfiguration.targets.azureQueries[0].scope', [sub]), self.check('updateConfiguration.targets.azureQueries[0].tagSettings.tags.tag', ['tag1','tag2']), self.check('updateConfiguration.windows.excludedKbNumbers', ['16800', '16800']), self.check('updateConfiguration.windows.includedKbNumbers', ['15000', '15000']), @@ -224,7 +245,7 @@ def test_automation_software_update_configuration(self, resource_group): self.check('value[0].updateConfiguration.nonAzureComputerNames', ['nonvm1', 'nonvm2']), self.check('value[0].updateConfiguration.operatingSystem', 'Windows'), self.check('value[0].updateConfiguration.targets.azureQueries[0].locations', ['eastus', 'westus']), - self.check('value[0].updateConfiguration.targets.azureQueries[0].scope', ['/subscriptions/00000000-0000-0000-0000-000000000000']), + self.check('value[0].updateConfiguration.targets.azureQueries[0].scope', [sub]), self.check('value[0].updateConfiguration.targets.azureQueries[0].tagSettings.tags.tag', ['tag1', 'tag2']), self.check('value[0].updateConfiguration.windows.excludedKbNumbers', ['16800', '16800']), self.check('value[0].updateConfiguration.windows.includedKbNumbers', ['15000', '15000']), @@ -236,7 +257,7 @@ def test_automation_software_update_configuration(self, resource_group): self.check('scheduleInfo.description', 'test'), self.check('scheduleInfo.frequency', 'Hour'), self.check('scheduleInfo.interval', '1'), - self.check('scheduleInfo.startTime', '2022-08-23T10:00:00+08:00'), + self.check('scheduleInfo.startTime', '2022-10-23T12:30:00+08:00'), self.check('scheduleInfo.timeZone', 'UTC+08:00'), self.check('scheduleInfo.description', 'test'), self.check('scheduleInfo.isEnabled', True), @@ -245,7 +266,7 @@ def test_automation_software_update_configuration(self, resource_group): self.check('updateConfiguration.nonAzureComputerNames', ['nonvm1', 'nonvm2']), self.check('updateConfiguration.operatingSystem', 'Windows'), self.check('updateConfiguration.targets.azureQueries[0].locations', ['eastus', 'westus']), - self.check('updateConfiguration.targets.azureQueries[0].scope', ['/subscriptions/00000000-0000-0000-0000-000000000000']), + self.check('updateConfiguration.targets.azureQueries[0].scope', [sub]), self.check('updateConfiguration.targets.azureQueries[0].tagSettings.tags.tag', ['tag1', 'tag2']), self.check('updateConfiguration.windows.excludedKbNumbers', ['16800', '16800']), self.check('updateConfiguration.windows.includedKbNumbers', ['15000', '15000']), diff --git a/src/automation/linter_exclusions.yml b/src/automation/linter_exclusions.yml index daf01e19db3..e46caf3b331 100644 --- a/src/automation/linter_exclusions.yml +++ b/src/automation/linter_exclusions.yml @@ -253,3 +253,28 @@ automation software-update-configuration machine-runs list: software_update_configuration_machine_run_id: rule_exclusions: - option_length_too_long +automation python3-package create: + parameters: + automation_account_name: + rule_exclusions: + - option_length_too_long +automation python3-package delete: + parameters: + automation_account_name: + rule_exclusions: + - option_length_too_long +automation python3-package list: + parameters: + automation_account_name: + rule_exclusions: + - option_length_too_long +automation python3-package show: + parameters: + automation_account_name: + rule_exclusions: + - option_length_too_long +automation python3-package update: + parameters: + automation_account_name: + rule_exclusions: + - option_length_too_long \ No newline at end of file diff --git a/src/automation/setup.py b/src/automation/setup.py index ae416c9d979..d1ad108c545 100644 --- a/src/automation/setup.py +++ b/src/automation/setup.py @@ -10,7 +10,7 @@ from setuptools import setup, find_packages # HISTORY.rst entry. -VERSION = '0.2.0' +VERSION = '0.2.1' try: from azext_automation.manual.version import VERSION except ImportError: From 33e409363cfffe0f09fe3256ed03e62798d80e73 Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Tue, 25 Oct 2022 07:30:52 +0000 Subject: [PATCH 28/85] [Release] Update index.json for extension [ automation ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=11062&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/d855cc232c7733159579389c98cc5d4286cb069f --- src/index.json | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/src/index.json b/src/index.json index 5cbfa956c1f..80e7845daee 100644 --- a/src/index.json +++ b/src/index.json @@ -10534,6 +10534,49 @@ "version": "0.1.4" }, "sha256Digest": "cf27c3366e48ad1758a30745d1cd05703b15c84d0fa95369c4a098620ff93df8" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/automation-0.2.1-py3-none-any.whl", + "filename": "automation-0.2.1-py3-none-any.whl", + "metadata": { + "azext.isExperimental": true, + "azext.minCliCoreVersion": "2.40.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/automation" + } + } + }, + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "automation", + "summary": "Microsoft Azure Command-Line Tools AutomationClient Extension", + "version": "0.2.1" + }, + "sha256Digest": "18cb0bfa7c7121bd7afdceea2182ac0fe3b174933bbb897b023c61e9dc05459b" } ], "azure-batch-cli-extensions": [ From e99e7e51219f223448e316678bd7b7f24259c14d Mon Sep 17 00:00:00 2001 From: Vivian Thiebaut <81188381+vthiebaut10@users.noreply.github.com> Date: Tue, 25 Oct 2022 22:05:01 -0400 Subject: [PATCH 29/85] [ssh] Upgrade Version for Release (#5485) --- src/ssh/azext_ssh/custom.py | 2 +- src/ssh/azext_ssh/ssh_utils.py | 6 ++---- src/ssh/setup.py | 2 +- 3 files changed, 4 insertions(+), 6 deletions(-) diff --git a/src/ssh/azext_ssh/custom.py b/src/ssh/azext_ssh/custom.py index 39df248a006..678a32ac9ff 100644 --- a/src/ssh/azext_ssh/custom.py +++ b/src/ssh/azext_ssh/custom.py @@ -274,7 +274,7 @@ def _assert_args(resource_group, vm_name, ssh_ip, resource_type, cert_file, user raise azclierror.InvalidArgumentValueError("--resource-type must be either " "\"Microsoft.Compute/virtualMachines\", " "\"Microsoft.HybridCompute/machines\", " - "or \"Microsoft.ConnectedVMwareSphere/virtualMachines\".") + "or \"Microsoft.ConnectedVMwarevSphere/virtualMachines\".") if not (resource_group or vm_name or ssh_ip): raise azclierror.RequiredArgumentMissingError( diff --git a/src/ssh/azext_ssh/ssh_utils.py b/src/ssh/azext_ssh/ssh_utils.py index b889ee7cbc6..f35395fb133 100644 --- a/src/ssh/azext_ssh/ssh_utils.py +++ b/src/ssh/azext_ssh/ssh_utils.py @@ -32,12 +32,10 @@ def start_ssh_connection(op_info, delete_keys, delete_cert): # Redirecting stderr: # 1. Read SSH logs to determine if authentication was successful so credentials can be deleted # 2. Read SSHProxy error messages to print friendly error messages for well known errors. - # On Linux when connecting to a local user on a host with a banner, output gets messed up if stderr redirected. + # When connecting to a local user on a host with a banner, output gets messed up if stderr redirected. # If user expects logs to be printed, do not redirect logs. In some ocasions output gets messed up. - is_local_user_on_linux = (platform.system() != 'Windows' and not delete_cert) redirect_stderr = set(['-v', '-vv', '-vvv']).isdisjoint(ssh_arg_list) and \ - (op_info.is_arc or delete_cert or op_info.delete_credentials) and \ - not is_local_user_on_linux + (delete_cert or op_info.delete_credentials) if redirect_stderr: ssh_arg_list = ['-v'] + ssh_arg_list diff --git a/src/ssh/setup.py b/src/ssh/setup.py index a080cbec415..c58433bc6e8 100644 --- a/src/ssh/setup.py +++ b/src/ssh/setup.py @@ -7,7 +7,7 @@ from setuptools import setup, find_packages -VERSION = "1.1.2" +VERSION = "1.1.3" CLASSIFIERS = [ 'Development Status :: 4 - Beta', From 328ae24ead7c307c9f4ad48e7a088f405f26e158 Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Wed, 26 Oct 2022 02:12:25 +0000 Subject: [PATCH 30/85] [Release] Update index.json for extension [ ssh ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=11234&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/e99e7e51219f223448e316678bd7b7f24259c14d --- src/index.json | 51 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/src/index.json b/src/index.json index 80e7845daee..8425dc83bfa 100644 --- a/src/index.json +++ b/src/index.json @@ -38625,6 +38625,57 @@ "version": "1.1.2" }, "sha256Digest": "e50513e5985024936c5f65ae564817e5705ccda69046abcbabd2418f5753d48e" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/ssh-1.1.3-py3-none-any.whl", + "filename": "ssh-1.1.3-py3-none-any.whl", + "metadata": { + "azext.isPreview": false, + "azext.minCliCoreVersion": "2.4.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/ssh" + } + } + }, + "extras": [], + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "ssh", + "run_requires": [ + { + "requires": [ + "oschmod (==0.3.12)" + ] + } + ], + "summary": "SSH into Azure VMs using RBAC and AAD OpenSSH Certificates", + "version": "1.1.3" + }, + "sha256Digest": "7cc245377b2a287b6ca4d2b47119ee48a1c5aa18443db00e6ea0e071825f7adf" } ], "stack-hci": [ From 7bf4b9b91752780d26e5d4899477933df850b0e0 Mon Sep 17 00:00:00 2001 From: FumingZhang <81607949+FumingZhang@users.noreply.github.com> Date: Wed, 26 Oct 2022 14:22:48 +0800 Subject: [PATCH 31/85] update env setup steps (#5487) --- src/aks-preview/azcli_aks_live_test/scripts/setup_venv.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/aks-preview/azcli_aks_live_test/scripts/setup_venv.sh b/src/aks-preview/azcli_aks_live_test/scripts/setup_venv.sh index d01c4ca5aff..5e7496e18c7 100755 --- a/src/aks-preview/azcli_aks_live_test/scripts/setup_venv.sh +++ b/src/aks-preview/azcli_aks_live_test/scripts/setup_venv.sh @@ -48,7 +48,7 @@ setupAZ(){ # need to be executed in a venv installTestPackages(){ # install pytest plugins - pip install pytest-json-report pytest-rerunfailures pytest-cov --upgrade + pip install pytest-json-report pytest-rerunfailures pytest-cov pytest-forked --upgrade # install coverage for measuring code coverage pip install coverage From 227faf9b5c6bd313b256dc36ba1dc7cb49172722 Mon Sep 17 00:00:00 2001 From: Qingyi Liu <34502364+smile37773@users.noreply.github.com> Date: Wed, 26 Oct 2022 14:25:21 +0800 Subject: [PATCH 32/85] add warning logs when updating builder and bindings (#5454) --- src/spring/HISTORY.md | 4 ++++ src/spring/azext_spring/_build_service.py | 6 ++++++ src/spring/azext_spring/buildpack_binding.py | 6 ++++++ src/spring/setup.py | 2 +- 4 files changed, 17 insertions(+), 1 deletion(-) diff --git a/src/spring/HISTORY.md b/src/spring/HISTORY.md index 111889261d6..17182d619e0 100644 --- a/src/spring/HISTORY.md +++ b/src/spring/HISTORY.md @@ -1,5 +1,9 @@ Release History =============== +1.1.12 +--- +* Add warning logs when editing builders and buildpack bindings. + 1.1.11 --- * Add command `az spring app deployment enable-remote-debugging`. diff --git a/src/spring/azext_spring/_build_service.py b/src/spring/azext_spring/_build_service.py index 3a1908f8ecf..3ac9bd78bd4 100644 --- a/src/spring/azext_spring/_build_service.py +++ b/src/spring/azext_spring/_build_service.py @@ -8,6 +8,9 @@ import json from azure.cli.core.util import sdk_no_wait from .vendored_sdks.appplatform.v2022_01_01_preview import models +from knack.log import get_logger + +logger = get_logger(__name__) DEFAULT_BUILD_SERVICE_NAME = "default" DEFAULT_BUILD_AGENT_POOL_NAME = "default" @@ -25,6 +28,9 @@ def _update_default_build_agent_pool(cmd, client, resource_group, name, build_po def create_or_update_builder(cmd, client, resource_group, service, name, builder_json=None, builder_file=None, no_wait=False): + logger.warning('Editing builder will regenerate images for all app deployments using this builder. These new images will ' + + 'be used after app restart either manually by yourself or automatically by Azure Spring Apps in regular maintenance tasks. ' + + 'Use CLI command --"az spring build-service builder show-deployments" to view the app deployment list of the builder.') builder = _update_builder(builder_file, builder_json) builder_resource = models.BuilderResource( properties=builder diff --git a/src/spring/azext_spring/buildpack_binding.py b/src/spring/azext_spring/buildpack_binding.py index 82d5f02b3ba..0588f7123ef 100644 --- a/src/spring/azext_spring/buildpack_binding.py +++ b/src/spring/azext_spring/buildpack_binding.py @@ -23,6 +23,9 @@ def create_or_update_buildpack_binding(cmd, client, resource_group, service, name, type, builder_name=None, properties=None, secrets=None): + logger.warning('Editing bindings will regenerate images for all app deployments using this builder. These new images will ' + + 'be used after app restart either manually by yourself or automatically by Azure Spring Apps in regular maintenance tasks. ' + + 'Use CLI command --"az spring build-service builder show-deployments" to view the app deployment list of the builder.') if not builder_name: builder_name = DEFAULT_BUILDER_NAME logger.warning('Option --builder-name is not provided, will use default builder name "{}".'.format(builder_name)) @@ -52,6 +55,9 @@ def buildpack_binding_list(cmd, client, resource_group, service, builder_name=No def buildpack_binding_delete(cmd, client, resource_group, service, name, builder_name=None): + logger.warning('Deleting bindings will regenerate images for all app deployments using this builder. These new images will ' + + 'be used after app restart either manually by yourself or automatically by Azure Spring Apps in regular maintenance tasks. ' + + 'Use CLI command --"az spring build-service builder show-deployments" to view the app deployment list of the builder.') if not builder_name: builder_name = DEFAULT_BUILDER_NAME logger.warning('Option --builder-name is not provided, will use default builder name "{}".'.format(builder_name)) diff --git a/src/spring/setup.py b/src/spring/setup.py index aa89b509d53..c657fd65fb8 100644 --- a/src/spring/setup.py +++ b/src/spring/setup.py @@ -16,7 +16,7 @@ # TODO: Confirm this is the right version number you want and it matches your # HISTORY.rst entry. -VERSION = '1.1.11' +VERSION = '1.1.12' # The full list of classifiers is available at # https://pypi.python.org/pypi?%3Aaction=list_classifiers From 5b3d04ce56e66b8778e181c857c875bf7e829d6d Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Wed, 26 Oct 2022 06:35:26 +0000 Subject: [PATCH 33/85] [Release] Update index.json for extension [ spring ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=11370&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/227faf9b5c6bd313b256dc36ba1dc7cb49172722 --- src/index.json | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/src/index.json b/src/index.json index 8425dc83bfa..a2f239a1ebd 100644 --- a/src/index.json +++ b/src/index.json @@ -35869,6 +35869,49 @@ "version": "1.1.11" }, "sha256Digest": "be1471eb6aa6a462d13f352f643f4267fa8411c73f17d2b6d0f842229860dde8" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/spring-1.1.12-py3-none-any.whl", + "filename": "spring-1.1.12-py3-none-any.whl", + "metadata": { + "azext.isPreview": false, + "azext.minCliCoreVersion": "2.38.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/spring" + } + } + }, + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "spring", + "summary": "Microsoft Azure Command-Line Tools spring Extension", + "version": "1.1.12" + }, + "sha256Digest": "ae1c41d2d09150c600e51d6de3a085a04d079b6803bdf4fcba6b041460f46a8d" } ], "spring-cloud": [ From c91f2dfed6c48ae0be9acb4a8e8ed350308c1883 Mon Sep 17 00:00:00 2001 From: MartinForReal Date: Wed, 26 Oct 2022 18:14:55 +0900 Subject: [PATCH 34/85] aks: add --outbound-type option in update command (#5379) Signed-off-by: MartinForReal Signed-off-by: MartinForReal --- src/aks-preview/HISTORY.rst | 5 + .../configs/ext_matrix_default.json | 3 +- src/aks-preview/azext_aks_preview/_consts.py | 2 +- src/aks-preview/azext_aks_preview/_help.py | 6 + .../azext_aks_preview/_loadbalancer.py | 8 +- .../azext_aks_preview/_natgateway.py | 6 +- src/aks-preview/azext_aks_preview/_params.py | 1 + src/aks-preview/azext_aks_preview/custom.py | 1 + .../managed_cluster_decorator.py | 118 +- ...pdate_outbound_from_slb_to_natgateway.yaml | 1325 +++++++++++++++++ .../tests/latest/test_aks_commands.py | 33 + .../latest/test_managed_cluster_decorator.py | 29 + src/aks-preview/setup.py | 2 +- 13 files changed, 1515 insertions(+), 24 deletions(-) create mode 100644 src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_update_outbound_from_slb_to_natgateway.yaml diff --git a/src/aks-preview/HISTORY.rst b/src/aks-preview/HISTORY.rst index 699f2169e02..e5f4b9584e4 100644 --- a/src/aks-preview/HISTORY.rst +++ b/src/aks-preview/HISTORY.rst @@ -12,6 +12,11 @@ To release a new version, please select a new version number (usually plus 1 to Pending +++++++ +0.5.112 ++++++++ + +* Add --outbound-type to update managed cluster command. + 0.5.111 +++++++ diff --git a/src/aks-preview/azcli_aks_live_test/configs/ext_matrix_default.json b/src/aks-preview/azcli_aks_live_test/configs/ext_matrix_default.json index 015b0b84990..ec81fa34ea0 100644 --- a/src/aks-preview/azcli_aks_live_test/configs/ext_matrix_default.json +++ b/src/aks-preview/azcli_aks_live_test/configs/ext_matrix_default.json @@ -22,7 +22,8 @@ "test_aks_custom_ca_trust_flow", "test_aks_create_with_csi_driver_v2", "test_aks_create_and_update_csi_driver_to_v2", - "test_aks_nodepool_abort" + "test_aks_nodepool_abort", + "test_aks_update_outbound_from_slb_to_natgateway" ], "missing namespace registration (AME)": [ "test_aks_create_with_monitoring_aad_auth_msi", diff --git a/src/aks-preview/azext_aks_preview/_consts.py b/src/aks-preview/azext_aks_preview/_consts.py index 58b55b38231..3201d9ef6aa 100644 --- a/src/aks-preview/azext_aks_preview/_consts.py +++ b/src/aks-preview/azext_aks_preview/_consts.py @@ -63,7 +63,7 @@ CONST_OUTBOUND_TYPE_USER_DEFINED_ROUTING = "userDefinedRouting" CONST_OUTBOUND_TYPE_MANAGED_NAT_GATEWAY = "managedNATGateway" CONST_OUTBOUND_TYPE_USER_ASSIGNED_NAT_GATEWAY = "userAssignedNATGateway" - +CONST_OUTBOUND_MIGRATION_MULTIZONE_TO_NATGATEWAY_MSG = "Warning: this AKS cluster has multi-zonal nodepools, but NAT Gateway is not currently zone redundant. Migrating outbound connectivity to NAT Gateway could lead to a reduction in zone redundancy for this cluster. Continue?" # load balancer backend pool type CONST_LOAD_BALANCER_BACKEND_POOL_TYPE_NODE_IP = "nodeIP" CONST_LOAD_BALANCER_BACKEND_POOL_TYPE_NODE_IPCONFIGURATION = "nodeIPConfiguration" diff --git a/src/aks-preview/azext_aks_preview/_help.py b/src/aks-preview/azext_aks_preview/_help.py index 2de4e84b396..046728db666 100644 --- a/src/aks-preview/azext_aks_preview/_help.py +++ b/src/aks-preview/azext_aks_preview/_help.py @@ -657,6 +657,10 @@ type: int short-summary: NAT gateway idle timeout in minutes. long-summary: Desired idle timeout for NAT gateway outbound flows, default is 4 minutes. Please specify a value in the range of [4, 120]. Valid for Standard SKU load balancer cluster with managedNATGateway outbound type only. + - name: --outbound-type + type: string + short-summary: How outbound traffic will be configured for a cluster. + long-summary: This option will change the way how the outbound connections are managed in the AKS cluster. Default is loadbalancer, other available options are managedNATGateway, userassignedNATGateway, UDR - name: --enable-pod-security-policy type: bool short-summary: (PREVIEW) Enable pod security policy. @@ -917,6 +921,8 @@ text: az aks update -g MyResourceGroup -n MyManagedCluster --load-balancer-outbound-ips - name: Update a kubernetes cluster with standard SKU load balancer to use the provided public IP prefixes for the load balancer outbound connection usage. text: az aks update -g MyResourceGroup -n MyManagedCluster --load-balancer-outbound-ip-prefixes + - name: Update a kubernetes cluster with new outbound type + text: az aks update -g MyResourceGroup -n MyManagedCluster --outbound-type managedNATGateway - name: Update a kubernetes cluster with two outbound AKS managed IPs an idle flow timeout of 5 minutes and 8000 allocated ports per machine text: az aks update -g MyResourceGroup -n MyManagedCluster --load-balancer-managed-outbound-ip-count 2 --load-balancer-idle-timeout 5 --load-balancer-outbound-ports 8000 - name: Update a kubernetes cluster of managedNATGateway outbound type with two outbound AKS managed IPs an idle flow timeout of 4 minutes diff --git a/src/aks-preview/azext_aks_preview/_loadbalancer.py b/src/aks-preview/azext_aks_preview/_loadbalancer.py index 25f4daca122..9d863353f97 100644 --- a/src/aks-preview/azext_aks_preview/_loadbalancer.py +++ b/src/aks-preview/azext_aks_preview/_loadbalancer.py @@ -28,6 +28,12 @@ def update_load_balancer_profile(managed_outbound_ip_count, managed_outbound_ipv if not is_load_balancer_profile_provided(managed_outbound_ip_count, managed_outbound_ipv6_count, outbound_ips, outbound_ip_prefixes, outbound_ports, backend_pool_type, idle_timeout): return profile + if not profile: + if isinstance(models, SimpleNamespace): + ManagedClusterLoadBalancerProfile = models.ManagedClusterLoadBalancerProfile + else: + ManagedClusterLoadBalancerProfile = models.get("ManagedClusterLoadBalancerProfile") + profile = ManagedClusterLoadBalancerProfile() return configure_load_balancer_profile(managed_outbound_ip_count, managed_outbound_ipv6_count, outbound_ips, outbound_ip_prefixes, outbound_ports, idle_timeout, backend_pool_type, profile, models) @@ -51,8 +57,6 @@ def create_load_balancer_profile(managed_outbound_ip_count, managed_outbound_ipv def configure_load_balancer_profile(managed_outbound_ip_count, managed_outbound_ipv6_count, outbound_ips, outbound_ip_prefixes, outbound_ports, idle_timeout, backend_pool_type, profile, models): """configure a load balancer with customer supplied values""" - if not profile: - return profile outbound_ip_resources = _get_load_balancer_outbound_ips(outbound_ips, models) outbound_ip_prefix_resources = _get_load_balancer_outbound_ip_prefixes(outbound_ip_prefixes, models) diff --git a/src/aks-preview/azext_aks_preview/_natgateway.py b/src/aks-preview/azext_aks_preview/_natgateway.py index bffc77847ad..e34c043e9cd 100644 --- a/src/aks-preview/azext_aks_preview/_natgateway.py +++ b/src/aks-preview/azext_aks_preview/_natgateway.py @@ -19,7 +19,8 @@ def update_nat_gateway_profile(managed_outbound_ip_count, idle_timeout, profile, """parse and update an existing NAT gateway profile""" if not is_nat_gateway_profile_provided(managed_outbound_ip_count, idle_timeout): return profile - + if not profile: + profile = models.ManagedClusterNATGatewayProfile() return configure_nat_gateway_profile(managed_outbound_ip_count, idle_timeout, profile, models) @@ -29,9 +30,6 @@ def is_nat_gateway_profile_provided(managed_outbound_ip_count, idle_timeout): def configure_nat_gateway_profile(managed_outbound_ip_count, idle_timeout, profile, models: SimpleNamespace): """configure a NAT Gateway with customer supplied values""" - if not profile: - return profile - if managed_outbound_ip_count: ManagedClusterManagedOutboundIPProfile = models.ManagedClusterManagedOutboundIPProfile profile.managed_outbound_ip_profile = ManagedClusterManagedOutboundIPProfile( diff --git a/src/aks-preview/azext_aks_preview/_params.py b/src/aks-preview/azext_aks_preview/_params.py index fde0f75cdfc..5a9ae9cd685 100644 --- a/src/aks-preview/azext_aks_preview/_params.py +++ b/src/aks-preview/azext_aks_preview/_params.py @@ -411,6 +411,7 @@ def load_arguments(self, _): # managed cluster c.argument('http_proxy_config') c.argument('load_balancer_managed_outbound_ipv6_count', type=int) + c.argument('outbound_type', arg_type=get_enum_type(outbound_types)) c.argument('enable_pod_security_policy', action='store_true') c.argument('disable_pod_security_policy', action='store_true') c.argument('enable_pod_identity', action='store_true') diff --git a/src/aks-preview/azext_aks_preview/custom.py b/src/aks-preview/azext_aks_preview/custom.py index 44d34267d0b..3aca771be67 100644 --- a/src/aks-preview/azext_aks_preview/custom.py +++ b/src/aks-preview/azext_aks_preview/custom.py @@ -768,6 +768,7 @@ def aks_update( # managed cluster http_proxy_config=None, load_balancer_managed_outbound_ipv6_count=None, + outbound_type=None, enable_pod_security_policy=False, disable_pod_security_policy=False, enable_pod_identity=False, diff --git a/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py b/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py index 7d8decd9425..46a6618110b 100644 --- a/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py +++ b/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py @@ -48,6 +48,9 @@ CONST_AZURE_KEYVAULT_NETWORK_ACCESS_PRIVATE, CONST_AZURE_KEYVAULT_NETWORK_ACCESS_PUBLIC, CONST_LOAD_BALANCER_SKU_BASIC, + CONST_OUTBOUND_TYPE_LOAD_BALANCER, + CONST_OUTBOUND_TYPE_MANAGED_NAT_GATEWAY, + CONST_OUTBOUND_MIGRATION_MULTIZONE_TO_NATGATEWAY_MSG, CONST_PRIVATE_DNS_ZONE_NONE, CONST_PRIVATE_DNS_ZONE_SYSTEM, CONST_EBPF_DATAPLANE_CILIUM, @@ -61,6 +64,8 @@ from azext_aks_preview._loadbalancer import ( update_load_balancer_profile as _update_load_balancer_profile, ) +from azext_aks_preview._natgateway import update_nat_gateway_profile as _update_nat_gateway_profile + from azext_aks_preview._podidentity import ( _fill_defaults_for_pod_identity_profile, _is_pod_identity_addon_enabled, @@ -92,6 +97,7 @@ ManagedClusterSecurityProfileDefender = TypeVar("ManagedClusterSecurityProfileDefender") ManagedClusterSecurityProfileNodeRestriction = TypeVar("ManagedClusterSecurityProfileNodeRestriction") ManagedClusterWorkloadProfileVerticalPodAutoscaler = TypeVar("ManagedClusterWorkloadProfileVerticalPodAutoscaler") +ManagedClusterLoadBalancerProfile = TypeVar("ManagedClusterLoadBalancerProfile") # pylint: disable=too-few-public-methods @@ -270,6 +276,33 @@ def get_ip_families(self) -> Union[List[str], None]: # this parameter does not need validation return ip_families + def get_outbound_type(self, load_balancer_profile: ManagedClusterLoadBalancerProfile = None) -> Union[str, None]: + """Internal function to dynamically obtain the value of outbound_type according to the context. + + Note: All the external parameters involved in the validation are not verified in their own getters. + + When outbound_type is not assigned, dynamic completion will be triggerd. By default, the value is set to + CONST_OUTBOUND_TYPE_LOAD_BALANCER. + + This function supports the option of enable_validation. When enabled, if the value of outbound_type is + CONST_OUTBOUND_TYPE_MANAGED_NAT_GATEWAY, CONST_OUTBOUND_TYPE_USER_ASSIGNED_NAT_GATEWAY or + CONST_OUTBOUND_TYPE_USER_DEFINED_ROUTING, the following checks will be performed. If load_balancer_sku is set + to basic, an InvalidArgumentValueError will be raised. If vnet_subnet_id is not assigned, + a RequiredArgumentMissingError will be raised. If any of load_balancer_managed_outbound_ip_count, + load_balancer_outbound_ips or load_balancer_outbound_ip_prefixes is assigned, a MutuallyExclusiveArgumentError + will be raised. + This function supports the option of read_only. When enabled, it will skip dynamic completion and validation. + This function supports the option of load_balancer_profile, if provided, when verifying loadbalancer-related + parameters, the value in load_balancer_profile will be used for validation. + + :return: string or None + """ + outbound_type = super().get_outbound_type(load_balancer_profile) + user_assigned_outbound_type = self.raw_param.get("outbound_type") + if user_assigned_outbound_type: + return user_assigned_outbound_type + return outbound_type + def get_load_balancer_managed_outbound_ip_count(self) -> Union[int, None]: """Obtain the value of load_balancer_managed_outbound_ip_count. @@ -2608,6 +2641,53 @@ def check_raw_parameters(self): ) raise RequiredArgumentMissingError(error_msg) + def update_outbound_type_in_network_profile(self, mc: ManagedCluster) -> ManagedCluster: + """Set up network profile for the ManagedCluster object. + Build load balancer profile, verify outbound type and load balancer sku first, then set up network profile. + :return: the ManagedCluster object + """ + self._ensure_mc(mc) + + # verify outbound type + # Note: Validation internally depends on load_balancer_sku, which is a temporary value that is + # dynamically completed. + if not mc.network_profile: + raise UnknownError( + "Unexpectedly get an empty network profile in the process of updating outbound type." + ) + if mc.agent_pool_profiles is not None and len(mc.agent_pool_profiles) > 1: + multizoned = False + for ap in mc.agent_pool_profiles: + if ap.availability_zones: + multizoned = True + if multizoned and not self.get_yes() and not prompt_y_n("\n" + CONST_OUTBOUND_MIGRATION_MULTIZONE_TO_NATGATEWAY_MSG, default="y"): + raise DecoratorEarlyExitException() + outboundType = self.context.get_outbound_type() + if outboundType: + mc.network_profile.outbound_type = outboundType + return mc + + def update_nat_gateway_profile(self, mc: ManagedCluster) -> ManagedCluster: + """Update nat gateway profile for the ManagedCluster object. + :return: the ManagedCluster object + """ + self._ensure_mc(mc) + + if not mc.network_profile: + raise UnknownError( + "Unexpectedly get an empty network profile in the process of updating nat gateway profile." + ) + if mc.network_profile.outbound_type != CONST_OUTBOUND_TYPE_MANAGED_NAT_GATEWAY: + mc.network_profile.nat_gateway_profile = None + else: + mc.network_profile.nat_gateway_profile = _update_nat_gateway_profile( + self.context.get_nat_gateway_managed_outbound_ip_count(), + self.context.get_nat_gateway_idle_timeout(), + mc.network_profile.nat_gateway_profile, + models=self.models.nat_gateway_models, + ) + return mc + def update_load_balancer_profile(self, mc: ManagedCluster) -> ManagedCluster: """Update load balancer profile for the ManagedCluster object. @@ -2621,21 +2701,23 @@ def update_load_balancer_profile(self, mc: ManagedCluster) -> ManagedCluster: raise UnknownError( "Unexpectedly get an empty network profile in the process of updating load balancer profile." ) - - # In the internal function "_update_load_balancer_profile", it will check whether the provided parameters - # have been assigned, and if there are any, the corresponding profile will be modified; otherwise, it will - # remain unchanged. - mc.network_profile.load_balancer_profile = _update_load_balancer_profile( - managed_outbound_ip_count=self.context.get_load_balancer_managed_outbound_ip_count(), - managed_outbound_ipv6_count=self.context.get_load_balancer_managed_outbound_ipv6_count(), - outbound_ips=self.context.get_load_balancer_outbound_ips(), - outbound_ip_prefixes=self.context.get_load_balancer_outbound_ip_prefixes(), - outbound_ports=self.context.get_load_balancer_outbound_ports(), - idle_timeout=self.context.get_load_balancer_idle_timeout(), - backend_pool_type=self.context.get_load_balancer_backend_pool_type(), - profile=mc.network_profile.load_balancer_profile, - models=self.models.load_balancer_models, - ) + if mc.network_profile.outbound_type != CONST_OUTBOUND_TYPE_LOAD_BALANCER: + mc.network_profile.load_balancer_profile = None + else: + # In the internal function "_update_load_balancer_profile", it will check whether the provided parameters + # have been assigned, and if there are any, the corresponding profile will be modified; otherwise, it will + # remain unchanged. + mc.network_profile.load_balancer_profile = _update_load_balancer_profile( + managed_outbound_ip_count=self.context.get_load_balancer_managed_outbound_ip_count(), + managed_outbound_ipv6_count=self.context.get_load_balancer_managed_outbound_ipv6_count(), + outbound_ips=self.context.get_load_balancer_outbound_ips(), + outbound_ip_prefixes=self.context.get_load_balancer_outbound_ip_prefixes(), + outbound_ports=self.context.get_load_balancer_outbound_ports(), + idle_timeout=self.context.get_load_balancer_idle_timeout(), + backend_pool_type=self.context.get_load_balancer_backend_pool_type(), + profile=mc.network_profile.load_balancer_profile, + models=self.models.load_balancer_models, + ) return mc def update_api_server_access_profile(self, mc: ManagedCluster) -> ManagedCluster: @@ -3074,5 +3156,11 @@ def update_mc_profile_preview(self) -> ManagedCluster: mc = self.update_creation_data(mc) # update linux profile mc = self.update_linux_profile(mc) + # update outbound type + mc = self.update_outbound_type_in_network_profile(mc) + # update nat gateway profile + mc = self.update_nat_gateway_profile(mc) + # update load balancer profile + mc = self.update_load_balancer_profile(mc) return mc diff --git a/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_update_outbound_from_slb_to_natgateway.yaml b/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_update_outbound_from_slb_to_natgateway.yaml new file mode 100644 index 00000000000..85f228fb336 --- /dev/null +++ b/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_update_outbound_from_slb_to_natgateway.yaml @@ -0,0 +1,1325 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001","name":"clitest000001","type":"Microsoft.Resources/resourceGroups","location":"westus2","tags":{"product":"azurecli","cause":"automation","date":"2022-10-26T07:27:53Z"},"properties":{"provisioningState":"Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '305' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 26 Oct 2022 07:27:54 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"location": "westus2", "identity": {"type": "SystemAssigned"}, "properties": + {"kubernetesVersion": "", "dnsPrefix": "cliakstest-clitesttj4rejxwd-79a739", + "agentPoolProfiles": [{"count": 1, "vmSize": "Standard_DS2_v2", "osDiskSizeGB": + 0, "workloadRuntime": "OCIContainer", "osType": "Linux", "enableAutoScaling": + false, "type": "VirtualMachineScaleSets", "mode": "System", "orchestratorVersion": + "", "upgradeSettings": {}, "enableNodePublicIP": false, "enableCustomCATrust": + false, "scaleSetPriority": "Regular", "scaleSetEvictionPolicy": "Delete", "spotMaxPrice": + -1.0, "nodeTaints": [], "enableEncryptionAtHost": false, "enableUltraSSD": false, + "enableFIPS": false, "name": "nodepool1"}], "linuxProfile": {"adminUsername": + "azureuser", "ssh": {"publicKeys": [{"keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFZag72JUdUPjufTC+9bGJ6+03D4WKC5qcYu2xbnzAEjW0CGYdezDhyqogngxvXgEzfhg7yUDUMYKjfg70oTN7w5t4YFiouN50Zlwra73QZ71Y9uNWM7qd5Uolaw1drHPLEbTcFloZTQM8gwWainBxkd6xRJK7A/otKFpN+TdV4T1dpNH826z+HC/ZpTavYZI+hqc7twWIj7hDQtUs2k4NDWJzPwMTAuIj4aNrmBYv4z8rK/ZtNIK07UbFlWDHFz1eZrDjOEBoSaakTY5j3vZXwe/dj5SmQypxg3AY+Xxboe4YxU2Iv1r0mcr7ST0wDEgUsdCUXO/lvNO3vGGV8qzf + azcli_aks_live_test@example.com\n"}]}}, "addonProfiles": {}, "enableRBAC": true, + "enablePodSecurityPolicy": false, "networkProfile": {"networkPlugin": "kubenet", + "podCidr": "10.244.0.0/16", "serviceCidr": "10.0.0.0/16", "dnsServiceIP": "10.0.0.10", + "dockerBridgeCidr": "172.17.0.1/16", "outboundType": "loadBalancer", "loadBalancerSku": + "standard"}, "disableLocalAccounts": false, "storageProfile": {}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + Content-Length: + - '1558' + Content-Type: + - application/json + ParameterSetName: + - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002\",\n + \ \"location\": \"westus2\",\n \"name\": \"cliakstest000002\",\n \"type\": + \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \"provisioningState\": + \"Creating\",\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"kubernetesVersion\": + \"1.23.12\",\n \"currentKubernetesVersion\": \"1.23.12\",\n \"dnsPrefix\": + \"cliakstest-clitesttj4rejxwd-79a739\",\n \"fqdn\": \"cliakstest-clitesttj4rejxwd-79a739-ab6f17da.hcp.westus2.azmk8s.io\",\n + \ \"azurePortalFQDN\": \"cliakstest-clitesttj4rejxwd-79a739-ab6f17da.portal.hcp.westus2.azmk8s.io\",\n + \ \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \"count\": + 1,\n \"vmSize\": \"Standard_DS2_v2\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": + \"Managed\",\n \"kubeletDiskType\": \"OS\",\n \"workloadRuntime\": + \"OCIContainer\",\n \"maxPods\": 110,\n \"type\": \"VirtualMachineScaleSets\",\n + \ \"enableAutoScaling\": false,\n \"provisioningState\": \"Creating\",\n + \ \"powerState\": {\n \"code\": \"Running\"\n },\n \"orchestratorVersion\": + \"1.23.12\",\n \"currentOrchestratorVersion\": \"1.23.12\",\n \"enableNodePublicIP\": + false,\n \"enableCustomCATrust\": false,\n \"mode\": \"System\",\n + \ \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n + \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\": + \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\": {},\n + \ \"enableFIPS\": false\n }\n ],\n \"linuxProfile\": {\n \"adminUsername\": + \"azureuser\",\n \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFZag72JUdUPjufTC+9bGJ6+03D4WKC5qcYu2xbnzAEjW0CGYdezDhyqogngxvXgEzfhg7yUDUMYKjfg70oTN7w5t4YFiouN50Zlwra73QZ71Y9uNWM7qd5Uolaw1drHPLEbTcFloZTQM8gwWainBxkd6xRJK7A/otKFpN+TdV4T1dpNH826z+HC/ZpTavYZI+hqc7twWIj7hDQtUs2k4NDWJzPwMTAuIj4aNrmBYv4z8rK/ZtNIK07UbFlWDHFz1eZrDjOEBoSaakTY5j3vZXwe/dj5SmQypxg3AY+Xxboe4YxU2Iv1r0mcr7ST0wDEgUsdCUXO/lvNO3vGGV8qzf + azcli_aks_live_test@example.com\\n\"\n }\n ]\n }\n },\n \"servicePrincipalProfile\": + {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n },\n \"nodeResourceGroup\": + \"MC_clitest000001_cliakstest000002_westus2\",\n \"enableRBAC\": true,\n + \ \"enablePodSecurityPolicy\": false,\n \"networkProfile\": {\n \"networkPlugin\": + \"kubenet\",\n \"loadBalancerSku\": \"standard\",\n \"loadBalancerProfile\": + {\n \"managedOutboundIPs\": {\n \"count\": 1\n },\n \"backendPoolType\": + \"nodeIPConfiguration\"\n },\n \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": + \"10.0.0.0/16\",\n \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": + \"172.17.0.1/16\",\n \"outboundType\": \"loadBalancer\",\n \"podCidrs\": + [\n \"10.244.0.0/16\"\n ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n + \ ],\n \"ipFamilies\": [\n \"IPv4\"\n ]\n },\n \"maxAgentPools\": + 100,\n \"disableLocalAccounts\": false,\n \"securityProfile\": {},\n \"storageProfile\": + {\n \"diskCSIDriver\": {\n \"enabled\": true,\n \"version\": \"v1\"\n + \ },\n \"fileCSIDriver\": {\n \"enabled\": true\n },\n \"snapshotController\": + {\n \"enabled\": true\n }\n },\n \"oidcIssuerProfile\": {\n \"enabled\": + false\n },\n \"workloadAutoScalerProfile\": {}\n },\n \"identity\": + {\n \"type\": \"SystemAssigned\",\n \"principalId\":\"00000000-0000-0000-0000-000000000001\",\n + \ \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\": + {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" + headers: + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 + cache-control: + - no-cache + content-length: + - '3453' + content-type: + - application/json + date: + - Wed, 26 Oct 2022 07:27:58 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"1b8fd278-feb7-af40-bf21-303dd1dab078\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-26T07:27:59.3884289Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 26 Oct 2022 07:28:28 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"1b8fd278-feb7-af40-bf21-303dd1dab078\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-26T07:27:59.3884289Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 26 Oct 2022 07:28:59 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"1b8fd278-feb7-af40-bf21-303dd1dab078\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-26T07:27:59.3884289Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 26 Oct 2022 07:29:29 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"1b8fd278-feb7-af40-bf21-303dd1dab078\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-26T07:27:59.3884289Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 26 Oct 2022 07:29:59 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"1b8fd278-feb7-af40-bf21-303dd1dab078\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-26T07:27:59.3884289Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 26 Oct 2022 07:30:29 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"1b8fd278-feb7-af40-bf21-303dd1dab078\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-26T07:27:59.3884289Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 26 Oct 2022 07:30:59 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"1b8fd278-feb7-af40-bf21-303dd1dab078\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-26T07:27:59.3884289Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 26 Oct 2022 07:31:29 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"1b8fd278-feb7-af40-bf21-303dd1dab078\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-26T07:27:59.3884289Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 26 Oct 2022 07:31:59 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"1b8fd278-feb7-af40-bf21-303dd1dab078\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-26T07:27:59.3884289Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 26 Oct 2022 07:32:29 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"1b8fd278-feb7-af40-bf21-303dd1dab078\",\n \"status\": + \"Succeeded\",\n \"startTime\": \"2022-10-26T07:27:59.3884289Z\",\n \"endTime\": + \"2022-10-26T07:32:39.0357785Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '170' + content-type: + - application/json + date: + - Wed, 26 Oct 2022 07:32:59 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks create + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002\",\n + \ \"location\": \"westus2\",\n \"name\": \"cliakstest000002\",\n \"type\": + \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \"provisioningState\": + \"Succeeded\",\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"kubernetesVersion\": + \"1.23.12\",\n \"currentKubernetesVersion\": \"1.23.12\",\n \"dnsPrefix\": + \"cliakstest-clitesttj4rejxwd-79a739\",\n \"fqdn\": \"cliakstest-clitesttj4rejxwd-79a739-ab6f17da.hcp.westus2.azmk8s.io\",\n + \ \"azurePortalFQDN\": \"cliakstest-clitesttj4rejxwd-79a739-ab6f17da.portal.hcp.westus2.azmk8s.io\",\n + \ \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \"count\": + 1,\n \"vmSize\": \"Standard_DS2_v2\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": + \"Managed\",\n \"kubeletDiskType\": \"OS\",\n \"workloadRuntime\": + \"OCIContainer\",\n \"maxPods\": 110,\n \"type\": \"VirtualMachineScaleSets\",\n + \ \"enableAutoScaling\": false,\n \"provisioningState\": \"Succeeded\",\n + \ \"powerState\": {\n \"code\": \"Running\"\n },\n \"orchestratorVersion\": + \"1.23.12\",\n \"currentOrchestratorVersion\": \"1.23.12\",\n \"enableNodePublicIP\": + false,\n \"enableCustomCATrust\": false,\n \"mode\": \"System\",\n + \ \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n + \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\": + \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\": {},\n + \ \"enableFIPS\": false\n }\n ],\n \"linuxProfile\": {\n \"adminUsername\": + \"azureuser\",\n \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFZag72JUdUPjufTC+9bGJ6+03D4WKC5qcYu2xbnzAEjW0CGYdezDhyqogngxvXgEzfhg7yUDUMYKjfg70oTN7w5t4YFiouN50Zlwra73QZ71Y9uNWM7qd5Uolaw1drHPLEbTcFloZTQM8gwWainBxkd6xRJK7A/otKFpN+TdV4T1dpNH826z+HC/ZpTavYZI+hqc7twWIj7hDQtUs2k4NDWJzPwMTAuIj4aNrmBYv4z8rK/ZtNIK07UbFlWDHFz1eZrDjOEBoSaakTY5j3vZXwe/dj5SmQypxg3AY+Xxboe4YxU2Iv1r0mcr7ST0wDEgUsdCUXO/lvNO3vGGV8qzf + azcli_aks_live_test@example.com\\n\"\n }\n ]\n }\n },\n \"servicePrincipalProfile\": + {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n },\n \"nodeResourceGroup\": + \"MC_clitest000001_cliakstest000002_westus2\",\n \"enableRBAC\": true,\n + \ \"enablePodSecurityPolicy\": false,\n \"networkProfile\": {\n \"networkPlugin\": + \"kubenet\",\n \"loadBalancerSku\": \"Standard\",\n \"loadBalancerProfile\": + {\n \"managedOutboundIPs\": {\n \"count\": 1\n },\n \"effectiveOutboundIPs\": + [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/66497f43-1896-40bb-8814-3cd8d65f6abe\"\n + \ }\n ],\n \"backendPoolType\": \"nodeIPConfiguration\"\n },\n + \ \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\",\n + \ \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\",\n + \ \"outboundType\": \"loadBalancer\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\n + \ ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\": + [\n \"IPv4\"\n ]\n },\n \"maxAgentPools\": 100,\n \"identityProfile\": + {\n \"kubeletidentity\": {\n \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/cliakstest000002-agentpool\",\n + \ \"clientId\":\"00000000-0000-0000-0000-000000000001\",\n \"objectId\":\"00000000-0000-0000-0000-000000000001\"\n + \ }\n },\n \"disableLocalAccounts\": false,\n \"securityProfile\": + {},\n \"storageProfile\": {\n \"diskCSIDriver\": {\n \"enabled\": + true,\n \"version\": \"v1\"\n },\n \"fileCSIDriver\": {\n \"enabled\": + true\n },\n \"snapshotController\": {\n \"enabled\": true\n }\n + \ },\n \"oidcIssuerProfile\": {\n \"enabled\": false\n },\n \"workloadAutoScalerProfile\": + {}\n },\n \"identity\": {\n \"type\": \"SystemAssigned\",\n \"principalId\":\"00000000-0000-0000-0000-000000000001\",\n + \ \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\": + {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" + headers: + cache-control: + - no-cache + content-length: + - '4106' + content-type: + - application/json + date: + - Wed, 26 Oct 2022 07:33:00 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --nat-gateway-managed-outbound-ip-count --nat-gateway-idle-timeout + --outbound-type --aks-custom-header + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002\",\n + \ \"location\": \"westus2\",\n \"name\": \"cliakstest000002\",\n \"type\": + \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \"provisioningState\": + \"Succeeded\",\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"kubernetesVersion\": + \"1.23.12\",\n \"currentKubernetesVersion\": \"1.23.12\",\n \"dnsPrefix\": + \"cliakstest-clitesttj4rejxwd-79a739\",\n \"fqdn\": \"cliakstest-clitesttj4rejxwd-79a739-ab6f17da.hcp.westus2.azmk8s.io\",\n + \ \"azurePortalFQDN\": \"cliakstest-clitesttj4rejxwd-79a739-ab6f17da.portal.hcp.westus2.azmk8s.io\",\n + \ \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \"count\": + 1,\n \"vmSize\": \"Standard_DS2_v2\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": + \"Managed\",\n \"kubeletDiskType\": \"OS\",\n \"workloadRuntime\": + \"OCIContainer\",\n \"maxPods\": 110,\n \"type\": \"VirtualMachineScaleSets\",\n + \ \"enableAutoScaling\": false,\n \"provisioningState\": \"Succeeded\",\n + \ \"powerState\": {\n \"code\": \"Running\"\n },\n \"orchestratorVersion\": + \"1.23.12\",\n \"currentOrchestratorVersion\": \"1.23.12\",\n \"enableNodePublicIP\": + false,\n \"enableCustomCATrust\": false,\n \"mode\": \"System\",\n + \ \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n + \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\": + \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\": {},\n + \ \"enableFIPS\": false\n }\n ],\n \"linuxProfile\": {\n \"adminUsername\": + \"azureuser\",\n \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFZag72JUdUPjufTC+9bGJ6+03D4WKC5qcYu2xbnzAEjW0CGYdezDhyqogngxvXgEzfhg7yUDUMYKjfg70oTN7w5t4YFiouN50Zlwra73QZ71Y9uNWM7qd5Uolaw1drHPLEbTcFloZTQM8gwWainBxkd6xRJK7A/otKFpN+TdV4T1dpNH826z+HC/ZpTavYZI+hqc7twWIj7hDQtUs2k4NDWJzPwMTAuIj4aNrmBYv4z8rK/ZtNIK07UbFlWDHFz1eZrDjOEBoSaakTY5j3vZXwe/dj5SmQypxg3AY+Xxboe4YxU2Iv1r0mcr7ST0wDEgUsdCUXO/lvNO3vGGV8qzf + azcli_aks_live_test@example.com\\n\"\n }\n ]\n }\n },\n \"servicePrincipalProfile\": + {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n },\n \"nodeResourceGroup\": + \"MC_clitest000001_cliakstest000002_westus2\",\n \"enableRBAC\": true,\n + \ \"enablePodSecurityPolicy\": false,\n \"networkProfile\": {\n \"networkPlugin\": + \"kubenet\",\n \"loadBalancerSku\": \"Standard\",\n \"loadBalancerProfile\": + {\n \"managedOutboundIPs\": {\n \"count\": 1\n },\n \"effectiveOutboundIPs\": + [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/66497f43-1896-40bb-8814-3cd8d65f6abe\"\n + \ }\n ],\n \"backendPoolType\": \"nodeIPConfiguration\"\n },\n + \ \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\",\n + \ \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\",\n + \ \"outboundType\": \"loadBalancer\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\n + \ ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\": + [\n \"IPv4\"\n ]\n },\n \"maxAgentPools\": 100,\n \"identityProfile\": + {\n \"kubeletidentity\": {\n \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/cliakstest000002-agentpool\",\n + \ \"clientId\":\"00000000-0000-0000-0000-000000000001\",\n \"objectId\":\"00000000-0000-0000-0000-000000000001\"\n + \ }\n },\n \"disableLocalAccounts\": false,\n \"securityProfile\": + {},\n \"storageProfile\": {\n \"diskCSIDriver\": {\n \"enabled\": + true,\n \"version\": \"v1\"\n },\n \"fileCSIDriver\": {\n \"enabled\": + true\n },\n \"snapshotController\": {\n \"enabled\": true\n }\n + \ },\n \"oidcIssuerProfile\": {\n \"enabled\": false\n },\n \"workloadAutoScalerProfile\": + {}\n },\n \"identity\": {\n \"type\": \"SystemAssigned\",\n \"principalId\":\"00000000-0000-0000-0000-000000000001\",\n + \ \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\": + {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" + headers: + cache-control: + - no-cache + content-length: + - '4106' + content-type: + - application/json + date: + - Wed, 26 Oct 2022 07:33:01 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"location": "westus2", "sku": {"name": "Basic", "tier": "Free"}, "identity": + {"type": "SystemAssigned"}, "properties": {"kubernetesVersion": "1.23.12", "dnsPrefix": + "cliakstest-clitesttj4rejxwd-79a739", "agentPoolProfiles": [{"count": 1, "vmSize": + "Standard_DS2_v2", "osDiskSizeGB": 128, "osDiskType": "Managed", "kubeletDiskType": + "OS", "workloadRuntime": "OCIContainer", "maxPods": 110, "osType": "Linux", + "osSKU": "Ubuntu", "enableAutoScaling": false, "type": "VirtualMachineScaleSets", + "mode": "System", "orchestratorVersion": "1.23.12", "upgradeSettings": {}, "powerState": + {"code": "Running"}, "enableNodePublicIP": false, "enableCustomCATrust": false, + "enableEncryptionAtHost": false, "enableUltraSSD": false, "enableFIPS": false, + "name": "nodepool1"}], "linuxProfile": {"adminUsername": "azureuser", "ssh": + {"publicKeys": [{"keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFZag72JUdUPjufTC+9bGJ6+03D4WKC5qcYu2xbnzAEjW0CGYdezDhyqogngxvXgEzfhg7yUDUMYKjfg70oTN7w5t4YFiouN50Zlwra73QZ71Y9uNWM7qd5Uolaw1drHPLEbTcFloZTQM8gwWainBxkd6xRJK7A/otKFpN+TdV4T1dpNH826z+HC/ZpTavYZI+hqc7twWIj7hDQtUs2k4NDWJzPwMTAuIj4aNrmBYv4z8rK/ZtNIK07UbFlWDHFz1eZrDjOEBoSaakTY5j3vZXwe/dj5SmQypxg3AY+Xxboe4YxU2Iv1r0mcr7ST0wDEgUsdCUXO/lvNO3vGGV8qzf + azcli_aks_live_test@example.com\n"}]}}, "servicePrincipalProfile": {"clientId":"00000000-0000-0000-0000-000000000001"}, + "nodeResourceGroup": "MC_clitest000001_cliakstest000002_westus2", "enableRBAC": + true, "enablePodSecurityPolicy": false, "networkProfile": {"networkPlugin": + "kubenet", "podCidr": "10.244.0.0/16", "serviceCidr": "10.0.0.0/16", "dnsServiceIP": + "10.0.0.10", "dockerBridgeCidr": "172.17.0.1/16", "outboundType": "managedNATGateway", + "loadBalancerSku": "Standard", "natGatewayProfile": {"managedOutboundIPProfile": + {"count": 2}, "idleTimeoutInMinutes": 30}, "podCidrs": ["10.244.0.0/16"], "serviceCidrs": + ["10.0.0.0/16"], "ipFamilies": ["IPv4"]}, "identityProfile": {"kubeletidentity": + {"resourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/cliakstest000002-agentpool", + "clientId":"00000000-0000-0000-0000-000000000001", "objectId":"00000000-0000-0000-0000-000000000001"}}, + "disableLocalAccounts": false, "securityProfile": {}, "storageProfile": {}, + "workloadAutoScalerProfile": {}}}' + headers: + AKSHTTPCustomFeatures: + - Microsoft.ContainerService/AKS-OutBoundTypeMigrationPreview + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + Content-Length: + - '2352' + Content-Type: + - application/json + ParameterSetName: + - --resource-group --name --nat-gateway-managed-outbound-ip-count --nat-gateway-idle-timeout + --outbound-type --aks-custom-header + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002\",\n + \ \"location\": \"westus2\",\n \"name\": \"cliakstest000002\",\n \"type\": + \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \"provisioningState\": + \"Updating\",\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"kubernetesVersion\": + \"1.23.12\",\n \"currentKubernetesVersion\": \"1.23.12\",\n \"dnsPrefix\": + \"cliakstest-clitesttj4rejxwd-79a739\",\n \"fqdn\": \"cliakstest-clitesttj4rejxwd-79a739-ab6f17da.hcp.westus2.azmk8s.io\",\n + \ \"azurePortalFQDN\": \"cliakstest-clitesttj4rejxwd-79a739-ab6f17da.portal.hcp.westus2.azmk8s.io\",\n + \ \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \"count\": + 1,\n \"vmSize\": \"Standard_DS2_v2\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": + \"Managed\",\n \"kubeletDiskType\": \"OS\",\n \"workloadRuntime\": + \"OCIContainer\",\n \"maxPods\": 110,\n \"type\": \"VirtualMachineScaleSets\",\n + \ \"enableAutoScaling\": false,\n \"provisioningState\": \"Updating\",\n + \ \"powerState\": {\n \"code\": \"Running\"\n },\n \"orchestratorVersion\": + \"1.23.12\",\n \"currentOrchestratorVersion\": \"1.23.12\",\n \"enableNodePublicIP\": + false,\n \"enableCustomCATrust\": false,\n \"mode\": \"System\",\n + \ \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n + \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\": + \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\": {},\n + \ \"enableFIPS\": false\n }\n ],\n \"linuxProfile\": {\n \"adminUsername\": + \"azureuser\",\n \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFZag72JUdUPjufTC+9bGJ6+03D4WKC5qcYu2xbnzAEjW0CGYdezDhyqogngxvXgEzfhg7yUDUMYKjfg70oTN7w5t4YFiouN50Zlwra73QZ71Y9uNWM7qd5Uolaw1drHPLEbTcFloZTQM8gwWainBxkd6xRJK7A/otKFpN+TdV4T1dpNH826z+HC/ZpTavYZI+hqc7twWIj7hDQtUs2k4NDWJzPwMTAuIj4aNrmBYv4z8rK/ZtNIK07UbFlWDHFz1eZrDjOEBoSaakTY5j3vZXwe/dj5SmQypxg3AY+Xxboe4YxU2Iv1r0mcr7ST0wDEgUsdCUXO/lvNO3vGGV8qzf + azcli_aks_live_test@example.com\\n\"\n }\n ]\n }\n },\n \"servicePrincipalProfile\": + {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n },\n \"nodeResourceGroup\": + \"MC_clitest000001_cliakstest000002_westus2\",\n \"enableRBAC\": true,\n + \ \"enablePodSecurityPolicy\": false,\n \"networkProfile\": {\n \"networkPlugin\": + \"kubenet\",\n \"loadBalancerSku\": \"Standard\",\n \"loadBalancerProfile\": + {\n \"managedOutboundIPs\": {\n \"count\": 1\n },\n \"effectiveOutboundIPs\": + [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/66497f43-1896-40bb-8814-3cd8d65f6abe\"\n + \ }\n ],\n \"backendPoolType\": \"nodeIPConfiguration\"\n },\n + \ \"natGatewayProfile\": {\n \"managedOutboundIPProfile\": {\n \"count\": + 2\n },\n \"idleTimeoutInMinutes\": 30\n },\n \"podCidr\": \"10.244.0.0/16\",\n + \ \"serviceCidr\": \"10.0.0.0/16\",\n \"dnsServiceIP\": \"10.0.0.10\",\n + \ \"dockerBridgeCidr\": \"172.17.0.1/16\",\n \"outboundType\": \"managedNATGateway\",\n + \ \"podCidrs\": [\n \"10.244.0.0/16\"\n ],\n \"serviceCidrs\": + [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\": [\n \"IPv4\"\n ]\n + \ },\n \"maxAgentPools\": 100,\n \"identityProfile\": {\n \"kubeletidentity\": + {\n \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/cliakstest000002-agentpool\",\n + \ \"clientId\":\"00000000-0000-0000-0000-000000000001\",\n \"objectId\":\"00000000-0000-0000-0000-000000000001\"\n + \ }\n },\n \"disableLocalAccounts\": false,\n \"securityProfile\": + {},\n \"storageProfile\": {\n \"diskCSIDriver\": {\n \"enabled\": + true,\n \"version\": \"v1\"\n },\n \"fileCSIDriver\": {\n \"enabled\": + true\n },\n \"snapshotController\": {\n \"enabled\": true\n }\n + \ },\n \"oidcIssuerProfile\": {\n \"enabled\": false\n },\n \"workloadAutoScalerProfile\": + {}\n },\n \"identity\": {\n \"type\": \"SystemAssigned\",\n \"principalId\":\"00000000-0000-0000-0000-000000000001\",\n + \ \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\": + {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" + headers: + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/86d1700a-f14c-4068-879b-fefe3d8bc7ba?api-version=2016-03-30 + cache-control: + - no-cache + content-length: + - '4235' + content-type: + - application/json + date: + - Wed, 26 Oct 2022 07:33:04 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --nat-gateway-managed-outbound-ip-count --nat-gateway-idle-timeout + --outbound-type --aks-custom-header + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/86d1700a-f14c-4068-879b-fefe3d8bc7ba?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"0a70d186-4cf1-6840-879b-fefe3d8bc7ba\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-26T07:33:04.1423202Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 26 Oct 2022 07:33:34 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --nat-gateway-managed-outbound-ip-count --nat-gateway-idle-timeout + --outbound-type --aks-custom-header + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/86d1700a-f14c-4068-879b-fefe3d8bc7ba?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"0a70d186-4cf1-6840-879b-fefe3d8bc7ba\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-26T07:33:04.1423202Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 26 Oct 2022 07:34:04 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --nat-gateway-managed-outbound-ip-count --nat-gateway-idle-timeout + --outbound-type --aks-custom-header + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/86d1700a-f14c-4068-879b-fefe3d8bc7ba?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"0a70d186-4cf1-6840-879b-fefe3d8bc7ba\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-26T07:33:04.1423202Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 26 Oct 2022 07:34:34 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --nat-gateway-managed-outbound-ip-count --nat-gateway-idle-timeout + --outbound-type --aks-custom-header + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/86d1700a-f14c-4068-879b-fefe3d8bc7ba?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"0a70d186-4cf1-6840-879b-fefe3d8bc7ba\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-26T07:33:04.1423202Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 26 Oct 2022 07:35:04 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --nat-gateway-managed-outbound-ip-count --nat-gateway-idle-timeout + --outbound-type --aks-custom-header + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/86d1700a-f14c-4068-879b-fefe3d8bc7ba?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"0a70d186-4cf1-6840-879b-fefe3d8bc7ba\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-26T07:33:04.1423202Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '126' + content-type: + - application/json + date: + - Wed, 26 Oct 2022 07:35:33 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --nat-gateway-managed-outbound-ip-count --nat-gateway-idle-timeout + --outbound-type --aks-custom-header + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/86d1700a-f14c-4068-879b-fefe3d8bc7ba?api-version=2016-03-30 + response: + body: + string: "{\n \"name\": \"0a70d186-4cf1-6840-879b-fefe3d8bc7ba\",\n \"status\": + \"Succeeded\",\n \"startTime\": \"2022-10-26T07:33:04.1423202Z\",\n \"endTime\": + \"2022-10-26T07:35:48.1753038Z\"\n }" + headers: + cache-control: + - no-cache + content-length: + - '170' + content-type: + - application/json + date: + - Wed, 26 Oct 2022 07:36:04 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - aks update + Connection: + - keep-alive + ParameterSetName: + - --resource-group --name --nat-gateway-managed-outbound-ip-count --nat-gateway-idle-timeout + --outbound-type --aks-custom-header + User-Agent: + - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002\",\n + \ \"location\": \"westus2\",\n \"name\": \"cliakstest000002\",\n \"type\": + \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \"provisioningState\": + \"Succeeded\",\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"kubernetesVersion\": + \"1.23.12\",\n \"currentKubernetesVersion\": \"1.23.12\",\n \"dnsPrefix\": + \"cliakstest-clitesttj4rejxwd-79a739\",\n \"fqdn\": \"cliakstest-clitesttj4rejxwd-79a739-ab6f17da.hcp.westus2.azmk8s.io\",\n + \ \"azurePortalFQDN\": \"cliakstest-clitesttj4rejxwd-79a739-ab6f17da.portal.hcp.westus2.azmk8s.io\",\n + \ \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \"count\": + 1,\n \"vmSize\": \"Standard_DS2_v2\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": + \"Managed\",\n \"kubeletDiskType\": \"OS\",\n \"workloadRuntime\": + \"OCIContainer\",\n \"maxPods\": 110,\n \"type\": \"VirtualMachineScaleSets\",\n + \ \"enableAutoScaling\": false,\n \"provisioningState\": \"Succeeded\",\n + \ \"powerState\": {\n \"code\": \"Running\"\n },\n \"orchestratorVersion\": + \"1.23.12\",\n \"currentOrchestratorVersion\": \"1.23.12\",\n \"enableNodePublicIP\": + false,\n \"enableCustomCATrust\": false,\n \"mode\": \"System\",\n + \ \"enableEncryptionAtHost\": false,\n \"enableUltraSSD\": false,\n + \ \"osType\": \"Linux\",\n \"osSKU\": \"Ubuntu\",\n \"nodeImageVersion\": + \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\": {},\n + \ \"enableFIPS\": false\n }\n ],\n \"linuxProfile\": {\n \"adminUsername\": + \"azureuser\",\n \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFZag72JUdUPjufTC+9bGJ6+03D4WKC5qcYu2xbnzAEjW0CGYdezDhyqogngxvXgEzfhg7yUDUMYKjfg70oTN7w5t4YFiouN50Zlwra73QZ71Y9uNWM7qd5Uolaw1drHPLEbTcFloZTQM8gwWainBxkd6xRJK7A/otKFpN+TdV4T1dpNH826z+HC/ZpTavYZI+hqc7twWIj7hDQtUs2k4NDWJzPwMTAuIj4aNrmBYv4z8rK/ZtNIK07UbFlWDHFz1eZrDjOEBoSaakTY5j3vZXwe/dj5SmQypxg3AY+Xxboe4YxU2Iv1r0mcr7ST0wDEgUsdCUXO/lvNO3vGGV8qzf + azcli_aks_live_test@example.com\\n\"\n }\n ]\n }\n },\n \"servicePrincipalProfile\": + {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n },\n \"nodeResourceGroup\": + \"MC_clitest000001_cliakstest000002_westus2\",\n \"enableRBAC\": true,\n + \ \"enablePodSecurityPolicy\": false,\n \"networkProfile\": {\n \"networkPlugin\": + \"kubenet\",\n \"loadBalancerSku\": \"Standard\",\n \"loadBalancerProfile\": + {\n \"managedOutboundIPs\": {\n \"count\": 1\n },\n \"effectiveOutboundIPs\": + [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/66497f43-1896-40bb-8814-3cd8d65f6abe\"\n + \ }\n ],\n \"backendPoolType\": \"nodeIPConfiguration\"\n },\n + \ \"natGatewayProfile\": {\n \"managedOutboundIPProfile\": {\n \"count\": + 2\n },\n \"effectiveOutboundIPs\": [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/901ca738-9f18-4ca0-9bbd-e3d3943b481a\"\n + \ },\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/0e91132d-cfa8-4b09-ba34-0bed69292769\"\n + \ }\n ],\n \"idleTimeoutInMinutes\": 30\n },\n \"podCidr\": + \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\",\n \"dnsServiceIP\": + \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\",\n \"outboundType\": + \"managedNATGateway\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\n ],\n + \ \"serviceCidrs\": [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\": + [\n \"IPv4\"\n ]\n },\n \"maxAgentPools\": 100,\n \"identityProfile\": + {\n \"kubeletidentity\": {\n \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/cliakstest000002-agentpool\",\n + \ \"clientId\":\"00000000-0000-0000-0000-000000000001\",\n \"objectId\":\"00000000-0000-0000-0000-000000000001\"\n + \ }\n },\n \"disableLocalAccounts\": false,\n \"securityProfile\": + {},\n \"storageProfile\": {\n \"diskCSIDriver\": {\n \"enabled\": + true,\n \"version\": \"v1\"\n },\n \"fileCSIDriver\": {\n \"enabled\": + true\n },\n \"snapshotController\": {\n \"enabled\": true\n }\n + \ },\n \"oidcIssuerProfile\": {\n \"enabled\": false\n },\n \"workloadAutoScalerProfile\": + {}\n },\n \"identity\": {\n \"type\": \"SystemAssigned\",\n \"principalId\":\"00000000-0000-0000-0000-000000000001\",\n + \ \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\": + {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" + headers: + cache-control: + - no-cache + content-length: + - '4723' + content-type: + - application/json + date: + - Wed, 26 Oct 2022 07:36:05 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - nginx + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +version: 1 diff --git a/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py b/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py index e49e03996d0..5896cc39017 100644 --- a/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py +++ b/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py @@ -119,6 +119,39 @@ def test_aks_create_and_update_with_managed_nat_gateway_outbound(self, resource_ 'networkProfile.natGatewayProfile.managedOutboundIpProfile.count', 2), ]) + @AllowLargeResponse() + @AKSCustomResourceGroupPreparer(random_name_length=17, name_prefix='clitest', location='eastus') + def test_aks_update_outbound_from_slb_to_natgateway(self, resource_group, resource_group_location): + aks_name = self.create_random_name('cliakstest', 16) + self.kwargs.update({ + 'resource_group': resource_group, + 'name': aks_name, + 'ssh_key_value': self.generate_ssh_keys() + }) + + create_cmd = 'aks create --resource-group={resource_group} --name={name} ' \ + '--vm-set-type VirtualMachineScaleSets -c 1 ' \ + '--outbound-type=loadbalancer ' \ + '--ssh-key-value={ssh_key_value}' + self.cmd(create_cmd, checks=[ + self.check('provisioningState', 'Succeeded'), + self.check('networkProfile.outboundType', 'loadBalancer'), + ]) + + update_cmd = 'aks update --resource-group={resource_group} --name={name} ' \ + '--nat-gateway-managed-outbound-ip-count 2 ' \ + '--nat-gateway-idle-timeout 30 ' \ + '--outbound-type managedNATGateway ' \ + '--aks-custom-header AKSHTTPCustomFeatures=Microsoft.ContainerService/AKS-OutBoundTypeMigrationPreview' + self.cmd(update_cmd, checks=[ + self.check('provisioningState', 'Succeeded'), + self.check('networkProfile.outboundType', 'managedNATGateway'), + self.check( + 'networkProfile.natGatewayProfile.idleTimeoutInMinutes', 30), + self.check( + 'networkProfile.natGatewayProfile.managedOutboundIpProfile.count', 2), + ]) + @AllowLargeResponse() @AKSCustomResourceGroupPreparer(random_name_length=17, name_prefix='clitest', location='westus2') def test_aks_create_and_update_with_managed_aad(self, resource_group, resource_group_location): diff --git a/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py b/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py index 0289f6bd25b..31ded0aef56 100644 --- a/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py +++ b/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py @@ -4639,6 +4639,35 @@ def test_update_load_balancer_profile(self): # fail on incomplete mc object (no network profile) with self.assertRaises(UnknownError): dec_9.update_load_balancer_profile(mc_9) + def test_update_outbound_type(self): + # default value in `aks_update` + dec_1 = AKSPreviewManagedClusterUpdateDecorator( + self.cmd, + self.client, + { + "outbound_type": "managedNATGateway", + "nat_gateway_managed_outbound_ip_count": 2, + }, + CUSTOM_MGMT_AKS_PREVIEW, + ) + mc_1 = self.models.ManagedCluster( + location="test_location", + network_profile=self.models.ContainerServiceNetworkProfile(), + ) + dec_1.context.attach_mc(mc_1) + # fail on passing the wrong mc object + with self.assertRaises(CLIInternalError): + dec_1.update_outbound_type_in_network_profile(None) + dec_mc_1 = dec_1.update_outbound_type_in_network_profile(mc_1) + + ground_truth_mc_1 = self.models.ManagedCluster( + location="test_location", + network_profile=self.models.ContainerServiceNetworkProfile(), + ) + ground_truth_mc_1.network_profile.outbound_type = "managedNATGateway" + self.assertEqual(dec_mc_1, ground_truth_mc_1) + + def test_update_api_server_access_profile(self): dec_1 = AKSPreviewManagedClusterUpdateDecorator( diff --git a/src/aks-preview/setup.py b/src/aks-preview/setup.py index 42914e2ffba..0134a88c419 100644 --- a/src/aks-preview/setup.py +++ b/src/aks-preview/setup.py @@ -9,7 +9,7 @@ from setuptools import setup, find_packages -VERSION = "0.5.111" +VERSION = "0.5.112" CLASSIFIERS = [ "Development Status :: 4 - Beta", From 4d9ad315dbf80f0b4016347b9267491d2ca70261 Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Wed, 26 Oct 2022 09:21:25 +0000 Subject: [PATCH 35/85] [Release] Update index.json for extension [ aks-preview ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=11519&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/c91f2dfed6c48ae0be9acb4a8e8ed350308c1883 --- src/index.json | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/src/index.json b/src/index.json index a2f239a1ebd..50994c1dd6d 100644 --- a/src/index.json +++ b/src/index.json @@ -6954,6 +6954,49 @@ "version": "0.5.111" }, "sha256Digest": "7da4a7815d1f192f1e5c1172e732956ddfaa9d218856188b9ce5906040b21fbe" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/aks_preview-0.5.112-py2.py3-none-any.whl", + "filename": "aks_preview-0.5.112-py2.py3-none-any.whl", + "metadata": { + "azext.isPreview": true, + "azext.minCliCoreVersion": "2.38.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/aks-preview" + } + } + }, + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "aks-preview", + "summary": "Provides a preview for upcoming AKS features", + "version": "0.5.112" + }, + "sha256Digest": "b33d01bc5ba13d5932951e577ede76d97aa6d3fad88dcabe1b786b3f12476eff" } ], "alertsmanagement": [ From f4d6e23bb51b18b6c859ba18d96552a0c4bd71ec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Damjan=20Ko=C5=A1ir?= Date: Thu, 27 Oct 2022 15:33:34 +1300 Subject: [PATCH 36/85] QBS extension (#5402) --- src/index.json | 47 +++++++++++++++++++++++++++++++++++++++++++ src/service_name.json | 5 +++++ 2 files changed, 52 insertions(+) diff --git a/src/index.json b/src/index.json index 50994c1dd6d..2f639dbea4f 100644 --- a/src/index.json +++ b/src/index.json @@ -32598,6 +32598,53 @@ "sha256Digest": "706cc2550fbd07b8b676345c2f26c5ba66550905bc8ec224c6c4e5637c497266" } ], + "qbs": [ + { + "downloadUrl": "https://qbsazcliextension.blob.core.windows.net/qbs/qbs-0.1.0-py3-none-any.whl", + "filename": "qbs-0.1.0-py3-none-any.whl", + "metadata": { + "azext.isPreview": true, + "azext.minCliCoreVersion": "2.39.0", + "classifiers": [ + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "qbs-team@consensys.net", + "name": "ConsenSys", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://consensys.net/quorum/qbs/" + } + } + }, + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "qbs", + "summary": "Support for Quorum Blockchain Service.", + "version": "0.1.0" + }, + "sha256Digest": "f8ef34fb62837e16bafe4ce4e141f1d35e7a494516614d8a54a050b4527da3f6" + } + ], "quantum": [ { "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/quantum-0.1.0-py3-none-any.whl", diff --git a/src/service_name.json b/src/service_name.json index 0e80eb1edfc..137294c522d 100644 --- a/src/service_name.json +++ b/src/service_name.json @@ -629,6 +629,11 @@ "AzureServiceName": "Azure Orbital", "URL": "https://docs.microsoft.com/en-us/azure/orbital/" }, + { + "Command": "az qbs", + "AzureServiceName": "ConsenSys Quorum Blockchain Service", + "URL": "https://consensys.net/quorum/qbs/" + }, { "Command": "az nginx", "AzureServiceName": "Nginx for Azure", From 5fd6621206a8974f663cb53efb17f4cbd83f4a0a Mon Sep 17 00:00:00 2001 From: Mason Chen Date: Thu, 27 Oct 2022 16:28:11 +0800 Subject: [PATCH 37/85] throw exception when operation status is Failed when create service instance. (#5471) --- src/spring/HISTORY.md | 4 ++++ src/spring/azext_spring/spring_instance.py | 5 ++--- src/spring/setup.py | 2 +- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/src/spring/HISTORY.md b/src/spring/HISTORY.md index 17182d619e0..c89cec36b59 100644 --- a/src/spring/HISTORY.md +++ b/src/spring/HISTORY.md @@ -1,5 +1,9 @@ Release History =============== +1.1.13 +--- +* Stop execution and throw exception when operation status is `Failed`. + 1.1.12 --- * Add warning logs when editing builders and buildpack bindings. diff --git a/src/spring/azext_spring/spring_instance.py b/src/spring/azext_spring/spring_instance.py index a47bcdbdf96..447ff226474 100644 --- a/src/spring/azext_spring/spring_instance.py +++ b/src/spring/azext_spring/spring_instance.py @@ -15,8 +15,8 @@ create_gateway, create_api_portal) - from ._validators import (_parse_sku_name, validate_instance_not_existed) +from azure.cli.core.commands import LongRunningOperation from knack.log import get_logger logger = get_logger(__name__) @@ -95,8 +95,7 @@ def create_service(self, poller = self.client.services.begin_create_or_update( self.resource_group, self.name, resource) logger.warning(" - Creating Service ..") - wait_till_end(self.cmd, poller) - return poller + return LongRunningOperation(self.cmd.cli_ctx)(poller) class EnterpriseSpringCloud(DefaultSpringCloud): diff --git a/src/spring/setup.py b/src/spring/setup.py index c657fd65fb8..111c3806299 100644 --- a/src/spring/setup.py +++ b/src/spring/setup.py @@ -16,7 +16,7 @@ # TODO: Confirm this is the right version number you want and it matches your # HISTORY.rst entry. -VERSION = '1.1.12' +VERSION = '1.1.13' # The full list of classifiers is available at # https://pypi.python.org/pypi?%3Aaction=list_classifiers From cc1c120b297819819eb4eff3661e9199cd5781ab Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Thu, 27 Oct 2022 08:35:37 +0000 Subject: [PATCH 38/85] [Release] Update index.json for extension [ spring ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=11806&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/5fd6621206a8974f663cb53efb17f4cbd83f4a0a --- src/index.json | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/src/index.json b/src/index.json index 2f639dbea4f..a0e94baf8c6 100644 --- a/src/index.json +++ b/src/index.json @@ -36002,6 +36002,49 @@ "version": "1.1.12" }, "sha256Digest": "ae1c41d2d09150c600e51d6de3a085a04d079b6803bdf4fcba6b041460f46a8d" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/spring-1.1.13-py3-none-any.whl", + "filename": "spring-1.1.13-py3-none-any.whl", + "metadata": { + "azext.isPreview": false, + "azext.minCliCoreVersion": "2.38.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/spring" + } + } + }, + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "spring", + "summary": "Microsoft Azure Command-Line Tools spring Extension", + "version": "1.1.13" + }, + "sha256Digest": "c3d968114b8e68b522962dccba958bc43b0235af55c417ad5cf6b6a25f1a9444" } ], "spring-cloud": [ From 789659a92c40d7690f58790ba6de9aebdc67330e Mon Sep 17 00:00:00 2001 From: Ryan K Date: Thu, 27 Oct 2022 11:21:59 -0700 Subject: [PATCH 39/85] [IoT] IoT Extension Release v0.18.0 (#5494) * Adds IoT Extension v0.18.0 * Linter exclusions update --- linter_exclusions.yml | 2 +- src/index.json | 75 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 76 insertions(+), 1 deletion(-) diff --git a/linter_exclusions.yml b/linter_exclusions.yml index 491f0359ff1..ac884f0fc7b 100644 --- a/linter_exclusions.yml +++ b/linter_exclusions.yml @@ -1212,7 +1212,7 @@ iot central user update: central_dns_suffix: rule_exclusions: - no_parameter_defaults_for_update_commands -iot device-update update: +iot du update: rule_exclusions: - require_wait_command_if_no_wait iot dps enrollment update: diff --git a/src/index.json b/src/index.json index a0e94baf8c6..4b04e0e8b83 100644 --- a/src/index.json +++ b/src/index.json @@ -13240,6 +13240,81 @@ "version": "0.17.3" }, "sha256Digest": "bf343936bd2f515c46d8cff9b59cc340eb88c0ee2f4be93fe9d2a9d4ba863346" + }, + { + "downloadUrl": "https://github.com/Azure/azure-iot-cli-extension/releases/download/v0.18.0/azure_iot-0.18.0-py3-none-any.whl", + "filename": "azure_iot-0.18.0-py3-none-any.whl", + "metadata": { + "azext.minCliCoreVersion": "2.32.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "iotupx@microsoft.com", + "name": "Microsoft", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/azure/azure-iot-cli-extension" + } + } + }, + "extras": [ + "uamqp" + ], + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "azure-iot", + "requires_python": ">=3.7", + "run_requires": [ + { + "requires": [ + "azure-core (<2.0.0,>=1.24.0)", + "azure-identity (<2.0.0,>=1.6.1)", + "azure-iot-device (~=2.11)", + "azure-mgmt-core (<2.0.0,>=1.3.0)", + "jsonschema (~=3.2.0)", + "msrest (>=0.6.21)", + "msrestazure (<2.0.0,>=0.6.3)", + "packaging", + "tqdm (~=4.62)" + ] + }, + { + "extra": "uamqp", + "requires": [ + "uamqp (~=1.2)" + ] + }, + { + "environment": "python_version < \"3.8\"", + "requires": [ + "importlib-metadata" + ] + } + ], + "summary": "The Azure IoT extension for Azure CLI.", + "version": "0.18.0" + }, + "sha256Digest": "b57987fe6b4627d4184db54715e71f5a28d7b16fb346352a37c0bf1ea5f4640e" } ], "azurestackhci": [ From 93de2fba68b40ff3d50799b8d3a9c34aaedfd195 Mon Sep 17 00:00:00 2001 From: Saisankar Gochhayat Date: Thu, 27 Oct 2022 19:09:05 -0700 Subject: [PATCH 40/85] Added ArcApliance October Release (#5495) Co-authored-by: Sai Sankar Gochhayat --- src/index.json | 52 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/src/index.json b/src/index.json index 4b04e0e8b83..c66f32e99de 100644 --- a/src/index.json +++ b/src/index.json @@ -8899,6 +8899,58 @@ "version": "0.2.27" }, "sha256Digest": "ac083e353e6b9a308df6723b9e759fbf8f4cec4694a4779d722ddd42c0132ada" + }, + { + "downloadUrl": "https://arcplatformcliextprod.blob.core.windows.net/arcappliance/arcappliance-0.2.28-py2.py3-none-any.whl", + "filename": "arcappliance-0.2.28-py2.py3-none-any.whl", + "metadata": { + "azext.isPreview": true, + "azext.minCliCoreVersion": "2.41.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "appliance@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://msazure.visualstudio.com/AzureArcPlatform/_git/arcappliance-cli-extensions" + } + } + }, + "extras": [], + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "arcappliance", + "run_requires": [ + { + "requires": [ + "jsonschema (==3.2.0)", + "kubernetes (==11.0.0)" + ] + } + ], + "summary": "Microsoft Azure Command-Line Tools Arcappliance Extension", + "version": "0.2.28" + }, + "sha256Digest": "d3958a72a58c21092b3a01f04cf8fbb418a34db6c9310e16f41af3447052ed80" } ], "arcdata": [ From 3442a090a06b91a3ac117aa8eb77e36afb974f68 Mon Sep 17 00:00:00 2001 From: Hannah K <7254163+mindlessroman@users.noreply.github.com> Date: Fri, 28 Oct 2022 01:12:17 -0400 Subject: [PATCH 41/85] [datafactory] Adding support for ADF data flows with`az datafactory data-flow` (#5407) * Adding in data flows support; updating the vendored_sdk; commiting latests tests. * Update extension version, the HISTORY.rst file, missing checks from a test scenario, readding new integration test recordings. Co-authored-by: Hannah Kennedy --- src/datafactory/HISTORY.rst | 4 + src/datafactory/azext_datafactory/__init__.py | 27 +- src/datafactory/azext_datafactory/_help.py | 3 +- src/datafactory/azext_datafactory/action.py | 3 +- src/datafactory/azext_datafactory/custom.py | 3 +- .../azext_datafactory/generated/__init__.py | 2 +- .../generated/_client_factory.py | 8 +- .../azext_datafactory/generated/_help.py | 358 +- .../azext_datafactory/generated/_params.py | 1997 +- .../azext_datafactory/generated/action.py | 126 +- .../azext_datafactory/generated/commands.py | 307 +- .../azext_datafactory/generated/custom.py | 1783 +- .../azext_datafactory/manual/__init__.py | 2 +- .../manual/_client_factory.py | 20 + .../azext_datafactory/manual/_help.py | 110 +- .../azext_datafactory/manual/_params.py | 168 +- .../azext_datafactory/manual/commands.py | 33 + .../azext_datafactory/manual/custom.py | 231 +- .../manual/tests/__init__.py | 2 +- .../manual/tests/latest/__init__.py | 2 +- .../tests/latest/test_datafactory_scenario.py | 396 +- .../azext_datafactory/manual/version.py | 2 +- .../azext_datafactory/tests/__init__.py | 59 +- .../tests/latest/__init__.py | 2 +- .../tests/latest/example_steps.py | 904 +- .../latest/recordings/test_datafactory.yaml | 5130 -- .../recordings/test_datafactory_main.yaml | 8647 ++- ...st_datafactory_managedPrivateEndpoint.yaml | 182 +- .../tests/latest/test_datafactory_scenario.py | 104 +- .../test_datafactory_scenario_coverage.md | 53 +- .../vendored_sdks/datafactory/__init__.py | 3 +- .../datafactory/_configuration.py | 42 +- .../_data_factory_management_client.py | 87 +- .../vendored_sdks/datafactory/_patch.py | 2 +- .../datafactory/_serialization.py | 1970 + .../vendored_sdks/datafactory/_vendor.py | 6 +- .../vendored_sdks/datafactory/_version.py | 2 +- .../vendored_sdks/datafactory/aio/__init__.py | 3 +- .../datafactory/aio/_configuration.py | 44 +- .../aio/_data_factory_management_client.py | 87 +- .../vendored_sdks/datafactory/aio/_patch.py | 2 +- .../datafactory/aio/operations/__init__.py | 45 +- .../operations/_activity_runs_operations.py | 130 +- .../_data_flow_debug_session_operations.py | 629 +- .../aio/operations/_data_flows_operations.py | 283 +- .../aio/operations/_datasets_operations.py | 283 +- .../_exposure_control_operations.py | 323 +- .../aio/operations/_factories_operations.py | 730 +- .../_global_parameters_operations.py | 277 +- .../_integration_runtime_nodes_operations.py | 277 +- ...tion_runtime_object_metadata_operations.py | 221 +- .../_integration_runtimes_operations.py | 1215 +- .../operations/_linked_services_operations.py | 285 +- .../_managed_private_endpoints_operations.py | 298 +- .../_managed_virtual_networks_operations.py | 246 +- .../datafactory/aio/operations/_operations.py | 66 +- .../datafactory/aio/operations/_patch.py | 1 + .../operations/_pipeline_runs_operations.py | 206 +- .../aio/operations/_pipelines_operations.py | 450 +- ...rivate_end_point_connections_operations.py | 77 +- ..._private_endpoint_connection_operations.py | 224 +- .../_private_link_resources_operations.py | 51 +- .../operations/_trigger_runs_operations.py | 212 +- .../aio/operations/_triggers_operations.py | 771 +- .../datafactory/models/__init__.py | 1880 +- .../_data_factory_management_client_enums.py | 435 +- .../datafactory/models/_models.py | 37651 ------------ .../datafactory/models/_models_py3.py | 47787 +++++++++------- .../datafactory/models/_patch.py | 1 + .../datafactory/operations/__init__.py | 45 +- .../operations/_activity_runs_operations.py | 189 +- .../_data_flow_debug_session_operations.py | 881 +- .../operations/_data_flows_operations.py | 480 +- .../operations/_datasets_operations.py | 479 +- .../_exposure_control_operations.py | 463 +- .../operations/_factories_operations.py | 1104 +- .../_global_parameters_operations.py | 480 +- .../_integration_runtime_nodes_operations.py | 487 +- ...tion_runtime_object_metadata_operations.py | 338 +- .../_integration_runtimes_operations.py | 2105 +- .../operations/_linked_services_operations.py | 482 +- .../_managed_private_endpoints_operations.py | 528 +- .../_managed_virtual_networks_operations.py | 393 +- .../datafactory/operations/_operations.py | 91 +- .../datafactory/operations/_patch.py | 1 + .../operations/_pipeline_runs_operations.py | 340 +- .../operations/_pipelines_operations.py | 713 +- ...rivate_end_point_connections_operations.py | 124 +- ..._private_endpoint_connection_operations.py | 360 +- .../_private_link_resources_operations.py | 100 +- .../operations/_trigger_runs_operations.py | 370 +- .../operations/_triggers_operations.py | 1274 +- src/datafactory/report.md | 379 +- src/datafactory/setup.py | 40 +- 94 files changed, 52687 insertions(+), 77529 deletions(-) create mode 100644 src/datafactory/azext_datafactory/manual/_client_factory.py create mode 100644 src/datafactory/azext_datafactory/manual/commands.py delete mode 100644 src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory.yaml create mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/_serialization.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py diff --git a/src/datafactory/HISTORY.rst b/src/datafactory/HISTORY.rst index 44113c83af3..076aa12f91e 100644 --- a/src/datafactory/HISTORY.rst +++ b/src/datafactory/HISTORY.rst @@ -2,6 +2,10 @@ Release History =============== +0.7.0 ++++++ +* az datafactory data-flow: Support create/update/list/show/delete data flows. + 0.6.0 +++++ * Bug fix for `az datafactory pipeline list`. diff --git a/src/datafactory/azext_datafactory/__init__.py b/src/datafactory/azext_datafactory/__init__.py index 68dc1a4c888..50e6d93d070 100644 --- a/src/datafactory/azext_datafactory/__init__.py +++ b/src/datafactory/azext_datafactory/__init__.py @@ -9,29 +9,34 @@ # -------------------------------------------------------------------------- # pylint: disable=unused-import -import azext_datafactory._help from azure.cli.core import AzCommandsLoader +import azext_datafactory._help class DataFactoryManagementClientCommandsLoader(AzCommandsLoader): - def __init__(self, cli_ctx=None): from azure.cli.core.commands import CliCommandType from azext_datafactory.generated._client_factory import cf_datafactory_cl + datafactory_custom = CliCommandType( - operations_tmpl='azext_datafactory.custom#{}', - client_factory=cf_datafactory_cl) - parent = super(DataFactoryManagementClientCommandsLoader, self) + operations_tmpl="azext_datafactory.custom#{}", + client_factory=cf_datafactory_cl, + ) + parent = super() parent.__init__(cli_ctx=cli_ctx, custom_command_type=datafactory_custom) def load_command_table(self, args): from azext_datafactory.generated.commands import load_command_table + load_command_table(self, args) try: - from azext_datafactory.manual.commands import load_command_table as load_command_table_manual + from azext_datafactory.manual.commands import ( + load_command_table as load_command_table_manual, + ) + load_command_table_manual(self, args) except ImportError as e: - if e.name.endswith('manual.commands'): + if e.name.endswith("manual.commands"): pass else: raise e @@ -39,12 +44,16 @@ def load_command_table(self, args): def load_arguments(self, command): from azext_datafactory.generated._params import load_arguments + load_arguments(self, command) try: - from azext_datafactory.manual._params import load_arguments as load_arguments_manual + from azext_datafactory.manual._params import ( + load_arguments as load_arguments_manual, + ) + load_arguments_manual(self, command) except ImportError as e: - if e.name.endswith('manual._params'): + if e.name.endswith("manual._params"): pass else: raise e diff --git a/src/datafactory/azext_datafactory/_help.py b/src/datafactory/azext_datafactory/_help.py index 9b93f87a6e9..b683f6136df 100644 --- a/src/datafactory/azext_datafactory/_help.py +++ b/src/datafactory/azext_datafactory/_help.py @@ -11,10 +11,11 @@ # pylint: disable=unused-wildcard-import # pylint: disable=unused-import from .generated._help import helps # pylint: disable=reimported + try: from .manual._help import helps # pylint: disable=reimported except ImportError as e: - if e.name.endswith('manual._help'): + if e.name.endswith("manual._help"): pass else: raise e diff --git a/src/datafactory/azext_datafactory/action.py b/src/datafactory/azext_datafactory/action.py index 9b3d0a8a78c..d6f2d095789 100644 --- a/src/datafactory/azext_datafactory/action.py +++ b/src/datafactory/azext_datafactory/action.py @@ -11,10 +11,11 @@ # pylint: disable=unused-wildcard-import from .generated.action import * # noqa: F403 + try: from .manual.action import * # noqa: F403 except ImportError as e: - if e.name.endswith('manual.action'): + if e.name.endswith("manual.action"): pass else: raise e diff --git a/src/datafactory/azext_datafactory/custom.py b/src/datafactory/azext_datafactory/custom.py index 885447229d6..264ba5f368b 100644 --- a/src/datafactory/azext_datafactory/custom.py +++ b/src/datafactory/azext_datafactory/custom.py @@ -11,10 +11,11 @@ # pylint: disable=unused-wildcard-import from .generated.custom import * # noqa: F403 + try: from .manual.custom import * # noqa: F403 except ImportError as e: - if e.name.endswith('manual.custom'): + if e.name.endswith("manual.custom"): pass else: raise e diff --git a/src/datafactory/azext_datafactory/generated/__init__.py b/src/datafactory/azext_datafactory/generated/__init__.py index c9cfdc73e77..93202b7013d 100644 --- a/src/datafactory/azext_datafactory/generated/__init__.py +++ b/src/datafactory/azext_datafactory/generated/__init__.py @@ -9,4 +9,4 @@ # regenerated. # -------------------------------------------------------------------------- -__path__ = __import__('pkgutil').extend_path(__path__, __name__) +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/src/datafactory/azext_datafactory/generated/_client_factory.py b/src/datafactory/azext_datafactory/generated/_client_factory.py index 7f3f4f6fc12..710f7dc9162 100644 --- a/src/datafactory/azext_datafactory/generated/_client_factory.py +++ b/src/datafactory/azext_datafactory/generated/_client_factory.py @@ -12,8 +12,8 @@ def cf_datafactory_cl(cli_ctx, *_): from azure.cli.core.commands.client_factory import get_mgmt_service_client from azext_datafactory.vendored_sdks.datafactory import DataFactoryManagementClient - return get_mgmt_service_client(cli_ctx, - DataFactoryManagementClient) + + return get_mgmt_service_client(cli_ctx, DataFactoryManagementClient) def cf_factory(cli_ctx, *_): @@ -36,6 +36,10 @@ def cf_dataset(cli_ctx, *_): return cf_datafactory_cl(cli_ctx).datasets +def cf_data_flow(cli_ctx, *_): + return cf_datafactory_cl(cli_ctx).data_flows + + def cf_pipeline(cli_ctx, *_): return cf_datafactory_cl(cli_ctx).pipelines diff --git a/src/datafactory/azext_datafactory/generated/_help.py b/src/datafactory/azext_datafactory/generated/_help.py index b48ae278922..5bc1bfc9c75 100644 --- a/src/datafactory/azext_datafactory/generated/_help.py +++ b/src/datafactory/azext_datafactory/generated/_help.py @@ -12,12 +12,16 @@ from knack.help_files import helps -helps['datafactory'] = ''' +helps[ + "datafactory" +] = """ type: group short-summary: Manage Data Factory -''' +""" -helps['datafactory list'] = """ +helps[ + "datafactory list" +] = """ type: command short-summary: "Lists factories. And Lists factories under the specified subscription." examples: @@ -29,7 +33,9 @@ az datafactory list """ -helps['datafactory show'] = """ +helps[ + "datafactory show" +] = """ type: command short-summary: "Gets a factory." examples: @@ -38,7 +44,9 @@ az datafactory show --name "exampleFactoryName" --resource-group "exampleResourceGroup" """ -helps['datafactory create'] = """ +helps[ + "datafactory create" +] = """ type: command short-summary: "Create a factory." parameters: @@ -76,7 +84,9 @@ "exampleResourceGroup" """ -helps['datafactory update'] = """ +helps[ + "datafactory update" +] = """ type: command short-summary: "Updates a factory." examples: @@ -86,7 +96,9 @@ "exampleResourceGroup" """ -helps['datafactory delete'] = """ +helps[ + "datafactory delete" +] = """ type: command short-summary: "Deletes a factory." examples: @@ -95,7 +107,9 @@ az datafactory delete --name "exampleFactoryName" --resource-group "exampleResourceGroup" """ -helps['datafactory configure-factory-repo'] = """ +helps[ + "datafactory configure-factory-repo" +] = """ type: command short-summary: "Updates a factory's repo information." parameters: @@ -135,7 +149,9 @@ repository-name="repo" root-folder="/" tenant-id="" --location "East US" """ -helps['datafactory get-data-plane-access'] = """ +helps[ + "datafactory get-data-plane-access" +] = """ type: command short-summary: "Get Data Plane access." examples: @@ -146,7 +162,9 @@ "2018-11-10T02:46:20.2659347Z" --resource-group "exampleResourceGroup" """ -helps['datafactory get-git-hub-access-token'] = """ +helps[ + "datafactory get-git-hub-access-token" +] = """ type: command short-summary: "Get GitHub Access Token." examples: @@ -156,12 +174,16 @@ --git-hub-access-token-base-url "some" --git-hub-client-id "some" --resource-group "exampleResourceGroup" """ -helps['datafactory integration-runtime'] = """ +helps[ + "datafactory integration-runtime" +] = """ type: group short-summary: Manage integration runtime with datafactory """ -helps['datafactory integration-runtime list'] = """ +helps[ + "datafactory integration-runtime list" +] = """ type: command short-summary: "Lists integration runtimes." examples: @@ -171,7 +193,9 @@ "exampleResourceGroup" """ -helps['datafactory integration-runtime show'] = """ +helps[ + "datafactory integration-runtime show" +] = """ type: command short-summary: "Gets an integration runtime." examples: @@ -181,12 +205,16 @@ "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ -helps['datafactory integration-runtime linked-integration-runtime'] = """ +helps[ + "datafactory integration-runtime linked-integration-runtime" +] = """ type: group short-summary: Manage integration runtime with datafactory sub group linked-integration-runtime """ -helps['datafactory integration-runtime linked-integration-runtime create'] = """ +helps[ + "datafactory integration-runtime linked-integration-runtime create" +] = """ type: command short-summary: "Create a linked integration runtime entry in a shared integration runtime." examples: @@ -198,22 +226,30 @@ --integration-runtime-name "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ -helps['datafactory integration-runtime managed'] = """ +helps[ + "datafactory integration-runtime managed" +] = """ type: group short-summary: Manage integration runtime with datafactory sub group managed """ -helps['datafactory integration-runtime managed create'] = """ +helps[ + "datafactory integration-runtime managed create" +] = """ type: command short-summary: "Create an integration runtime." """ -helps['datafactory integration-runtime self-hosted'] = """ +helps[ + "datafactory integration-runtime self-hosted" +] = """ type: group short-summary: Manage integration runtime with datafactory sub group self-hosted """ -helps['datafactory integration-runtime self-hosted create'] = """ +helps[ + "datafactory integration-runtime self-hosted create" +] = """ type: command short-summary: "Create an integration runtime." examples: @@ -223,7 +259,9 @@ "A selfhosted integration runtime" --name "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ -helps['datafactory integration-runtime update'] = """ +helps[ + "datafactory integration-runtime update" +] = """ type: command short-summary: "Updates an integration runtime." examples: @@ -234,7 +272,9 @@ "\\"PT3H\\"" """ -helps['datafactory integration-runtime delete'] = """ +helps[ + "datafactory integration-runtime delete" +] = """ type: command short-summary: "Deletes an integration runtime." examples: @@ -244,7 +284,9 @@ "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ -helps['datafactory integration-runtime get-connection-info'] = """ +helps[ + "datafactory integration-runtime get-connection-info" +] = """ type: command short-summary: "Gets the on-premises integration runtime connection information for encrypting the on-premises \ data source credentials." @@ -255,7 +297,9 @@ "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ -helps['datafactory integration-runtime get-monitoring-data'] = """ +helps[ + "datafactory integration-runtime get-monitoring-data" +] = """ type: command short-summary: "Get the integration runtime monitoring data, which includes the monitor data for all the nodes \ under this integration runtime." @@ -266,7 +310,9 @@ "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ -helps['datafactory integration-runtime get-status'] = """ +helps[ + "datafactory integration-runtime get-status" +] = """ type: command short-summary: "Gets detailed status information for an integration runtime." examples: @@ -276,7 +322,9 @@ "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ -helps['datafactory integration-runtime list-auth-key'] = """ +helps[ + "datafactory integration-runtime list-auth-key" +] = """ type: command short-summary: "Retrieves the authentication keys for an integration runtime." examples: @@ -286,7 +334,9 @@ "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ -helps['datafactory integration-runtime regenerate-auth-key'] = """ +helps[ + "datafactory integration-runtime regenerate-auth-key" +] = """ type: command short-summary: "Regenerates the authentication key for an integration runtime." examples: @@ -296,7 +346,9 @@ "exampleIntegrationRuntime" --key-name "authKey2" --resource-group "exampleResourceGroup" """ -helps['datafactory integration-runtime remove-link'] = """ +helps[ + "datafactory integration-runtime remove-link" +] = """ type: command short-summary: "Remove all linked integration runtimes under specific data factory in a self-hosted integration \ runtime." @@ -307,7 +359,9 @@ "exampleIntegrationRuntime" --linked-factory-name "exampleFactoryName-linked" --resource-group "exampleResourceGroup" """ -helps['datafactory integration-runtime start'] = """ +helps[ + "datafactory integration-runtime start" +] = """ type: command short-summary: "Starts a ManagedReserved type integration runtime." examples: @@ -317,7 +371,9 @@ "exampleManagedIntegrationRuntime" --resource-group "exampleResourceGroup" """ -helps['datafactory integration-runtime stop'] = """ +helps[ + "datafactory integration-runtime stop" +] = """ type: command short-summary: "Stops a ManagedReserved type integration runtime." examples: @@ -327,7 +383,9 @@ "exampleManagedIntegrationRuntime" --resource-group "exampleResourceGroup" """ -helps['datafactory integration-runtime sync-credentials'] = """ +helps[ + "datafactory integration-runtime sync-credentials" +] = """ type: command short-summary: "Force the integration runtime to synchronize credentials across integration runtime nodes, and \ this will override the credentials across all worker nodes with those available on the dispatcher node. If you already \ @@ -340,7 +398,9 @@ "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ -helps['datafactory integration-runtime upgrade'] = """ +helps[ + "datafactory integration-runtime upgrade" +] = """ type: command short-summary: "Upgrade self-hosted integration runtime to latest version if availability." examples: @@ -350,7 +410,9 @@ "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ -helps['datafactory integration-runtime wait'] = """ +helps[ + "datafactory integration-runtime wait" +] = """ type: command short-summary: Place the CLI in a waiting state until a condition of the datafactory integration-runtime is met. examples: @@ -361,12 +423,16 @@ "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" --created """ -helps['datafactory integration-runtime-node'] = """ +helps[ + "datafactory integration-runtime-node" +] = """ type: group short-summary: Manage integration runtime node with datafactory """ -helps['datafactory integration-runtime-node show'] = """ +helps[ + "datafactory integration-runtime-node show" +] = """ type: command short-summary: "Gets a self-hosted integration runtime node." examples: @@ -376,7 +442,9 @@ --integration-runtime-name "exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" """ -helps['datafactory integration-runtime-node update'] = """ +helps[ + "datafactory integration-runtime-node update" +] = """ type: command short-summary: "Updates a self-hosted integration runtime node." examples: @@ -387,7 +455,9 @@ --concurrent-jobs-limit 2 """ -helps['datafactory integration-runtime-node delete'] = """ +helps[ + "datafactory integration-runtime-node delete" +] = """ type: command short-summary: "Deletes a self-hosted integration runtime node." examples: @@ -397,7 +467,9 @@ --integration-runtime-name "exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" """ -helps['datafactory integration-runtime-node get-ip-address'] = """ +helps[ + "datafactory integration-runtime-node get-ip-address" +] = """ type: command short-summary: "Get the IP address of self-hosted integration runtime node." examples: @@ -407,12 +479,16 @@ --integration-runtime-name "exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" """ -helps['datafactory linked-service'] = """ +helps[ + "datafactory linked-service" +] = """ type: group short-summary: Manage linked service with datafactory """ -helps['datafactory linked-service list'] = """ +helps[ + "datafactory linked-service list" +] = """ type: command short-summary: "Lists linked services." examples: @@ -422,7 +498,9 @@ "exampleResourceGroup" """ -helps['datafactory linked-service show'] = """ +helps[ + "datafactory linked-service show" +] = """ type: command short-summary: "Gets a linked service." examples: @@ -432,7 +510,9 @@ --resource-group "exampleResourceGroup" """ -helps['datafactory linked-service create'] = """ +helps[ + "datafactory linked-service create" +] = """ type: command short-summary: "Create a linked service." examples: @@ -444,12 +524,16 @@ "exampleLinkedService" --resource-group "exampleResourceGroup" """ -helps['datafactory linked-service update'] = """ +helps[ + "datafactory linked-service update" +] = """ type: command short-summary: "Update a linked service." """ -helps['datafactory linked-service delete'] = """ +helps[ + "datafactory linked-service delete" +] = """ type: command short-summary: "Deletes a linked service." examples: @@ -459,12 +543,16 @@ --resource-group "exampleResourceGroup" """ -helps['datafactory dataset'] = """ +helps[ + "datafactory dataset" +] = """ type: group short-summary: Manage dataset with datafactory """ -helps['datafactory dataset list'] = """ +helps[ + "datafactory dataset list" +] = """ type: command short-summary: "Lists datasets." examples: @@ -473,7 +561,9 @@ az datafactory dataset list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" """ -helps['datafactory dataset show'] = """ +helps[ + "datafactory dataset show" +] = """ type: command short-summary: "Gets a dataset." examples: @@ -483,7 +573,9 @@ --resource-group "exampleResourceGroup" """ -helps['datafactory dataset create'] = """ +helps[ + "datafactory dataset create" +] = """ type: command short-summary: "Create a dataset." examples: @@ -497,7 +589,9 @@ "exampleFactoryName" --resource-group "exampleResourceGroup" """ -helps['datafactory dataset update'] = """ +helps[ + "datafactory dataset update" +] = """ type: command short-summary: "Update a dataset." parameters: @@ -509,7 +603,9 @@ name: The name of the folder that this Dataset is in. """ -helps['datafactory dataset delete'] = """ +helps[ + "datafactory dataset delete" +] = """ type: command short-summary: "Deletes a dataset." examples: @@ -519,12 +615,16 @@ --resource-group "exampleResourceGroup" """ -helps['datafactory pipeline'] = """ +helps[ + "datafactory pipeline" +] = """ type: group short-summary: Manage pipeline with datafactory """ -helps['datafactory pipeline list'] = """ +helps[ + "datafactory pipeline list" +] = """ type: command short-summary: "Lists pipelines." examples: @@ -534,7 +634,9 @@ "exampleResourceGroup" """ -helps['datafactory pipeline show'] = """ +helps[ + "datafactory pipeline show" +] = """ type: command short-summary: "Gets a pipeline." examples: @@ -544,7 +646,9 @@ --resource-group "exampleResourceGroup" """ -helps['datafactory pipeline create'] = """ +helps[ + "datafactory pipeline create" +] = """ type: command short-summary: "Create a pipeline." examples: @@ -564,7 +668,9 @@ \\"}" --name "examplePipeline" --resource-group "exampleResourceGroup" """ -helps['datafactory pipeline update'] = """ +helps[ + "datafactory pipeline update" +] = """ type: command short-summary: "Update a pipeline." examples: @@ -582,7 +688,9 @@ "0.00:10:00" --name "examplePipeline" --resource-group "exampleResourceGroup" """ -helps['datafactory pipeline delete'] = """ +helps[ + "datafactory pipeline delete" +] = """ type: command short-summary: "Deletes a pipeline." examples: @@ -592,7 +700,9 @@ --resource-group "exampleResourceGroup" """ -helps['datafactory pipeline create-run'] = """ +helps[ + "datafactory pipeline create-run" +] = """ type: command short-summary: "Creates a run of a pipeline." examples: @@ -603,12 +713,16 @@ "exampleResourceGroup" """ -helps['datafactory pipeline-run'] = """ +helps[ + "datafactory pipeline-run" +] = """ type: group short-summary: Manage pipeline run with datafactory """ -helps['datafactory pipeline-run show'] = """ +helps[ + "datafactory pipeline-run show" +] = """ type: command short-summary: "Get a pipeline run by its run ID." examples: @@ -618,7 +732,9 @@ "exampleResourceGroup" --run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" """ -helps['datafactory pipeline-run cancel'] = """ +helps[ + "datafactory pipeline-run cancel" +] = """ type: command short-summary: "Cancel a pipeline run by its run ID." examples: @@ -628,7 +744,9 @@ "exampleResourceGroup" --run-id "16ac5348-ff82-4f95-a80d-638c1d47b721" """ -helps['datafactory pipeline-run query-by-factory'] = """ +helps[ + "datafactory pipeline-run query-by-factory" +] = """ type: command short-summary: "Query pipeline runs in the factory based on input filter conditions." parameters: @@ -663,12 +781,16 @@ --last-updated-before "2018-06-16T00:49:48.3686473Z" --resource-group "exampleResourceGroup" """ -helps['datafactory activity-run'] = """ +helps[ + "datafactory activity-run" +] = """ type: group short-summary: Manage activity run with datafactory """ -helps['datafactory activity-run query-by-pipeline-run'] = """ +helps[ + "datafactory activity-run query-by-pipeline-run" +] = """ type: command short-summary: "Query activity runs based on input filter conditions." parameters: @@ -703,12 +825,16 @@ --resource-group "exampleResourceGroup" --run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" """ -helps['datafactory trigger'] = """ +helps[ + "datafactory trigger" +] = """ type: group short-summary: Manage trigger with datafactory """ -helps['datafactory trigger list'] = """ +helps[ + "datafactory trigger list" +] = """ type: command short-summary: "Lists triggers." examples: @@ -717,7 +843,9 @@ az datafactory trigger list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" """ -helps['datafactory trigger show'] = """ +helps[ + "datafactory trigger show" +] = """ type: command short-summary: "Gets a trigger." examples: @@ -727,7 +855,9 @@ --name "exampleTrigger" """ -helps['datafactory trigger create'] = """ +helps[ + "datafactory trigger create" +] = """ type: command short-summary: "Create a trigger." examples: @@ -741,12 +871,16 @@ }" --name "exampleTrigger" """ -helps['datafactory trigger update'] = """ +helps[ + "datafactory trigger update" +] = """ type: command short-summary: "Update a trigger." """ -helps['datafactory trigger delete'] = """ +helps[ + "datafactory trigger delete" +] = """ type: command short-summary: "Deletes a trigger." examples: @@ -756,7 +890,9 @@ "exampleResourceGroup" --name "exampleTrigger" """ -helps['datafactory trigger get-event-subscription-status'] = """ +helps[ + "datafactory trigger get-event-subscription-status" +] = """ type: command short-summary: "Get a trigger's event subscription status." examples: @@ -766,7 +902,9 @@ --resource-group "exampleResourceGroup" --name "exampleTrigger" """ -helps['datafactory trigger query-by-factory'] = """ +helps[ + "datafactory trigger query-by-factory" +] = """ type: command short-summary: "Query triggers." examples: @@ -776,7 +914,9 @@ "exampleTrigger" --resource-group "exampleResourceGroup" """ -helps['datafactory trigger start'] = """ +helps[ + "datafactory trigger start" +] = """ type: command short-summary: "Starts a trigger." examples: @@ -786,7 +926,9 @@ "exampleResourceGroup" --name "exampleTrigger" """ -helps['datafactory trigger stop'] = """ +helps[ + "datafactory trigger stop" +] = """ type: command short-summary: "Stops a trigger." examples: @@ -796,7 +938,9 @@ --name "exampleTrigger" """ -helps['datafactory trigger subscribe-to-event'] = """ +helps[ + "datafactory trigger subscribe-to-event" +] = """ type: command short-summary: "Subscribe event trigger to events." examples: @@ -806,7 +950,9 @@ "exampleResourceGroup" --name "exampleTrigger" """ -helps['datafactory trigger unsubscribe-from-event'] = """ +helps[ + "datafactory trigger unsubscribe-from-event" +] = """ type: command short-summary: "Unsubscribe event trigger from events." examples: @@ -816,7 +962,9 @@ "exampleResourceGroup" --name "exampleTrigger" """ -helps['datafactory trigger wait'] = """ +helps[ + "datafactory trigger wait" +] = """ type: command short-summary: Place the CLI in a waiting state until a condition of the datafactory trigger is met. examples: @@ -826,12 +974,16 @@ --name "exampleTrigger" --created """ -helps['datafactory trigger-run'] = """ +helps[ + "datafactory trigger-run" +] = """ type: group short-summary: Manage trigger run with datafactory """ -helps['datafactory trigger-run cancel'] = """ +helps[ + "datafactory trigger-run cancel" +] = """ type: command short-summary: "Cancel a single trigger instance by runId." examples: @@ -841,7 +993,9 @@ "exampleResourceGroup" --run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" --trigger-name "exampleTrigger" """ -helps['datafactory trigger-run query-by-factory'] = """ +helps[ + "datafactory trigger-run query-by-factory" +] = """ type: command short-summary: "Query trigger runs." parameters: @@ -876,7 +1030,9 @@ --last-updated-before "2018-06-16T00:49:48.3686473Z" --resource-group "exampleResourceGroup" """ -helps['datafactory trigger-run rerun'] = """ +helps[ + "datafactory trigger-run rerun" +] = """ type: command short-summary: "Rerun single trigger instance by runId." examples: @@ -886,12 +1042,16 @@ "exampleResourceGroup" --run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" --trigger-name "exampleTrigger" """ -helps['datafactory managed-virtual-network'] = """ +helps[ + "datafactory managed-virtual-network" +] = """ type: group short-summary: Manage managed virtual network with datafactory """ -helps['datafactory managed-virtual-network list'] = """ +helps[ + "datafactory managed-virtual-network list" +] = """ type: command short-summary: "Lists managed Virtual Networks." examples: @@ -901,7 +1061,9 @@ "exampleResourceGroup" """ -helps['datafactory managed-virtual-network show'] = """ +helps[ + "datafactory managed-virtual-network show" +] = """ type: command short-summary: "Gets a managed Virtual Network." examples: @@ -911,7 +1073,9 @@ "exampleManagedVirtualNetworkName" --resource-group "exampleResourceGroup" """ -helps['datafactory managed-virtual-network create'] = """ +helps[ + "datafactory managed-virtual-network create" +] = """ type: command short-summary: "Create a managed Virtual Network." examples: @@ -921,17 +1085,23 @@ "exampleManagedVirtualNetworkName" --resource-group "exampleResourceGroup" """ -helps['datafactory managed-virtual-network update'] = """ +helps[ + "datafactory managed-virtual-network update" +] = """ type: command short-summary: "Update a managed Virtual Network." """ -helps['datafactory managed-private-endpoint'] = """ +helps[ + "datafactory managed-private-endpoint" +] = """ type: group short-summary: Manage managed private endpoint with datafactory """ -helps['datafactory managed-private-endpoint list'] = """ +helps[ + "datafactory managed-private-endpoint list" +] = """ type: command short-summary: "Lists managed private endpoints." examples: @@ -941,7 +1111,9 @@ --managed-virtual-network-name "exampleManagedVirtualNetworkName" --resource-group "exampleResourceGroup" """ -helps['datafactory managed-private-endpoint show'] = """ +helps[ + "datafactory managed-private-endpoint show" +] = """ type: command short-summary: "Gets a managed private endpoint." examples: @@ -952,7 +1124,9 @@ "exampleResourceGroup" """ -helps['datafactory managed-private-endpoint create'] = """ +helps[ + "datafactory managed-private-endpoint create" +] = """ type: command short-summary: "Create a managed private endpoint." examples: @@ -964,12 +1138,16 @@ --managed-virtual-network-name "exampleManagedVirtualNetworkName" --resource-group "exampleResourceGroup" """ -helps['datafactory managed-private-endpoint update'] = """ +helps[ + "datafactory managed-private-endpoint update" +] = """ type: command short-summary: "Update a managed private endpoint." """ -helps['datafactory managed-private-endpoint delete'] = """ +helps[ + "datafactory managed-private-endpoint delete" +] = """ type: command short-summary: "Deletes a managed private endpoint." examples: diff --git a/src/datafactory/azext_datafactory/generated/_params.py b/src/datafactory/azext_datafactory/generated/_params.py index 76633d40136..3f9ccf0a8d5 100644 --- a/src/datafactory/azext_datafactory/generated/_params.py +++ b/src/datafactory/azext_datafactory/generated/_params.py @@ -15,652 +15,1379 @@ get_three_state_flag, get_enum_type, resource_group_name_type, - get_location_type + get_location_type, ) from azure.cli.core.commands.validators import ( get_default_location_from_resource_group, - validate_file_or_dict + validate_file_or_dict, ) from azext_datafactory.action import ( AddFactoryVstsConfiguration, AddFactoryGitHubConfiguration, AddFolder, AddFilters, - AddOrderBy + AddOrderBy, ) def load_arguments(self, _): - with self.argument_context('datafactory list') as c: - c.argument('resource_group_name', resource_group_name_type) - - with self.argument_context('datafactory show') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', - id_part='name') - c.argument('if_none_match', type=str, help='ETag of the factory entity. Should only be specified for get. If ' - 'the ETag matches the existing entity tag, or if * was provided, then no content will be returned.') - - with self.argument_context('datafactory create') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, - help='The factory name.') - c.argument('if_match', type=str, help='ETag of the factory entity. Should only be specified for update, for ' - 'which it should match existing entity or can be * for unconditional update.') - c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, - validator=get_default_location_from_resource_group) - c.argument('tags', tags_type) - c.argument('factory_vsts_configuration', action=AddFactoryVstsConfiguration, nargs='+', help='Factory\'s VSTS ' - 'repo information.', arg_group='RepoConfiguration') - c.argument('factory_git_hub_configuration', action=AddFactoryGitHubConfiguration, nargs='+', help='Factory\'s ' - 'GitHub repo information.', arg_group='RepoConfiguration') - c.argument('global_parameters', type=validate_file_or_dict, help='List of parameters for factory. Expected ' - 'value: json-string/json-file/@json-file.') - - with self.argument_context('datafactory update') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', - id_part='name') - c.argument('tags', tags_type) - - with self.argument_context('datafactory delete') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', - id_part='name') - - with self.argument_context('datafactory configure-factory-repo') as c: - c.argument('location', arg_type=get_location_type(self.cli_ctx), id_part='name') - c.argument('factory_resource_id', type=str, help='The factory resource id.') - c.argument('factory_vsts_configuration', action=AddFactoryVstsConfiguration, nargs='+', help='Factory\'s VSTS ' - 'repo information.', arg_group='RepoConfiguration') - c.argument('factory_git_hub_configuration', action=AddFactoryGitHubConfiguration, nargs='+', help='Factory\'s ' - 'GitHub repo information.', arg_group='RepoConfiguration') - - with self.argument_context('datafactory get-data-plane-access') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', - id_part='name') - c.argument('permissions', type=str, help='The string with permissions for Data Plane access. Currently only ' - '\'r\' is supported which grants read only access.') - c.argument('access_resource_path', type=str, help='The resource path to get access relative to factory. ' - 'Currently only empty string is supported which corresponds to the factory resource.') - c.argument('profile_name', type=str, help='The name of the profile. Currently only the default is supported. ' - 'The default value is DefaultProfile.') - c.argument('start_time', type=str, help='Start time for the token. If not specified the current time will be ' - 'used.') - c.argument('expire_time', type=str, help='Expiration time for the token. Maximum duration for the token is ' - 'eight hours and by default the token will expire in eight hours.') - - with self.argument_context('datafactory get-git-hub-access-token') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', - id_part='name') - c.argument('git_hub_access_code', type=str, help='GitHub access code.') - c.argument('git_hub_client_id', type=str, help='GitHub application client ID.') - c.argument('git_hub_access_token_base_url', type=str, help='GitHub access token base URL.') - - with self.argument_context('datafactory integration-runtime list') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.') - - with self.argument_context('datafactory integration-runtime show') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, - help='The integration runtime name.', id_part='child_name_1') - c.argument('if_none_match', type=str, help='ETag of the integration runtime entity. Should only be specified ' - 'for get. If the ETag matches the existing entity tag, or if * was provided, then no content will ' - 'be returned.') - - with self.argument_context('datafactory integration-runtime linked-integration-runtime create') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.') - c.argument('integration_runtime_name', type=str, help='The integration runtime name.') - c.argument('name', type=str, help='The name of the linked integration runtime.') - c.argument('subscription_id', type=str, help='The ID of the subscription that the linked integration runtime ' - 'belongs to.') - c.argument('data_factory_name', type=str, help='The name of the data factory that the linked integration ' - 'runtime belongs to.') - c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, - validator=get_default_location_from_resource_group) - - with self.argument_context('datafactory integration-runtime managed create') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.') - c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, - help='The integration runtime name.') - c.argument('if_match', type=str, help='ETag of the integration runtime entity. Should only be specified for ' - 'update, for which it should match existing entity or can be * for unconditional update.') - c.argument('description', type=str, help='Integration runtime description.') - c.argument('compute_properties', type=validate_file_or_dict, help='The compute resource for managed ' - 'integration runtime. Expected value: json-string/json-file/@json-file.', arg_group='Type ' - 'Properties') - c.argument('ssis_properties', type=validate_file_or_dict, help='SSIS properties for managed integration ' - 'runtime. Expected value: json-string/json-file/@json-file.', arg_group='Type Properties') - - with self.argument_context('datafactory integration-runtime self-hosted create') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.') - c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, - help='The integration runtime name.') - c.argument('if_match', type=str, help='ETag of the integration runtime entity. Should only be specified for ' - 'update, for which it should match existing entity or can be * for unconditional update.') - c.argument('description', type=str, help='Integration runtime description.') - c.argument('linked_info', type=validate_file_or_dict, help='The base definition of a linked integration ' - 'runtime. Expected value: json-string/json-file/@json-file.', arg_group='Type Properties') - - with self.argument_context('datafactory integration-runtime update') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, - help='The integration runtime name.', id_part='child_name_1') - c.argument('auto_update', arg_type=get_enum_type(['On', 'Off']), help='Enables or disables the auto-update ' - 'feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189.' - '') - c.argument('update_delay_offset', type=str, help='The time offset (in hours) in the day, e.g., PT03H is 3 ' - 'hours. The integration runtime auto update will happen on that time.') - - with self.argument_context('datafactory integration-runtime delete') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, - help='The integration runtime name.', id_part='child_name_1') - - with self.argument_context('datafactory integration-runtime get-connection-info') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, - help='The integration runtime name.', id_part='child_name_1') - - with self.argument_context('datafactory integration-runtime get-monitoring-data') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, - help='The integration runtime name.', id_part='child_name_1') - - with self.argument_context('datafactory integration-runtime get-status') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, - help='The integration runtime name.', id_part='child_name_1') - - with self.argument_context('datafactory integration-runtime list-auth-key') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.') - c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, - help='The integration runtime name.') - - with self.argument_context('datafactory integration-runtime regenerate-auth-key') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, - help='The integration runtime name.', id_part='child_name_1') - c.argument('key_name', arg_type=get_enum_type(['authKey1', 'authKey2']), help='The name of the authentication ' - 'key to regenerate.') - - with self.argument_context('datafactory integration-runtime remove-link') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, - help='The integration runtime name.', id_part='child_name_1') - c.argument('linked_factory_name', type=str, help='The data factory name for linked integration runtime.') - - with self.argument_context('datafactory integration-runtime start') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, - help='The integration runtime name.', id_part='child_name_1') - - with self.argument_context('datafactory integration-runtime stop') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, - help='The integration runtime name.', id_part='child_name_1') - - with self.argument_context('datafactory integration-runtime sync-credentials') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, - help='The integration runtime name.', id_part='child_name_1') - - with self.argument_context('datafactory integration-runtime upgrade') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, - help='The integration runtime name.', id_part='child_name_1') - - with self.argument_context('datafactory integration-runtime wait') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, - help='The integration runtime name.', id_part='child_name_1') - c.argument('if_none_match', type=str, help='ETag of the integration runtime entity. Should only be specified ' - 'for get. If the ETag matches the existing entity tag, or if * was provided, then no content will ' - 'be returned.') - - with self.argument_context('datafactory integration-runtime-node show') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('integration_runtime_name', type=str, help='The integration runtime name.', id_part='child_name_1') - c.argument('node_name', type=str, help='The integration runtime node name.', id_part='child_name_2') - - with self.argument_context('datafactory integration-runtime-node update') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('integration_runtime_name', type=str, help='The integration runtime name.', id_part='child_name_1') - c.argument('node_name', type=str, help='The integration runtime node name.', id_part='child_name_2') - c.argument('concurrent_jobs_limit', type=int, help='The number of concurrent jobs permitted to run on the ' - 'integration runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed.') - - with self.argument_context('datafactory integration-runtime-node delete') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('integration_runtime_name', type=str, help='The integration runtime name.', id_part='child_name_1') - c.argument('node_name', type=str, help='The integration runtime node name.', id_part='child_name_2') - - with self.argument_context('datafactory integration-runtime-node get-ip-address') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('integration_runtime_name', type=str, help='The integration runtime name.', id_part='child_name_1') - c.argument('node_name', type=str, help='The integration runtime node name.', id_part='child_name_2') - - with self.argument_context('datafactory linked-service list') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.') - - with self.argument_context('datafactory linked-service show') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('linked_service_name', options_list=['--name', '-n', '--linked-service-name'], type=str, help='The ' - 'linked service name.', id_part='child_name_1') - c.argument('if_none_match', type=str, help='ETag of the linked service entity. Should only be specified for ' - 'get. If the ETag matches the existing entity tag, or if * was provided, then no content will be ' - 'returned.') - - with self.argument_context('datafactory linked-service create') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.') - c.argument('linked_service_name', options_list=['--name', '-n', '--linked-service-name'], type=str, help='The ' - 'linked service name.') - c.argument('if_match', type=str, help='ETag of the linkedService entity. Should only be specified for update, ' - 'for which it should match existing entity or can be * for unconditional update.') - c.argument('properties', type=validate_file_or_dict, help='Properties of linked service. Expected value: ' - 'json-string/json-file/@json-file.') - - with self.argument_context('datafactory linked-service update') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('linked_service_name', options_list=['--name', '-n', '--linked-service-name'], type=str, help='The ' - 'linked service name.', id_part='child_name_1') - c.argument('if_match', type=str, help='ETag of the linkedService entity. Should only be specified for update, ' - 'for which it should match existing entity or can be * for unconditional update.') - c.argument('connect_via', type=validate_file_or_dict, help='The integration runtime reference. Expected value: ' - 'json-string/json-file/@json-file.') - c.argument('description', type=str, help='Linked service description.') - c.argument('parameters', type=validate_file_or_dict, help='Parameters for linked service. Expected value: ' - 'json-string/json-file/@json-file.') - c.argument('annotations', type=validate_file_or_dict, help='List of tags that can be used for describing the ' - 'linked service. Expected value: json-string/json-file/@json-file.') - c.ignore('linked_service') - - with self.argument_context('datafactory linked-service delete') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('linked_service_name', options_list=['--name', '-n', '--linked-service-name'], type=str, help='The ' - 'linked service name.', id_part='child_name_1') - - with self.argument_context('datafactory dataset list') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.') - - with self.argument_context('datafactory dataset show') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('dataset_name', options_list=['--name', '-n', '--dataset-name'], type=str, help='The dataset name.', - id_part='child_name_1') - c.argument('if_none_match', type=str, help='ETag of the dataset entity. Should only be specified for get. If ' - 'the ETag matches the existing entity tag, or if * was provided, then no content will be returned.') - - with self.argument_context('datafactory dataset create') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.') - c.argument('dataset_name', options_list=['--name', '-n', '--dataset-name'], type=str, - help='The dataset name.') - c.argument('if_match', type=str, help='ETag of the dataset entity. Should only be specified for update, for ' - 'which it should match existing entity or can be * for unconditional update.') - c.argument('properties', type=validate_file_or_dict, help='Dataset properties. Expected value: ' - 'json-string/json-file/@json-file.') - - with self.argument_context('datafactory dataset update') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('dataset_name', options_list=['--name', '-n', '--dataset-name'], type=str, help='The dataset name.', - id_part='child_name_1') - c.argument('if_match', type=str, help='ETag of the dataset entity. Should only be specified for update, for ' - 'which it should match existing entity or can be * for unconditional update.') - c.argument('description', type=str, help='Dataset description.') - c.argument('structure', type=validate_file_or_dict, help='Columns that define the structure of the dataset. ' - 'Type: array (or Expression with resultType array), itemType: DatasetDataElement. Expected value: ' - 'json-string/json-file/@json-file.') - c.argument('schema', type=validate_file_or_dict, help='Columns that define the physical type schema of the ' - 'dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. ' - 'Expected value: json-string/json-file/@json-file.') - c.argument('linked_service_name', type=validate_file_or_dict, help='Linked service reference. Expected value: ' - 'json-string/json-file/@json-file.') - c.argument('parameters', type=validate_file_or_dict, help='Parameters for dataset. Expected value: ' - 'json-string/json-file/@json-file.') - c.argument('annotations', type=validate_file_or_dict, help='List of tags that can be used for describing the ' - 'Dataset. Expected value: json-string/json-file/@json-file.') - c.argument('folder', action=AddFolder, nargs='+', help='The folder that this Dataset is in. If not specified, ' - 'Dataset will appear at the root level.') - c.ignore('dataset') - - with self.argument_context('datafactory dataset delete') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('dataset_name', options_list=['--name', '-n', '--dataset-name'], type=str, help='The dataset name.', - id_part='child_name_1') - - with self.argument_context('datafactory pipeline list') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.') - - with self.argument_context('datafactory pipeline show') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('pipeline_name', options_list=['--name', '-n', '--pipeline-name'], type=str, help='The pipeline ' - 'name.', id_part='child_name_1') - c.argument('if_none_match', type=str, help='ETag of the pipeline entity. Should only be specified for get. If ' - 'the ETag matches the existing entity tag, or if * was provided, then no content will be returned.') - - with self.argument_context('datafactory pipeline create') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.') - c.argument('pipeline_name', options_list=['--name', '-n', '--pipeline-name'], type=str, help='The pipeline ' - 'name.') - c.argument('if_match', type=str, help='ETag of the pipeline entity. Should only be specified for update, for ' - 'which it should match existing entity or can be * for unconditional update.') - c.argument('pipeline', type=validate_file_or_dict, help='Pipeline resource definition. Expected value: ' - 'json-string/json-file/@json-file.') - - with self.argument_context('datafactory pipeline update') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('pipeline_name', options_list=['--name', '-n', '--pipeline-name'], type=str, help='The pipeline ' - 'name.', id_part='child_name_1') - c.argument('if_match', type=str, help='ETag of the pipeline entity. Should only be specified for update, for ' - 'which it should match existing entity or can be * for unconditional update.') - c.argument('description', type=str, help='The description of the pipeline.') - c.argument('activities', type=validate_file_or_dict, help='List of activities in pipeline. Expected value: ' - 'json-string/json-file/@json-file.') - c.argument('parameters', type=validate_file_or_dict, help='List of parameters for pipeline. Expected value: ' - 'json-string/json-file/@json-file.') - c.argument('variables', type=validate_file_or_dict, help='List of variables for pipeline. Expected value: ' - 'json-string/json-file/@json-file.') - c.argument('concurrency', type=int, help='The max number of concurrent runs for the pipeline.') - c.argument('annotations', type=validate_file_or_dict, help='List of tags that can be used for describing the ' - 'Pipeline. Expected value: json-string/json-file/@json-file.') - c.argument('run_dimensions', type=validate_file_or_dict, help='Dimensions emitted by Pipeline. Expected value: ' - 'json-string/json-file/@json-file.') - c.argument('duration', type=validate_file_or_dict, help='TimeSpan value, after which an Azure Monitoring ' - 'Metric is fired. Expected value: json-string/json-file/@json-file.', arg_group='Policy Elapsed ' - 'Time Metric') - c.argument('folder_name', type=str, help='The name of the folder that this Pipeline is in.', - arg_group='Folder') - c.ignore('pipeline') - - with self.argument_context('datafactory pipeline delete') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('pipeline_name', options_list=['--name', '-n', '--pipeline-name'], type=str, help='The pipeline ' - 'name.', id_part='child_name_1') - - with self.argument_context('datafactory pipeline create-run') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.') - c.argument('pipeline_name', options_list=['--name', '-n', '--pipeline-name'], type=str, help='The pipeline ' - 'name.') - c.argument('reference_pipeline_run_id', type=str, help='The pipeline run identifier. If run ID is specified ' - 'the parameters of the specified run will be used to create a new run.') - c.argument('is_recovery', arg_type=get_three_state_flag(), help='Recovery mode flag. If recovery mode is set ' - 'to true, the specified referenced pipeline run and the new run will be grouped under the same ' - 'groupId.') - c.argument('start_activity_name', type=str, help='In recovery mode, the rerun will start from this activity. ' - 'If not specified, all activities will run.') - c.argument('start_from_failure', arg_type=get_three_state_flag(), help='In recovery mode, if set to true, the ' - 'rerun will start from failed activities. The property will be used only if startActivityName is ' - 'not specified.') - c.argument('parameters', type=validate_file_or_dict, help='Parameters of the pipeline run. These parameters ' - 'will be used only if the runId is not specified. Expected value: json-string/json-file/@json-file.') - - with self.argument_context('datafactory pipeline-run show') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('run_id', type=str, help='The pipeline run identifier.', id_part='child_name_1') - - with self.argument_context('datafactory pipeline-run cancel') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('run_id', type=str, help='The pipeline run identifier.', id_part='child_name_1') - c.argument('is_recursive', arg_type=get_three_state_flag(), help='If true, cancel all the Child pipelines that ' - 'are triggered by the current pipeline.') - - with self.argument_context('datafactory pipeline-run query-by-factory') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('continuation_token', type=str, help='The continuation token for getting the next page of results. ' - 'Null for first page.') - c.argument('last_updated_after', help='The time at or after which the run event was updated in \'ISO 8601\' ' - 'format.') - c.argument('last_updated_before', help='The time at or before which the run event was updated in \'ISO 8601\' ' - 'format.') - c.argument('filters', action=AddFilters, nargs='+', help='List of filters.') - c.argument('order_by', action=AddOrderBy, nargs='+', help='List of OrderBy option.') - - with self.argument_context('datafactory activity-run query-by-pipeline-run') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('run_id', type=str, help='The pipeline run identifier.', id_part='child_name_1') - c.argument('continuation_token', type=str, help='The continuation token for getting the next page of results. ' - 'Null for first page.') - c.argument('last_updated_after', help='The time at or after which the run event was updated in \'ISO 8601\' ' - 'format.') - c.argument('last_updated_before', help='The time at or before which the run event was updated in \'ISO 8601\' ' - 'format.') - c.argument('filters', action=AddFilters, nargs='+', help='List of filters.') - c.argument('order_by', action=AddOrderBy, nargs='+', help='List of OrderBy option.') - - with self.argument_context('datafactory trigger list') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.') - - with self.argument_context('datafactory trigger show') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.', - id_part='child_name_1') - c.argument('if_none_match', type=str, help='ETag of the trigger entity. Should only be specified for get. If ' - 'the ETag matches the existing entity tag, or if * was provided, then no content will be returned.') - - with self.argument_context('datafactory trigger create') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.') - c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, - help='The trigger name.') - c.argument('if_match', type=str, help='ETag of the trigger entity. Should only be specified for update, for ' - 'which it should match existing entity or can be * for unconditional update.') - c.argument('properties', type=validate_file_or_dict, help='Properties of the trigger. Expected value: ' - 'json-string/json-file/@json-file.') - - with self.argument_context('datafactory trigger update') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.', - id_part='child_name_1') - c.argument('if_match', type=str, help='ETag of the trigger entity. Should only be specified for update, for ' - 'which it should match existing entity or can be * for unconditional update.') - c.argument('description', type=str, help='Trigger description.') - c.argument('annotations', type=validate_file_or_dict, help='List of tags that can be used for describing the ' - 'trigger. Expected value: json-string/json-file/@json-file.') - c.ignore('trigger') - - with self.argument_context('datafactory trigger delete') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.', - id_part='child_name_1') - - with self.argument_context('datafactory trigger get-event-subscription-status') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.', - id_part='child_name_1') - - with self.argument_context('datafactory trigger query-by-factory') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('continuation_token', type=str, help='The continuation token for getting the next page of results. ' - 'Null for first page.') - c.argument('parent_trigger_name', type=str, help='The name of the parent TumblingWindowTrigger to get the ' - 'child rerun triggers') - - with self.argument_context('datafactory trigger start') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.', - id_part='child_name_1') - - with self.argument_context('datafactory trigger stop') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.', - id_part='child_name_1') - - with self.argument_context('datafactory trigger subscribe-to-event') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.', - id_part='child_name_1') - - with self.argument_context('datafactory trigger unsubscribe-from-event') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.', - id_part='child_name_1') - - with self.argument_context('datafactory trigger wait') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.', - id_part='child_name_1') - c.argument('if_none_match', type=str, help='ETag of the trigger entity. Should only be specified for get. If ' - 'the ETag matches the existing entity tag, or if * was provided, then no content will be returned.') - - with self.argument_context('datafactory trigger-run cancel') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('trigger_name', type=str, help='The trigger name.', id_part='child_name_1') - c.argument('run_id', type=str, help='The pipeline run identifier.', id_part='child_name_2') - - with self.argument_context('datafactory trigger-run query-by-factory') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('continuation_token', type=str, help='The continuation token for getting the next page of results. ' - 'Null for first page.') - c.argument('last_updated_after', help='The time at or after which the run event was updated in \'ISO 8601\' ' - 'format.') - c.argument('last_updated_before', help='The time at or before which the run event was updated in \'ISO 8601\' ' - 'format.') - c.argument('filters', action=AddFilters, nargs='+', help='List of filters.') - c.argument('order_by', action=AddOrderBy, nargs='+', help='List of OrderBy option.') - - with self.argument_context('datafactory trigger-run rerun') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('trigger_name', type=str, help='The trigger name.', id_part='child_name_1') - c.argument('run_id', type=str, help='The pipeline run identifier.', id_part='child_name_2') - - with self.argument_context('datafactory managed-virtual-network list') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.') - - with self.argument_context('datafactory managed-virtual-network show') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('managed_virtual_network_name', options_list=['--name', '-n', '--managed-virtual-network-name'], - type=str, help='Managed virtual network name', id_part='child_name_1') - c.argument('if_none_match', type=str, help='ETag of the managed Virtual Network entity. Should only be ' - 'specified for get. If the ETag matches the existing entity tag, or if * was provided, then no ' - 'content will be returned.') - - with self.argument_context('datafactory managed-virtual-network create') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.') - c.argument('managed_virtual_network_name', options_list=['--name', '-n', '--managed-virtual-network-name'], - type=str, help='Managed virtual network name') - c.argument('if_match', type=str, help='ETag of the managed Virtual Network entity. Should only be specified ' - 'for update, for which it should match existing entity or can be * for unconditional update.') - - with self.argument_context('datafactory managed-virtual-network update') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('managed_virtual_network_name', options_list=['--name', '-n', '--managed-virtual-network-name'], - type=str, help='Managed virtual network name', id_part='child_name_1') - c.argument('if_match', type=str, help='ETag of the managed Virtual Network entity. Should only be specified ' - 'for update, for which it should match existing entity or can be * for unconditional update.') - c.ignore('managed_virtual_network') - - with self.argument_context('datafactory managed-private-endpoint list') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.') - c.argument('managed_virtual_network_name', options_list=['--managed-virtual-network-name', '--mvnet-name'], - type=str, help='Managed virtual network name') - - with self.argument_context('datafactory managed-private-endpoint show') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('managed_virtual_network_name', options_list=['--managed-virtual-network-name', '--mvnet-name'], - type=str, help='Managed virtual network name', id_part='child_name_1') - c.argument('managed_private_endpoint_name', options_list=['--name', '-n', '--managed-private-endpoint-name'], - type=str, help='Managed private endpoint name', id_part='child_name_2') - c.argument('if_none_match', type=str, help='ETag of the managed private endpoint entity. Should only be ' - 'specified for get. If the ETag matches the existing entity tag, or if * was provided, then no ' - 'content will be returned.') - - with self.argument_context('datafactory managed-private-endpoint create') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.') - c.argument('managed_virtual_network_name', options_list=['--managed-virtual-network-name', '--mvnet-name'], - type=str, help='Managed virtual network name') - c.argument('managed_private_endpoint_name', options_list=['--name', '-n', '--managed-private-endpoint-name'], - type=str, help='Managed private endpoint name') - c.argument('if_match', type=str, help='ETag of the managed private endpoint entity. Should only be specified ' - 'for update, for which it should match existing entity or can be * for unconditional update.') - c.argument('fqdns', nargs='+', help='Fully qualified domain names') - c.argument('group_id', type=str, help='The groupId to which the managed private endpoint is created') - c.argument('private_link_resource_id', options_list=['--private-link-resource-id', '--private-link'], type=str, - help='The ARM resource ID of the resource to which the managed private endpoint is created') - - with self.argument_context('datafactory managed-private-endpoint update') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('managed_virtual_network_name', options_list=['--managed-virtual-network-name', '--mvnet-name'], - type=str, help='Managed virtual network name', id_part='child_name_1') - c.argument('managed_private_endpoint_name', options_list=['--name', '-n', '--managed-private-endpoint-name'], - type=str, help='Managed private endpoint name', id_part='child_name_2') - c.argument('if_match', type=str, help='ETag of the managed private endpoint entity. Should only be specified ' - 'for update, for which it should match existing entity or can be * for unconditional update.') - c.argument('fqdns', nargs='+', help='Fully qualified domain names') - c.argument('group_id', type=str, help='The groupId to which the managed private endpoint is created') - c.argument('private_link_resource_id', options_list=['--private-link-resource-id', '--private-link'], type=str, - help='The ARM resource ID of the resource to which the managed private endpoint is created') - c.ignore('managed_private_endpoint') - - with self.argument_context('datafactory managed-private-endpoint delete') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('managed_virtual_network_name', options_list=['--managed-virtual-network-name', '--mvnet-name'], - type=str, help='Managed virtual network name', id_part='child_name_1') - c.argument('managed_private_endpoint_name', options_list=['--name', '-n', '--managed-private-endpoint-name'], - type=str, help='Managed private endpoint name', id_part='child_name_2') + with self.argument_context("datafactory list") as c: + c.argument("resource_group_name", resource_group_name_type) + + with self.argument_context("datafactory show") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument( + "factory_name", + options_list=["--name", "-n", "--factory-name"], + type=str, + help="The factory name.", + id_part="name", + ) + c.argument( + "if_none_match", + type=str, + help="ETag of the factory entity. Should only be specified for get. If " + "the ETag matches the existing entity tag, or if * was provided, then no content will be returned.", + ) + + with self.argument_context("datafactory create") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument( + "factory_name", + options_list=["--name", "-n", "--factory-name"], + type=str, + help="The factory name.", + ) + c.argument( + "if_match", + type=str, + help="ETag of the factory entity. Should only be specified for update, for " + "which it should match existing entity or can be * for unconditional update.", + ) + c.argument( + "location", + arg_type=get_location_type(self.cli_ctx), + required=False, + validator=get_default_location_from_resource_group, + ) + c.argument("tags", tags_type) + c.argument( + "factory_vsts_configuration", + action=AddFactoryVstsConfiguration, + nargs="+", + help="Factory's VSTS " "repo information.", + arg_group="RepoConfiguration", + ) + c.argument( + "factory_git_hub_configuration", + action=AddFactoryGitHubConfiguration, + nargs="+", + help="Factory's " "GitHub repo information.", + arg_group="RepoConfiguration", + ) + c.argument( + "global_parameters", + type=validate_file_or_dict, + help="List of parameters for factory. Expected " + "value: json-string/json-file/@json-file.", + ) + + with self.argument_context("datafactory update") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument( + "factory_name", + options_list=["--name", "-n", "--factory-name"], + type=str, + help="The factory name.", + id_part="name", + ) + c.argument("tags", tags_type) + + with self.argument_context("datafactory delete") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument( + "factory_name", + options_list=["--name", "-n", "--factory-name"], + type=str, + help="The factory name.", + id_part="name", + ) + + with self.argument_context("datafactory configure-factory-repo") as c: + c.argument("location", arg_type=get_location_type(self.cli_ctx), id_part="name") + c.argument("factory_resource_id", type=str, help="The factory resource id.") + c.argument( + "factory_vsts_configuration", + action=AddFactoryVstsConfiguration, + nargs="+", + help="Factory's VSTS " "repo information.", + arg_group="RepoConfiguration", + ) + c.argument( + "factory_git_hub_configuration", + action=AddFactoryGitHubConfiguration, + nargs="+", + help="Factory's " "GitHub repo information.", + arg_group="RepoConfiguration", + ) + + with self.argument_context("datafactory get-data-plane-access") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument( + "factory_name", + options_list=["--name", "-n", "--factory-name"], + type=str, + help="The factory name.", + id_part="name", + ) + c.argument( + "permissions", + type=str, + help="The string with permissions for Data Plane access. Currently only " + "'r' is supported which grants read only access.", + ) + c.argument( + "access_resource_path", + type=str, + help="The resource path to get access relative to factory. " + "Currently only empty string is supported which corresponds to the factory resource.", + ) + c.argument( + "profile_name", + type=str, + help="The name of the profile. Currently only the default is supported. " + "The default value is DefaultProfile.", + ) + c.argument( + "start_time", + type=str, + help="Start time for the token. If not specified the current time will be " + "used.", + ) + c.argument( + "expire_time", + type=str, + help="Expiration time for the token. Maximum duration for the token is " + "eight hours and by default the token will expire in eight hours.", + ) + + with self.argument_context("datafactory get-git-hub-access-token") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument( + "factory_name", + options_list=["--name", "-n", "--factory-name"], + type=str, + help="The factory name.", + id_part="name", + ) + c.argument("git_hub_access_code", type=str, help="GitHub access code.") + c.argument("git_hub_client_id", type=str, help="GitHub application client ID.") + c.argument( + "git_hub_access_token_base_url", + type=str, + help="GitHub access token base URL.", + ) + + with self.argument_context("datafactory integration-runtime list") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.") + + with self.argument_context("datafactory integration-runtime show") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "integration_runtime_name", + options_list=["--name", "-n", "--integration-runtime-name"], + type=str, + help="The integration runtime name.", + id_part="child_name_1", + ) + c.argument( + "if_none_match", + type=str, + help="ETag of the integration runtime entity. Should only be specified " + "for get. If the ETag matches the existing entity tag, or if * was provided, then no content will " + "be returned.", + ) + + with self.argument_context( + "datafactory integration-runtime linked-integration-runtime create" + ) as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.") + c.argument( + "integration_runtime_name", type=str, help="The integration runtime name." + ) + c.argument("name", type=str, help="The name of the linked integration runtime.") + c.argument( + "subscription_id", + type=str, + help="The ID of the subscription that the linked integration runtime " + "belongs to.", + ) + c.argument( + "data_factory_name", + type=str, + help="The name of the data factory that the linked integration " + "runtime belongs to.", + ) + c.argument( + "location", + arg_type=get_location_type(self.cli_ctx), + required=False, + validator=get_default_location_from_resource_group, + ) + + with self.argument_context("datafactory integration-runtime managed create") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.") + c.argument( + "integration_runtime_name", + options_list=["--name", "-n", "--integration-runtime-name"], + type=str, + help="The integration runtime name.", + ) + c.argument( + "if_match", + type=str, + help="ETag of the integration runtime entity. Should only be specified for " + "update, for which it should match existing entity or can be * for unconditional update.", + ) + c.argument("description", type=str, help="Integration runtime description.") + c.argument( + "compute_properties", + type=validate_file_or_dict, + help="The compute resource for managed " + "integration runtime. Expected value: json-string/json-file/@json-file.", + arg_group="Type " "Properties", + ) + c.argument( + "ssis_properties", + type=validate_file_or_dict, + help="SSIS properties for managed integration " + "runtime. Expected value: json-string/json-file/@json-file.", + arg_group="Type Properties", + ) + + with self.argument_context( + "datafactory integration-runtime self-hosted create" + ) as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.") + c.argument( + "integration_runtime_name", + options_list=["--name", "-n", "--integration-runtime-name"], + type=str, + help="The integration runtime name.", + ) + c.argument( + "if_match", + type=str, + help="ETag of the integration runtime entity. Should only be specified for " + "update, for which it should match existing entity or can be * for unconditional update.", + ) + c.argument("description", type=str, help="Integration runtime description.") + c.argument( + "linked_info", + type=validate_file_or_dict, + help="The base definition of a linked integration " + "runtime. Expected value: json-string/json-file/@json-file.", + arg_group="Type Properties", + ) + + with self.argument_context("datafactory integration-runtime update") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "integration_runtime_name", + options_list=["--name", "-n", "--integration-runtime-name"], + type=str, + help="The integration runtime name.", + id_part="child_name_1", + ) + c.argument( + "auto_update", + arg_type=get_enum_type(["On", "Off"]), + help="Enables or disables the auto-update " + "feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189." + "", + ) + c.argument( + "update_delay_offset", + type=str, + help="The time offset (in hours) in the day, e.g., PT03H is 3 " + "hours. The integration runtime auto update will happen on that time.", + ) + + with self.argument_context("datafactory integration-runtime delete") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "integration_runtime_name", + options_list=["--name", "-n", "--integration-runtime-name"], + type=str, + help="The integration runtime name.", + id_part="child_name_1", + ) + + with self.argument_context( + "datafactory integration-runtime get-connection-info" + ) as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "integration_runtime_name", + options_list=["--name", "-n", "--integration-runtime-name"], + type=str, + help="The integration runtime name.", + id_part="child_name_1", + ) + + with self.argument_context( + "datafactory integration-runtime get-monitoring-data" + ) as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "integration_runtime_name", + options_list=["--name", "-n", "--integration-runtime-name"], + type=str, + help="The integration runtime name.", + id_part="child_name_1", + ) + + with self.argument_context("datafactory integration-runtime get-status") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "integration_runtime_name", + options_list=["--name", "-n", "--integration-runtime-name"], + type=str, + help="The integration runtime name.", + id_part="child_name_1", + ) + + with self.argument_context("datafactory integration-runtime list-auth-key") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.") + c.argument( + "integration_runtime_name", + options_list=["--name", "-n", "--integration-runtime-name"], + type=str, + help="The integration runtime name.", + ) + + with self.argument_context( + "datafactory integration-runtime regenerate-auth-key" + ) as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "integration_runtime_name", + options_list=["--name", "-n", "--integration-runtime-name"], + type=str, + help="The integration runtime name.", + id_part="child_name_1", + ) + c.argument( + "key_name", + arg_type=get_enum_type(["authKey1", "authKey2"]), + help="The name of the authentication " "key to regenerate.", + ) + + with self.argument_context("datafactory integration-runtime remove-link") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "integration_runtime_name", + options_list=["--name", "-n", "--integration-runtime-name"], + type=str, + help="The integration runtime name.", + id_part="child_name_1", + ) + c.argument( + "linked_factory_name", + type=str, + help="The data factory name for linked integration runtime.", + ) + + with self.argument_context("datafactory integration-runtime start") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "integration_runtime_name", + options_list=["--name", "-n", "--integration-runtime-name"], + type=str, + help="The integration runtime name.", + id_part="child_name_1", + ) + + with self.argument_context("datafactory integration-runtime stop") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "integration_runtime_name", + options_list=["--name", "-n", "--integration-runtime-name"], + type=str, + help="The integration runtime name.", + id_part="child_name_1", + ) + + with self.argument_context("datafactory integration-runtime sync-credentials") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "integration_runtime_name", + options_list=["--name", "-n", "--integration-runtime-name"], + type=str, + help="The integration runtime name.", + id_part="child_name_1", + ) + + with self.argument_context("datafactory integration-runtime upgrade") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "integration_runtime_name", + options_list=["--name", "-n", "--integration-runtime-name"], + type=str, + help="The integration runtime name.", + id_part="child_name_1", + ) + + with self.argument_context("datafactory integration-runtime wait") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "integration_runtime_name", + options_list=["--name", "-n", "--integration-runtime-name"], + type=str, + help="The integration runtime name.", + id_part="child_name_1", + ) + c.argument( + "if_none_match", + type=str, + help="ETag of the integration runtime entity. Should only be specified " + "for get. If the ETag matches the existing entity tag, or if * was provided, then no content will " + "be returned.", + ) + + with self.argument_context("datafactory integration-runtime-node show") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "integration_runtime_name", + type=str, + help="The integration runtime name.", + id_part="child_name_1", + ) + c.argument( + "node_name", + type=str, + help="The integration runtime node name.", + id_part="child_name_2", + ) + + with self.argument_context("datafactory integration-runtime-node update") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "integration_runtime_name", + type=str, + help="The integration runtime name.", + id_part="child_name_1", + ) + c.argument( + "node_name", + type=str, + help="The integration runtime node name.", + id_part="child_name_2", + ) + c.argument( + "concurrent_jobs_limit", + type=int, + help="The number of concurrent jobs permitted to run on the " + "integration runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed.", + ) + + with self.argument_context("datafactory integration-runtime-node delete") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "integration_runtime_name", + type=str, + help="The integration runtime name.", + id_part="child_name_1", + ) + c.argument( + "node_name", + type=str, + help="The integration runtime node name.", + id_part="child_name_2", + ) + + with self.argument_context( + "datafactory integration-runtime-node get-ip-address" + ) as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "integration_runtime_name", + type=str, + help="The integration runtime name.", + id_part="child_name_1", + ) + c.argument( + "node_name", + type=str, + help="The integration runtime node name.", + id_part="child_name_2", + ) + + with self.argument_context("datafactory linked-service list") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.") + + with self.argument_context("datafactory linked-service show") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "linked_service_name", + options_list=["--name", "-n", "--linked-service-name"], + type=str, + help="The " "linked service name.", + id_part="child_name_1", + ) + c.argument( + "if_none_match", + type=str, + help="ETag of the linked service entity. Should only be specified for " + "get. If the ETag matches the existing entity tag, or if * was provided, then no content will be " + "returned.", + ) + + with self.argument_context("datafactory linked-service create") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.") + c.argument( + "linked_service_name", + options_list=["--name", "-n", "--linked-service-name"], + type=str, + help="The " "linked service name.", + ) + c.argument( + "if_match", + type=str, + help="ETag of the linkedService entity. Should only be specified for update, " + "for which it should match existing entity or can be * for unconditional update.", + ) + c.argument( + "properties", + type=validate_file_or_dict, + help="Properties of linked service. Expected value: " + "json-string/json-file/@json-file.", + ) + + with self.argument_context("datafactory linked-service update") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "linked_service_name", + options_list=["--name", "-n", "--linked-service-name"], + type=str, + help="The " "linked service name.", + id_part="child_name_1", + ) + c.argument( + "if_match", + type=str, + help="ETag of the linkedService entity. Should only be specified for update, " + "for which it should match existing entity or can be * for unconditional update.", + ) + c.argument( + "connect_via", + type=validate_file_or_dict, + help="The integration runtime reference. Expected value: " + "json-string/json-file/@json-file.", + ) + c.argument("description", type=str, help="Linked service description.") + c.argument( + "parameters", + type=validate_file_or_dict, + help="Parameters for linked service. Expected value: " + "json-string/json-file/@json-file.", + ) + c.argument( + "annotations", + type=validate_file_or_dict, + help="List of tags that can be used for describing the " + "linked service. Expected value: json-string/json-file/@json-file.", + ) + c.ignore("linked_service") + + with self.argument_context("datafactory linked-service delete") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "linked_service_name", + options_list=["--name", "-n", "--linked-service-name"], + type=str, + help="The " "linked service name.", + id_part="child_name_1", + ) + + with self.argument_context("datafactory dataset list") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.") + + with self.argument_context("datafactory dataset show") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "dataset_name", + options_list=["--name", "-n", "--dataset-name"], + type=str, + help="The dataset name.", + id_part="child_name_1", + ) + c.argument( + "if_none_match", + type=str, + help="ETag of the dataset entity. Should only be specified for get. If " + "the ETag matches the existing entity tag, or if * was provided, then no content will be returned.", + ) + + with self.argument_context("datafactory dataset create") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.") + c.argument( + "dataset_name", + options_list=["--name", "-n", "--dataset-name"], + type=str, + help="The dataset name.", + ) + c.argument( + "if_match", + type=str, + help="ETag of the dataset entity. Should only be specified for update, for " + "which it should match existing entity or can be * for unconditional update.", + ) + c.argument( + "properties", + type=validate_file_or_dict, + help="Dataset properties. Expected value: " + "json-string/json-file/@json-file.", + ) + + with self.argument_context("datafactory dataset update") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "dataset_name", + options_list=["--name", "-n", "--dataset-name"], + type=str, + help="The dataset name.", + id_part="child_name_1", + ) + c.argument( + "if_match", + type=str, + help="ETag of the dataset entity. Should only be specified for update, for " + "which it should match existing entity or can be * for unconditional update.", + ) + c.argument("description", type=str, help="Dataset description.") + c.argument( + "structure", + type=validate_file_or_dict, + help="Columns that define the structure of the dataset. " + "Type: array (or Expression with resultType array), itemType: DatasetDataElement. Expected value: " + "json-string/json-file/@json-file.", + ) + c.argument( + "schema", + type=validate_file_or_dict, + help="Columns that define the physical type schema of the " + "dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. " + "Expected value: json-string/json-file/@json-file.", + ) + c.argument( + "linked_service_name", + type=validate_file_or_dict, + help="Linked service reference. Expected value: " + "json-string/json-file/@json-file.", + ) + c.argument( + "parameters", + type=validate_file_or_dict, + help="Parameters for dataset. Expected value: " + "json-string/json-file/@json-file.", + ) + c.argument( + "annotations", + type=validate_file_or_dict, + help="List of tags that can be used for describing the " + "Dataset. Expected value: json-string/json-file/@json-file.", + ) + c.argument( + "folder", + action=AddFolder, + nargs="+", + help="The folder that this Dataset is in. If not specified, " + "Dataset will appear at the root level.", + ) + c.ignore("dataset") + + with self.argument_context("datafactory dataset delete") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "dataset_name", + options_list=["--name", "-n", "--dataset-name"], + type=str, + help="The dataset name.", + id_part="child_name_1", + ) + + with self.argument_context("datafactory pipeline list") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.") + + with self.argument_context("datafactory pipeline show") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "pipeline_name", + options_list=["--name", "-n", "--pipeline-name"], + type=str, + help="The pipeline " "name.", + id_part="child_name_1", + ) + c.argument( + "if_none_match", + type=str, + help="ETag of the pipeline entity. Should only be specified for get. If " + "the ETag matches the existing entity tag, or if * was provided, then no content will be returned.", + ) + + with self.argument_context("datafactory pipeline create") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.") + c.argument( + "pipeline_name", + options_list=["--name", "-n", "--pipeline-name"], + type=str, + help="The pipeline " "name.", + ) + c.argument( + "if_match", + type=str, + help="ETag of the pipeline entity. Should only be specified for update, for " + "which it should match existing entity or can be * for unconditional update.", + ) + c.argument( + "pipeline", + type=validate_file_or_dict, + help="Pipeline resource definition. Expected value: " + "json-string/json-file/@json-file.", + ) + + with self.argument_context("datafactory pipeline update") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "pipeline_name", + options_list=["--name", "-n", "--pipeline-name"], + type=str, + help="The pipeline " "name.", + id_part="child_name_1", + ) + c.argument( + "if_match", + type=str, + help="ETag of the pipeline entity. Should only be specified for update, for " + "which it should match existing entity or can be * for unconditional update.", + ) + c.argument("description", type=str, help="The description of the pipeline.") + c.argument( + "activities", + type=validate_file_or_dict, + help="List of activities in pipeline. Expected value: " + "json-string/json-file/@json-file.", + ) + c.argument( + "parameters", + type=validate_file_or_dict, + help="List of parameters for pipeline. Expected value: " + "json-string/json-file/@json-file.", + ) + c.argument( + "variables", + type=validate_file_or_dict, + help="List of variables for pipeline. Expected value: " + "json-string/json-file/@json-file.", + ) + c.argument( + "concurrency", + type=int, + help="The max number of concurrent runs for the pipeline.", + ) + c.argument( + "annotations", + type=validate_file_or_dict, + help="List of tags that can be used for describing the " + "Pipeline. Expected value: json-string/json-file/@json-file.", + ) + c.argument( + "run_dimensions", + type=validate_file_or_dict, + help="Dimensions emitted by Pipeline. Expected value: " + "json-string/json-file/@json-file.", + ) + c.argument( + "duration", + type=validate_file_or_dict, + help="TimeSpan value, after which an Azure Monitoring " + "Metric is fired. Expected value: json-string/json-file/@json-file.", + arg_group="Policy Elapsed " "Time Metric", + ) + c.argument( + "folder_name", + type=str, + help="The name of the folder that this Pipeline is in.", + arg_group="Folder", + ) + c.ignore("pipeline") + + with self.argument_context("datafactory pipeline delete") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "pipeline_name", + options_list=["--name", "-n", "--pipeline-name"], + type=str, + help="The pipeline " "name.", + id_part="child_name_1", + ) + + with self.argument_context("datafactory pipeline create-run") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.") + c.argument( + "pipeline_name", + options_list=["--name", "-n", "--pipeline-name"], + type=str, + help="The pipeline " "name.", + ) + c.argument( + "reference_pipeline_run_id", + type=str, + help="The pipeline run identifier. If run ID is specified " + "the parameters of the specified run will be used to create a new run.", + ) + c.argument( + "is_recovery", + arg_type=get_three_state_flag(), + help="Recovery mode flag. If recovery mode is set " + "to true, the specified referenced pipeline run and the new run will be grouped under the same " + "groupId.", + ) + c.argument( + "start_activity_name", + type=str, + help="In recovery mode, the rerun will start from this activity. " + "If not specified, all activities will run.", + ) + c.argument( + "start_from_failure", + arg_type=get_three_state_flag(), + help="In recovery mode, if set to true, the " + "rerun will start from failed activities. The property will be used only if startActivityName is " + "not specified.", + ) + c.argument( + "parameters", + type=validate_file_or_dict, + help="Parameters of the pipeline run. These parameters " + "will be used only if the runId is not specified. Expected value: json-string/json-file/@json-file.", + ) + + with self.argument_context("datafactory pipeline-run show") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "run_id", + type=str, + help="The pipeline run identifier.", + id_part="child_name_1", + ) + + with self.argument_context("datafactory pipeline-run cancel") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "run_id", + type=str, + help="The pipeline run identifier.", + id_part="child_name_1", + ) + c.argument( + "is_recursive", + arg_type=get_three_state_flag(), + help="If true, cancel all the Child pipelines that " + "are triggered by the current pipeline.", + ) + + with self.argument_context("datafactory pipeline-run query-by-factory") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "continuation_token", + type=str, + help="The continuation token for getting the next page of results. " + "Null for first page.", + ) + c.argument( + "last_updated_after", + help="The time at or after which the run event was updated in 'ISO 8601' " + "format.", + ) + c.argument( + "last_updated_before", + help="The time at or before which the run event was updated in 'ISO 8601' " + "format.", + ) + c.argument("filters", action=AddFilters, nargs="+", help="List of filters.") + c.argument( + "order_by", action=AddOrderBy, nargs="+", help="List of OrderBy option." + ) + + with self.argument_context("datafactory activity-run query-by-pipeline-run") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "run_id", + type=str, + help="The pipeline run identifier.", + id_part="child_name_1", + ) + c.argument( + "continuation_token", + type=str, + help="The continuation token for getting the next page of results. " + "Null for first page.", + ) + c.argument( + "last_updated_after", + help="The time at or after which the run event was updated in 'ISO 8601' " + "format.", + ) + c.argument( + "last_updated_before", + help="The time at or before which the run event was updated in 'ISO 8601' " + "format.", + ) + c.argument("filters", action=AddFilters, nargs="+", help="List of filters.") + c.argument( + "order_by", action=AddOrderBy, nargs="+", help="List of OrderBy option." + ) + + with self.argument_context("datafactory trigger list") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.") + + with self.argument_context("datafactory trigger show") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "trigger_name", + options_list=["--name", "-n", "--trigger-name"], + type=str, + help="The trigger name.", + id_part="child_name_1", + ) + c.argument( + "if_none_match", + type=str, + help="ETag of the trigger entity. Should only be specified for get. If " + "the ETag matches the existing entity tag, or if * was provided, then no content will be returned.", + ) + + with self.argument_context("datafactory trigger create") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.") + c.argument( + "trigger_name", + options_list=["--name", "-n", "--trigger-name"], + type=str, + help="The trigger name.", + ) + c.argument( + "if_match", + type=str, + help="ETag of the trigger entity. Should only be specified for update, for " + "which it should match existing entity or can be * for unconditional update.", + ) + c.argument( + "properties", + type=validate_file_or_dict, + help="Properties of the trigger. Expected value: " + "json-string/json-file/@json-file.", + ) + + with self.argument_context("datafactory trigger update") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "trigger_name", + options_list=["--name", "-n", "--trigger-name"], + type=str, + help="The trigger name.", + id_part="child_name_1", + ) + c.argument( + "if_match", + type=str, + help="ETag of the trigger entity. Should only be specified for update, for " + "which it should match existing entity or can be * for unconditional update.", + ) + c.argument("description", type=str, help="Trigger description.") + c.argument( + "annotations", + type=validate_file_or_dict, + help="List of tags that can be used for describing the " + "trigger. Expected value: json-string/json-file/@json-file.", + ) + c.ignore("trigger") + + with self.argument_context("datafactory trigger delete") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "trigger_name", + options_list=["--name", "-n", "--trigger-name"], + type=str, + help="The trigger name.", + id_part="child_name_1", + ) + + with self.argument_context( + "datafactory trigger get-event-subscription-status" + ) as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "trigger_name", + options_list=["--name", "-n", "--trigger-name"], + type=str, + help="The trigger name.", + id_part="child_name_1", + ) + + with self.argument_context("datafactory trigger query-by-factory") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "continuation_token", + type=str, + help="The continuation token for getting the next page of results. " + "Null for first page.", + ) + c.argument( + "parent_trigger_name", + type=str, + help="The name of the parent TumblingWindowTrigger to get the " + "child rerun triggers", + ) + + with self.argument_context("datafactory trigger start") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "trigger_name", + options_list=["--name", "-n", "--trigger-name"], + type=str, + help="The trigger name.", + id_part="child_name_1", + ) + + with self.argument_context("datafactory trigger stop") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "trigger_name", + options_list=["--name", "-n", "--trigger-name"], + type=str, + help="The trigger name.", + id_part="child_name_1", + ) + + with self.argument_context("datafactory trigger subscribe-to-event") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "trigger_name", + options_list=["--name", "-n", "--trigger-name"], + type=str, + help="The trigger name.", + id_part="child_name_1", + ) + + with self.argument_context("datafactory trigger unsubscribe-from-event") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "trigger_name", + options_list=["--name", "-n", "--trigger-name"], + type=str, + help="The trigger name.", + id_part="child_name_1", + ) + + with self.argument_context("datafactory trigger wait") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "trigger_name", + options_list=["--name", "-n", "--trigger-name"], + type=str, + help="The trigger name.", + id_part="child_name_1", + ) + c.argument( + "if_none_match", + type=str, + help="ETag of the trigger entity. Should only be specified for get. If " + "the ETag matches the existing entity tag, or if * was provided, then no content will be returned.", + ) + + with self.argument_context("datafactory trigger-run cancel") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "trigger_name", type=str, help="The trigger name.", id_part="child_name_1" + ) + c.argument( + "run_id", + type=str, + help="The pipeline run identifier.", + id_part="child_name_2", + ) + + with self.argument_context("datafactory trigger-run query-by-factory") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "continuation_token", + type=str, + help="The continuation token for getting the next page of results. " + "Null for first page.", + ) + c.argument( + "last_updated_after", + help="The time at or after which the run event was updated in 'ISO 8601' " + "format.", + ) + c.argument( + "last_updated_before", + help="The time at or before which the run event was updated in 'ISO 8601' " + "format.", + ) + c.argument("filters", action=AddFilters, nargs="+", help="List of filters.") + c.argument( + "order_by", action=AddOrderBy, nargs="+", help="List of OrderBy option." + ) + + with self.argument_context("datafactory trigger-run rerun") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "trigger_name", type=str, help="The trigger name.", id_part="child_name_1" + ) + c.argument( + "run_id", + type=str, + help="The pipeline run identifier.", + id_part="child_name_2", + ) + + with self.argument_context("datafactory managed-virtual-network list") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.") + + with self.argument_context("datafactory managed-virtual-network show") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "managed_virtual_network_name", + options_list=["--name", "-n", "--managed-virtual-network-name"], + type=str, + help="Managed virtual network name", + id_part="child_name_1", + ) + c.argument( + "if_none_match", + type=str, + help="ETag of the managed Virtual Network entity. Should only be " + "specified for get. If the ETag matches the existing entity tag, or if * was provided, then no " + "content will be returned.", + ) + + with self.argument_context("datafactory managed-virtual-network create") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.") + c.argument( + "managed_virtual_network_name", + options_list=["--name", "-n", "--managed-virtual-network-name"], + type=str, + help="Managed virtual network name", + ) + c.argument( + "if_match", + type=str, + help="ETag of the managed Virtual Network entity. Should only be specified " + "for update, for which it should match existing entity or can be * for unconditional update.", + ) + + with self.argument_context("datafactory managed-virtual-network update") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "managed_virtual_network_name", + options_list=["--name", "-n", "--managed-virtual-network-name"], + type=str, + help="Managed virtual network name", + id_part="child_name_1", + ) + c.argument( + "if_match", + type=str, + help="ETag of the managed Virtual Network entity. Should only be specified " + "for update, for which it should match existing entity or can be * for unconditional update.", + ) + c.ignore("managed_virtual_network") + + with self.argument_context("datafactory managed-private-endpoint list") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.") + c.argument( + "managed_virtual_network_name", + options_list=["--managed-virtual-network-name", "--mvnet-name"], + type=str, + help="Managed virtual network name", + ) + + with self.argument_context("datafactory managed-private-endpoint show") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "managed_virtual_network_name", + options_list=["--managed-virtual-network-name", "--mvnet-name"], + type=str, + help="Managed virtual network name", + id_part="child_name_1", + ) + c.argument( + "managed_private_endpoint_name", + options_list=["--name", "-n", "--managed-private-endpoint-name"], + type=str, + help="Managed private endpoint name", + id_part="child_name_2", + ) + c.argument( + "if_none_match", + type=str, + help="ETag of the managed private endpoint entity. Should only be " + "specified for get. If the ETag matches the existing entity tag, or if * was provided, then no " + "content will be returned.", + ) + + with self.argument_context("datafactory managed-private-endpoint create") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.") + c.argument( + "managed_virtual_network_name", + options_list=["--managed-virtual-network-name", "--mvnet-name"], + type=str, + help="Managed virtual network name", + ) + c.argument( + "managed_private_endpoint_name", + options_list=["--name", "-n", "--managed-private-endpoint-name"], + type=str, + help="Managed private endpoint name", + ) + c.argument( + "if_match", + type=str, + help="ETag of the managed private endpoint entity. Should only be specified " + "for update, for which it should match existing entity or can be * for unconditional update.", + ) + c.argument("fqdns", nargs="+", help="Fully qualified domain names") + c.argument( + "group_id", + type=str, + help="The groupId to which the managed private endpoint is created", + ) + c.argument( + "private_link_resource_id", + options_list=["--private-link-resource-id", "--private-link"], + type=str, + help="The ARM resource ID of the resource to which the managed private endpoint is created", + ) + + with self.argument_context("datafactory managed-private-endpoint update") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "managed_virtual_network_name", + options_list=["--managed-virtual-network-name", "--mvnet-name"], + type=str, + help="Managed virtual network name", + id_part="child_name_1", + ) + c.argument( + "managed_private_endpoint_name", + options_list=["--name", "-n", "--managed-private-endpoint-name"], + type=str, + help="Managed private endpoint name", + id_part="child_name_2", + ) + c.argument( + "if_match", + type=str, + help="ETag of the managed private endpoint entity. Should only be specified " + "for update, for which it should match existing entity or can be * for unconditional update.", + ) + c.argument("fqdns", nargs="+", help="Fully qualified domain names") + c.argument( + "group_id", + type=str, + help="The groupId to which the managed private endpoint is created", + ) + c.argument( + "private_link_resource_id", + options_list=["--private-link-resource-id", "--private-link"], + type=str, + help="The ARM resource ID of the resource to which the managed private endpoint is created", + ) + c.ignore("managed_private_endpoint") + + with self.argument_context("datafactory managed-private-endpoint delete") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument("factory_name", type=str, help="The factory name.", id_part="name") + c.argument( + "managed_virtual_network_name", + options_list=["--managed-virtual-network-name", "--mvnet-name"], + type=str, + help="Managed virtual network name", + id_part="child_name_1", + ) + c.argument( + "managed_private_endpoint_name", + options_list=["--name", "-n", "--managed-private-endpoint-name"], + type=str, + help="Managed private endpoint name", + id_part="child_name_2", + ) diff --git a/src/datafactory/azext_datafactory/generated/action.py b/src/datafactory/azext_datafactory/generated/action.py index 8737ce3fbb2..6fa3d4979f8 100644 --- a/src/datafactory/azext_datafactory/generated/action.py +++ b/src/datafactory/azext_datafactory/generated/action.py @@ -27,45 +27,45 @@ def __call__(self, parser, namespace, values, option_string=None): def get_action(self, values, option_string): try: properties = defaultdict(list) - for (k, v) in (x.split('=', 1) for x in values): + for (k, v) in (x.split("=", 1) for x in values): properties[k].append(v) properties = dict(properties) except ValueError: - raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + raise CLIError("usage error: {} [KEY=VALUE ...]".format(option_string)) d = {} for k in properties: kl = k.lower() v = properties[k] - if kl == 'project-name': - d['project_name'] = v[0] + if kl == "project-name": + d["project_name"] = v[0] - elif kl == 'tenant-id': - d['tenant_id'] = v[0] + elif kl == "tenant-id": + d["tenant_id"] = v[0] - elif kl == 'account-name': - d['account_name'] = v[0] + elif kl == "account-name": + d["account_name"] = v[0] - elif kl == 'repository-name': - d['repository_name'] = v[0] + elif kl == "repository-name": + d["repository_name"] = v[0] - elif kl == 'collaboration-branch': - d['collaboration_branch'] = v[0] + elif kl == "collaboration-branch": + d["collaboration_branch"] = v[0] - elif kl == 'root-folder': - d['root_folder'] = v[0] + elif kl == "root-folder": + d["root_folder"] = v[0] - elif kl == 'last-commit-id': - d['last_commit_id'] = v[0] + elif kl == "last-commit-id": + d["last_commit_id"] = v[0] else: raise CLIError( - 'Unsupported Key {} is provided for parameter factory-vsts-configuration. All possible keys are:' - ' project-name, tenant-id, account-name, repository-name, collaboration-branch, root-folder,' - ' last-commit-id'.format(k) + "Unsupported Key {} is provided for parameter factory-vsts-configuration. All possible keys are:" + " project-name, tenant-id, account-name, repository-name, collaboration-branch, root-folder," + " last-commit-id".format(k) ) - d['type'] = 'FactoryVSTSConfiguration' + d["type"] = "FactoryVSTSConfiguration" return d @@ -78,42 +78,43 @@ def __call__(self, parser, namespace, values, option_string=None): def get_action(self, values, option_string): try: properties = defaultdict(list) - for (k, v) in (x.split('=', 1) for x in values): + for (k, v) in (x.split("=", 1) for x in values): properties[k].append(v) properties = dict(properties) except ValueError: - raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + raise CLIError("usage error: {} [KEY=VALUE ...]".format(option_string)) d = {} for k in properties: kl = k.lower() v = properties[k] - if kl == 'host-name': - d['host_name'] = v[0] + if kl == "host-name": + d["host_name"] = v[0] - elif kl == 'account-name': - d['account_name'] = v[0] + elif kl == "account-name": + d["account_name"] = v[0] - elif kl == 'repository-name': - d['repository_name'] = v[0] + elif kl == "repository-name": + d["repository_name"] = v[0] - elif kl == 'collaboration-branch': - d['collaboration_branch'] = v[0] + elif kl == "collaboration-branch": + d["collaboration_branch"] = v[0] - elif kl == 'root-folder': - d['root_folder'] = v[0] + elif kl == "root-folder": + d["root_folder"] = v[0] - elif kl == 'last-commit-id': - d['last_commit_id'] = v[0] + elif kl == "last-commit-id": + d["last_commit_id"] = v[0] else: raise CLIError( - 'Unsupported Key {} is provided for parameter factory-git-hub-configuration. All possible keys are:' - ' host-name, account-name, repository-name, collaboration-branch, root-folder, last-commit-id' - .format(k) + "Unsupported Key {} is provided for parameter factory-git-hub-configuration. All possible keys are:" + " host-name, account-name, repository-name, collaboration-branch, root-folder, last-commit-id".format( + k + ) ) - d['type'] = 'FactoryGitHubConfiguration' + d["type"] = "FactoryGitHubConfiguration" return d @@ -126,22 +127,24 @@ def __call__(self, parser, namespace, values, option_string=None): def get_action(self, values, option_string): try: properties = defaultdict(list) - for (k, v) in (x.split('=', 1) for x in values): + for (k, v) in (x.split("=", 1) for x in values): properties[k].append(v) properties = dict(properties) except ValueError: - raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + raise CLIError("usage error: {} [KEY=VALUE ...]".format(option_string)) d = {} for k in properties: kl = k.lower() v = properties[k] - if kl == 'name': - d['name'] = v[0] + if kl == "name": + d["name"] = v[0] else: raise CLIError( - 'Unsupported Key {} is provided for parameter folder. All possible keys are: name'.format(k) + "Unsupported Key {} is provided for parameter folder. All possible keys are: name".format( + k + ) ) return d @@ -155,29 +158,29 @@ def __call__(self, parser, namespace, values, option_string=None): def get_action(self, values, option_string): try: properties = defaultdict(list) - for (k, v) in (x.split('=', 1) for x in values): + for (k, v) in (x.split("=", 1) for x in values): properties[k].append(v) properties = dict(properties) except ValueError: - raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + raise CLIError("usage error: {} [KEY=VALUE ...]".format(option_string)) d = {} for k in properties: kl = k.lower() v = properties[k] - if kl == 'operand': - d['operand'] = v[0] + if kl == "operand": + d["operand"] = v[0] - elif kl == 'operator': - d['operator'] = v[0] + elif kl == "operator": + d["operator"] = v[0] - elif kl == 'values': - d['values'] = v + elif kl == "values": + d["values"] = v else: raise CLIError( - 'Unsupported Key {} is provided for parameter filters. All possible keys are: operand, operator,' - ' values'.format(k) + "Unsupported Key {} is provided for parameter filters. All possible keys are: operand, operator," + " values".format(k) ) return d @@ -191,26 +194,27 @@ def __call__(self, parser, namespace, values, option_string=None): def get_action(self, values, option_string): try: properties = defaultdict(list) - for (k, v) in (x.split('=', 1) for x in values): + for (k, v) in (x.split("=", 1) for x in values): properties[k].append(v) properties = dict(properties) except ValueError: - raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + raise CLIError("usage error: {} [KEY=VALUE ...]".format(option_string)) d = {} for k in properties: kl = k.lower() v = properties[k] - if kl == 'order-by': - d['order_by'] = v[0] + if kl == "order-by": + d["order_by"] = v[0] - elif kl == 'order': - d['order'] = v[0] + elif kl == "order": + d["order"] = v[0] else: raise CLIError( - 'Unsupported Key {} is provided for parameter order-by. All possible keys are: order-by, order' - .format(k) + "Unsupported Key {} is provided for parameter order-by. All possible keys are: order-by, order".format( + k + ) ) return d diff --git a/src/datafactory/azext_datafactory/generated/commands.py b/src/datafactory/azext_datafactory/generated/commands.py index 027d5c6638a..90b0748bc43 100644 --- a/src/datafactory/azext_datafactory/generated/commands.py +++ b/src/datafactory/azext_datafactory/generated/commands.py @@ -9,7 +9,6 @@ # -------------------------------------------------------------------------- # pylint: disable=too-many-statements # pylint: disable=too-many-locals -# pylint: disable=bad-continuation # pylint: disable=line-too-long from azure.cli.core.commands import CliCommandType @@ -31,7 +30,7 @@ datafactory_factory = CliCommandType( operations_tmpl=( - 'azext_datafactory.vendored_sdks.datafactory.operations._factories_operations#FactoriesOperations.{}' + "azext_datafactory.vendored_sdks.datafactory.operations._factories_operations#FactoriesOperations.{}" ), client_factory=cf_factory, ) @@ -39,53 +38,53 @@ datafactory_activity_run = CliCommandType( operations_tmpl=( - 'azext_datafactory.vendored_sdks.datafactory.operations._activity_runs_operations#ActivityRunsOperations.{}' + "azext_datafactory.vendored_sdks.datafactory.operations._activity_runs_operations#ActivityRunsOperations.{}" ), client_factory=cf_activity_run, ) datafactory_dataset = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._datasets_operations#DatasetsOperations.{}', + operations_tmpl="azext_datafactory.vendored_sdks.datafactory.operations._datasets_operations#DatasetsOperations.{}", client_factory=cf_dataset, ) datafactory_integration_runtime = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._integration_runtimes_operations#IntegrationRuntimesOperations.{}', + operations_tmpl="azext_datafactory.vendored_sdks.datafactory.operations._integration_runtimes_operations#IntegrationRuntimesOperations.{}", client_factory=cf_integration_runtime, ) datafactory_integration_runtime_node = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._integration_runtime_nodes_operations#IntegrationRuntimeNodesOperations.{}', + operations_tmpl="azext_datafactory.vendored_sdks.datafactory.operations._integration_runtime_nodes_operations#IntegrationRuntimeNodesOperations.{}", client_factory=cf_integration_runtime_node, ) datafactory_linked_service = CliCommandType( operations_tmpl=( - 'azext_datafactory.vendored_sdks.datafactory.operations._linked_services_operations#LinkedServicesOperations.{}' + "azext_datafactory.vendored_sdks.datafactory.operations._linked_services_operations#LinkedServicesOperations.{}" ), client_factory=cf_linked_service, ) datafactory_managed_private_endpoint = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._managed_private_endpoints_operations#ManagedPrivateEndpointsOperations.{}', + operations_tmpl="azext_datafactory.vendored_sdks.datafactory.operations._managed_private_endpoints_operations#ManagedPrivateEndpointsOperations.{}", client_factory=cf_managed_private_endpoint, ) datafactory_managed_virtual_network = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._managed_virtual_networks_operations#ManagedVirtualNetworksOperations.{}', + operations_tmpl="azext_datafactory.vendored_sdks.datafactory.operations._managed_virtual_networks_operations#ManagedVirtualNetworksOperations.{}", client_factory=cf_managed_virtual_network, ) datafactory_pipeline = CliCommandType( operations_tmpl=( - 'azext_datafactory.vendored_sdks.datafactory.operations._pipelines_operations#PipelinesOperations.{}' + "azext_datafactory.vendored_sdks.datafactory.operations._pipelines_operations#PipelinesOperations.{}" ), client_factory=cf_pipeline, ) @@ -93,21 +92,21 @@ datafactory_pipeline_run = CliCommandType( operations_tmpl=( - 'azext_datafactory.vendored_sdks.datafactory.operations._pipeline_runs_operations#PipelineRunsOperations.{}' + "azext_datafactory.vendored_sdks.datafactory.operations._pipeline_runs_operations#PipelineRunsOperations.{}" ), client_factory=cf_pipeline_run, ) datafactory_trigger = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._triggers_operations#TriggersOperations.{}', + operations_tmpl="azext_datafactory.vendored_sdks.datafactory.operations._triggers_operations#TriggersOperations.{}", client_factory=cf_trigger, ) datafactory_trigger_run = CliCommandType( operations_tmpl=( - 'azext_datafactory.vendored_sdks.datafactory.operations._trigger_runs_operations#TriggerRunsOperations.{}' + "azext_datafactory.vendored_sdks.datafactory.operations._trigger_runs_operations#TriggerRunsOperations.{}" ), client_factory=cf_trigger_run, ) @@ -115,130 +114,214 @@ def load_command_table(self, _): - with self.command_group('datafactory', datafactory_factory, client_factory=cf_factory) as g: - g.custom_command('list', 'datafactory_list') - g.custom_show_command('show', 'datafactory_show') - g.custom_command('create', 'datafactory_create') - g.custom_command('update', 'datafactory_update') - g.custom_command('delete', 'datafactory_delete', confirmation=True) - g.custom_command('configure-factory-repo', 'datafactory_configure_factory_repo') - g.custom_command('get-data-plane-access', 'datafactory_get_data_plane_access') - g.custom_command('get-git-hub-access-token', 'datafactory_get_git_hub_access_token') - - with self.command_group('datafactory activity-run', datafactory_activity_run, client_factory=cf_activity_run) as g: - g.custom_command('query-by-pipeline-run', 'datafactory_activity_run_query_by_pipeline_run') - - with self.command_group('datafactory dataset', datafactory_dataset, client_factory=cf_dataset) as g: - g.custom_command('list', 'datafactory_dataset_list') - g.custom_show_command('show', 'datafactory_dataset_show') - g.custom_command('create', 'datafactory_dataset_create') - g.generic_update_command('update', custom_func_name='datafactory_dataset_update', setter_arg_name='dataset') - g.custom_command('delete', 'datafactory_dataset_delete', confirmation=True) + with self.command_group( + "datafactory", datafactory_factory, client_factory=cf_factory + ) as g: + g.custom_command("list", "datafactory_list") + g.custom_show_command("show", "datafactory_show") + g.custom_command("create", "datafactory_create") + g.custom_command("update", "datafactory_update") + g.custom_command("delete", "datafactory_delete", confirmation=True) + g.custom_command("configure-factory-repo", "datafactory_configure_factory_repo") + g.custom_command("get-data-plane-access", "datafactory_get_data_plane_access") + g.custom_command( + "get-git-hub-access-token", "datafactory_get_git_hub_access_token" + ) + + with self.command_group( + "datafactory activity-run", + datafactory_activity_run, + client_factory=cf_activity_run, + ) as g: + g.custom_command( + "query-by-pipeline-run", "datafactory_activity_run_query_by_pipeline_run" + ) with self.command_group( - 'datafactory integration-runtime', datafactory_integration_runtime, client_factory=cf_integration_runtime + "datafactory dataset", datafactory_dataset, client_factory=cf_dataset ) as g: - g.custom_command('list', 'datafactory_integration_runtime_list') - g.custom_show_command('show', 'datafactory_integration_runtime_show') - g.custom_command( - 'linked-integration-runtime create', 'datafactory_integration_runtime_linked_integration_runtime_create' - ) - g.custom_command('managed create', 'datafactory_integration_runtime_managed_create') - g.custom_command('self-hosted create', 'datafactory_integration_runtime_self_hosted_create') - g.custom_command('update', 'datafactory_integration_runtime_update') - g.custom_command('delete', 'datafactory_integration_runtime_delete', confirmation=True) - g.custom_command('get-connection-info', 'datafactory_integration_runtime_get_connection_info') - g.custom_command('get-monitoring-data', 'datafactory_integration_runtime_get_monitoring_data') - g.custom_command('get-status', 'datafactory_integration_runtime_get_status') - g.custom_command('list-auth-key', 'datafactory_integration_runtime_list_auth_key') - g.custom_command('regenerate-auth-key', 'datafactory_integration_runtime_regenerate_auth_key') - g.custom_command('remove-link', 'datafactory_integration_runtime_remove_link') - g.custom_command('start', 'datafactory_integration_runtime_start', supports_no_wait=True) - g.custom_command('stop', 'datafactory_integration_runtime_stop', supports_no_wait=True) - g.custom_command('sync-credentials', 'datafactory_integration_runtime_sync_credentials') - g.custom_command('upgrade', 'datafactory_integration_runtime_upgrade') - g.custom_wait_command('wait', 'datafactory_integration_runtime_show') + g.custom_command("list", "datafactory_dataset_list") + g.custom_show_command("show", "datafactory_dataset_show") + g.custom_command("create", "datafactory_dataset_create") + g.generic_update_command( + "update", + custom_func_name="datafactory_dataset_update", + setter_arg_name="dataset", + ) + g.custom_command("delete", "datafactory_dataset_delete", confirmation=True) with self.command_group( - 'datafactory integration-runtime-node', + "datafactory integration-runtime", + datafactory_integration_runtime, + client_factory=cf_integration_runtime, + ) as g: + g.custom_command("list", "datafactory_integration_runtime_list") + g.custom_show_command("show", "datafactory_integration_runtime_show") + g.custom_command( + "linked-integration-runtime create", + "datafactory_integration_runtime_linked_integration_runtime_create", + ) + g.custom_command( + "managed create", "datafactory_integration_runtime_managed_create" + ) + g.custom_command( + "self-hosted create", "datafactory_integration_runtime_self_hosted_create" + ) + g.custom_command("update", "datafactory_integration_runtime_update") + g.custom_command( + "delete", "datafactory_integration_runtime_delete", confirmation=True + ) + g.custom_command( + "get-connection-info", "datafactory_integration_runtime_get_connection_info" + ) + g.custom_command( + "get-monitoring-data", "datafactory_integration_runtime_get_monitoring_data" + ) + g.custom_command("get-status", "datafactory_integration_runtime_get_status") + g.custom_command( + "list-auth-key", "datafactory_integration_runtime_list_auth_key" + ) + g.custom_command( + "regenerate-auth-key", "datafactory_integration_runtime_regenerate_auth_key" + ) + g.custom_command("remove-link", "datafactory_integration_runtime_remove_link") + g.custom_command( + "start", "datafactory_integration_runtime_start", supports_no_wait=True + ) + g.custom_command( + "stop", "datafactory_integration_runtime_stop", supports_no_wait=True + ) + g.custom_command( + "sync-credentials", "datafactory_integration_runtime_sync_credentials" + ) + g.custom_command("upgrade", "datafactory_integration_runtime_upgrade") + g.custom_wait_command("wait", "datafactory_integration_runtime_show") + + with self.command_group( + "datafactory integration-runtime-node", datafactory_integration_runtime_node, client_factory=cf_integration_runtime_node, ) as g: - g.custom_show_command('show', 'datafactory_integration_runtime_node_show') - g.custom_command('update', 'datafactory_integration_runtime_node_update') - g.custom_command('delete', 'datafactory_integration_runtime_node_delete', confirmation=True) - g.custom_command('get-ip-address', 'datafactory_integration_runtime_node_get_ip_address') + g.custom_show_command("show", "datafactory_integration_runtime_node_show") + g.custom_command("update", "datafactory_integration_runtime_node_update") + g.custom_command( + "delete", "datafactory_integration_runtime_node_delete", confirmation=True + ) + g.custom_command( + "get-ip-address", "datafactory_integration_runtime_node_get_ip_address" + ) with self.command_group( - 'datafactory linked-service', datafactory_linked_service, client_factory=cf_linked_service + "datafactory linked-service", + datafactory_linked_service, + client_factory=cf_linked_service, ) as g: - g.custom_command('list', 'datafactory_linked_service_list') - g.custom_show_command('show', 'datafactory_linked_service_show') - g.custom_command('create', 'datafactory_linked_service_create') + g.custom_command("list", "datafactory_linked_service_list") + g.custom_show_command("show", "datafactory_linked_service_show") + g.custom_command("create", "datafactory_linked_service_create") g.generic_update_command( - 'update', custom_func_name='datafactory_linked_service_update', setter_arg_name='linked_service' + "update", + custom_func_name="datafactory_linked_service_update", + setter_arg_name="linked_service", + ) + g.custom_command( + "delete", "datafactory_linked_service_delete", confirmation=True ) - g.custom_command('delete', 'datafactory_linked_service_delete', confirmation=True) with self.command_group( - 'datafactory managed-private-endpoint', + "datafactory managed-private-endpoint", datafactory_managed_private_endpoint, client_factory=cf_managed_private_endpoint, is_preview=True, ) as g: - g.custom_command('list', 'datafactory_managed_private_endpoint_list') - g.custom_show_command('show', 'datafactory_managed_private_endpoint_show') - g.custom_command('create', 'datafactory_managed_private_endpoint_create') + g.custom_command("list", "datafactory_managed_private_endpoint_list") + g.custom_show_command("show", "datafactory_managed_private_endpoint_show") + g.custom_command("create", "datafactory_managed_private_endpoint_create") g.generic_update_command( - 'update', - custom_func_name='datafactory_managed_private_endpoint_update', - setter_arg_name='managed_private_endpoint', + "update", + custom_func_name="datafactory_managed_private_endpoint_update", + setter_arg_name="managed_private_endpoint", + ) + g.custom_command( + "delete", "datafactory_managed_private_endpoint_delete", confirmation=True ) - g.custom_command('delete', 'datafactory_managed_private_endpoint_delete', confirmation=True) with self.command_group( - 'datafactory managed-virtual-network', + "datafactory managed-virtual-network", datafactory_managed_virtual_network, client_factory=cf_managed_virtual_network, is_preview=True, ) as g: - g.custom_command('list', 'datafactory_managed_virtual_network_list') - g.custom_show_command('show', 'datafactory_managed_virtual_network_show') - g.custom_command('create', 'datafactory_managed_virtual_network_create') + g.custom_command("list", "datafactory_managed_virtual_network_list") + g.custom_show_command("show", "datafactory_managed_virtual_network_show") + g.custom_command("create", "datafactory_managed_virtual_network_create") + g.generic_update_command( + "update", + custom_func_name="datafactory_managed_virtual_network_update", + setter_arg_name="managed_virtual_network", + ) + + with self.command_group( + "datafactory pipeline", datafactory_pipeline, client_factory=cf_pipeline + ) as g: + g.custom_command("list", "datafactory_pipeline_list") + g.custom_show_command("show", "datafactory_pipeline_show") + g.custom_command("create", "datafactory_pipeline_create") + g.generic_update_command( + "update", + custom_func_name="datafactory_pipeline_update", + setter_arg_name="pipeline", + ) + g.custom_command("delete", "datafactory_pipeline_delete", confirmation=True) + g.custom_command("create-run", "datafactory_pipeline_create_run") + + with self.command_group( + "datafactory pipeline-run", + datafactory_pipeline_run, + client_factory=cf_pipeline_run, + ) as g: + g.custom_show_command("show", "datafactory_pipeline_run_show") + g.custom_command("cancel", "datafactory_pipeline_run_cancel") + g.custom_command( + "query-by-factory", "datafactory_pipeline_run_query_by_factory" + ) + + with self.command_group( + "datafactory trigger", datafactory_trigger, client_factory=cf_trigger + ) as g: + g.custom_command("list", "datafactory_trigger_list") + g.custom_show_command("show", "datafactory_trigger_show") + g.custom_command("create", "datafactory_trigger_create") g.generic_update_command( - 'update', - custom_func_name='datafactory_managed_virtual_network_update', - setter_arg_name='managed_virtual_network', - ) - - with self.command_group('datafactory pipeline', datafactory_pipeline, client_factory=cf_pipeline) as g: - g.custom_command('list', 'datafactory_pipeline_list') - g.custom_show_command('show', 'datafactory_pipeline_show') - g.custom_command('create', 'datafactory_pipeline_create') - g.generic_update_command('update', custom_func_name='datafactory_pipeline_update', setter_arg_name='pipeline') - g.custom_command('delete', 'datafactory_pipeline_delete', confirmation=True) - g.custom_command('create-run', 'datafactory_pipeline_create_run') - - with self.command_group('datafactory pipeline-run', datafactory_pipeline_run, client_factory=cf_pipeline_run) as g: - g.custom_show_command('show', 'datafactory_pipeline_run_show') - g.custom_command('cancel', 'datafactory_pipeline_run_cancel') - g.custom_command('query-by-factory', 'datafactory_pipeline_run_query_by_factory') - - with self.command_group('datafactory trigger', datafactory_trigger, client_factory=cf_trigger) as g: - g.custom_command('list', 'datafactory_trigger_list') - g.custom_show_command('show', 'datafactory_trigger_show') - g.custom_command('create', 'datafactory_trigger_create') - g.generic_update_command('update', custom_func_name='datafactory_trigger_update', setter_arg_name='trigger') - g.custom_command('delete', 'datafactory_trigger_delete', confirmation=True) - g.custom_command('get-event-subscription-status', 'datafactory_trigger_get_event_subscription_status') - g.custom_command('query-by-factory', 'datafactory_trigger_query_by_factory') - g.custom_command('start', 'datafactory_trigger_start', supports_no_wait=True) - g.custom_command('stop', 'datafactory_trigger_stop', supports_no_wait=True) - g.custom_command('subscribe-to-event', 'datafactory_trigger_subscribe_to_event', supports_no_wait=True) - g.custom_command('unsubscribe-from-event', 'datafactory_trigger_unsubscribe_from_event', supports_no_wait=True) - g.custom_wait_command('wait', 'datafactory_trigger_show') - - with self.command_group('datafactory trigger-run', datafactory_trigger_run, client_factory=cf_trigger_run) as g: - g.custom_command('cancel', 'datafactory_trigger_run_cancel') - g.custom_command('query-by-factory', 'datafactory_trigger_run_query_by_factory') - g.custom_command('rerun', 'datafactory_trigger_run_rerun') + "update", + custom_func_name="datafactory_trigger_update", + setter_arg_name="trigger", + ) + g.custom_command("delete", "datafactory_trigger_delete", confirmation=True) + g.custom_command( + "get-event-subscription-status", + "datafactory_trigger_get_event_subscription_status", + ) + g.custom_command("query-by-factory", "datafactory_trigger_query_by_factory") + g.custom_command("start", "datafactory_trigger_start", supports_no_wait=True) + g.custom_command("stop", "datafactory_trigger_stop", supports_no_wait=True) + g.custom_command( + "subscribe-to-event", + "datafactory_trigger_subscribe_to_event", + supports_no_wait=True, + ) + g.custom_command( + "unsubscribe-from-event", + "datafactory_trigger_unsubscribe_from_event", + supports_no_wait=True, + ) + g.custom_wait_command("wait", "datafactory_trigger_show") + + with self.command_group( + "datafactory trigger-run", + datafactory_trigger_run, + client_factory=cf_trigger_run, + ) as g: + g.custom_command("cancel", "datafactory_trigger_run_cancel") + g.custom_command("query-by-factory", "datafactory_trigger_run_query_by_factory") + g.custom_command("rerun", "datafactory_trigger_run_rerun") diff --git a/src/datafactory/azext_datafactory/generated/custom.py b/src/datafactory/azext_datafactory/generated/custom.py index d9b21280a67..b77c351e915 100644 --- a/src/datafactory/azext_datafactory/generated/custom.py +++ b/src/datafactory/azext_datafactory/generated/custom.py @@ -15,454 +15,518 @@ from azure.cli.core.util import sdk_no_wait -def datafactory_list(client, - resource_group_name=None): +def datafactory_list(client, resource_group_name=None): if resource_group_name: return client.list_by_resource_group(resource_group_name=resource_group_name) return client.list() -def datafactory_show(client, - resource_group_name, - factory_name, - if_none_match=None): - return client.get(resource_group_name=resource_group_name, - factory_name=factory_name, - if_none_match=if_none_match) - - -def datafactory_create(client, - resource_group_name, - factory_name, - if_match=None, - location=None, - tags=None, - factory_vsts_configuration=None, - factory_git_hub_configuration=None, - global_parameters=None): +def datafactory_show(client, resource_group_name, factory_name, if_none_match=None): + return client.get( + resource_group_name=resource_group_name, + factory_name=factory_name, + if_none_match=if_none_match, + ) + + +def datafactory_create( + client, + resource_group_name, + factory_name, + if_match=None, + location=None, + tags=None, + factory_vsts_configuration=None, + factory_git_hub_configuration=None, + global_parameters=None, +): all_repo_configuration = [] if factory_vsts_configuration is not None: all_repo_configuration.append(factory_vsts_configuration) if factory_git_hub_configuration is not None: all_repo_configuration.append(factory_git_hub_configuration) if len(all_repo_configuration) > 1: - raise CLIError('at most one of factory_vsts_configuration, factory_git_hub_configuration is needed for ' - 'repo_configuration!') - repo_configuration = all_repo_configuration[0] if len(all_repo_configuration) == 1 else None + raise CLIError( + "at most one of factory_vsts_configuration, factory_git_hub_configuration is needed for " + "repo_configuration!" + ) + repo_configuration = ( + all_repo_configuration[0] if len(all_repo_configuration) == 1 else None + ) factory = {} if location is not None: - factory['location'] = location + factory["location"] = location if tags is not None: - factory['tags'] = tags + factory["tags"] = tags if repo_configuration is not None: - factory['repo_configuration'] = repo_configuration + factory["repo_configuration"] = repo_configuration if global_parameters is not None: - factory['global_parameters'] = global_parameters - factory['encryption'] = {} - if len(factory['encryption']) == 0: - del factory['encryption'] - factory['identity'] = {} - if len(factory['identity']) == 0: - del factory['identity'] - return client.create_or_update(resource_group_name=resource_group_name, - factory_name=factory_name, - if_match=if_match, - factory=factory) - - -def datafactory_update(client, - resource_group_name, - factory_name, - tags=None): + factory["global_parameters"] = global_parameters + factory["encryption"] = {} + if len(factory["encryption"]) == 0: + del factory["encryption"] + factory["identity"] = {} + if len(factory["identity"]) == 0: + del factory["identity"] + return client.create_or_update( + resource_group_name=resource_group_name, + factory_name=factory_name, + if_match=if_match, + factory=factory, + ) + + +def datafactory_update(client, resource_group_name, factory_name, tags=None): factory_update_parameters = {} if tags is not None: - factory_update_parameters['tags'] = tags - factory_update_parameters['identity'] = {} - if len(factory_update_parameters['identity']) == 0: - del factory_update_parameters['identity'] - return client.update(resource_group_name=resource_group_name, - factory_name=factory_name, - factory_update_parameters=factory_update_parameters) - - -def datafactory_delete(client, - resource_group_name, - factory_name): - return client.delete(resource_group_name=resource_group_name, - factory_name=factory_name) - - -def datafactory_configure_factory_repo(client, - location, - factory_resource_id=None, - factory_vsts_configuration=None, - factory_git_hub_configuration=None): + factory_update_parameters["tags"] = tags + factory_update_parameters["identity"] = {} + if len(factory_update_parameters["identity"]) == 0: + del factory_update_parameters["identity"] + return client.update( + resource_group_name=resource_group_name, + factory_name=factory_name, + factory_update_parameters=factory_update_parameters, + ) + + +def datafactory_delete(client, resource_group_name, factory_name): + return client.delete( + resource_group_name=resource_group_name, factory_name=factory_name + ) + + +def datafactory_configure_factory_repo( + client, + location, + factory_resource_id=None, + factory_vsts_configuration=None, + factory_git_hub_configuration=None, +): all_repo_configuration = [] if factory_vsts_configuration is not None: all_repo_configuration.append(factory_vsts_configuration) if factory_git_hub_configuration is not None: all_repo_configuration.append(factory_git_hub_configuration) if len(all_repo_configuration) > 1: - raise CLIError('at most one of factory_vsts_configuration, factory_git_hub_configuration is needed for ' - 'repo_configuration!') - repo_configuration = all_repo_configuration[0] if len(all_repo_configuration) == 1 else None + raise CLIError( + "at most one of factory_vsts_configuration, factory_git_hub_configuration is needed for " + "repo_configuration!" + ) + repo_configuration = ( + all_repo_configuration[0] if len(all_repo_configuration) == 1 else None + ) factory_repo_update = {} if factory_resource_id is not None: - factory_repo_update['factory_resource_id'] = factory_resource_id + factory_repo_update["factory_resource_id"] = factory_resource_id if repo_configuration is not None: - factory_repo_update['repo_configuration'] = repo_configuration - return client.configure_factory_repo(location_id=location, - factory_repo_update=factory_repo_update) - - -def datafactory_get_data_plane_access(client, - resource_group_name, - factory_name, - permissions=None, - access_resource_path=None, - profile_name=None, - start_time=None, - expire_time=None): + factory_repo_update["repo_configuration"] = repo_configuration + return client.configure_factory_repo( + location_id=location, factory_repo_update=factory_repo_update + ) + + +def datafactory_get_data_plane_access( + client, + resource_group_name, + factory_name, + permissions=None, + access_resource_path=None, + profile_name=None, + start_time=None, + expire_time=None, +): policy = {} if permissions is not None: - policy['permissions'] = permissions + policy["permissions"] = permissions if access_resource_path is not None: - policy['access_resource_path'] = access_resource_path + policy["access_resource_path"] = access_resource_path if profile_name is not None: - policy['profile_name'] = profile_name + policy["profile_name"] = profile_name if start_time is not None: - policy['start_time'] = start_time + policy["start_time"] = start_time if expire_time is not None: - policy['expire_time'] = expire_time - return client.get_data_plane_access(resource_group_name=resource_group_name, - factory_name=factory_name, - policy=policy) - - -def datafactory_get_git_hub_access_token(client, - resource_group_name, - factory_name, - git_hub_access_code, - git_hub_access_token_base_url, - git_hub_client_id=None): + policy["expire_time"] = expire_time + return client.get_data_plane_access( + resource_group_name=resource_group_name, + factory_name=factory_name, + policy=policy, + ) + + +def datafactory_get_git_hub_access_token( + client, + resource_group_name, + factory_name, + git_hub_access_code, + git_hub_access_token_base_url, + git_hub_client_id=None, +): git_hub_access_token_request = {} - git_hub_access_token_request['git_hub_access_code'] = git_hub_access_code + git_hub_access_token_request["git_hub_access_code"] = git_hub_access_code if git_hub_client_id is not None: - git_hub_access_token_request['git_hub_client_id'] = git_hub_client_id - git_hub_access_token_request['git_hub_access_token_base_url'] = git_hub_access_token_base_url - return client.get_git_hub_access_token(resource_group_name=resource_group_name, - factory_name=factory_name, - git_hub_access_token_request=git_hub_access_token_request) - - -def datafactory_integration_runtime_list(client, - resource_group_name, - factory_name): - return client.list_by_factory(resource_group_name=resource_group_name, - factory_name=factory_name) - - -def datafactory_integration_runtime_show(client, - resource_group_name, - factory_name, - integration_runtime_name, - if_none_match=None): - return client.get(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - if_none_match=if_none_match) - - -def datafactory_integration_runtime_linked_integration_runtime_create(client, - resource_group_name, - factory_name, - integration_runtime_name, - name=None, - subscription_id=None, - data_factory_name=None, - location=None): + git_hub_access_token_request["git_hub_client_id"] = git_hub_client_id + git_hub_access_token_request[ + "git_hub_access_token_base_url" + ] = git_hub_access_token_base_url + return client.get_git_hub_access_token( + resource_group_name=resource_group_name, + factory_name=factory_name, + git_hub_access_token_request=git_hub_access_token_request, + ) + + +def datafactory_integration_runtime_list(client, resource_group_name, factory_name): + return client.list_by_factory( + resource_group_name=resource_group_name, factory_name=factory_name + ) + + +def datafactory_integration_runtime_show( + client, + resource_group_name, + factory_name, + integration_runtime_name, + if_none_match=None, +): + return client.get( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + if_none_match=if_none_match, + ) + + +def datafactory_integration_runtime_linked_integration_runtime_create( + client, + resource_group_name, + factory_name, + integration_runtime_name, + name=None, + subscription_id=None, + data_factory_name=None, + location=None, +): create_linked_integration_runtime_request = {} if name is not None: - create_linked_integration_runtime_request['name'] = name + create_linked_integration_runtime_request["name"] = name if subscription_id is not None: - create_linked_integration_runtime_request['subscription_id'] = subscription_id + create_linked_integration_runtime_request["subscription_id"] = subscription_id if data_factory_name is not None: - create_linked_integration_runtime_request['data_factory_name'] = data_factory_name + create_linked_integration_runtime_request[ + "data_factory_name" + ] = data_factory_name if location is not None: - create_linked_integration_runtime_request['data_factory_location'] = location - return client.create_linked_integration_runtime(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - create_linked_integration_runtime_request=create_linked_integration_runtime_request) - - -def datafactory_integration_runtime_managed_create(client, - resource_group_name, - factory_name, - integration_runtime_name, - if_match=None, - description=None, - compute_properties=None, - ssis_properties=None): + create_linked_integration_runtime_request["data_factory_location"] = location + return client.create_linked_integration_runtime( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + create_linked_integration_runtime_request=create_linked_integration_runtime_request, + ) + + +def datafactory_integration_runtime_managed_create( + client, + resource_group_name, + factory_name, + integration_runtime_name, + if_match=None, + description=None, + compute_properties=None, + ssis_properties=None, +): integration_runtime = {} - integration_runtime['properties'] = {} - integration_runtime['properties']['type'] = 'Managed' + integration_runtime["properties"] = {} + integration_runtime["properties"]["type"] = "Managed" if description is not None: - integration_runtime['properties']['description'] = description + integration_runtime["properties"]["description"] = description if compute_properties is not None: - integration_runtime['properties']['compute_properties'] = compute_properties + integration_runtime["properties"]["compute_properties"] = compute_properties if ssis_properties is not None: - integration_runtime['properties']['ssis_properties'] = ssis_properties - return client.create_or_update(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - if_match=if_match, - integration_runtime=integration_runtime) - - -def datafactory_integration_runtime_self_hosted_create(client, - resource_group_name, - factory_name, - integration_runtime_name, - if_match=None, - description=None, - linked_info=None): + integration_runtime["properties"]["ssis_properties"] = ssis_properties + return client.create_or_update( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + if_match=if_match, + integration_runtime=integration_runtime, + ) + + +def datafactory_integration_runtime_self_hosted_create( + client, + resource_group_name, + factory_name, + integration_runtime_name, + if_match=None, + description=None, + linked_info=None, +): integration_runtime = {} - integration_runtime['properties'] = {} - integration_runtime['properties']['type'] = 'SelfHosted' + integration_runtime["properties"] = {} + integration_runtime["properties"]["type"] = "SelfHosted" if description is not None: - integration_runtime['properties']['description'] = description + integration_runtime["properties"]["description"] = description if linked_info is not None: - integration_runtime['properties']['linked_info'] = linked_info - return client.create_or_update(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - if_match=if_match, - integration_runtime=integration_runtime) - - -def datafactory_integration_runtime_update(client, - resource_group_name, - factory_name, - integration_runtime_name, - auto_update=None, - update_delay_offset=None): + integration_runtime["properties"]["linked_info"] = linked_info + return client.create_or_update( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + if_match=if_match, + integration_runtime=integration_runtime, + ) + + +def datafactory_integration_runtime_update( + client, + resource_group_name, + factory_name, + integration_runtime_name, + auto_update=None, + update_delay_offset=None, +): update_integration_runtime_request = {} if auto_update is not None: - update_integration_runtime_request['auto_update'] = auto_update + update_integration_runtime_request["auto_update"] = auto_update if update_delay_offset is not None: - update_integration_runtime_request['update_delay_offset'] = update_delay_offset - return client.update(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - update_integration_runtime_request=update_integration_runtime_request) - - -def datafactory_integration_runtime_delete(client, - resource_group_name, - factory_name, - integration_runtime_name): - return client.delete(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name) - - -def datafactory_integration_runtime_get_connection_info(client, - resource_group_name, - factory_name, - integration_runtime_name): - return client.get_connection_info(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name) - - -def datafactory_integration_runtime_get_monitoring_data(client, - resource_group_name, - factory_name, - integration_runtime_name): - return client.get_monitoring_data(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name) - - -def datafactory_integration_runtime_get_status(client, - resource_group_name, - factory_name, - integration_runtime_name): - return client.get_status(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name) - - -def datafactory_integration_runtime_list_auth_key(client, - resource_group_name, - factory_name, - integration_runtime_name): - return client.list_auth_keys(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name) - - -def datafactory_integration_runtime_regenerate_auth_key(client, - resource_group_name, - factory_name, - integration_runtime_name, - key_name=None): + update_integration_runtime_request["update_delay_offset"] = update_delay_offset + return client.update( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + update_integration_runtime_request=update_integration_runtime_request, + ) + + +def datafactory_integration_runtime_delete( + client, resource_group_name, factory_name, integration_runtime_name +): + return client.delete( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + ) + + +def datafactory_integration_runtime_get_connection_info( + client, resource_group_name, factory_name, integration_runtime_name +): + return client.get_connection_info( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + ) + + +def datafactory_integration_runtime_get_monitoring_data( + client, resource_group_name, factory_name, integration_runtime_name +): + return client.get_monitoring_data( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + ) + + +def datafactory_integration_runtime_get_status( + client, resource_group_name, factory_name, integration_runtime_name +): + return client.get_status( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + ) + + +def datafactory_integration_runtime_list_auth_key( + client, resource_group_name, factory_name, integration_runtime_name +): + return client.list_auth_keys( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + ) + + +def datafactory_integration_runtime_regenerate_auth_key( + client, resource_group_name, factory_name, integration_runtime_name, key_name=None +): regenerate_key_parameters = {} if key_name is not None: - regenerate_key_parameters['key_name'] = key_name - return client.regenerate_auth_key(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - regenerate_key_parameters=regenerate_key_parameters) - - -def datafactory_integration_runtime_remove_link(client, - resource_group_name, - factory_name, - integration_runtime_name, - linked_factory_name): + regenerate_key_parameters["key_name"] = key_name + return client.regenerate_auth_key( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + regenerate_key_parameters=regenerate_key_parameters, + ) + + +def datafactory_integration_runtime_remove_link( + client, + resource_group_name, + factory_name, + integration_runtime_name, + linked_factory_name, +): linked_integration_runtime_request = {} - linked_integration_runtime_request['linked_factory_name'] = linked_factory_name - return client.remove_links(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - linked_integration_runtime_request=linked_integration_runtime_request) - - -def datafactory_integration_runtime_start(client, - resource_group_name, - factory_name, - integration_runtime_name, - no_wait=False): - return sdk_no_wait(no_wait, - client.begin_start, - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name) - - -def datafactory_integration_runtime_stop(client, - resource_group_name, - factory_name, - integration_runtime_name, - no_wait=False): - return sdk_no_wait(no_wait, - client.begin_stop, - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name) - - -def datafactory_integration_runtime_sync_credentials(client, - resource_group_name, - factory_name, - integration_runtime_name): - return client.sync_credentials(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name) - - -def datafactory_integration_runtime_upgrade(client, - resource_group_name, - factory_name, - integration_runtime_name): - return client.upgrade(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name) - - -def datafactory_integration_runtime_node_show(client, - resource_group_name, - factory_name, - integration_runtime_name, - node_name): - return client.get(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - node_name=node_name) - - -def datafactory_integration_runtime_node_update(client, - resource_group_name, - factory_name, - integration_runtime_name, - node_name, - concurrent_jobs_limit=None): + linked_integration_runtime_request["linked_factory_name"] = linked_factory_name + return client.remove_links( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + linked_integration_runtime_request=linked_integration_runtime_request, + ) + + +def datafactory_integration_runtime_start( + client, resource_group_name, factory_name, integration_runtime_name, no_wait=False +): + return sdk_no_wait( + no_wait, + client.begin_start, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + ) + + +def datafactory_integration_runtime_stop( + client, resource_group_name, factory_name, integration_runtime_name, no_wait=False +): + return sdk_no_wait( + no_wait, + client.begin_stop, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + ) + + +def datafactory_integration_runtime_sync_credentials( + client, resource_group_name, factory_name, integration_runtime_name +): + return client.sync_credentials( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + ) + + +def datafactory_integration_runtime_upgrade( + client, resource_group_name, factory_name, integration_runtime_name +): + return client.upgrade( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + ) + + +def datafactory_integration_runtime_node_show( + client, resource_group_name, factory_name, integration_runtime_name, node_name +): + return client.get( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + node_name=node_name, + ) + + +def datafactory_integration_runtime_node_update( + client, + resource_group_name, + factory_name, + integration_runtime_name, + node_name, + concurrent_jobs_limit=None, +): update_integration_runtime_node_request = {} if concurrent_jobs_limit is not None: - update_integration_runtime_node_request['concurrent_jobs_limit'] = concurrent_jobs_limit - return client.update(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - node_name=node_name, - update_integration_runtime_node_request=update_integration_runtime_node_request) - - -def datafactory_integration_runtime_node_delete(client, - resource_group_name, - factory_name, - integration_runtime_name, - node_name): - return client.delete(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - node_name=node_name) - - -def datafactory_integration_runtime_node_get_ip_address(client, - resource_group_name, - factory_name, - integration_runtime_name, - node_name): - return client.get_ip_address(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - node_name=node_name) - - -def datafactory_linked_service_list(client, - resource_group_name, - factory_name): - return client.list_by_factory(resource_group_name=resource_group_name, - factory_name=factory_name) - - -def datafactory_linked_service_show(client, - resource_group_name, - factory_name, - linked_service_name, - if_none_match=None): - return client.get(resource_group_name=resource_group_name, - factory_name=factory_name, - linked_service_name=linked_service_name, - if_none_match=if_none_match) - - -def datafactory_linked_service_create(client, - resource_group_name, - factory_name, - linked_service_name, - properties, - if_match=None): + update_integration_runtime_node_request[ + "concurrent_jobs_limit" + ] = concurrent_jobs_limit + return client.update( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + node_name=node_name, + update_integration_runtime_node_request=update_integration_runtime_node_request, + ) + + +def datafactory_integration_runtime_node_delete( + client, resource_group_name, factory_name, integration_runtime_name, node_name +): + return client.delete( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + node_name=node_name, + ) + + +def datafactory_integration_runtime_node_get_ip_address( + client, resource_group_name, factory_name, integration_runtime_name, node_name +): + return client.get_ip_address( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + node_name=node_name, + ) + + +def datafactory_linked_service_list(client, resource_group_name, factory_name): + return client.list_by_factory( + resource_group_name=resource_group_name, factory_name=factory_name + ) + + +def datafactory_linked_service_show( + client, resource_group_name, factory_name, linked_service_name, if_none_match=None +): + return client.get( + resource_group_name=resource_group_name, + factory_name=factory_name, + linked_service_name=linked_service_name, + if_none_match=if_none_match, + ) + + +def datafactory_linked_service_create( + client, + resource_group_name, + factory_name, + linked_service_name, + properties, + if_match=None, +): linked_service = {} - linked_service['properties'] = properties - return client.create_or_update(resource_group_name=resource_group_name, - factory_name=factory_name, - linked_service_name=linked_service_name, - if_match=if_match, - linked_service=linked_service) - - -def datafactory_linked_service_update(instance, - resource_group_name, - factory_name, - linked_service_name, - if_match=None, - connect_via=None, - description=None, - parameters=None, - annotations=None): + linked_service["properties"] = properties + return client.create_or_update( + resource_group_name=resource_group_name, + factory_name=factory_name, + linked_service_name=linked_service_name, + if_match=if_match, + linked_service=linked_service, + ) + + +def datafactory_linked_service_update( + instance, + resource_group_name, + factory_name, + linked_service_name, + if_match=None, + connect_via=None, + description=None, + parameters=None, + annotations=None, +): if connect_via is not None: instance.properties.connect_via = connect_via if description is not None: @@ -474,60 +538,61 @@ def datafactory_linked_service_update(instance, return instance -def datafactory_linked_service_delete(client, - resource_group_name, - factory_name, - linked_service_name): - return client.delete(resource_group_name=resource_group_name, - factory_name=factory_name, - linked_service_name=linked_service_name) +def datafactory_linked_service_delete( + client, resource_group_name, factory_name, linked_service_name +): + return client.delete( + resource_group_name=resource_group_name, + factory_name=factory_name, + linked_service_name=linked_service_name, + ) -def datafactory_dataset_list(client, - resource_group_name, - factory_name): - return client.list_by_factory(resource_group_name=resource_group_name, - factory_name=factory_name) +def datafactory_dataset_list(client, resource_group_name, factory_name): + return client.list_by_factory( + resource_group_name=resource_group_name, factory_name=factory_name + ) -def datafactory_dataset_show(client, - resource_group_name, - factory_name, - dataset_name, - if_none_match=None): - return client.get(resource_group_name=resource_group_name, - factory_name=factory_name, - dataset_name=dataset_name, - if_none_match=if_none_match) +def datafactory_dataset_show( + client, resource_group_name, factory_name, dataset_name, if_none_match=None +): + return client.get( + resource_group_name=resource_group_name, + factory_name=factory_name, + dataset_name=dataset_name, + if_none_match=if_none_match, + ) -def datafactory_dataset_create(client, - resource_group_name, - factory_name, - dataset_name, - properties, - if_match=None): +def datafactory_dataset_create( + client, resource_group_name, factory_name, dataset_name, properties, if_match=None +): dataset = {} - dataset['properties'] = properties - return client.create_or_update(resource_group_name=resource_group_name, - factory_name=factory_name, - dataset_name=dataset_name, - if_match=if_match, - dataset=dataset) - - -def datafactory_dataset_update(instance, - resource_group_name, - factory_name, - dataset_name, - linked_service_name, - if_match=None, - description=None, - structure=None, - schema=None, - parameters=None, - annotations=None, - folder=None): + dataset["properties"] = properties + return client.create_or_update( + resource_group_name=resource_group_name, + factory_name=factory_name, + dataset_name=dataset_name, + if_match=if_match, + dataset=dataset, + ) + + +def datafactory_dataset_update( + instance, + resource_group_name, + factory_name, + dataset_name, + linked_service_name, + if_match=None, + description=None, + structure=None, + schema=None, + parameters=None, + annotations=None, + folder=None, +): if description is not None: instance.properties.description = description if structure is not None: @@ -544,60 +609,70 @@ def datafactory_dataset_update(instance, return instance -def datafactory_dataset_delete(client, - resource_group_name, - factory_name, - dataset_name): - return client.delete(resource_group_name=resource_group_name, - factory_name=factory_name, - dataset_name=dataset_name) - - -def datafactory_pipeline_list(client, - resource_group_name, - factory_name): - return client.list_by_factory(resource_group_name=resource_group_name, - factory_name=factory_name) - - -def datafactory_pipeline_show(client, - resource_group_name, - factory_name, - pipeline_name, - if_none_match=None): - return client.get(resource_group_name=resource_group_name, - factory_name=factory_name, - pipeline_name=pipeline_name, - if_none_match=if_none_match) - - -def datafactory_pipeline_create(client, - resource_group_name, - factory_name, - pipeline_name, - pipeline, - if_match=None): - return client.create_or_update(resource_group_name=resource_group_name, - factory_name=factory_name, - pipeline_name=pipeline_name, - if_match=if_match, - pipeline=pipeline) - - -def datafactory_pipeline_update(instance, - resource_group_name, - factory_name, - pipeline_name, - if_match=None, - description=None, - activities=None, - parameters=None, - variables=None, - concurrency=None, - annotations=None, - run_dimensions=None, - duration=None, - folder_name=None): +def datafactory_dataset_delete(client, resource_group_name, factory_name, dataset_name): + return client.delete( + resource_group_name=resource_group_name, + factory_name=factory_name, + dataset_name=dataset_name, + ) + + +def datafactory_data_flow_show( + client, resource_group_name, factory_name, data_flow_name, if_none_match=None +): + return client.get( + resource_group_name=resource_group_name, + factory_name=factory_name, + data_flow_name=data_flow_name, + if_none_match=if_none_match, + ) + + +def datafactory_pipeline_list(client, resource_group_name, factory_name): + return client.list_by_factory( + resource_group_name=resource_group_name, factory_name=factory_name + ) + + +def datafactory_pipeline_show( + client, resource_group_name, factory_name, pipeline_name, if_none_match=None +): + return client.get( + resource_group_name=resource_group_name, + factory_name=factory_name, + pipeline_name=pipeline_name, + if_none_match=if_none_match, + ) + + +def datafactory_pipeline_create( + client, resource_group_name, factory_name, pipeline_name, pipeline, if_match=None +): + return client.create_or_update( + resource_group_name=resource_group_name, + factory_name=factory_name, + pipeline_name=pipeline_name, + if_match=if_match, + pipeline=pipeline, + ) + + +def datafactory_pipeline_update( + instance, + resource_group_name, + factory_name, + pipeline_name, + if_match=None, + description=None, + activities=None, + parameters=None, + variables=None, + concurrency=None, + annotations=None, + run_dimensions=None, + duration=None, + folder_name=None, +): if description is not None: instance.description = description if activities is not None: @@ -619,140 +694,152 @@ def datafactory_pipeline_update(instance, return instance -def datafactory_pipeline_delete(client, - resource_group_name, - factory_name, - pipeline_name): - return client.delete(resource_group_name=resource_group_name, - factory_name=factory_name, - pipeline_name=pipeline_name) - - -def datafactory_pipeline_create_run(client, - resource_group_name, - factory_name, - pipeline_name, - reference_pipeline_run_id=None, - is_recovery=None, - start_activity_name=None, - start_from_failure=None, - parameters=None): - return client.create_run(resource_group_name=resource_group_name, - factory_name=factory_name, - pipeline_name=pipeline_name, - reference_pipeline_run_id=reference_pipeline_run_id, - is_recovery=is_recovery, - start_activity_name=start_activity_name, - start_from_failure=start_from_failure, - parameters=parameters) - - -def datafactory_pipeline_run_show(client, - resource_group_name, - factory_name, - run_id): - return client.get(resource_group_name=resource_group_name, - factory_name=factory_name, - run_id=run_id) - - -def datafactory_pipeline_run_cancel(client, - resource_group_name, - factory_name, - run_id, - is_recursive=None): - return client.cancel(resource_group_name=resource_group_name, - factory_name=factory_name, - run_id=run_id, - is_recursive=is_recursive) - - -def datafactory_pipeline_run_query_by_factory(client, - resource_group_name, - factory_name, - last_updated_after, - last_updated_before, - continuation_token=None, - filters=None, - order_by=None): +def datafactory_pipeline_delete( + client, resource_group_name, factory_name, pipeline_name +): + return client.delete( + resource_group_name=resource_group_name, + factory_name=factory_name, + pipeline_name=pipeline_name, + ) + + +def datafactory_pipeline_create_run( + client, + resource_group_name, + factory_name, + pipeline_name, + reference_pipeline_run_id=None, + is_recovery=None, + start_activity_name=None, + start_from_failure=None, + parameters=None, +): + return client.create_run( + resource_group_name=resource_group_name, + factory_name=factory_name, + pipeline_name=pipeline_name, + reference_pipeline_run_id=reference_pipeline_run_id, + is_recovery=is_recovery, + start_activity_name=start_activity_name, + start_from_failure=start_from_failure, + parameters=parameters, + ) + + +def datafactory_pipeline_run_show(client, resource_group_name, factory_name, run_id): + return client.get( + resource_group_name=resource_group_name, + factory_name=factory_name, + run_id=run_id, + ) + + +def datafactory_pipeline_run_cancel( + client, resource_group_name, factory_name, run_id, is_recursive=None +): + return client.cancel( + resource_group_name=resource_group_name, + factory_name=factory_name, + run_id=run_id, + is_recursive=is_recursive, + ) + + +def datafactory_pipeline_run_query_by_factory( + client, + resource_group_name, + factory_name, + last_updated_after, + last_updated_before, + continuation_token=None, + filters=None, + order_by=None, +): filter_parameters = {} if continuation_token is not None: - filter_parameters['continuation_token'] = continuation_token - filter_parameters['last_updated_after'] = last_updated_after - filter_parameters['last_updated_before'] = last_updated_before + filter_parameters["continuation_token"] = continuation_token + filter_parameters["last_updated_after"] = last_updated_after + filter_parameters["last_updated_before"] = last_updated_before if filters is not None: - filter_parameters['filters'] = filters + filter_parameters["filters"] = filters if order_by is not None: - filter_parameters['order_by'] = order_by - return client.query_by_factory(resource_group_name=resource_group_name, - factory_name=factory_name, - filter_parameters=filter_parameters) - - -def datafactory_activity_run_query_by_pipeline_run(client, - resource_group_name, - factory_name, - run_id, - last_updated_after, - last_updated_before, - continuation_token=None, - filters=None, - order_by=None): + filter_parameters["order_by"] = order_by + return client.query_by_factory( + resource_group_name=resource_group_name, + factory_name=factory_name, + filter_parameters=filter_parameters, + ) + + +def datafactory_activity_run_query_by_pipeline_run( + client, + resource_group_name, + factory_name, + run_id, + last_updated_after, + last_updated_before, + continuation_token=None, + filters=None, + order_by=None, +): filter_parameters = {} if continuation_token is not None: - filter_parameters['continuation_token'] = continuation_token - filter_parameters['last_updated_after'] = last_updated_after - filter_parameters['last_updated_before'] = last_updated_before + filter_parameters["continuation_token"] = continuation_token + filter_parameters["last_updated_after"] = last_updated_after + filter_parameters["last_updated_before"] = last_updated_before if filters is not None: - filter_parameters['filters'] = filters + filter_parameters["filters"] = filters if order_by is not None: - filter_parameters['order_by'] = order_by - return client.query_by_pipeline_run(resource_group_name=resource_group_name, - factory_name=factory_name, - run_id=run_id, - filter_parameters=filter_parameters) - - -def datafactory_trigger_list(client, - resource_group_name, - factory_name): - return client.list_by_factory(resource_group_name=resource_group_name, - factory_name=factory_name) - - -def datafactory_trigger_show(client, - resource_group_name, - factory_name, - trigger_name, - if_none_match=None): - return client.get(resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - if_none_match=if_none_match) - - -def datafactory_trigger_create(client, - resource_group_name, - factory_name, - trigger_name, - properties, - if_match=None): + filter_parameters["order_by"] = order_by + return client.query_by_pipeline_run( + resource_group_name=resource_group_name, + factory_name=factory_name, + run_id=run_id, + filter_parameters=filter_parameters, + ) + + +def datafactory_trigger_list(client, resource_group_name, factory_name): + return client.list_by_factory( + resource_group_name=resource_group_name, factory_name=factory_name + ) + + +def datafactory_trigger_show( + client, resource_group_name, factory_name, trigger_name, if_none_match=None +): + return client.get( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + if_none_match=if_none_match, + ) + + +def datafactory_trigger_create( + client, resource_group_name, factory_name, trigger_name, properties, if_match=None +): trigger = {} - trigger['properties'] = properties - return client.create_or_update(resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - if_match=if_match, - trigger=trigger) - - -def datafactory_trigger_update(instance, - resource_group_name, - factory_name, - trigger_name, - if_match=None, - description=None, - annotations=None): + trigger["properties"] = properties + return client.create_or_update( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + if_match=if_match, + trigger=trigger, + ) + + +def datafactory_trigger_update( + instance, + resource_group_name, + factory_name, + trigger_name, + if_match=None, + description=None, + annotations=None, +): if description is not None: instance.properties.description = description if annotations is not None: @@ -760,229 +847,259 @@ def datafactory_trigger_update(instance, return instance -def datafactory_trigger_delete(client, - resource_group_name, - factory_name, - trigger_name): - return client.delete(resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name) +def datafactory_trigger_delete(client, resource_group_name, factory_name, trigger_name): + return client.delete( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + ) -def datafactory_trigger_get_event_subscription_status(client, - resource_group_name, - factory_name, - trigger_name): - return client.get_event_subscription_status(resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name) +def datafactory_trigger_get_event_subscription_status( + client, resource_group_name, factory_name, trigger_name +): + return client.get_event_subscription_status( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + ) -def datafactory_trigger_query_by_factory(client, - resource_group_name, - factory_name, - continuation_token=None, - parent_trigger_name=None): +def datafactory_trigger_query_by_factory( + client, + resource_group_name, + factory_name, + continuation_token=None, + parent_trigger_name=None, +): filter_parameters = {} if continuation_token is not None: - filter_parameters['continuation_token'] = continuation_token + filter_parameters["continuation_token"] = continuation_token if parent_trigger_name is not None: - filter_parameters['parent_trigger_name'] = parent_trigger_name - return client.query_by_factory(resource_group_name=resource_group_name, - factory_name=factory_name, - filter_parameters=filter_parameters) - - -def datafactory_trigger_start(client, - resource_group_name, - factory_name, - trigger_name, - no_wait=False): - return sdk_no_wait(no_wait, - client.begin_start, - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name) - - -def datafactory_trigger_stop(client, - resource_group_name, - factory_name, - trigger_name, - no_wait=False): - return sdk_no_wait(no_wait, - client.begin_stop, - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name) - - -def datafactory_trigger_subscribe_to_event(client, - resource_group_name, - factory_name, - trigger_name, - no_wait=False): - return sdk_no_wait(no_wait, - client.begin_subscribe_to_events, - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name) - - -def datafactory_trigger_unsubscribe_from_event(client, - resource_group_name, - factory_name, - trigger_name, - no_wait=False): - return sdk_no_wait(no_wait, - client.begin_unsubscribe_from_events, - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name) - - -def datafactory_trigger_run_cancel(client, - resource_group_name, - factory_name, - trigger_name, - run_id): - return client.cancel(resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - run_id=run_id) - - -def datafactory_trigger_run_query_by_factory(client, - resource_group_name, - factory_name, - last_updated_after, - last_updated_before, - continuation_token=None, - filters=None, - order_by=None): + filter_parameters["parent_trigger_name"] = parent_trigger_name + return client.query_by_factory( + resource_group_name=resource_group_name, + factory_name=factory_name, + filter_parameters=filter_parameters, + ) + + +def datafactory_trigger_start( + client, resource_group_name, factory_name, trigger_name, no_wait=False +): + return sdk_no_wait( + no_wait, + client.begin_start, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + ) + + +def datafactory_trigger_stop( + client, resource_group_name, factory_name, trigger_name, no_wait=False +): + return sdk_no_wait( + no_wait, + client.begin_stop, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + ) + + +def datafactory_trigger_subscribe_to_event( + client, resource_group_name, factory_name, trigger_name, no_wait=False +): + return sdk_no_wait( + no_wait, + client.begin_subscribe_to_events, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + ) + + +def datafactory_trigger_unsubscribe_from_event( + client, resource_group_name, factory_name, trigger_name, no_wait=False +): + return sdk_no_wait( + no_wait, + client.begin_unsubscribe_from_events, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + ) + + +def datafactory_trigger_run_cancel( + client, resource_group_name, factory_name, trigger_name, run_id +): + return client.cancel( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + run_id=run_id, + ) + + +def datafactory_trigger_run_query_by_factory( + client, + resource_group_name, + factory_name, + last_updated_after, + last_updated_before, + continuation_token=None, + filters=None, + order_by=None, +): filter_parameters = {} if continuation_token is not None: - filter_parameters['continuation_token'] = continuation_token - filter_parameters['last_updated_after'] = last_updated_after - filter_parameters['last_updated_before'] = last_updated_before + filter_parameters["continuation_token"] = continuation_token + filter_parameters["last_updated_after"] = last_updated_after + filter_parameters["last_updated_before"] = last_updated_before if filters is not None: - filter_parameters['filters'] = filters + filter_parameters["filters"] = filters if order_by is not None: - filter_parameters['order_by'] = order_by - return client.query_by_factory(resource_group_name=resource_group_name, - factory_name=factory_name, - filter_parameters=filter_parameters) - - -def datafactory_trigger_run_rerun(client, - resource_group_name, - factory_name, - trigger_name, - run_id): - return client.rerun(resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - run_id=run_id) - - -def datafactory_managed_virtual_network_list(client, - resource_group_name, - factory_name): - return client.list_by_factory(resource_group_name=resource_group_name, - factory_name=factory_name) - - -def datafactory_managed_virtual_network_show(client, - resource_group_name, - factory_name, - managed_virtual_network_name, - if_none_match=None): - return client.get(resource_group_name=resource_group_name, - factory_name=factory_name, - managed_virtual_network_name=managed_virtual_network_name, - if_none_match=if_none_match) - - -def datafactory_managed_virtual_network_create(client, - resource_group_name, - factory_name, - managed_virtual_network_name, - if_match=None): + filter_parameters["order_by"] = order_by + return client.query_by_factory( + resource_group_name=resource_group_name, + factory_name=factory_name, + filter_parameters=filter_parameters, + ) + + +def datafactory_trigger_run_rerun( + client, resource_group_name, factory_name, trigger_name, run_id +): + return client.rerun( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + run_id=run_id, + ) + + +def datafactory_managed_virtual_network_list(client, resource_group_name, factory_name): + return client.list_by_factory( + resource_group_name=resource_group_name, factory_name=factory_name + ) + + +def datafactory_managed_virtual_network_show( + client, + resource_group_name, + factory_name, + managed_virtual_network_name, + if_none_match=None, +): + return client.get( + resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + if_none_match=if_none_match, + ) + + +def datafactory_managed_virtual_network_create( + client, + resource_group_name, + factory_name, + managed_virtual_network_name, + if_match=None, +): managed_virtual_network = {} - managed_virtual_network['properties'] = {} - return client.create_or_update(resource_group_name=resource_group_name, - factory_name=factory_name, - managed_virtual_network_name=managed_virtual_network_name, - if_match=if_match, - managed_virtual_network=managed_virtual_network) - - -def datafactory_managed_virtual_network_update(instance, - resource_group_name, - factory_name, - managed_virtual_network_name, - if_match=None): + managed_virtual_network["properties"] = {} + return client.create_or_update( + resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + if_match=if_match, + managed_virtual_network=managed_virtual_network, + ) + + +def datafactory_managed_virtual_network_update( + instance, + resource_group_name, + factory_name, + managed_virtual_network_name, + if_match=None, +): return instance -def datafactory_managed_private_endpoint_list(client, - resource_group_name, - factory_name, - managed_virtual_network_name): - return client.list_by_factory(resource_group_name=resource_group_name, - factory_name=factory_name, - managed_virtual_network_name=managed_virtual_network_name) - - -def datafactory_managed_private_endpoint_show(client, - resource_group_name, - factory_name, - managed_virtual_network_name, - managed_private_endpoint_name, - if_none_match=None): - return client.get(resource_group_name=resource_group_name, - factory_name=factory_name, - managed_virtual_network_name=managed_virtual_network_name, - managed_private_endpoint_name=managed_private_endpoint_name, - if_none_match=if_none_match) - - -def datafactory_managed_private_endpoint_create(client, - resource_group_name, - factory_name, - managed_virtual_network_name, - managed_private_endpoint_name, - if_match=None, - fqdns=None, - group_id=None, - private_link_resource_id=None): +def datafactory_managed_private_endpoint_list( + client, resource_group_name, factory_name, managed_virtual_network_name +): + return client.list_by_factory( + resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + ) + + +def datafactory_managed_private_endpoint_show( + client, + resource_group_name, + factory_name, + managed_virtual_network_name, + managed_private_endpoint_name, + if_none_match=None, +): + return client.get( + resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + managed_private_endpoint_name=managed_private_endpoint_name, + if_none_match=if_none_match, + ) + + +def datafactory_managed_private_endpoint_create( + client, + resource_group_name, + factory_name, + managed_virtual_network_name, + managed_private_endpoint_name, + if_match=None, + fqdns=None, + group_id=None, + private_link_resource_id=None, +): managed_private_endpoint = {} - managed_private_endpoint['properties'] = {} + managed_private_endpoint["properties"] = {} if fqdns is not None: - managed_private_endpoint['properties']['fqdns'] = fqdns + managed_private_endpoint["properties"]["fqdns"] = fqdns if group_id is not None: - managed_private_endpoint['properties']['group_id'] = group_id + managed_private_endpoint["properties"]["group_id"] = group_id if private_link_resource_id is not None: - managed_private_endpoint['properties']['private_link_resource_id'] = private_link_resource_id - if len(managed_private_endpoint['properties']) == 0: - del managed_private_endpoint['properties'] - return client.create_or_update(resource_group_name=resource_group_name, - factory_name=factory_name, - managed_virtual_network_name=managed_virtual_network_name, - managed_private_endpoint_name=managed_private_endpoint_name, - if_match=if_match, - managed_private_endpoint=managed_private_endpoint) - - -def datafactory_managed_private_endpoint_update(instance, - resource_group_name, - factory_name, - managed_virtual_network_name, - managed_private_endpoint_name, - if_match=None, - fqdns=None, - group_id=None, - private_link_resource_id=None): + managed_private_endpoint["properties"][ + "private_link_resource_id" + ] = private_link_resource_id + if len(managed_private_endpoint["properties"]) == 0: + del managed_private_endpoint["properties"] + return client.create_or_update( + resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + managed_private_endpoint_name=managed_private_endpoint_name, + if_match=if_match, + managed_private_endpoint=managed_private_endpoint, + ) + + +def datafactory_managed_private_endpoint_update( + instance, + resource_group_name, + factory_name, + managed_virtual_network_name, + managed_private_endpoint_name, + if_match=None, + fqdns=None, + group_id=None, + private_link_resource_id=None, +): if fqdns is not None: instance.properties.fqdns = fqdns if group_id is not None: @@ -992,12 +1109,16 @@ def datafactory_managed_private_endpoint_update(instance, return instance -def datafactory_managed_private_endpoint_delete(client, - resource_group_name, - factory_name, - managed_virtual_network_name, - managed_private_endpoint_name): - return client.delete(resource_group_name=resource_group_name, - factory_name=factory_name, - managed_virtual_network_name=managed_virtual_network_name, - managed_private_endpoint_name=managed_private_endpoint_name) +def datafactory_managed_private_endpoint_delete( + client, + resource_group_name, + factory_name, + managed_virtual_network_name, + managed_private_endpoint_name, +): + return client.delete( + resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + managed_private_endpoint_name=managed_private_endpoint_name, + ) diff --git a/src/datafactory/azext_datafactory/manual/__init__.py b/src/datafactory/azext_datafactory/manual/__init__.py index c9cfdc73e77..93202b7013d 100644 --- a/src/datafactory/azext_datafactory/manual/__init__.py +++ b/src/datafactory/azext_datafactory/manual/__init__.py @@ -9,4 +9,4 @@ # regenerated. # -------------------------------------------------------------------------- -__path__ = __import__('pkgutil').extend_path(__path__, __name__) +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/src/datafactory/azext_datafactory/manual/_client_factory.py b/src/datafactory/azext_datafactory/manual/_client_factory.py new file mode 100644 index 00000000000..1b990d34d4a --- /dev/null +++ b/src/datafactory/azext_datafactory/manual/_client_factory.py @@ -0,0 +1,20 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + + +def cf_datafactory_cl(cli_ctx, *_): + from azure.cli.core.commands.client_factory import get_mgmt_service_client + from azext_datafactory.vendored_sdks.datafactory import DataFactoryManagementClient + + return get_mgmt_service_client(cli_ctx, DataFactoryManagementClient) + + +def cf_data_flow(cli_ctx, *_): + return cf_datafactory_cl(cli_ctx).data_flows diff --git a/src/datafactory/azext_datafactory/manual/_help.py b/src/datafactory/azext_datafactory/manual/_help.py index cbc8eb2f3e5..525630369a4 100644 --- a/src/datafactory/azext_datafactory/manual/_help.py +++ b/src/datafactory/azext_datafactory/manual/_help.py @@ -12,7 +12,9 @@ from knack.help_files import helps -helps['datafactory create'] = """ +helps[ + "datafactory create" +] = """ type: command short-summary: "Create a factory." parameters: @@ -50,7 +52,9 @@ "exampleResourceGroup" """ -helps['datafactory configure-factory-repo'] = """ +helps[ + "datafactory configure-factory-repo" +] = """ type: command short-summary: "Updates a factory's repo information." parameters: @@ -89,3 +93,105 @@ --factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" project-name="project" \ repository-name="repo" root-folder="/" tenant-id="" --location "East US" """ + +helps[ + "datafactory data-flow" +] = """ + type: group + short-summary: Managing and configuring Data Flows in Azure Data Factory +""" + +helps[ + "datafactory data-flow create" +] = """ + type: command + short-summary: "Creates a data flow within a factory" + parameters: + - name: --properties + short-summary: "Specified properties for the dataflow. Expected value: \ + json-string/json-file/@json-file." + long-summary: | + Usage: (if inline JSON) + --properties \"{\\\"name\\\": \\\"hello\\\", \\\"description\\\": \\\"Example \ +description\\\", \\\"folder\\\": \\\"sub-folder\\\", \\\"annotations\\\": \ +\\\" , typeProperties:{...} }\" + + - name: Name of the data flow. (Avoid hyphens.) + - description: (optional) Description for the data flow. + - folder: (optional) Folder the data flow will be organized under. + - annotations: (optional) Annotations for the data flow. + - typeProperties: (optional) If none provided, a generic and empty definition \ +is created. Including the definitions is best done by copying the JSON from within \ +the Data Factory Studio in the Azure Portal. + examples: + - name: Factories_DataFlowCreate + text: |- + az datafactory data-flow create -g example-resource-group \ +-f example-data-factory -n example-data-flow \ +-t "Flowlet" --properties example-properties.json +""" + +helps[ + "datafactory data-flow update" +] = """ + type: command + short-summary: "Updates a specified data flow within a factory" + parameters: + - name: --properties + short-summary: "Specified properties for the dataflow. Expected value: \ + json-string/json-file/@json-file." + long-summary: | + Usage: (if inline JSON) + --properties \"{ \\\"description\\\": \\\"Example \ +description\\\", \\\"folder\\\": \\\"sub-folder\\\", \\\"annotations\\\": \ +\\\" , typeProperties:{...} }\" + + - description: (optional) Description for the data flow. + - folder: (optional) Folder the data flow will be organized under. + - annotations: (optional) Annotations for the data flow. + - typeProperties: (optional) If none provided, a generic and empty definition \ +is created. Including the definitions is best done by copying the JSON from within \ +the Data Factory Studio in the Azure Portal. + examples: + - name: Factories_DataFlowUpdate + text: |- + az datafactory data-flow update -g example-resource-group \ +-f example-data-factory -n example-data-flow \ +--properties example-properties.json +""" + +helps[ + "datafactory data-flow show" +] = """ + type: command + short-summary: "Show information about the specified data flow" + examples: + - name: Factories_DataFlowShow + text: |- + az datafactory data-flow show -g example-resource-group \ +-f example-data-factory -n example-data-flow +""" + +helps[ + "datafactory data-flow list" +] = """ + type: command + short-summary: "List data flows within a provided factory" + examples: + - name: Factories_DataFlowList + text: |- + az datafactory data-flow list -g example-resource-group \ +-f example-data-factory +""" + +helps[ + "datafactory data-flow delete" +] = """ + type: command + short-summary: "Delete a specific data flow in a given factory" + examples: + - name: Factories_DataFlowDelete + text: |- + az datafactory data-flow delete -g example-resource-group \ +-f example-data-factory -n example-data-flow +""" diff --git a/src/datafactory/azext_datafactory/manual/_params.py b/src/datafactory/azext_datafactory/manual/_params.py index 2fac10a33d2..e5ebe35c5e3 100644 --- a/src/datafactory/azext_datafactory/manual/_params.py +++ b/src/datafactory/azext_datafactory/manual/_params.py @@ -7,29 +7,161 @@ # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- +# pylint: disable=too-many-lines +# pylint: disable=too-many-statements + +from azure.cli.core.commands.parameters import ( + resource_group_name_type, +) +from azure.cli.core.commands.validators import ( + validate_file_or_dict, +) from azext_datafactory.action import ( AddFactoryVstsConfiguration, - AddFactoryGitHubConfiguration + AddFactoryGitHubConfiguration, ) def load_arguments(self, _): + # DataFactory + with self.argument_context("datafactory create") as c: + c.argument( + "factory_vsts_configuration", + options_list=["--vsts-config", "--factory-vsts-configuration"], + action=AddFactoryVstsConfiguration, + nargs="+", + help="Factory's VSTS repo information.", + arg_group="RepoConfiguration", + ) + c.argument( + "factory_git_hub_configuration", + options_list=["--github-config", "--factory-git-hub-configuration"], + action=AddFactoryGitHubConfiguration, + nargs="+", + help="Factory's GitHub repo information.", + arg_group="RepoConfiguration", + ) + + with self.argument_context("datafactory configure-factory-repo") as c: + c.argument( + "factory_vsts_configuration", + options_list=["--vsts-config", "--factory-vsts-configuration"], + action=AddFactoryVstsConfiguration, + nargs="+", + help="Factory's VSTS repo information.", + arg_group="RepoConfiguration", + ) + c.argument( + "factory_git_hub_configuration", + options_list=["--github-config", "--factory-git-hub-configuration"], + action=AddFactoryGitHubConfiguration, + nargs="+", + help="Factory's GitHub repo information.", + arg_group="RepoConfiguration", + ) + + # Data Flows + with self.argument_context("datafactory data-flow show") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument( + "factory_name", + options_list=["--factory-name", "-f"], + type=str, + help="The factory name.", + id_part="name", + ) + c.argument( + "data_flow_name", + options_list=["--name", "-n", "--data-flow-name"], + type=str, + help="The data flow name.", + id_part="child_name_1", + ) + c.argument( + "if_none_match", + type=str, + help="ETag of the pipeline entity. Should only be specified for get. If " + "the ETag matches the existing entity tag, or if * was provided, then no content will be returned.", + ) + + with self.argument_context("datafactory data-flow list") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument( + "factory_name", + options_list=["--factory-name", "-f"], + type=str, + help="The factory name.", + id_part=None, + ) + + with self.argument_context("datafactory data-flow delete") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument( + "factory_name", + options_list=["--factory-name", "-f"], + type=str, + help="The factory name.", + id_part="name", + ) + c.argument( + "data_flow_name", + options_list=["--name", "-n", "--data-flow-name"], + type=str, + help="The data flow name.", + id_part="child_name_1", + ) + + with self.argument_context("datafactory data-flow create") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument( + "factory_name", + options_list=["--factory-name", "-f"], + type=str, + help="The factory name.", + id_part="name", + ) + c.argument( + "data_flow_name", + options_list=["--name", "-n", "--data-flow-name"], + type=str, + help="The data flow name.", + id_part="child_name_1", + ) + c.argument("properties", type=validate_file_or_dict) + c.argument( + "flow_type", + options_list=["--flow-type", "-t"], + type=str, + help="The data flow type. Valid choices: MappingDataFlow, Flowlet", + ) + c.argument( + "if_match", + type=str, + help="ETag of the data flow entity. Should only be specified for update, for " + "which it should match existing entity or can be * for unconditional update.", + ) - with self.argument_context('datafactory create') as c: - c.argument('factory_vsts_configuration', options_list=['--vsts-config', '--factory-vsts-configuration'], - action=AddFactoryVstsConfiguration, nargs='+', help='Factory\'s VSTS repo information.', - arg_group='RepoConfiguration') - c.argument('factory_git_hub_configuration', - options_list=['--github-config', '--factory-git-hub-configuration'], - action=AddFactoryGitHubConfiguration, nargs='+', help='Factory\'s GitHub repo information.', - arg_group='RepoConfiguration') - - with self.argument_context('datafactory configure-factory-repo') as c: - c.argument('factory_vsts_configuration', options_list=['--vsts-config', '--factory-vsts-configuration'], - action=AddFactoryVstsConfiguration, nargs='+', help='Factory\'s VSTS repo information.', - arg_group='RepoConfiguration') - c.argument('factory_git_hub_configuration', - options_list=['--github-config', '--factory-git-hub-configuration'], - action=AddFactoryGitHubConfiguration, nargs='+', help='Factory\'s GitHub repo information.', - arg_group='RepoConfiguration') + with self.argument_context("datafactory data-flow update") as c: + c.argument("resource_group_name", resource_group_name_type) + c.argument( + "factory_name", + options_list=["--factory-name", "-f"], + type=str, + help="The factory name.", + id_part="name", + ) + c.argument( + "data_flow_name", + options_list=["--name", "-n", "--data-flow-name"], + type=str, + help="The data flow name.", + id_part="child_name_1", + ) + c.argument("properties", type=validate_file_or_dict) + c.argument( + "if_match", + type=str, + help="ETag of the data flow entity. Should only be specified for update, for " + "which it should match existing entity or can be * for unconditional update.", + ) diff --git a/src/datafactory/azext_datafactory/manual/commands.py b/src/datafactory/azext_datafactory/manual/commands.py new file mode 100644 index 00000000000..e540d341812 --- /dev/null +++ b/src/datafactory/azext_datafactory/manual/commands.py @@ -0,0 +1,33 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-statements +# pylint: disable=too-many-locals +# pylint: disable=line-too-long + +from azure.cli.core.commands import CliCommandType +from azext_datafactory.generated._client_factory import ( + cf_data_flow, +) + +datafactory_data_flow = CliCommandType( + operations_tmpl="azext_datafactory.vendored_sdks.datafactory.operations._data_flows_operations#DataFlowsOperations.{}", + client_factory=cf_data_flow, +) + + +def load_command_table(self, _): + with self.command_group( + "datafactory data-flow", datafactory_data_flow, client_factory=cf_data_flow + ) as g: + g.custom_show_command("show", "datafactory_data_flow_show") + g.custom_command("list", "datafactory_data_flow_list") + g.custom_command("delete", "datafactory_data_flow_delete") + g.custom_command("create", "datafactory_data_flow_create") + g.custom_command("update", "datafactory_data_flow_update") diff --git a/src/datafactory/azext_datafactory/manual/custom.py b/src/datafactory/azext_datafactory/manual/custom.py index 7a96c61335b..0d5cb1c0eef 100644 --- a/src/datafactory/azext_datafactory/manual/custom.py +++ b/src/datafactory/azext_datafactory/manual/custom.py @@ -9,47 +9,214 @@ # -------------------------------------------------------------------------- from knack.util import CLIError +from knack.log import get_logger +DATA_FLOW_SUBTYPES = ["Flowlet", "MappingDataFlow", "WranglingDataFlow"] -def datafactory_create(client, - resource_group_name, - factory_name, - if_match=None, - location=None, - tags=None, - factory_vsts_configuration=None, - factory_git_hub_configuration=None, - global_parameters=None): + +def datafactory_create( + client, + resource_group_name, + factory_name, + if_match=None, + location=None, + tags=None, + factory_vsts_configuration=None, + factory_git_hub_configuration=None, + global_parameters=None, +): from azext_datafactory.vendored_sdks.datafactory.models import FactoryIdentity from azext_datafactory.vendored_sdks.datafactory.models import FactoryIdentityType + all_repo_configuration = [] if factory_vsts_configuration is not None: all_repo_configuration.append(factory_vsts_configuration) if factory_git_hub_configuration is not None: all_repo_configuration.append(factory_git_hub_configuration) if len(all_repo_configuration) > 1: - raise CLIError('At most one of factory_vsts_configuration, factory_git_hub_configuration is needed for ' - 'repo_configuration!') - repo_configuration = all_repo_configuration[0] if len(all_repo_configuration) == 1 else None + raise CLIError( + "At most one of factory_vsts_configuration, factory_git_hub_configuration is needed for " + "repo_configuration!" + ) + repo_configuration = ( + all_repo_configuration[0] if len(all_repo_configuration) == 1 else None + ) factory = {} - factory['location'] = location - factory['tags'] = tags - factory['repo_configuration'] = repo_configuration - factory['global_parameters'] = global_parameters - factory['encryption'] = {} - factory['identity'] = FactoryIdentity(type=FactoryIdentityType.SYSTEM_ASSIGNED) - return client.create_or_update(resource_group_name=resource_group_name, - factory_name=factory_name, - if_match=if_match, - factory=factory) - - -def datafactory_update(client, - resource_group_name, - factory_name, - tags=None): + factory["location"] = location + factory["tags"] = tags + factory["repo_configuration"] = repo_configuration + factory["global_parameters"] = global_parameters + factory["encryption"] = {} + factory["identity"] = FactoryIdentity(type=FactoryIdentityType.SYSTEM_ASSIGNED) + return client.create_or_update( + resource_group_name=resource_group_name, + factory_name=factory_name, + if_match=if_match, + factory=factory, + ) + + +def datafactory_update(client, resource_group_name, factory_name, tags=None): factory_update_parameters = {} - factory_update_parameters['tags'] = tags - return client.update(resource_group_name=resource_group_name, - factory_name=factory_name, - factory_update_parameters=factory_update_parameters) + factory_update_parameters["tags"] = tags + return client.update( + resource_group_name=resource_group_name, + factory_name=factory_name, + factory_update_parameters=factory_update_parameters, + ) + + +# Data Flows +def datafactory_data_flow_list(client, resource_group_name, factory_name): + return client.list_by_factory( + resource_group_name=resource_group_name, factory_name=factory_name + ) + + +def datafactory_data_flow_delete( + client, resource_group_name, factory_name, data_flow_name +): + return client.delete( + resource_group_name=resource_group_name, + factory_name=factory_name, + data_flow_name=data_flow_name, + ) + + +def datafactory_data_flow_create( + client, + resource_group_name, + factory_name, + data_flow_name, + properties, + flow_type, + if_match=None, +): + logger = get_logger() + data_flow = {} + data_flow["properties"] = {} + if flow_type not in DATA_FLOW_SUBTYPES: + raise CLIError( + f"Not a valid type of dataflow. Valid choices: {DATA_FLOW_SUBTYPES}" + ) + + data_flow["properties"]["type"] = flow_type + data_flow["properties"]["description"] = ( + properties["description"] if "description" in properties.keys() else "" + ) + data_flow["properties"]["folder"] = ( + {"name": properties["folder"]["name"]} + if "folder" in properties.keys() + else None + ) + data_flow["properties"]["annotations"] = ( + properties["annotations"] if "annotations" in properties.keys() else [] + ) + data_flow["properties"]["typeProperties"] = {} + if "typeProperties" not in properties.keys(): + data_flow["properties"]["typeProperties"] = { + "sources": [], + "sinks": [], + "transformations": [], + "scriptLines": [], + } + else: + logger.warning( + 'Any malformed "typeProperty" sub-item will result in an incomplete definition ' + "once viewed on ADF." + ) + if ( + "sources" in properties["typeProperties"].keys() + or "sinks" in properties["typeProperties"].keys() + ) and ("scriptLines" not in properties["typeProperties"].keys()): + + logger.warning( + "Not including a scriptLines in this case may result " + "in a malformed data flow" + ) + data_flow["properties"]["typeProperties"]["sources"] = ( + properties["typeProperties"]["sources"] + if "sources" in properties["typeProperties"].keys() + else [] + ) + data_flow["properties"]["typeProperties"]["sinks"] = ( + properties["typeProperties"]["sinks"] + if "sinks" in properties["typeProperties"].keys() + else [] + ) + data_flow["properties"]["typeProperties"]["transformations"] = ( + properties["typeProperties"]["transformations"] + if "transformations" in properties["typeProperties"].keys() + else [] + ) + data_flow["properties"]["typeProperties"]["scriptLines"] = ( + properties["typeProperties"]["scriptLines"] + if "scriptLines" in properties["typeProperties"].keys() + else [] + ) + return client.create_or_update( + resource_group_name=resource_group_name, + factory_name=factory_name, + data_flow_name=data_flow_name, + if_match=if_match, + data_flow=data_flow, + ) + + +def datafactory_data_flow_update( + client, + resource_group_name, + factory_name, + data_flow_name, + properties, +): + logger = get_logger() + data_flow_data = {} + data_flow_data["properties"] = {} + + # Avoid creating in the update command + try: + client.get(resource_group_name, factory_name, data_flow_name) + except Exception as e: + raise CLIError( + f"No data flow with this name `{data_flow_name}` exists - no update performed" + ) from e + + if "name" in properties.keys(): + raise CLIError( + "Do not update the name of the data flow via CLI - chance of naming collision" + ) + + if "annotations" in properties.keys(): + data_flow_data["properties"]["annotations"] = properties["annotations"] + if "description" in properties.keys(): + data_flow_data["properties"]["description"] = properties["description"] + if "typeProperties" in properties.keys(): + logger.warning( + "If the definition is not correct here, the resource in ADF may be malformed" + ) + + if "sinks" in properties["typeProperties"].keys(): + data_flow_data["properties"]["typeProperties"]["sinks"] = properties[ + "typeProperties" + ]["sinks"] + if "sources" in properties["typeProperties"].keys(): + data_flow_data["properties"]["typeProperties"]["sources"] = properties[ + "typeProperties" + ]["sources"] + if "transformations" in properties["typeProperties"].keys(): + data_flow_data["properties"]["typeProperties"][ + "transformations" + ] = properties["typeProperties"]["transformations"] + if "scriptLines" in properties["typeProperties"].keys(): + data_flow_data["properties"]["typeProperties"]["scriptLines"] = properties[ + "typeProperties" + ]["scriptLines"] + + return client.create_or_update( + resource_group_name=resource_group_name, + factory_name=factory_name, + data_flow_name=data_flow_name, + if_match=None, + data_flow=data_flow_data, + ) diff --git a/src/datafactory/azext_datafactory/manual/tests/__init__.py b/src/datafactory/azext_datafactory/manual/tests/__init__.py index c9cfdc73e77..93202b7013d 100644 --- a/src/datafactory/azext_datafactory/manual/tests/__init__.py +++ b/src/datafactory/azext_datafactory/manual/tests/__init__.py @@ -9,4 +9,4 @@ # regenerated. # -------------------------------------------------------------------------- -__path__ = __import__('pkgutil').extend_path(__path__, __name__) +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/src/datafactory/azext_datafactory/manual/tests/latest/__init__.py b/src/datafactory/azext_datafactory/manual/tests/latest/__init__.py index c9cfdc73e77..93202b7013d 100644 --- a/src/datafactory/azext_datafactory/manual/tests/latest/__init__.py +++ b/src/datafactory/azext_datafactory/manual/tests/latest/__init__.py @@ -9,4 +9,4 @@ # regenerated. # -------------------------------------------------------------------------- -__path__ = __import__('pkgutil').extend_path(__path__, __name__) +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/src/datafactory/azext_datafactory/manual/tests/latest/test_datafactory_scenario.py b/src/datafactory/azext_datafactory/manual/tests/latest/test_datafactory_scenario.py index 28519f30473..e30d6bc4b36 100644 --- a/src/datafactory/azext_datafactory/manual/tests/latest/test_datafactory_scenario.py +++ b/src/datafactory/azext_datafactory/manual/tests/latest/test_datafactory_scenario.py @@ -8,172 +8,312 @@ # regenerated. # -------------------------------------------------------------------------- +from azure.cli.testsdk import ScenarioTest +from azure.cli.testsdk import ResourceGroupPreparer + +# from knack.util import CLIError +from azure.cli.core.azclierror import AzCLIError, CLIInternalError, CLIError +import unittest + +### Steps + def step_dataset_update(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory dataset update ' - '--description "Example description" ' - '--linked-service-name "{{\\"type\\":\\"LinkedServiceReference\\",\\"referenceName\\":\\"{myLinkedService}' - '\\"}}" ' - '--parameters "{{\\"MyFileName\\":{{\\"type\\":\\"String\\"}},\\"MyFolderPath\\":{{\\"type\\":\\"String\\"' - '}}}}" ' - '--name "{myDataset}" ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory dataset update " + '--description "Example description" ' + '--linked-service-name "{{\\"type\\":\\"LinkedServiceReference\\",\\"referenceName\\":\\"{myLinkedService}' + '\\"}}" ' + '--parameters "{{\\"MyFileName\\":{{\\"type\\":\\"String\\"}},\\"MyFolderPath\\":{{\\"type\\":\\"String\\"' + '}}}}" ' + '--name "{myDataset}" ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks, + ) def step_linked_service_update(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory linked-service update ' - '--factory-name "{myFactory}" ' - '--description "Example description" ' - '--name "{myLinkedService}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory linked-service update " + '--factory-name "{myFactory}" ' + '--description "Example description" ' + '--name "{myLinkedService}" ' + '--resource-group "{rg}"', + checks=checks, + ) def step_trigger_update(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory trigger update ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--description "Example description" ' - '--name "{myTrigger}"', - checks=checks) + test.cmd( + "az datafactory trigger update " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--description "Example description" ' + '--name "{myTrigger}"', + checks=checks, + ) # EXAMPLE: IntegrationRuntimes_Create def step_integration_runtime_create(test): - test.cmd('az datafactory integration-runtime self-hosted create ' - '--factory-name "{myFactory}" ' - '--description "A selfhosted integration runtime" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=[ - test.check('name', "{myIntegrationRuntime}"), - test.check('properties.type', 'SelfHosted') - ]) + test.cmd( + "az datafactory integration-runtime self-hosted create " + '--factory-name "{myFactory}" ' + '--description "A selfhosted integration runtime" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=[ + test.check("name", "{myIntegrationRuntime}"), + test.check("properties.type", "SelfHosted"), + ], + ) def step_trigger_run_rerun(test): - test.cmd('az datafactory trigger-run rerun ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--trigger-name "{myTrigger}" ' - '--run-id "{myRunId}"', - checks=[]) + test.cmd( + "az datafactory trigger-run rerun " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--trigger-name "{myTrigger}" ' + '--run-id "{myRunId}"', + checks=[], + ) def step_pipeline_create_run(test): - output = test.cmd('az datafactory pipeline create-run ' - '--factory-name "{myFactory}" ' - '--parameters "{{\\"OutputBlobNameList\\":[\\"exampleoutput.csv\\"]}}" ' - '--name "{myPipeline}" ' - '--resource-group "{rg}"', - checks=[]).get_output_in_json() + output = test.cmd( + "az datafactory pipeline create-run " + '--factory-name "{myFactory}" ' + '--parameters "{{\\"OutputBlobNameList\\":[\\"exampleoutput.csv\\"]}}" ' + '--name "{myPipeline}" ' + '--resource-group "{rg}"', + checks=[], + ).get_output_in_json() return output def step_pipeline_run_cancel(test): - test.cmd('az datafactory pipeline-run cancel ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--run-id "{myRunId}"', - checks=[]) + test.cmd( + "az datafactory pipeline-run cancel " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--run-id "{myRunId}"', + checks=[], + ) def step_pipeline_run_show(test): - test.cmd('az datafactory pipeline-run show ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--run-id "{myRunId}"', - checks=[]) + test.cmd( + "az datafactory pipeline-run show " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--run-id "{myRunId}"', + checks=[], + ) def step_pipeline_update(test): - test.cmd('az datafactory pipeline update ' - '--factory-name "{myFactory}" ' - '--description "Test Update description" ' - '--name "{myPipeline}" ' - '--resource-group "{rg}"', - checks=[]) + test.cmd( + "az datafactory pipeline update " + '--factory-name "{myFactory}" ' + '--description "Test Update description" ' + '--name "{myPipeline}" ' + '--resource-group "{rg}"', + checks=[], + ) def step_trigger_run_query_by_factory(test): - output = test.cmd('az datafactory trigger-run query-by-factory ' - '--factory-name "{myFactory}" ' - '--last-updated-after "{myStartTime}" ' - '--last-updated-before "{myEndTime}" ' - '--resource-group "{rg}"', - checks=[]).get_output_in_json() + output = test.cmd( + "az datafactory trigger-run query-by-factory " + '--factory-name "{myFactory}" ' + '--last-updated-after "{myStartTime}" ' + '--last-updated-before "{myEndTime}" ' + '--resource-group "{rg}"', + checks=[], + ).get_output_in_json() return output def step_integration_runtime_managed_create(test): - test.cmd('az datafactory integration-runtime managed create ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}" ' - '--description "Managed Integration Runtime" ' - '--compute-properties "{{\\"location\\":' - '\\"East US 2\\",\\"nodeSize\\":\\"Standard_D2_v3\\",' - '\\"numberOfNodes\\":1,\\"maxParallelExecutionsPerNode\\":2}}" ' - '--ssis-properties "{{\\"edition\\":\\"Standard' - '\\",\\"licenseType\\":\\"LicenseIncluded\\"}}" ', - checks=[ - test.check('name', "{myIntegrationRuntime}"), - test.check('properties.type', "Managed") - ]) + test.cmd( + "az datafactory integration-runtime managed create " + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}" ' + '--description "Managed Integration Runtime" ' + '--compute-properties "{{\\"location\\":' + '\\"East US 2\\",\\"nodeSize\\":\\"Standard_D2_v3\\",' + '\\"numberOfNodes\\":1,\\"maxParallelExecutionsPerNode\\":2}}" ' + '--ssis-properties "{{\\"edition\\":\\"Standard' + '\\",\\"licenseType\\":\\"LicenseIncluded\\"}}" ', + checks=[ + test.check("name", "{myIntegrationRuntime}"), + test.check("properties.type", "Managed"), + ], + ) def step_pipeline_wait_create(test): - test.cmd('az datafactory pipeline create ' - '--factory-name "{myFactory}" ' - '--pipeline "{{\\"activities\\":[{{\\"name\\":\\"Wait1\\",' - '\\"type\\":\\"Wait\\",\\"dependsOn\\":[],\\"userProperties' - '\\":[],\\"typeProperties\\":{{\\"waitTimeInSeconds\\":5' - '}}}}],\\"annotations\\":[]}}" ' - '--name "{myPipeline}" ' - '--resource-group "{rg}" ', - checks=[ - test.check('name', "{myPipeline}"), - test.check('activities[0].type', "Wait") - ]) + test.cmd( + "az datafactory pipeline create " + '--factory-name "{myFactory}" ' + '--pipeline "{{\\"activities\\":[{{\\"name\\":\\"Wait1\\",' + '\\"type\\":\\"Wait\\",\\"dependsOn\\":[],\\"userProperties' + '\\":[],\\"typeProperties\\":{{\\"waitTimeInSeconds\\":5' + '}}}}],\\"annotations\\":[]}}" ' + '--name "{myPipeline}" ' + '--resource-group "{rg}" ', + checks=[ + test.check("name", "{myPipeline}"), + test.check("activities[0].type", "Wait"), + ], + ) def step_trigger_tumble_create(test): - test.cmd('az datafactory trigger create ' - '--resource-group "{rg}" ' - '--properties "{{\\"description\\":\\"trumblingwindowtrigger' - '\\",\\"annotations\\":[],\\"pipeline\\":{{\\"pipelineReference' - '\\":{{\\"referenceName\\":\\"{myPipeline}\\",\\"type\\":' - '\\"PipelineReference\\"}}}},\\"type\\":\\"TumblingWindowTrigger' - '\\",\\"typeProperties\\":{{\\"frequency\\":\\"Minute\\",' - '\\"interval\\":5,\\"startTime\\":\\"{myStartTime}\\",' - '\\"endTime\\":\\"{myEndTime}\\",\\"delay\\":\\"00:00:00\\",' - '\\"maxConcurrency\\":50,\\"retryPolicy\\":{{\\"intervalInSeconds' - '\\":30}},\\"dependsOn\\":[]}}}}" ' - '--factory-name "{myFactory}" ' - '--name "{myTrigger}"', - checks=[ - test.check('name', "{myTrigger}"), - test.check('properties.type', "TumblingWindowTrigger"), - test.check('properties.pipeline.pipelineReference.referenceName', - "{myPipeline}") - ]) + test.cmd( + "az datafactory trigger create " + '--resource-group "{rg}" ' + '--properties "{{\\"description\\":\\"trumblingwindowtrigger' + '\\",\\"annotations\\":[],\\"pipeline\\":{{\\"pipelineReference' + '\\":{{\\"referenceName\\":\\"{myPipeline}\\",\\"type\\":' + '\\"PipelineReference\\"}}}},\\"type\\":\\"TumblingWindowTrigger' + '\\",\\"typeProperties\\":{{\\"frequency\\":\\"Minute\\",' + '\\"interval\\":5,\\"startTime\\":\\"{myStartTime}\\",' + '\\"endTime\\":\\"{myEndTime}\\",\\"delay\\":\\"00:00:00\\",' + '\\"maxConcurrency\\":50,\\"retryPolicy\\":{{\\"intervalInSeconds' + '\\":30}},\\"dependsOn\\":[]}}}}" ' + '--factory-name "{myFactory}" ' + '--name "{myTrigger}"', + checks=[ + test.check("name", "{myTrigger}"), + test.check("properties.type", "TumblingWindowTrigger"), + test.check( + "properties.pipeline.pipelineReference.referenceName", "{myPipeline}" + ), + ], + ) + + +def step_data_flow_create_mapping_data_flow(self): + + self.kwargs.update({"data_flow_type": "MappingDataFlow"}) + checks = [ + self.check("name", "{myMappingDataFlow}"), + self.check("properties.type", "MappingDataFlow"), + self.check("properties.description", "Example Text"), + ] + # Build command + self.cmd( + "az datafactory data-flow create " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myMappingDataFlow}" ' + '--flow-type "{data_flow_type}" ' + '--properties "{{\\"description\\": \\"Example Text\\"}}"', + checks=checks, + ) + + +def step_data_flow_create_flowlet(self): + self.kwargs.update({"data_flow_type": "Flowlet"}) + checks = [ + self.check("name", "{myFlowletDataFlow}"), + self.check("properties.type", "Flowlet"), + self.check("properties.description", "Example Text"), + ] + # Build command + self.cmd( + "az datafactory data-flow create " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myFlowletDataFlow}" ' + '--flow-type "{data_flow_type}" ' + '--properties "{{\\"description\\": \\"Example Text\\"}}"', + checks=checks, + ) + + +def step_data_flow_delete(self): + checks = [] + self.cmd( + "az datafactory data-flow delete " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myMappingDataFlow}"', + checks=checks, + ) + + +def step_data_flow_update(self): + checks = [ + self.check("name", "{myMappingDataFlow}"), + self.check("description", "A new example description"), + ] + self.cmd( + "az datafactory data-flow update " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myMappingDataFlow}" ' + '--properties "{{\\"description\\": \\"A new example description\\"}}"', + checks=checks, + ) + + +def step_data_flow_show(self): + checks = [ + self.check("name", "{myMappingDataFlow}"), + self.check("properties.type", "MappingDataFlow"), + self.check("properties.description", "Example Text"), + self.check("properties.annotations", []), + self.check("properties.scriptLines", []), + self.check("properties.sinks", []), + self.check("properties.sources", []), + self.check("properties.transformations", []), + self.check("properties.folder", None), + self.check("properties.script", None), + ] + self.cmd( + "az datafactory data-flow show " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myMappingDataFlow}"', + checks=checks, + ) + + +def step_data_flow_list(self): + data_flow_list = self.cmd( + "az datafactory data-flow list " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"' + ).get_output_in_json() + self.assertTrue(len(data_flow_list) > 0) + # Assume that at this point, two successful create commands + self.assertTrue(len(data_flow_list) == 2) + + +### Scenarios def call_managed_integrationruntime_scenario(test): from ....tests.latest import test_datafactory_scenario as g + g.setup_main(test) g.step_create(test) step_integration_runtime_managed_create(test) g.step_integration_runtime_show(test) - test.kwargs.update({'myIntegrationRuntime2': test.kwargs.get('myIntegrationRuntime')}) + test.kwargs.update( + {"myIntegrationRuntime2": test.kwargs.get("myIntegrationRuntime")} + ) g.step_integration_runtime_start(test) g.step_integration_runtime_stop(test) g.step_integration_runtime_delete(test) @@ -181,19 +321,34 @@ def call_managed_integrationruntime_scenario(test): g.cleanup_main(test) +def call_data_flow_scenario(test): + from ....tests.latest import test_datafactory_scenario as g + + g.setup_main(test) + g.step_create(test) + step_data_flow_create_mapping_data_flow(test) + step_data_flow_show(test) + step_data_flow_create_flowlet(test) + step_data_flow_list(test) + step_data_flow_delete(test) + g.step_delete(test) + g.cleanup_main(test) + + def call_triggerrun_scenario(test): from ....tests.latest import test_datafactory_scenario as g import time + g.setup_main(test) g.step_create(test) step_pipeline_wait_create(test) createrun_res = step_pipeline_create_run(test) time.sleep(5) - test.kwargs.update({'myRunId': createrun_res.get('runId')}) + test.kwargs.update({"myRunId": createrun_res.get("runId")}) step_pipeline_run_show(test) g.step_activity_run_query_by_pipeline_run(test) createrun_res = step_pipeline_create_run(test) - test.kwargs.update({'myRunId': createrun_res.get('runId')}) + test.kwargs.update({"myRunId": createrun_res.get("runId")}) step_pipeline_run_cancel(test) step_trigger_tumble_create(test) g.step_trigger_start(test) @@ -201,8 +356,11 @@ def call_triggerrun_scenario(test): maxRound = 2 while True: triggerrun_res = step_trigger_run_query_by_factory(test) - if len(triggerrun_res['value']) > 0 and triggerrun_res['value'][0]['status'] == 'Succeeded': - test.kwargs.update({'myRunId': triggerrun_res['value'][0]['triggerRunId']}) + if ( + len(triggerrun_res["value"]) > 0 + and triggerrun_res["value"][0]["status"] == "Succeeded" + ): + test.kwargs.update({"myRunId": triggerrun_res["value"][0]["triggerRunId"]}) break else: if maxRound > 0: @@ -223,6 +381,7 @@ def call_triggerrun_scenario(test): def call_main_scenario(test): from ....tests.latest import test_datafactory_scenario as g + g.setup_main(test) g.step_create(test) g.step_update(test) @@ -284,14 +443,13 @@ def call_main_scenario(test): def call_main(test): from datetime import datetime, timedelta + now = datetime.utcnow() startTime = now.strftime("%Y-%m-%dT%H:%M:%SZ") an_hour_later = now + timedelta(hours=1) endTime = an_hour_later.strftime("%Y-%m-%dT%H:%M:%SZ") - test.kwargs.update({ - 'myStartTime': startTime, - 'myEndTime': endTime - }) + test.kwargs.update({"myStartTime": startTime, "myEndTime": endTime}) call_main_scenario(test) + call_data_flow_scenario(test) call_managed_integrationruntime_scenario(test) call_triggerrun_scenario(test) diff --git a/src/datafactory/azext_datafactory/manual/version.py b/src/datafactory/azext_datafactory/manual/version.py index 64ffc743da7..be786b0bf33 100644 --- a/src/datafactory/azext_datafactory/manual/version.py +++ b/src/datafactory/azext_datafactory/manual/version.py @@ -8,4 +8,4 @@ # regenerated. # -------------------------------------------------------------------------- -VERSION = "0.6.0" +VERSION = "0.7.0" diff --git a/src/datafactory/azext_datafactory/tests/__init__.py b/src/datafactory/azext_datafactory/tests/__init__.py index 70488e93851..f4a2b4ecdca 100644 --- a/src/datafactory/azext_datafactory/tests/__init__.py +++ b/src/datafactory/azext_datafactory/tests/__init__.py @@ -16,12 +16,16 @@ import datetime as dt from azure.core.exceptions import AzureError -from azure.cli.testsdk.exceptions import CliTestError, CliExecutionError, JMESPathCheckAssertionError +from azure.cli.testsdk.exceptions import ( + CliTestError, + CliExecutionError, + JMESPathCheckAssertionError, +) -logger = logging.getLogger('azure.cli.testsdk') +logger = logging.getLogger("azure.cli.testsdk") logger.addHandler(logging.StreamHandler()) -__path__ = __import__('pkgutil').extend_path(__path__, __name__) +__path__ = __import__("pkgutil").extend_path(__path__, __name__) exceptions = [] test_map = dict() SUCCESSED = "successed" @@ -31,17 +35,23 @@ def try_manual(func): def import_manual_function(origin_func): from importlib import import_module + decorated_path = inspect.getfile(origin_func).lower() module_path = __path__[0].lower() if not decorated_path.startswith(module_path): raise Exception("Decorator can only be used in submodules!") - manual_path = os.path.join( - decorated_path[module_path.rfind(os.path.sep) + 1:]) + manual_path = os.path.join(decorated_path[module_path.rfind(os.path.sep) + 1 :]) manual_file_path, manual_file_name = os.path.split(manual_path) module_name, _ = os.path.splitext(manual_file_name) - manual_module = "..manual." + \ - ".".join(manual_file_path.split(os.path.sep) + [module_name, ]) - return getattr(import_module(manual_module, package=__name__), origin_func.__name__) + manual_module = "..manual." + ".".join( + manual_file_path.split(os.path.sep) + + [ + module_name, + ] + ) + return getattr( + import_module(manual_module, package=__name__), origin_func.__name__ + ) def get_func_to_call(): func_to_call = func @@ -63,16 +73,25 @@ def wrapper(*args, **kwargs): test_map[func.__name__]["error_normalized"] = "" test_map[func.__name__]["start_dt"] = dt.datetime.utcnow() ret = func_to_call(*args, **kwargs) - except (AssertionError, AzureError, CliTestError, CliExecutionError, SystemExit, - JMESPathCheckAssertionError) as e: + except ( + AssertionError, + AzureError, + CliTestError, + CliExecutionError, + SystemExit, + JMESPathCheckAssertionError, + ) as e: use_exception_cache = os.getenv("TEST_EXCEPTION_CACHE") if use_exception_cache is None or use_exception_cache.lower() != "true": raise test_map[func.__name__]["end_dt"] = dt.datetime.utcnow() test_map[func.__name__]["result"] = FAILED - test_map[func.__name__]["error_message"] = str(e).replace("\r\n", " ").replace("\n", " ")[:500] - test_map[func.__name__]["error_stack"] = traceback.format_exc().replace( - "\r\n", " ").replace("\n", " ")[:500] + test_map[func.__name__]["error_message"] = ( + str(e).replace("\r\n", " ").replace("\n", " ")[:500] + ) + test_map[func.__name__]["error_stack"] = ( + traceback.format_exc().replace("\r\n", " ").replace("\n", " ")[:500] + ) logger.info("--------------------------------------") logger.info("step exception: %s", e) logger.error("--------------------------------------") @@ -92,7 +111,9 @@ def calc_coverage(filename): filename = filename.split(".")[0] coverage_name = filename + "_coverage.md" with open(coverage_name, "w") as f: - f.write("|Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt|\n") + f.write( + "|Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt|\n" + ) total = len(test_map) covered = 0 for k, v in test_map.items(): @@ -101,8 +122,10 @@ def calc_coverage(filename): continue if v["result"] == SUCCESSED: covered += 1 - f.write("|{step_name}|{result}|{error_message}|{error_stack}|{error_normalized}|{start_dt}|" - "{end_dt}|\n".format(step_name=k, **v)) + f.write( + "|{step_name}|{result}|{error_message}|{error_stack}|{error_normalized}|{start_dt}|" + "{end_dt}|\n".format(step_name=k, **v) + ) f.write("Coverage: {}/{}\n".format(covered, total)) print("Create coverage\n", file=sys.stderr) @@ -111,6 +134,8 @@ def raise_if(): if exceptions: if len(exceptions) <= 1: raise exceptions[0][1][1] - message = "{}\nFollowed with exceptions in other steps:\n".format(str(exceptions[0][1][1])) + message = "{}\nFollowed with exceptions in other steps:\n".format( + str(exceptions[0][1][1]) + ) message += "\n".join(["{}: {}".format(h[0], h[1][1]) for h in exceptions[1:]]) raise exceptions[0][1][0](message).with_traceback(exceptions[0][1][2]) diff --git a/src/datafactory/azext_datafactory/tests/latest/__init__.py b/src/datafactory/azext_datafactory/tests/latest/__init__.py index c9cfdc73e77..93202b7013d 100644 --- a/src/datafactory/azext_datafactory/tests/latest/__init__.py +++ b/src/datafactory/azext_datafactory/tests/latest/__init__.py @@ -9,4 +9,4 @@ # regenerated. # -------------------------------------------------------------------------- -__path__ = __import__('pkgutil').extend_path(__path__, __name__) +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/src/datafactory/azext_datafactory/tests/latest/example_steps.py b/src/datafactory/azext_datafactory/tests/latest/example_steps.py index 0704716920c..f44c4955610 100644 --- a/src/datafactory/azext_datafactory/tests/latest/example_steps.py +++ b/src/datafactory/azext_datafactory/tests/latest/example_steps.py @@ -17,11 +17,13 @@ def step_create(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory create ' - '--location "East US" ' - '--name "{myFactory}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory create " + '--location "East US" ' + '--name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /Factories/get/Factories_Get @@ -29,10 +31,10 @@ def step_create(test, checks=None): def step_show(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory show ' - '--name "{myFactory}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory show " '--name "{myFactory}" ' '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /Factories/get/Factories_List @@ -40,9 +42,7 @@ def step_show(test, checks=None): def step_list(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory list ' - '-g ""', - checks=checks) + test.cmd("az datafactory list " '-g ""', checks=checks) # EXAMPLE: /Factories/get/Factories_ListByResourceGroup @@ -50,9 +50,7 @@ def step_list(test, checks=None): def step_list2(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory list ' - '--resource-group "{rg}"', - checks=checks) + test.cmd("az datafactory list " '--resource-group "{rg}"', checks=checks) # EXAMPLE: /Factories/patch/Factories_Update @@ -60,11 +58,13 @@ def step_list2(test, checks=None): def step_update(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory update ' - '--name "{myFactory}" ' - '--tags exampleTag="exampleValue" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory update " + '--name "{myFactory}" ' + '--tags exampleTag="exampleValue" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /Factories/post/Factories_ConfigureFactoryRepo @@ -72,13 +72,15 @@ def step_update(test, checks=None): def step_configure_factory_repo(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory configure-factory-repo ' - '--factory-resource-id "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.DataFacto' - 'ry/factories/{myFactory}" ' - '--factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" ' - 'project-name="project" repository-name="repo" root-folder="/" tenant-id="" ' - '--location "East US"', - checks=checks) + test.cmd( + "az datafactory configure-factory-repo " + '--factory-resource-id "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.DataFacto' + 'ry/factories/{myFactory}" ' + '--factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" ' + 'project-name="project" repository-name="repo" root-folder="/" tenant-id="" ' + '--location "East US"', + checks=checks, + ) # EXAMPLE: /Factories/post/Factories_GetDataPlaneAccess @@ -86,15 +88,17 @@ def step_configure_factory_repo(test, checks=None): def step_get_data_plane_access(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory get-data-plane-access ' - '--name "{myFactory}" ' - '--access-resource-path "" ' - '--expire-time "2018-11-10T09:46:20.2659347Z" ' - '--permissions "r" ' - '--profile-name "DefaultProfile" ' - '--start-time "2018-11-10T02:46:20.2659347Z" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory get-data-plane-access " + '--name "{myFactory}" ' + '--access-resource-path "" ' + '--expire-time "2018-11-10T09:46:20.2659347Z" ' + '--permissions "r" ' + '--profile-name "DefaultProfile" ' + '--start-time "2018-11-10T02:46:20.2659347Z" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /Factories/post/Factories_GetGitHubAccessToken @@ -102,13 +106,15 @@ def step_get_data_plane_access(test, checks=None): def step_get_git_hub_access_token(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory get-git-hub-access-token ' - '--name "{myFactory}" ' - '--git-hub-access-code "some" ' - '--git-hub-access-token-base-url "some" ' - '--git-hub-client-id "some" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory get-git-hub-access-token " + '--name "{myFactory}" ' + '--git-hub-access-code "some" ' + '--git-hub-access-token-base-url "some" ' + '--git-hub-client-id "some" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /ActivityRuns/post/ActivityRuns_QueryByPipelineRun @@ -116,13 +122,15 @@ def step_get_git_hub_access_token(test, checks=None): def step_activity_run_query_by_pipeline_run(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory activity-run query-by-pipeline-run ' - '--factory-name "{myFactory}" ' - '--last-updated-after "2018-06-16T00:36:44.3345758Z" ' - '--last-updated-before "2018-06-16T00:49:48.3686473Z" ' - '--resource-group "{rg}" ' - '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b"', - checks=checks) + test.cmd( + "az datafactory activity-run query-by-pipeline-run " + '--factory-name "{myFactory}" ' + '--last-updated-after "2018-06-16T00:36:44.3345758Z" ' + '--last-updated-before "2018-06-16T00:49:48.3686473Z" ' + '--resource-group "{rg}" ' + '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b"', + checks=checks, + ) # EXAMPLE: /Datasets/put/Datasets_Create @@ -130,16 +138,18 @@ def step_activity_run_query_by_pipeline_run(test, checks=None): def step_dataset_create(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory dataset create ' - '--properties "{{\\"type\\":\\"AzureBlob\\",\\"linkedServiceName\\":{{\\"type\\":\\"LinkedServiceReference' - '\\",\\"referenceName\\":\\"{myLinkedService}\\"}},\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Str' - 'ing\\"}},\\"MyFolderPath\\":{{\\"type\\":\\"String\\"}}}},\\"typeProperties\\":{{\\"format\\":{{\\"type\\' - '":\\"TextFormat\\"}},\\"fileName\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFileName\\"' - '}},\\"folderPath\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFolderPath\\"}}}}}}" ' - '--name "{myDataset}" ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory dataset create " + '--properties "{{\\"type\\":\\"AzureBlob\\",\\"linkedServiceName\\":{{\\"type\\":\\"LinkedServiceReference' + '\\",\\"referenceName\\":\\"{myLinkedService}\\"}},\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Str' + 'ing\\"}},\\"MyFolderPath\\":{{\\"type\\":\\"String\\"}}}},\\"typeProperties\\":{{\\"format\\":{{\\"type\\' + '":\\"TextFormat\\"}},\\"fileName\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFileName\\"' + '}},\\"folderPath\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFolderPath\\"}}}}}}" ' + '--name "{myDataset}" ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /Datasets/get/Datasets_Get @@ -147,11 +157,13 @@ def step_dataset_create(test, checks=None): def step_dataset_show(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory dataset show ' - '--name "{myDataset}" ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory dataset show " + '--name "{myDataset}" ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /Datasets/get/Datasets_ListByFactory @@ -159,10 +171,12 @@ def step_dataset_show(test, checks=None): def step_dataset_list(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory dataset list ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory dataset list " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /Datasets/delete/Datasets_Delete @@ -170,11 +184,13 @@ def step_dataset_list(test, checks=None): def step_dataset_delete(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory dataset delete -y ' - '--name "{myDataset}" ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory dataset delete -y " + '--name "{myDataset}" ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /IntegrationRuntimes/put/IntegrationRuntimes_Create @@ -182,12 +198,14 @@ def step_dataset_delete(test, checks=None): def step_integration_runtime_self_hosted_create(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory integration-runtime self-hosted create ' - '--factory-name "{myFactory}" ' - '--description "A selfhosted integration runtime" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory integration-runtime self-hosted create " + '--factory-name "{myFactory}" ' + '--description "A selfhosted integration runtime" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /IntegrationRuntimes/get/IntegrationRuntimes_Get @@ -195,11 +213,13 @@ def step_integration_runtime_self_hosted_create(test, checks=None): def step_integration_runtime_show(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory integration-runtime show ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory integration-runtime show " + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /IntegrationRuntimes/get/IntegrationRuntimes_ListByFactory @@ -207,10 +227,12 @@ def step_integration_runtime_show(test, checks=None): def step_integration_runtime_list(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory integration-runtime list ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory integration-runtime list " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /IntegrationRuntimes/patch/IntegrationRuntimes_Update @@ -218,13 +240,15 @@ def step_integration_runtime_list(test, checks=None): def step_integration_runtime_update(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory integration-runtime update ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}" ' - '--auto-update "Off" ' - '--update-delay-offset "\\"PT3H\\""', - checks=checks) + test.cmd( + "az datafactory integration-runtime update " + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}" ' + '--auto-update "Off" ' + '--update-delay-offset "\\"PT3H\\""', + checks=checks, + ) # EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_CreateLinkedIntegrationRuntime @@ -232,15 +256,17 @@ def step_integration_runtime_update(test, checks=None): def step_integration_runtime_linked(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory integration-runtime linked-integration-runtime create ' - '--name "bfa92911-9fb6-4fbe-8f23-beae87bc1c83" ' - '--location "West US" ' - '--data-factory-name "e9955d6d-56ea-4be3-841c-52a12c1a9981" ' - '--subscription-id "061774c7-4b5a-4159-a55b-365581830283" ' - '--factory-name "{myFactory}" ' - '--integration-runtime-name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory integration-runtime linked-integration-runtime create " + '--name "bfa92911-9fb6-4fbe-8f23-beae87bc1c83" ' + '--location "West US" ' + '--data-factory-name "e9955d6d-56ea-4be3-841c-52a12c1a9981" ' + '--subscription-id "061774c7-4b5a-4159-a55b-365581830283" ' + '--factory-name "{myFactory}" ' + '--integration-runtime-name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_GetConnectionInfo @@ -248,11 +274,13 @@ def step_integration_runtime_linked(test, checks=None): def step_integration_runtime_get_connection_info(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory integration-runtime get-connection-info ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory integration-runtime get-connection-info " + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_GetMonitoringData @@ -260,11 +288,13 @@ def step_integration_runtime_get_connection_info(test, checks=None): def step_integration_runtime_get_monitoring_data(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory integration-runtime get-monitoring-data ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory integration-runtime get-monitoring-data " + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_GetStatus @@ -272,11 +302,13 @@ def step_integration_runtime_get_monitoring_data(test, checks=None): def step_integration_runtime_get_status(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory integration-runtime get-status ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory integration-runtime get-status " + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_ListAuthKeys @@ -284,11 +316,13 @@ def step_integration_runtime_get_status(test, checks=None): def step_integration_runtime_list_auth_key(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory integration-runtime list-auth-key ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory integration-runtime list-auth-key " + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_RegenerateAuthKey @@ -296,12 +330,14 @@ def step_integration_runtime_list_auth_key(test, checks=None): def step_integration_runtime_regenerate_auth_key(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory integration-runtime regenerate-auth-key ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--key-name "authKey2" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory integration-runtime regenerate-auth-key " + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--key-name "authKey2" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_Start @@ -309,11 +345,13 @@ def step_integration_runtime_regenerate_auth_key(test, checks=None): def step_integration_runtime_start(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory integration-runtime start ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime2}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory integration-runtime start " + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime2}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_Stop @@ -321,11 +359,13 @@ def step_integration_runtime_start(test, checks=None): def step_integration_runtime_stop(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory integration-runtime stop ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime2}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory integration-runtime stop " + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime2}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_SyncCredentials @@ -333,11 +373,13 @@ def step_integration_runtime_stop(test, checks=None): def step_integration_runtime_sync_credentials(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory integration-runtime sync-credentials ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory integration-runtime sync-credentials " + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_Upgrade @@ -345,12 +387,14 @@ def step_integration_runtime_sync_credentials(test, checks=None): def step_integration_runtime_remove_link(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory integration-runtime remove-link ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--linked-factory-name "exampleFactoryName-linked" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory integration-runtime remove-link " + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--linked-factory-name "exampleFactoryName-linked" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /IntegrationRuntimeNodes/get/IntegrationRuntimeNodes_Get @@ -358,12 +402,14 @@ def step_integration_runtime_remove_link(test, checks=None): def step_integration_runtime_node_show(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory integration-runtime-node show ' - '--factory-name "{myFactory}" ' - '--integration-runtime-name "{myIntegrationRuntime}" ' - '--node-name "Node_1" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory integration-runtime-node show " + '--factory-name "{myFactory}" ' + '--integration-runtime-name "{myIntegrationRuntime}" ' + '--node-name "Node_1" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /IntegrationRuntimeNodes/patch/IntegrationRuntimeNodes_Update @@ -371,13 +417,15 @@ def step_integration_runtime_node_show(test, checks=None): def step_integration_runtime_node_update(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory integration-runtime-node update ' - '--factory-name "{myFactory}" ' - '--integration-runtime-name "{myIntegrationRuntime}" ' - '--node-name "Node_1" ' - '--resource-group "{rg}" ' - '--concurrent-jobs-limit 2', - checks=checks) + test.cmd( + "az datafactory integration-runtime-node update " + '--factory-name "{myFactory}" ' + '--integration-runtime-name "{myIntegrationRuntime}" ' + '--node-name "Node_1" ' + '--resource-group "{rg}" ' + "--concurrent-jobs-limit 2", + checks=checks, + ) # EXAMPLE: /IntegrationRuntimeNodes/post/IntegrationRuntimeNodes_GetIpAddress @@ -385,12 +433,14 @@ def step_integration_runtime_node_update(test, checks=None): def step_integration_runtime_node_get_ip_address(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory integration-runtime-node get-ip-address ' - '--factory-name "{myFactory}" ' - '--integration-runtime-name "{myIntegrationRuntime}" ' - '--node-name "Node_1" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory integration-runtime-node get-ip-address " + '--factory-name "{myFactory}" ' + '--integration-runtime-name "{myIntegrationRuntime}" ' + '--node-name "Node_1" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /IntegrationRuntimeNodes/delete/IntegrationRuntimesNodes_Delete @@ -398,12 +448,14 @@ def step_integration_runtime_node_get_ip_address(test, checks=None): def step_integration_runtime_node_delete(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory integration-runtime-node delete -y ' - '--factory-name "{myFactory}" ' - '--integration-runtime-name "{myIntegrationRuntime}" ' - '--node-name "Node_1" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory integration-runtime-node delete -y " + '--factory-name "{myFactory}" ' + '--integration-runtime-name "{myIntegrationRuntime}" ' + '--node-name "Node_1" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /IntegrationRuntimes/delete/IntegrationRuntimes_Delete @@ -411,11 +463,13 @@ def step_integration_runtime_node_delete(test, checks=None): def step_integration_runtime_delete(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory integration-runtime delete -y ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory integration-runtime delete -y " + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /LinkedServices/put/LinkedServices_Create @@ -423,14 +477,16 @@ def step_integration_runtime_delete(test, checks=None): def step_linked_service_create(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory linked-service create ' - '--factory-name "{myFactory}" ' - '--properties "{{\\"type\\":\\"AzureStorage\\",\\"typeProperties\\":{{\\"connectionString\\":{{\\"type\\":' - '\\"SecureString\\",\\"value\\":\\"DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;Accoun' - 'tKey=\\"}}}}}}" ' - '--name "{myLinkedService}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory linked-service create " + '--factory-name "{myFactory}" ' + '--properties "{{\\"type\\":\\"AzureStorage\\",\\"typeProperties\\":{{\\"connectionString\\":{{\\"type\\":' + '\\"SecureString\\",\\"value\\":\\"DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;Accoun' + 'tKey=\\"}}}}}}" ' + '--name "{myLinkedService}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /LinkedServices/get/LinkedServices_Get @@ -438,11 +494,13 @@ def step_linked_service_create(test, checks=None): def step_linked_service_show(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory linked-service show ' - '--factory-name "{myFactory}" ' - '--name "{myLinkedService}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory linked-service show " + '--factory-name "{myFactory}" ' + '--name "{myLinkedService}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /LinkedServices/get/LinkedServices_ListByFactory @@ -450,10 +508,12 @@ def step_linked_service_show(test, checks=None): def step_linked_service_list(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory linked-service list ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory linked-service list " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /LinkedServices/delete/LinkedServices_Delete @@ -461,11 +521,13 @@ def step_linked_service_list(test, checks=None): def step_linked_service_delete(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory linked-service delete -y ' - '--factory-name "{myFactory}" ' - '--name "{myLinkedService}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory linked-service delete -y " + '--factory-name "{myFactory}" ' + '--name "{myLinkedService}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /ManagedVirtualNetworks/put/ManagedVirtualNetworks_Create @@ -473,11 +535,13 @@ def step_linked_service_delete(test, checks=None): def step_managed_virtual_network_create(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory managed-virtual-network create ' - '--factory-name "{myFactory}" ' - '--name "{myManagedVirtualNetwork}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory managed-virtual-network create " + '--factory-name "{myFactory}" ' + '--name "{myManagedVirtualNetwork}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /ManagedVirtualNetworks/get/ManagedVirtualNetworks_Get @@ -485,11 +549,13 @@ def step_managed_virtual_network_create(test, checks=None): def step_managed_virtual_network_show(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory managed-virtual-network show ' - '--factory-name "{myFactory}" ' - '--name "{myManagedVirtualNetwork}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory managed-virtual-network show " + '--factory-name "{myFactory}" ' + '--name "{myManagedVirtualNetwork}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /ManagedVirtualNetworks/get/ManagedVirtualNetworks_ListByFactory @@ -497,10 +563,12 @@ def step_managed_virtual_network_show(test, checks=None): def step_managed_virtual_network_list(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory managed-virtual-network list ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory managed-virtual-network list " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /ManagedPrivateEndpoints/put/ManagedPrivateEndpoints_Create @@ -508,15 +576,17 @@ def step_managed_virtual_network_list(test, checks=None): def step_managed_private_endpoint_create(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory managed-private-endpoint create ' - '--factory-name "{myFactory}" ' - '--group-id "blob" ' - '--private-link-resource-id "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.Stor' - 'age/storageAccounts/{sa}" ' - '--name "{myManagedPrivateEndpoint}" ' - '--managed-virtual-network-name "{myManagedVirtualNetwork}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory managed-private-endpoint create " + '--factory-name "{myFactory}" ' + '--group-id "blob" ' + '--private-link-resource-id "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.Stor' + 'age/storageAccounts/{sa}" ' + '--name "{myManagedPrivateEndpoint}" ' + '--managed-virtual-network-name "{myManagedVirtualNetwork}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /ManagedPrivateEndpoints/get/ManagedPrivateEndpoints_Get @@ -524,12 +594,14 @@ def step_managed_private_endpoint_create(test, checks=None): def step_managed_private_endpoint_show(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory managed-private-endpoint show ' - '--factory-name "{myFactory}" ' - '--name "{myManagedPrivateEndpoint}" ' - '--managed-virtual-network-name "{myManagedVirtualNetwork}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory managed-private-endpoint show " + '--factory-name "{myFactory}" ' + '--name "{myManagedPrivateEndpoint}" ' + '--managed-virtual-network-name "{myManagedVirtualNetwork}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /ManagedPrivateEndpoints/get/ManagedPrivateEndpoints_ListByFactory @@ -537,11 +609,13 @@ def step_managed_private_endpoint_show(test, checks=None): def step_managed_private_endpoint_list(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory managed-private-endpoint list ' - '--factory-name "{myFactory}" ' - '--managed-virtual-network-name "{myManagedVirtualNetwork}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory managed-private-endpoint list " + '--factory-name "{myFactory}" ' + '--managed-virtual-network-name "{myManagedVirtualNetwork}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /ManagedPrivateEndpoints/delete/ManagedPrivateEndpoints_Delete @@ -549,12 +623,14 @@ def step_managed_private_endpoint_list(test, checks=None): def step_managed_private_endpoint_delete(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory managed-private-endpoint delete -y ' - '--factory-name "{myFactory}" ' - '--name "{myManagedPrivateEndpoint}" ' - '--managed-virtual-network-name "{myManagedVirtualNetwork}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory managed-private-endpoint delete -y " + '--factory-name "{myFactory}" ' + '--name "{myManagedPrivateEndpoint}" ' + '--managed-virtual-network-name "{myManagedVirtualNetwork}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /PipelineRuns/get/PipelineRuns_Get @@ -562,11 +638,13 @@ def step_managed_private_endpoint_delete(test, checks=None): def step_pipeline_run_show(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory pipeline-run show ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b"', - checks=checks) + test.cmd( + "az datafactory pipeline-run show " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b"', + checks=checks, + ) # EXAMPLE: /PipelineRuns/post/PipelineRuns_Cancel @@ -574,11 +652,13 @@ def step_pipeline_run_show(test, checks=None): def step_pipeline_run_cancel(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory pipeline-run cancel ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--run-id "16ac5348-ff82-4f95-a80d-638c1d47b721"', - checks=checks) + test.cmd( + "az datafactory pipeline-run cancel " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--run-id "16ac5348-ff82-4f95-a80d-638c1d47b721"', + checks=checks, + ) # EXAMPLE: /PipelineRuns/post/PipelineRuns_QueryByFactory @@ -586,13 +666,15 @@ def step_pipeline_run_cancel(test, checks=None): def step_pipeline_run_query_by_factory(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory pipeline-run query-by-factory ' - '--factory-name "{myFactory}" ' - '--filters operand="PipelineName" operator="Equals" values="{myPipeline}" ' - '--last-updated-after "2018-06-16T00:36:44.3345758Z" ' - '--last-updated-before "2018-06-16T00:49:48.3686473Z" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory pipeline-run query-by-factory " + '--factory-name "{myFactory}" ' + '--filters operand="PipelineName" operator="Equals" values="{myPipeline}" ' + '--last-updated-after "2018-06-16T00:36:44.3345758Z" ' + '--last-updated-before "2018-06-16T00:49:48.3686473Z" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /Pipelines/put/Pipelines_Create @@ -600,23 +682,25 @@ def step_pipeline_run_query_by_factory(test, checks=None): def step_pipeline_create(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory pipeline create ' - '--factory-name "{myFactory}" ' - '--pipeline "{{\\"activities\\":[{{\\"name\\":\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typ' - 'eProperties\\":{{\\"activities\\":[{{\\"name\\":\\"ExampleCopyActivity\\",\\"type\\":\\"Copy\\",\\"inputs' - '\\":[{{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":\\"examplecontainer.csv\\",' - '\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],\\"outputs\\":[{{\\"' - 'type\\":\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Expression\\",\\"value' - '\\":\\"@item()\\"}},\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],' - '\\"typeProperties\\":{{\\"dataIntegrationUnits\\":32,\\"sink\\":{{\\"type\\":\\"BlobSink\\"}},\\"source\\' - '":{{\\"type\\":\\"BlobSource\\"}}}}}}],\\"isSequential\\":true,\\"items\\":{{\\"type\\":\\"Expression\\",' - '\\"value\\":\\"@pipeline().parameters.OutputBlobNameList\\"}}}}}}],\\"parameters\\":{{\\"JobId\\":{{\\"ty' - 'pe\\":\\"String\\"}},\\"OutputBlobNameList\\":{{\\"type\\":\\"Array\\"}}}},\\"variables\\":{{\\"TestVaria' - 'bleArray\\":{{\\"type\\":\\"Array\\"}}}},\\"runDimensions\\":{{\\"JobId\\":{{\\"type\\":\\"Expression\\",' - '\\"value\\":\\"@pipeline().parameters.JobId\\"}}}},\\"duration\\":\\"0.00:10:00\\"}}" ' - '--name "{myPipeline}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory pipeline create " + '--factory-name "{myFactory}" ' + '--pipeline "{{\\"activities\\":[{{\\"name\\":\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typ' + 'eProperties\\":{{\\"activities\\":[{{\\"name\\":\\"ExampleCopyActivity\\",\\"type\\":\\"Copy\\",\\"inputs' + '\\":[{{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":\\"examplecontainer.csv\\",' + '\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],\\"outputs\\":[{{\\"' + 'type\\":\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Expression\\",\\"value' + '\\":\\"@item()\\"}},\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],' + '\\"typeProperties\\":{{\\"dataIntegrationUnits\\":32,\\"sink\\":{{\\"type\\":\\"BlobSink\\"}},\\"source\\' + '":{{\\"type\\":\\"BlobSource\\"}}}}}}],\\"isSequential\\":true,\\"items\\":{{\\"type\\":\\"Expression\\",' + '\\"value\\":\\"@pipeline().parameters.OutputBlobNameList\\"}}}}}}],\\"parameters\\":{{\\"JobId\\":{{\\"ty' + 'pe\\":\\"String\\"}},\\"OutputBlobNameList\\":{{\\"type\\":\\"Array\\"}}}},\\"variables\\":{{\\"TestVaria' + 'bleArray\\":{{\\"type\\":\\"Array\\"}}}},\\"runDimensions\\":{{\\"JobId\\":{{\\"type\\":\\"Expression\\",' + '\\"value\\":\\"@pipeline().parameters.JobId\\"}}}},\\"duration\\":\\"0.00:10:00\\"}}" ' + '--name "{myPipeline}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /Pipelines/put/Pipelines_Update @@ -624,23 +708,25 @@ def step_pipeline_create(test, checks=None): def step_pipeline_update(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory pipeline update ' - '--factory-name "{myFactory}" ' - '--description "Example description" ' - '--activities "[{{\\"name\\":\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typeProperties\\":{{' - '\\"activities\\":[{{\\"name\\":\\"ExampleCopyActivity\\",\\"type\\":\\"Copy\\",\\"inputs\\":[{{\\"type\\"' - ':\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":\\"examplecontainer.csv\\",\\"MyFolderPath\\"' - ':\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],\\"outputs\\":[{{\\"type\\":\\"Dataset' - 'Reference\\",\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@item()\\"}' - '},\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],\\"typeProperties' - '\\":{{\\"dataIntegrationUnits\\":32,\\"sink\\":{{\\"type\\":\\"BlobSink\\"}},\\"source\\":{{\\"type\\":\\' - '"BlobSource\\"}}}}}}],\\"isSequential\\":true,\\"items\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@p' - 'ipeline().parameters.OutputBlobNameList\\"}}}}}}]" ' - '--parameters "{{\\"OutputBlobNameList\\":{{\\"type\\":\\"Array\\"}}}}" ' - '--duration "0.00:10:00" ' - '--name "{myPipeline}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory pipeline update " + '--factory-name "{myFactory}" ' + '--description "Example description" ' + '--activities "[{{\\"name\\":\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typeProperties\\":{{' + '\\"activities\\":[{{\\"name\\":\\"ExampleCopyActivity\\",\\"type\\":\\"Copy\\",\\"inputs\\":[{{\\"type\\"' + ':\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":\\"examplecontainer.csv\\",\\"MyFolderPath\\"' + ':\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],\\"outputs\\":[{{\\"type\\":\\"Dataset' + 'Reference\\",\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@item()\\"}' + '},\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],\\"typeProperties' + '\\":{{\\"dataIntegrationUnits\\":32,\\"sink\\":{{\\"type\\":\\"BlobSink\\"}},\\"source\\":{{\\"type\\":\\' + '"BlobSource\\"}}}}}}],\\"isSequential\\":true,\\"items\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@p' + 'ipeline().parameters.OutputBlobNameList\\"}}}}}}]" ' + '--parameters "{{\\"OutputBlobNameList\\":{{\\"type\\":\\"Array\\"}}}}" ' + '--duration "0.00:10:00" ' + '--name "{myPipeline}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /Pipelines/get/Pipelines_Get @@ -648,11 +734,13 @@ def step_pipeline_update(test, checks=None): def step_pipeline_show(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory pipeline show ' - '--factory-name "{myFactory}" ' - '--name "{myPipeline}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory pipeline show " + '--factory-name "{myFactory}" ' + '--name "{myPipeline}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /Pipelines/get/Pipelines_ListByFactory @@ -660,10 +748,12 @@ def step_pipeline_show(test, checks=None): def step_pipeline_list(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory pipeline list ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory pipeline list " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /Pipelines/post/Pipelines_CreateRun @@ -671,12 +761,14 @@ def step_pipeline_list(test, checks=None): def step_pipeline_create_run(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory pipeline create-run ' - '--factory-name "{myFactory}" ' - '--parameters "{{\\"OutputBlobNameList\\":[\\"exampleoutput.csv\\"]}}" ' - '--name "{myPipeline}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory pipeline create-run " + '--factory-name "{myFactory}" ' + '--parameters "{{\\"OutputBlobNameList\\":[\\"exampleoutput.csv\\"]}}" ' + '--name "{myPipeline}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /Pipelines/delete/Pipelines_Delete @@ -684,11 +776,13 @@ def step_pipeline_create_run(test, checks=None): def step_pipeline_delete(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory pipeline delete -y ' - '--factory-name "{myFactory}" ' - '--name "{myPipeline}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory pipeline delete -y " + '--factory-name "{myFactory}" ' + '--name "{myPipeline}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /Triggers/put/Triggers_Create @@ -696,16 +790,18 @@ def step_pipeline_delete(test, checks=None): def step_trigger_create(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory trigger create ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--properties "{{\\"type\\":\\"ScheduleTrigger\\",\\"pipelines\\":[{{\\"parameters\\":{{\\"OutputBlobNameL' - 'ist\\":[\\"exampleoutput.csv\\"]}},\\"pipelineReference\\":{{\\"type\\":\\"PipelineReference\\",\\"refere' - 'nceName\\":\\"{myPipeline}\\"}}}}],\\"typeProperties\\":{{\\"recurrence\\":{{\\"endTime\\":\\"2018-06-16T' - '00:55:13.8441801Z\\",\\"frequency\\":\\"Minute\\",\\"interval\\":4,\\"startTime\\":\\"2018-06-16T00:39:13' - '.8441801Z\\",\\"timeZone\\":\\"UTC\\"}}}}}}" ' - '--name "{myTrigger}"', - checks=checks) + test.cmd( + "az datafactory trigger create " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--properties "{{\\"type\\":\\"ScheduleTrigger\\",\\"pipelines\\":[{{\\"parameters\\":{{\\"OutputBlobNameL' + 'ist\\":[\\"exampleoutput.csv\\"]}},\\"pipelineReference\\":{{\\"type\\":\\"PipelineReference\\",\\"refere' + 'nceName\\":\\"{myPipeline}\\"}}}}],\\"typeProperties\\":{{\\"recurrence\\":{{\\"endTime\\":\\"2018-06-16T' + '00:55:13.8441801Z\\",\\"frequency\\":\\"Minute\\",\\"interval\\":4,\\"startTime\\":\\"2018-06-16T00:39:13' + '.8441801Z\\",\\"timeZone\\":\\"UTC\\"}}}}}}" ' + '--name "{myTrigger}"', + checks=checks, + ) # EXAMPLE: /Triggers/get/Triggers_Get @@ -713,11 +809,13 @@ def step_trigger_create(test, checks=None): def step_trigger_show(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory trigger show ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--name "{myTrigger}"', - checks=checks) + test.cmd( + "az datafactory trigger show " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myTrigger}"', + checks=checks, + ) # EXAMPLE: /Triggers/get/Triggers_ListByFactory @@ -725,10 +823,12 @@ def step_trigger_show(test, checks=None): def step_trigger_list(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory trigger list ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory trigger list " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /Triggers/post/Triggers_GetEventSubscriptionStatus @@ -736,11 +836,13 @@ def step_trigger_list(test, checks=None): def step_trigger_get_event_subscription_status(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory trigger get-event-subscription-status ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--name "{myTrigger}"', - checks=checks) + test.cmd( + "az datafactory trigger get-event-subscription-status " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myTrigger}"', + checks=checks, + ) # EXAMPLE: /Triggers/post/Triggers_QueryByFactory @@ -748,11 +850,13 @@ def step_trigger_get_event_subscription_status(test, checks=None): def step_trigger_query_by_factory(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory trigger query-by-factory ' - '--factory-name "{myFactory}" ' - '--parent-trigger-name "{myTrigger}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory trigger query-by-factory " + '--factory-name "{myFactory}" ' + '--parent-trigger-name "{myTrigger}" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /Triggers/post/Triggers_Start @@ -760,11 +864,13 @@ def step_trigger_query_by_factory(test, checks=None): def step_trigger_start(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory trigger start ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--name "{myTrigger}"', - checks=checks) + test.cmd( + "az datafactory trigger start " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myTrigger}"', + checks=checks, + ) # EXAMPLE: /Triggers/post/Triggers_Stop @@ -772,11 +878,13 @@ def step_trigger_start(test, checks=None): def step_trigger_stop(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory trigger stop ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--name "{myTrigger}"', - checks=checks) + test.cmd( + "az datafactory trigger stop " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myTrigger}"', + checks=checks, + ) # EXAMPLE: /Triggers/post/Triggers_SubscribeToEvents @@ -784,11 +892,13 @@ def step_trigger_stop(test, checks=None): def step_trigger_subscribe_to_event(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory trigger subscribe-to-event ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--name "{myTrigger}"', - checks=checks) + test.cmd( + "az datafactory trigger subscribe-to-event " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myTrigger}"', + checks=checks, + ) # EXAMPLE: /Triggers/post/Triggers_UnsubscribeFromEvents @@ -796,11 +906,13 @@ def step_trigger_subscribe_to_event(test, checks=None): def step_trigger_unsubscribe_from_event(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory trigger unsubscribe-from-event ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--name "{myTrigger}"', - checks=checks) + test.cmd( + "az datafactory trigger unsubscribe-from-event " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myTrigger}"', + checks=checks, + ) # EXAMPLE: /TriggerRuns/post/TriggerRuns_QueryByFactory @@ -808,13 +920,15 @@ def step_trigger_unsubscribe_from_event(test, checks=None): def step_trigger_run_query_by_factory(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory trigger-run query-by-factory ' - '--factory-name "{myFactory}" ' - '--filters operand="TriggerName" operator="Equals" values="{myTrigger}" ' - '--last-updated-after "2018-06-16T00:36:44.3345758Z" ' - '--last-updated-before "2018-06-16T00:49:48.3686473Z" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory trigger-run query-by-factory " + '--factory-name "{myFactory}" ' + '--filters operand="TriggerName" operator="Equals" values="{myTrigger}" ' + '--last-updated-after "2018-06-16T00:36:44.3345758Z" ' + '--last-updated-before "2018-06-16T00:49:48.3686473Z" ' + '--resource-group "{rg}"', + checks=checks, + ) # EXAMPLE: /TriggerRuns/post/Triggers_Cancel @@ -822,12 +936,14 @@ def step_trigger_run_query_by_factory(test, checks=None): def step_trigger_run_cancel(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory trigger-run cancel ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" ' - '--trigger-name "{myTrigger}"', - checks=checks) + test.cmd( + "az datafactory trigger-run cancel " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" ' + '--trigger-name "{myTrigger}"', + checks=checks, + ) # EXAMPLE: /TriggerRuns/post/Triggers_Rerun @@ -835,12 +951,14 @@ def step_trigger_run_cancel(test, checks=None): def step_trigger_run_rerun(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory trigger-run rerun ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" ' - '--trigger-name "{myTrigger}"', - checks=checks) + test.cmd( + "az datafactory trigger-run rerun " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" ' + '--trigger-name "{myTrigger}"', + checks=checks, + ) # EXAMPLE: /Triggers/delete/Triggers_Delete @@ -848,11 +966,13 @@ def step_trigger_run_rerun(test, checks=None): def step_trigger_delete(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory trigger delete -y ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--name "{myTrigger}"', - checks=checks) + test.cmd( + "az datafactory trigger delete -y " + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myTrigger}"', + checks=checks, + ) # EXAMPLE: /Factories/delete/Factories_Delete @@ -860,7 +980,7 @@ def step_trigger_delete(test, checks=None): def step_delete(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory delete -y ' - '--name "{myFactory}" ' - '--resource-group "{rg}"', - checks=checks) + test.cmd( + "az datafactory delete -y " '--name "{myFactory}" ' '--resource-group "{rg}"', + checks=checks, + ) diff --git a/src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory.yaml b/src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory.yaml deleted file mode 100644 index bc4b967e683..00000000000 --- a/src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory.yaml +++ /dev/null @@ -1,5130 +0,0 @@ -interactions: -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - account list - Connection: - - keep-alive - ParameterSetName: - - --query -o - User-Agent: - - python/3.8.0 (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) msrest/0.6.18 - msrest_azure/0.6.3 azure-mgmt-resource/10.2.0 Azure-SDK-For-Python AZURECLI/2.11.1 - accept-language: - - en-US - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/locations?api-version=2019-11-01 - response: - body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus","name":"eastus","displayName":"East - US","regionalDisplayName":"(US) East US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-79.8164","latitude":"37.3719","physicalLocation":"Virginia","pairedRegion":[{"name":"westus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2","name":"eastus2","displayName":"East - US 2","regionalDisplayName":"(US) East US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","physicalLocation":"Virginia","pairedRegion":[{"name":"centralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus","name":"southcentralus","displayName":"South - Central US","regionalDisplayName":"(US) South Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-98.5","latitude":"29.4167","physicalLocation":"Texas","pairedRegion":[{"name":"northcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2","name":"westus2","displayName":"West - US 2","regionalDisplayName":"(US) West US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-119.852","latitude":"47.233","physicalLocation":"Washington","pairedRegion":[{"name":"westcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast","name":"australiaeast","displayName":"Australia - East","regionalDisplayName":"(Asia Pacific) Australia East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"151.2094","latitude":"-33.86","physicalLocation":"New - South Wales","pairedRegion":[{"name":"australiasoutheast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia","name":"southeastasia","displayName":"Southeast - Asia","regionalDisplayName":"(Asia Pacific) Southeast Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"103.833","latitude":"1.283","physicalLocation":"Singapore","pairedRegion":[{"name":"eastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope","name":"northeurope","displayName":"North - Europe","regionalDisplayName":"(Europe) North Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-6.2597","latitude":"53.3478","physicalLocation":"Ireland","pairedRegion":[{"name":"westeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth","name":"uksouth","displayName":"UK - South","regionalDisplayName":"(Europe) UK South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-0.799","latitude":"50.941","physicalLocation":"London","pairedRegion":[{"name":"ukwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope","name":"westeurope","displayName":"West - Europe","regionalDisplayName":"(Europe) West Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"4.9","latitude":"52.3667","physicalLocation":"Netherlands","pairedRegion":[{"name":"northeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus","name":"centralus","displayName":"Central - US","regionalDisplayName":"(US) Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","physicalLocation":"Iowa","pairedRegion":[{"name":"eastus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus","name":"northcentralus","displayName":"North - Central US","regionalDisplayName":"(US) North Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-87.6278","latitude":"41.8819","physicalLocation":"Illinois","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus","name":"westus","displayName":"West - US","regionalDisplayName":"(US) West US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-122.417","latitude":"37.783","physicalLocation":"California","pairedRegion":[{"name":"eastus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth","name":"southafricanorth","displayName":"South - Africa North","regionalDisplayName":"(Africa) South Africa North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Africa","longitude":"28.218370","latitude":"-25.731340","physicalLocation":"Johannesburg","pairedRegion":[{"name":"southafricawest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia","name":"centralindia","displayName":"Central - India","regionalDisplayName":"(Asia Pacific) Central India","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"73.9197","latitude":"18.5822","physicalLocation":"Pune","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia","name":"eastasia","displayName":"East - Asia","regionalDisplayName":"(Asia Pacific) East Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"114.188","latitude":"22.267","physicalLocation":"Hong - Kong","pairedRegion":[{"name":"southeastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast","name":"japaneast","displayName":"Japan - East","regionalDisplayName":"(Asia Pacific) Japan East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"139.77","latitude":"35.68","physicalLocation":"Tokyo, - Saitama","pairedRegion":[{"name":"japanwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral","name":"koreacentral","displayName":"Korea - Central","regionalDisplayName":"(Asia Pacific) Korea Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"126.9780","latitude":"37.5665","physicalLocation":"Seoul","pairedRegion":[{"name":"koreasouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral","name":"canadacentral","displayName":"Canada - Central","regionalDisplayName":"(Canada) Canada Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Canada","longitude":"-79.383","latitude":"43.653","physicalLocation":"Toronto","pairedRegion":[{"name":"canadaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral","name":"francecentral","displayName":"France - Central","regionalDisplayName":"(Europe) France Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"2.3730","latitude":"46.3772","physicalLocation":"Paris","pairedRegion":[{"name":"francesouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral","name":"germanywestcentral","displayName":"Germany - West Central","regionalDisplayName":"(Europe) Germany West Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.682127","latitude":"50.110924","physicalLocation":"Frankfurt","pairedRegion":[{"name":"germanynorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast","name":"norwayeast","displayName":"Norway - East","regionalDisplayName":"(Europe) Norway East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"10.752245","latitude":"59.913868","physicalLocation":"Norway","pairedRegion":[{"name":"norwaywest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth","name":"switzerlandnorth","displayName":"Switzerland - North","regionalDisplayName":"(Europe) Switzerland North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.564572","latitude":"47.451542","physicalLocation":"Zurich","pairedRegion":[{"name":"switzerlandwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth","name":"uaenorth","displayName":"UAE - North","regionalDisplayName":"(Middle East) UAE North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Middle - East","longitude":"55.316666","latitude":"25.266666","physicalLocation":"Dubai","pairedRegion":[{"name":"uaecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth","name":"brazilsouth","displayName":"Brazil - South","regionalDisplayName":"(South America) Brazil South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"South - America","longitude":"-46.633","latitude":"-23.55","physicalLocation":"Sao - Paulo State","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage","name":"centralusstage","displayName":"Central - US (Stage)","regionalDisplayName":"(US) Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstage","name":"eastusstage","displayName":"East - US (Stage)","regionalDisplayName":"(US) East US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2stage","name":"eastus2stage","displayName":"East - US 2 (Stage)","regionalDisplayName":"(US) East US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralusstage","name":"northcentralusstage","displayName":"North - Central US (Stage)","regionalDisplayName":"(US) North Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstage","name":"southcentralusstage","displayName":"South - Central US (Stage)","regionalDisplayName":"(US) South Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westusstage","name":"westusstage","displayName":"West - US (Stage)","regionalDisplayName":"(US) West US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2stage","name":"westus2stage","displayName":"West - US 2 (Stage)","regionalDisplayName":"(US) West US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asia","name":"asia","displayName":"Asia","regionalDisplayName":"Asia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asiapacific","name":"asiapacific","displayName":"Asia - Pacific","regionalDisplayName":"Asia Pacific","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia","name":"australia","displayName":"Australia","regionalDisplayName":"Australia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil","name":"brazil","displayName":"Brazil","regionalDisplayName":"Brazil","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada","name":"canada","displayName":"Canada","regionalDisplayName":"Canada","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe","name":"europe","displayName":"Europe","regionalDisplayName":"Europe","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global","name":"global","displayName":"Global","regionalDisplayName":"Global","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india","name":"india","displayName":"India","regionalDisplayName":"India","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan","name":"japan","displayName":"Japan","regionalDisplayName":"Japan","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk","name":"uk","displayName":"United - Kingdom","regionalDisplayName":"United Kingdom","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstates","name":"unitedstates","displayName":"United - States","regionalDisplayName":"United States","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage","name":"eastasiastage","displayName":"East - Asia (Stage)","regionalDisplayName":"(Asia Pacific) East Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia - Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasiastage","name":"southeastasiastage","displayName":"Southeast - Asia (Stage)","regionalDisplayName":"(Asia Pacific) Southeast Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia - Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap","name":"centraluseuap","displayName":"Central - US EUAP","regionalDisplayName":"(US) Central US EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","pairedRegion":[{"name":"eastus2euap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap","name":"eastus2euap","displayName":"East - US 2 EUAP","regionalDisplayName":"(US) East US 2 EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","pairedRegion":[{"name":"centraluseuap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus","name":"westcentralus","displayName":"West - Central US","regionalDisplayName":"(US) West Central US","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-110.234","latitude":"40.890","physicalLocation":"Wyoming","pairedRegion":[{"name":"westus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest","name":"southafricawest","displayName":"South - Africa West","regionalDisplayName":"(Africa) South Africa West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Africa","longitude":"18.843266","latitude":"-34.075691","physicalLocation":"Cape - Town","pairedRegion":[{"name":"southafricanorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral","name":"australiacentral","displayName":"Australia - Central","regionalDisplayName":"(Asia Pacific) Australia Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2","name":"australiacentral2","displayName":"Australia - Central 2","regionalDisplayName":"(Asia Pacific) Australia Central 2","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast","name":"australiasoutheast","displayName":"Australia - Southeast","regionalDisplayName":"(Asia Pacific) Australia Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"144.9631","latitude":"-37.8136","physicalLocation":"Victoria","pairedRegion":[{"name":"australiaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest","name":"japanwest","displayName":"Japan - West","regionalDisplayName":"(Asia Pacific) Japan West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"135.5022","latitude":"34.6939","physicalLocation":"Osaka","pairedRegion":[{"name":"japaneast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth","name":"koreasouth","displayName":"Korea - South","regionalDisplayName":"(Asia Pacific) Korea South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"129.0756","latitude":"35.1796","physicalLocation":"Busan","pairedRegion":[{"name":"koreacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia","name":"southindia","displayName":"South - India","regionalDisplayName":"(Asia Pacific) South India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"80.1636","latitude":"12.9822","physicalLocation":"Chennai","pairedRegion":[{"name":"centralindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westindia","name":"westindia","displayName":"West - India","regionalDisplayName":"(Asia Pacific) West India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"72.868","latitude":"19.088","physicalLocation":"Mumbai","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast","name":"canadaeast","displayName":"Canada - East","regionalDisplayName":"(Canada) Canada East","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Canada","longitude":"-71.217","latitude":"46.817","physicalLocation":"Quebec","pairedRegion":[{"name":"canadacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth","name":"francesouth","displayName":"France - South","regionalDisplayName":"(Europe) France South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"2.1972","latitude":"43.8345","physicalLocation":"Marseille","pairedRegion":[{"name":"francecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth","name":"germanynorth","displayName":"Germany - North","regionalDisplayName":"(Europe) Germany North","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"8.806422","latitude":"53.073635","physicalLocation":"Berlin","pairedRegion":[{"name":"germanywestcentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest","name":"norwaywest","displayName":"Norway - West","regionalDisplayName":"(Europe) Norway West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"5.733107","latitude":"58.969975","physicalLocation":"Norway","pairedRegion":[{"name":"norwayeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest","name":"switzerlandwest","displayName":"Switzerland - West","regionalDisplayName":"(Europe) Switzerland West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"6.143158","latitude":"46.204391","physicalLocation":"Geneva","pairedRegion":[{"name":"switzerlandnorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest","name":"ukwest","displayName":"UK - West","regionalDisplayName":"(Europe) UK West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"-3.084","latitude":"53.427","physicalLocation":"Cardiff","pairedRegion":[{"name":"uksouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral","name":"uaecentral","displayName":"UAE - Central","regionalDisplayName":"(Middle East) UAE Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Middle - East","longitude":"54.366669","latitude":"24.466667","physicalLocation":"Abu - Dhabi","pairedRegion":[{"name":"uaenorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsoutheast","name":"brazilsoutheast","displayName":"Brazil - Southeast","regionalDisplayName":"(South America) Brazil Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"South - America","longitude":"-43.2075","latitude":"-22.90278","physicalLocation":"Rio","pairedRegion":[{"name":"brazilsouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth"}]}}]}' - headers: - cache-control: - - no-cache - content-length: - - '25098' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:01 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: '{"location": "eastus"}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory factory create - Connection: - - keep-alive - Content-Length: - - '22' - Content-Type: - - application/json - ParameterSetName: - - --location --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002?api-version=2018-06-01 - response: - body: - string: '{"name":"exampleFa000002","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/examplefafwbxctazg","type":"Microsoft.DataFactory/factories","properties":{"provisioningState":"Succeeded","createTime":"2020-09-09T03:13:06.9017517Z","version":"2018-06-01"},"eTag":"\"99009f16-0000-0100-0000-5f5848430000\"","location":"eastus","tags":{}}' - headers: - cache-control: - - no-cache - content-length: - - '475' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:08 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1195' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"tags": {"exampleTag": "exampleValue"}, "identity": {"type": "SystemAssigned"}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory factory update - Connection: - - keep-alive - Content-Length: - - '80' - Content-Type: - - application/json - ParameterSetName: - - --name --tags --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: PATCH - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002?api-version=2018-06-01 - response: - body: - string: '{"name":"exampleFa000002","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/examplefafwbxctazg","type":"Microsoft.DataFactory/factories","properties":{"provisioningState":"Succeeded","createTime":"2020-09-09T03:13:06.9017517Z","version":"2018-06-01"},"eTag":"\"9900bb16-0000-0100-0000-5f58484a0000\"","location":"eastus","tags":{"exampleTag":"exampleValue"}}' - headers: - cache-control: - - no-cache - content-length: - - '502' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:16 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1197' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"properties": {"type": "AzureStorage", "typeProperties": {"connectionString": - {"type": "SecureString", "value": "DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;AccountKey="}}}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory linked-service create - Connection: - - keep-alive - Content-Length: - - '208' - Content-Type: - - application/json - ParameterSetName: - - --factory-name --properties --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/linkedservices/exampleLin000004?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/linkedservices/exampleLin000004","name":"exampleLin000004","type":"Microsoft.DataFactory/factories/linkedservices","properties":{"type":"AzureStorage","typeProperties":{"connectionString":{"type":"SecureString","value":"**********"},"encryptedCredential":"ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkVYQU1QTEVGQUZXQlhDVEFaR19lNjYzMTc5NC01ZjY3LTQwZGEtOTUyMi1mNGI5ZDk1YmZjOWEiDQp9"}},"etag":"b000631d-0000-0100-0000-5f58484e0000"}' - headers: - cache-control: - - no-cache - content-length: - - '757' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:18 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1197' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory linked-service update - Connection: - - keep-alive - ParameterSetName: - - --factory-name --description --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/linkedservices/exampleLin000004?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/linkedservices/exampleLin000004","name":"exampleLin000004","type":"Microsoft.DataFactory/factories/linkedservices","properties":{"type":"AzureStorage","typeProperties":{"connectionString":{"type":"SecureString","value":"**********"},"encryptedCredential":"ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkVYQU1QTEVGQUZXQlhDVEFaR19lNjYzMTc5NC01ZjY3LTQwZGEtOTUyMi1mNGI5ZDk1YmZjOWEiDQp9"}},"etag":"b000631d-0000-0100-0000-5f58484e0000"}' - headers: - cache-control: - - no-cache - content-length: - - '757' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:18 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"properties": {"type": "AzureStorage", "description": "Example description", - "typeProperties": {"connectionString": {"type": "SecureString", "value": "**********"}, - "encryptedCredential": "ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkVYQU1QTEVGQUZXQlhDVEFaR19lNjYzMTc5NC01ZjY3LTQwZGEtOTUyMi1mNGI5ZDk1YmZjOWEiDQp9"}}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory linked-service update - Connection: - - keep-alive - Content-Length: - - '426' - Content-Type: - - application/json - ParameterSetName: - - --factory-name --description --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/linkedservices/exampleLin000004?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/linkedservices/exampleLin000004","name":"exampleLin000004","type":"Microsoft.DataFactory/factories/linkedservices","properties":{"type":"AzureStorage","description":"Example - description","typeProperties":{"connectionString":{"type":"SecureString","value":"**********"},"encryptedCredential":"ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkVYQU1QTEVGQUZXQlhDVEFaR19lNjYzMTc5NC01ZjY3LTQwZGEtOTUyMi1mNGI5ZDk1YmZjOWEiDQp9"}},"etag":"b000681d-0000-0100-0000-5f58484f0000"}' - headers: - cache-control: - - no-cache - content-length: - - '793' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:19 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1197' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"properties": {"type": "AzureBlob", "linkedServiceName": {"type": "LinkedServiceReference", - "referenceName": "exampleLin000004"}, "parameters": {"MyFileName": {"type": - "String"}, "MyFolderPath": {"type": "String"}}, "typeProperties": {"folderPath": - {"type": "Expression", "value": "@dataset().MyFolderPath"}, "fileName": {"type": - "Expression", "value": "@dataset().MyFileName"}, "format": {"type": "TextFormat"}}}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory dataset create - Connection: - - keep-alive - Content-Length: - - '419' - Content-Type: - - application/json - ParameterSetName: - - --properties --name --factory-name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/datasets/example000005?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/datasets/example000005","name":"example000005","type":"Microsoft.DataFactory/factories/datasets","properties":{"type":"AzureBlob","linkedServiceName":{"type":"LinkedServiceReference","referenceName":"exampleLin000004"},"parameters":{"MyFileName":{"type":"String"},"MyFolderPath":{"type":"String"}},"typeProperties":{"folderPath":{"type":"Expression","value":"@dataset().MyFolderPath"},"fileName":{"type":"Expression","value":"@dataset().MyFileName"},"format":{"type":"TextFormat"}}},"etag":"b0006a1d-0000-0100-0000-5f5848500000"}' - headers: - cache-control: - - no-cache - content-length: - - '746' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:20 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1195' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory dataset update - Connection: - - keep-alive - ParameterSetName: - - --description --linked-service-name --parameters --name --factory-name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/datasets/example000005?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/datasets/example000005","name":"example000005","type":"Microsoft.DataFactory/factories/datasets","properties":{"type":"AzureBlob","linkedServiceName":{"type":"LinkedServiceReference","referenceName":"exampleLin000004"},"parameters":{"MyFileName":{"type":"String"},"MyFolderPath":{"type":"String"}},"typeProperties":{"folderPath":{"type":"Expression","value":"@dataset().MyFolderPath"},"fileName":{"type":"Expression","value":"@dataset().MyFileName"},"format":{"type":"TextFormat"}}},"etag":"b0006a1d-0000-0100-0000-5f5848500000"}' - headers: - cache-control: - - no-cache - content-length: - - '746' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:21 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"properties": {"type": "AzureBlob", "description": "Example description", - "linkedServiceName": {"type": "LinkedServiceReference", "referenceName": "exampleLin000004"}, - "parameters": {"MyFileName": {"type": "String"}, "MyFolderPath": {"type": "String"}}, - "typeProperties": {"folderPath": {"type": "Expression", "value": "@dataset().MyFolderPath"}, - "fileName": {"type": "Expression", "value": "@dataset().MyFileName"}, "format": - {"type": "TextFormat"}}}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory dataset update - Connection: - - keep-alive - Content-Length: - - '457' - Content-Type: - - application/json - ParameterSetName: - - --description --linked-service-name --parameters --name --factory-name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/datasets/example000005?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/datasets/example000005","name":"example000005","type":"Microsoft.DataFactory/factories/datasets","properties":{"type":"AzureBlob","description":"Example - description","linkedServiceName":{"type":"LinkedServiceReference","referenceName":"exampleLin000004"},"parameters":{"MyFileName":{"type":"String"},"MyFolderPath":{"type":"String"}},"typeProperties":{"folderPath":{"type":"Expression","value":"@dataset().MyFolderPath"},"fileName":{"type":"Expression","value":"@dataset().MyFileName"},"format":{"type":"TextFormat"}}},"etag":"b0006e1d-0000-0100-0000-5f5848530000"}' - headers: - cache-control: - - no-cache - content-length: - - '782' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:22 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1198' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"properties": {"activities": [{"name": "ExampleForeachActivity", "type": - "ForEach", "typeProperties": {"isSequential": true, "items": {"type": "Expression", - "value": "@pipeline().parameters.OutputBlobNameList"}, "activities": [{"name": - "ExampleCopyActivity", "type": "Copy", "inputs": [{"type": "DatasetReference", - "referenceName": "example000005", "parameters": {"MyFileName": "examplecontainer.csv", - "MyFolderPath": "examplecontainer"}}], "outputs": [{"type": "DatasetReference", - "referenceName": "example000005", "parameters": {"MyFileName": {"type": "Expression", - "value": "@item()"}, "MyFolderPath": "examplecontainer"}}], "typeProperties": - {"source": {"type": "BlobSource"}, "sink": {"type": "BlobSink"}, "dataIntegrationUnits": - 32}}]}}], "parameters": {"JobId": {"type": "String"}, "OutputBlobNameList": - {"type": "Array"}}, "variables": {"TestVariableArray": {"type": "Array"}}, "runDimensions": - {"JobId": {"type": "Expression", "value": "@pipeline().parameters.JobId"}}}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory pipeline create - Connection: - - keep-alive - Content-Length: - - '982' - Content-Type: - - application/json - ParameterSetName: - - --factory-name --pipeline --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/pipelines/example000006?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/pipelines/example000006","name":"example000006","type":"Microsoft.DataFactory/factories/pipelines","properties":{"activities":[{"name":"ExampleForeachActivity","type":"ForEach","typeProperties":{"isSequential":true,"items":{"type":"Expression","value":"@pipeline().parameters.OutputBlobNameList"},"activities":[{"name":"ExampleCopyActivity","type":"Copy","inputs":[{"type":"DatasetReference","referenceName":"example000005","parameters":{"MyFileName":"examplecontainer.csv","MyFolderPath":"examplecontainer"}}],"outputs":[{"type":"DatasetReference","referenceName":"example000005","parameters":{"MyFileName":{"type":"Expression","value":"@item()"},"MyFolderPath":"examplecontainer"}}],"typeProperties":{"source":{"type":"BlobSource"},"sink":{"type":"BlobSink"},"dataIntegrationUnits":32}}]}}],"parameters":{"JobId":{"type":"String"},"OutputBlobNameList":{"type":"Array"}},"variables":{"TestVariableArray":{"type":"Array"}},"runDimensions":{"JobId":{"type":"Expression","value":"@pipeline().parameters.JobId"}}},"etag":"b000701d-0000-0100-0000-5f5848530000"}' - headers: - cache-control: - - no-cache - content-length: - - '1274' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:23 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory pipeline update - Connection: - - keep-alive - ParameterSetName: - - --factory-name --description --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/pipelines/example000006?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/pipelines/example000006","name":"example000006","type":"Microsoft.DataFactory/factories/pipelines","properties":{"activities":[{"name":"ExampleForeachActivity","type":"ForEach","typeProperties":{"isSequential":true,"items":{"type":"Expression","value":"@pipeline().parameters.OutputBlobNameList"},"activities":[{"name":"ExampleCopyActivity","type":"Copy","inputs":[{"type":"DatasetReference","referenceName":"example000005","parameters":{"MyFileName":"examplecontainer.csv","MyFolderPath":"examplecontainer"}}],"outputs":[{"type":"DatasetReference","referenceName":"example000005","parameters":{"MyFileName":{"type":"Expression","value":"@item()"},"MyFolderPath":"examplecontainer"}}],"typeProperties":{"source":{"type":"BlobSource"},"sink":{"type":"BlobSink"},"dataIntegrationUnits":32}}]}}],"parameters":{"JobId":{"type":"String"},"OutputBlobNameList":{"type":"Array"}},"variables":{"TestVariableArray":{"type":"Array"}},"runDimensions":{"JobId":{"type":"Expression","value":"@pipeline().parameters.JobId"}},"lastPublishTime":"2020-09-09T03:13:23Z"},"etag":"b000701d-0000-0100-0000-5f5848530000"}' - headers: - cache-control: - - no-cache - content-length: - - '1315' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:23 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"properties": {"description": "Test Update description", "activities": - [{"name": "ExampleForeachActivity", "type": "ForEach", "typeProperties": {"isSequential": - true, "items": {"type": "Expression", "value": "@pipeline().parameters.OutputBlobNameList"}, - "activities": [{"name": "ExampleCopyActivity", "type": "Copy", "inputs": [{"type": - "DatasetReference", "referenceName": "example000005", "parameters": {"MyFileName": - "examplecontainer.csv", "MyFolderPath": "examplecontainer"}}], "outputs": [{"type": - "DatasetReference", "referenceName": "example000005", "parameters": {"MyFileName": - {"type": "Expression", "value": "@item()"}, "MyFolderPath": "examplecontainer"}}], - "typeProperties": {"source": {"type": "BlobSource"}, "sink": {"type": "BlobSink"}, - "dataIntegrationUnits": 32}}]}}], "parameters": {"JobId": {"type": "String"}, - "OutputBlobNameList": {"type": "Array"}}, "variables": {"TestVariableArray": - {"type": "Array"}}, "runDimensions": {"JobId": {"type": "Expression", "value": - "@pipeline().parameters.JobId"}}}, "folder": {"name": "example000006"}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory pipeline update - Connection: - - keep-alive - Content-Length: - - '1063' - Content-Type: - - application/json - ParameterSetName: - - --factory-name --description --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/pipelines/example000006?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/pipelines/example000006","name":"example000006","type":"Microsoft.DataFactory/factories/pipelines","properties":{"description":"Test - Update description","activities":[{"name":"ExampleForeachActivity","type":"ForEach","typeProperties":{"isSequential":true,"items":{"type":"Expression","value":"@pipeline().parameters.OutputBlobNameList"},"activities":[{"name":"ExampleCopyActivity","type":"Copy","inputs":[{"type":"DatasetReference","referenceName":"example000005","parameters":{"MyFileName":"examplecontainer.csv","MyFolderPath":"examplecontainer"}}],"outputs":[{"type":"DatasetReference","referenceName":"example000005","parameters":{"MyFileName":{"type":"Expression","value":"@item()"},"MyFolderPath":"examplecontainer"}}],"typeProperties":{"source":{"type":"BlobSource"},"sink":{"type":"BlobSink"},"dataIntegrationUnits":32}}]}}],"parameters":{"JobId":{"type":"String"},"OutputBlobNameList":{"type":"Array"}},"variables":{"TestVariableArray":{"type":"Array"}},"runDimensions":{"JobId":{"type":"Expression","value":"@pipeline().parameters.JobId"}}},"etag":"b000731d-0000-0100-0000-5f5848550000"}' - headers: - cache-control: - - no-cache - content-length: - - '1314' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:24 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1198' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"properties": {"type": "ScheduleTrigger", "pipelines": [{"pipelineReference": - {"type": "PipelineReference", "referenceName": "example000006"}, "parameters": - {"OutputBlobNameList": ["exampleoutput.csv"]}}], "typeProperties": {"recurrence": - {"frequency": "Minute", "interval": 4, "startTime": "2018-06-16T00:39:13.84418Z", - "endTime": "2018-06-16T00:55:13.84418Z", "timeZone": "UTC"}}}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger create - Connection: - - keep-alive - Content-Length: - - '386' - Content-Type: - - application/json - ParameterSetName: - - --factory-name --resource-group --properties --name - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007","name":"example000007","type":"Microsoft.DataFactory/factories/triggers","properties":{"type":"ScheduleTrigger","pipelines":[{"pipelineReference":{"type":"PipelineReference","referenceName":"example000006"},"parameters":{"OutputBlobNameList":["exampleoutput.csv"]}}],"typeProperties":{"recurrence":{"frequency":"Minute","interval":4,"startTime":"2018-06-16T00:39:13.84418Z","endTime":"2018-06-16T00:55:13.84418Z","timeZone":"UTC"}},"runtimeState":"Stopped"},"etag":"b000741d-0000-0100-0000-5f5848560000"}' - headers: - cache-control: - - no-cache - content-length: - - '743' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:25 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1197' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger update - Connection: - - keep-alive - ParameterSetName: - - --factory-name --resource-group --description --name - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007","name":"example000007","type":"Microsoft.DataFactory/factories/triggers","properties":{"type":"ScheduleTrigger","pipelines":[{"pipelineReference":{"type":"PipelineReference","referenceName":"example000006"},"parameters":{"OutputBlobNameList":["exampleoutput.csv"]}}],"typeProperties":{"recurrence":{"frequency":"Minute","interval":4,"startTime":"2018-06-16T00:39:13.84418Z","endTime":"2018-06-16T00:55:13.84418Z","timeZone":"UTC"}},"runtimeState":"Stopped"},"etag":"b000741d-0000-0100-0000-5f5848560000"}' - headers: - cache-control: - - no-cache - content-length: - - '743' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:26 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"properties": {"type": "ScheduleTrigger", "description": "Example description", - "pipelines": [{"pipelineReference": {"type": "PipelineReference", "referenceName": - "example000006"}, "parameters": {"OutputBlobNameList": ["exampleoutput.csv"]}}], - "typeProperties": {"recurrence": {"frequency": "Minute", "interval": 4, "startTime": - "2018-06-16T00:39:13.84418Z", "endTime": "2018-06-16T00:55:13.84418Z", "timeZone": - "UTC"}}}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger update - Connection: - - keep-alive - Content-Length: - - '424' - Content-Type: - - application/json - ParameterSetName: - - --factory-name --resource-group --description --name - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007","name":"example000007","type":"Microsoft.DataFactory/factories/triggers","properties":{"type":"ScheduleTrigger","description":"Example - description","pipelines":[{"pipelineReference":{"type":"PipelineReference","referenceName":"example000006"},"parameters":{"OutputBlobNameList":["exampleoutput.csv"]}}],"typeProperties":{"recurrence":{"frequency":"Minute","interval":4,"startTime":"2018-06-16T00:39:13.84418Z","endTime":"2018-06-16T00:55:13.84418Z","timeZone":"UTC"}},"runtimeState":"Stopped"},"etag":"b000791d-0000-0100-0000-5f5848580000"}' - headers: - cache-control: - - no-cache - content-length: - - '779' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:28 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1196' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"properties": {"type": "SelfHosted", "description": "A selfhosted integration - runtime"}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime self-hosted create - Connection: - - keep-alive - Content-Length: - - '89' - Content-Type: - - application/json - ParameterSetName: - - --factory-name --description --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationruntimes/exampleInteg000003","name":"exampleInteg000003","type":"Microsoft.DataFactory/factories/integrationruntimes","properties":{"type":"SelfHosted","description":"A - selfhosted integration runtime"},"etag":"b0007a1d-0000-0100-0000-5f5848590000"}' - headers: - cache-control: - - no-cache - content-length: - - '484' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:29 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1196' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"autoUpdate": "Off", "updateDelayOffset": "\"PT3H\""}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime update - Connection: - - keep-alive - Content-Length: - - '54' - Content-Type: - - application/json - ParameterSetName: - - --factory-name --name --resource-group --auto-update --update-delay-offset - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: PATCH - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationruntimes/exampleInteg000003","name":"exampleInteg000003","type":"Microsoft.DataFactory/factories/integrationruntimes","properties":{"type":"SelfHosted","description":"A - selfhosted integration runtime"},"etag":"b0007a1d-0000-0100-0000-5f5848590000"}' - headers: - cache-control: - - no-cache - content-length: - - '484' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:29 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1196' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"OutputBlobNameList": ["exampleoutput.csv"]}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory pipeline create-run - Connection: - - keep-alive - Content-Length: - - '45' - Content-Type: - - application/json - ParameterSetName: - - --factory-name --parameters --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/pipelines/example000006/createRun?api-version=2018-06-01 - response: - body: - string: '{"runId":"6fe80a10-f24a-11ea-8dee-00155d4d8222"}' - headers: - cache-control: - - no-cache - content-length: - - '48' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:54 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime show - Connection: - - keep-alive - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationruntimes/exampleInteg000003","name":"exampleInteg000003","type":"Microsoft.DataFactory/factories/integrationruntimes","properties":{"type":"SelfHosted","description":"A - selfhosted integration runtime"},"etag":"b0007a1d-0000-0100-0000-5f5848590000"}' - headers: - cache-control: - - no-cache - content-length: - - '484' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:55 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory linked-service show - Connection: - - keep-alive - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/linkedservices/exampleLin000004?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/linkedservices/exampleLin000004","name":"exampleLin000004","type":"Microsoft.DataFactory/factories/linkedservices","properties":{"type":"AzureStorage","description":"Example - description","typeProperties":{"connectionString":{"type":"SecureString","value":"**********"},"encryptedCredential":"ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkVYQU1QTEVGQUZXQlhDVEFaR19lNjYzMTc5NC01ZjY3LTQwZGEtOTUyMi1mNGI5ZDk1YmZjOWEiDQp9"}},"etag":"b000681d-0000-0100-0000-5f58484f0000"}' - headers: - cache-control: - - no-cache - content-length: - - '793' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:56 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory pipeline show - Connection: - - keep-alive - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/pipelines/example000006?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/pipelines/example000006","name":"example000006","type":"Microsoft.DataFactory/factories/pipelines","properties":{"description":"Test - Update description","activities":[{"name":"ExampleForeachActivity","type":"ForEach","typeProperties":{"isSequential":true,"items":{"type":"Expression","value":"@pipeline().parameters.OutputBlobNameList"},"activities":[{"name":"ExampleCopyActivity","type":"Copy","inputs":[{"type":"DatasetReference","referenceName":"example000005","parameters":{"MyFileName":"examplecontainer.csv","MyFolderPath":"examplecontainer"}}],"outputs":[{"type":"DatasetReference","referenceName":"example000005","parameters":{"MyFileName":{"type":"Expression","value":"@item()"},"MyFolderPath":"examplecontainer"}}],"typeProperties":{"source":{"type":"BlobSource"},"sink":{"type":"BlobSink"},"dataIntegrationUnits":32}}]}}],"parameters":{"JobId":{"type":"String"},"OutputBlobNameList":{"type":"Array"}},"variables":{"TestVariableArray":{"type":"Array"}},"runDimensions":{"JobId":{"type":"Expression","value":"@pipeline().parameters.JobId"}},"lastPublishTime":"2020-09-09T03:13:25Z"},"etag":"b000731d-0000-0100-0000-5f5848550000"}' - headers: - cache-control: - - no-cache - content-length: - - '1355' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:56 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory dataset show - Connection: - - keep-alive - ParameterSetName: - - --name --factory-name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/datasets/example000005?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/datasets/example000005","name":"example000005","type":"Microsoft.DataFactory/factories/datasets","properties":{"type":"AzureBlob","description":"Example - description","linkedServiceName":{"type":"LinkedServiceReference","referenceName":"exampleLin000004"},"parameters":{"MyFileName":{"type":"String"},"MyFolderPath":{"type":"String"}},"typeProperties":{"folderPath":{"type":"Expression","value":"@dataset().MyFolderPath"},"fileName":{"type":"Expression","value":"@dataset().MyFileName"},"format":{"type":"TextFormat"}}},"etag":"b0006e1d-0000-0100-0000-5f5848530000"}' - headers: - cache-control: - - no-cache - content-length: - - '782' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:56 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger show - Connection: - - keep-alive - ParameterSetName: - - --factory-name --resource-group --name - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007","name":"example000007","type":"Microsoft.DataFactory/factories/triggers","properties":{"type":"ScheduleTrigger","description":"Example - description","pipelines":[{"pipelineReference":{"type":"PipelineReference","referenceName":"example000006"},"parameters":{"OutputBlobNameList":["exampleoutput.csv"]}}],"typeProperties":{"recurrence":{"frequency":"Minute","interval":4,"startTime":"2018-06-16T00:39:13.84418Z","endTime":"2018-06-16T00:55:13.84418Z","timeZone":"UTC"}},"runtimeState":"Stopped"},"etag":"b000791d-0000-0100-0000-5f5848580000"}' - headers: - cache-control: - - no-cache - content-length: - - '779' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:57 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime list - Connection: - - keep-alive - ParameterSetName: - - --factory-name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes?api-version=2018-06-01 - response: - body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationruntimes/exampleInteg000003","name":"exampleInteg000003","type":"Microsoft.DataFactory/factories/integrationruntimes","properties":{"type":"SelfHosted","description":"A - selfhosted integration runtime"},"etag":"b0007a1d-0000-0100-0000-5f5848590000"}]}' - headers: - cache-control: - - no-cache - content-length: - - '496' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:58 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory linked-service list - Connection: - - keep-alive - ParameterSetName: - - --factory-name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/linkedservices?api-version=2018-06-01 - response: - body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/linkedservices/exampleLin000004","name":"exampleLin000004","type":"Microsoft.DataFactory/factories/linkedservices","properties":{"type":"AzureStorage","description":"Example - description","typeProperties":{"connectionString":{"type":"SecureString","value":"**********"},"encryptedCredential":"ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkVYQU1QTEVGQUZXQlhDVEFaR19lNjYzMTc5NC01ZjY3LTQwZGEtOTUyMi1mNGI5ZDk1YmZjOWEiDQp9"}},"etag":"b000681d-0000-0100-0000-5f58484f0000"}]}' - headers: - cache-control: - - no-cache - content-length: - - '805' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:59 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory pipeline list - Connection: - - keep-alive - ParameterSetName: - - --factory-name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/pipelines?api-version=2018-06-01 - response: - body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/pipelines/example000006","name":"example000006","type":"Microsoft.DataFactory/factories/pipelines","properties":{"description":"Test - Update description","activities":[{"name":"ExampleForeachActivity","type":"ForEach","typeProperties":{"isSequential":true,"items":{"type":"Expression","value":"@pipeline().parameters.OutputBlobNameList"},"activities":[{"name":"ExampleCopyActivity","type":"Copy","inputs":[{"type":"DatasetReference","referenceName":"example000005","parameters":{"MyFileName":"examplecontainer.csv","MyFolderPath":"examplecontainer"}}],"outputs":[{"type":"DatasetReference","referenceName":"example000005","parameters":{"MyFileName":{"type":"Expression","value":"@item()"},"MyFolderPath":"examplecontainer"}}],"typeProperties":{"source":{"type":"BlobSource"},"sink":{"type":"BlobSink"},"dataIntegrationUnits":32}}]}}],"parameters":{"JobId":{"type":"String"},"OutputBlobNameList":{"type":"Array"}},"variables":{"TestVariableArray":{"type":"Array"}},"runDimensions":{"JobId":{"type":"Expression","value":"@pipeline().parameters.JobId"}},"lastPublishTime":"2020-09-09T03:13:25Z"},"etag":"b000731d-0000-0100-0000-5f5848550000"}]}' - headers: - cache-control: - - no-cache - content-length: - - '1367' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:13:59 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger list - Connection: - - keep-alive - ParameterSetName: - - --factory-name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers?api-version=2018-06-01 - response: - body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007","name":"example000007","type":"Microsoft.DataFactory/factories/triggers","properties":{"type":"ScheduleTrigger","description":"Example - description","pipelines":[{"pipelineReference":{"type":"PipelineReference","referenceName":"example000006"},"parameters":{"OutputBlobNameList":["exampleoutput.csv"]}}],"typeProperties":{"recurrence":{"frequency":"Minute","interval":4,"startTime":"2018-06-16T00:39:13.84418Z","endTime":"2018-06-16T00:55:13.84418Z","timeZone":"UTC"}},"runtimeState":"Stopped"},"etag":"b000791d-0000-0100-0000-5f5848580000"}]}' - headers: - cache-control: - - no-cache - content-length: - - '791' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:14:00 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory dataset list - Connection: - - keep-alive - ParameterSetName: - - --factory-name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/datasets?api-version=2018-06-01 - response: - body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/datasets/example000005","name":"example000005","type":"Microsoft.DataFactory/factories/datasets","properties":{"type":"AzureBlob","description":"Example - description","linkedServiceName":{"type":"LinkedServiceReference","referenceName":"exampleLin000004"},"parameters":{"MyFileName":{"type":"String"},"MyFolderPath":{"type":"String"}},"typeProperties":{"folderPath":{"type":"Expression","value":"@dataset().MyFolderPath"},"fileName":{"type":"Expression","value":"@dataset().MyFileName"},"format":{"type":"TextFormat"}}},"etag":"b0006e1d-0000-0100-0000-5f5848530000"}]}' - headers: - cache-control: - - no-cache - content-length: - - '794' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:14:01 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory factory show - Connection: - - keep-alive - ParameterSetName: - - --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002?api-version=2018-06-01 - response: - body: - string: '{"name":"exampleFa000002","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/examplefafwbxctazg","type":"Microsoft.DataFactory/factories","properties":{"provisioningState":"Succeeded","createTime":"2020-09-09T03:13:06.9017517Z","version":"2018-06-01","factoryStatistics":{"totalResourceCount":0,"maxAllowedResourceCount":0,"factorySizeInGbUnits":0,"maxAllowedFactorySizeInGbUnits":0}},"eTag":"\"9900bb16-0000-0100-0000-5f58484a0000\"","location":"eastus","tags":{"exampleTag":"exampleValue"}}' - headers: - cache-control: - - no-cache - content-length: - - '635' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:14:01 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory factory list - Connection: - - keep-alive - ParameterSetName: - - --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories?api-version=2018-06-01 - response: - body: - string: '{"value":[{"name":"exampleFa000002","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/examplefafwbxctazg","type":"Microsoft.DataFactory/factories","properties":{"provisioningState":"Succeeded","createTime":"2020-09-09T03:13:06.9017517Z","version":"2018-06-01"},"eTag":"\"9900bb16-0000-0100-0000-5f58484a0000\"","location":"eastus","tags":{"exampleTag":"exampleValue"}}]}' - headers: - cache-control: - - no-cache - content-length: - - '514' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:14:02 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory factory list - Connection: - - keep-alive - ParameterSetName: - - -g - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory/factories?api-version=2018-06-01 - response: - body: - string: "{\r\n \"value\": [\r\n {\r\n \"name\": \"fengFact\",\r\n \ - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/feng-cli-rg/providers/Microsoft.DataFactory/factories/fengfact\"\ - ,\r\n \"type\": \"Microsoft.DataFactory/factories\",\r\n \"properties\"\ - : {\r\n \"provisioningState\": \"Succeeded\",\r\n \"createTime\"\ - : \"2020-08-10T03:16:35.8579311Z\",\r\n \"version\": \"2018-06-01\"\ - ,\r\n \"factoryStatistics\": null\r\n },\r\n \"eTag\": \"\ - \\\"3e005a78-0000-0100-0000-5f30bc140000\\\"\",\r\n \"location\": \"\ - eastus\",\r\n \"identity\": {\r\n \"type\": \"SystemAssigned\"\ - ,\r\n \"principalId\": \"ea810051-02c2-415c-b10c-5b47fc73df69\",\r\n\ - \ \"tenantId\": \"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a\",\r\n \ - \ \"userAssignedIdentities\": null\r\n },\r\n \"tags\": {}\r\n\ - \ },\r\n {\r\n \"name\": \"exampleFa000002\",\r\n \"id\":\ - \ \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/examplefafwbxctazg\"\ - ,\r\n \"type\": \"Microsoft.DataFactory/factories\",\r\n \"properties\"\ - : {\r\n \"provisioningState\": \"Succeeded\",\r\n \"createTime\"\ - : \"2020-09-09T03:13:06.9017517Z\",\r\n \"version\": \"2018-06-01\"\ - ,\r\n \"factoryStatistics\": null\r\n },\r\n \"eTag\": \"\ - \\\"9900bb16-0000-0100-0000-5f58484a0000\\\"\",\r\n \"location\": \"\ - eastus\",\r\n \"identity\": null,\r\n \"tags\": {\r\n \"\ - exampleTag\": \"exampleValue\"\r\n }\r\n }\r\n ],\r\n \"nextLink\"\ - : null\r\n}" - headers: - cache-control: - - no-cache - content-length: - - '1517' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:14:02 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"keyName": "authKey2"}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime regenerate-auth-key - Connection: - - keep-alive - Content-Length: - - '23' - Content-Type: - - application/json - ParameterSetName: - - --factory-name --name --key-name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/regenerateAuthKey?api-version=2018-06-01 - response: - body: - string: '{"authKey2":"IR@e92d9622-a626-42d7-9083-88fe835df269@exampleFa000002@ServiceEndpoint=examplefafwbxctazg.eastus.datafactory.azure.net@2OZelPxZcFNTXmEZTI0zZqN8JOIW0kUdsUAAWvIkjzE="}' - headers: - cache-control: - - no-cache - content-length: - - '182' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:14:04 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1198' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime sync-credentials - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/syncCredentials?api-version=2018-06-01 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:14:04 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1198' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime get-monitoring-data - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/monitoringData?api-version=2018-06-01 - response: - body: - string: '{"name":"exampleInteg000003"}' - headers: - cache-control: - - no-cache - content-length: - - '36' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:14:05 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime list-auth-key - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/listAuthKeys?api-version=2018-06-01 - response: - body: - string: '{"authKey1":"IR@e92d9622-a626-42d7-9083-88fe835df269@exampleFa000002@ServiceEndpoint=examplefafwbxctazg.eastus.datafactory.azure.net@ScXOH+Jfa1tVD42oKn7lrZj/7rT91/3rUDWP9cxXtgg=","authKey2":"IR@e92d9622-a626-42d7-9083-88fe835df269@exampleFa000002@ServiceEndpoint=examplefafwbxctazg.eastus.datafactory.azure.net@2OZelPxZcFNTXmEZTI0zZqN8JOIW0kUdsUAAWvIkjzE="}' - headers: - cache-control: - - no-cache - content-length: - - '363' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:14:06 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1197' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"factoryName": "exampleFactoryName-linked"}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime remove-link - Connection: - - keep-alive - Content-Length: - - '44' - Content-Type: - - application/json - ParameterSetName: - - --factory-name --name --linked-factory-name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/removeLinks?api-version=2018-06-01 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:14:07 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1196' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime get-status - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/getStatus?api-version=2018-06-01 - response: - body: - string: '{"name":"exampleInteg000003","properties":{"dataFactoryName":"exampleFa000002","state":"NeedRegistration","type":"SelfHosted","typeProperties":{"autoUpdate":"Off","taskQueueId":"e92d9622-a626-42d7-9083-88fe835df269","nodes":[],"updateDelayOffset":"PT3H","serviceUrls":["examplefafwbxctazg.eastus.datafactory.azure.net"],"links":[],"versionStatus":"None","capabilities":{},"latestVersion":"4.12.7538.2","newerVersions":["4.12.7538.2","4.11.7521.1","4.11.7515.1","4.11.7491.1","4.11.7512.1"],"createTime":"2020-09-09T03:13:29.3626423Z"}}}' - headers: - cache-control: - - no-cache - content-length: - - '546' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:14:07 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger get-event-subscription-status - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - --factory-name --resource-group --name - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007/getEventSubscriptionStatus?api-version=2018-06-01 - response: - body: - string: '{"triggerName":"example000007","status":"Enabled"}' - headers: - cache-control: - - no-cache - content-length: - - '51' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:14:07 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1196' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger unsubscribe-from-event - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - --factory-name --resource-group --name - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007/unsubscribeFromEvents?api-version=2018-06-01 - response: - body: - string: '{"triggerName":"example000007","status":"Disabled"}' - headers: - cache-control: - - no-cache - content-length: - - '52' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:14:09 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1197' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger subscribe-to-event - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - --factory-name --resource-group --name - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007/subscribeToEvents?api-version=2018-06-01 - response: - body: - string: '{"triggerName":"example000007","status":"Enabled"}' - headers: - cache-control: - - no-cache - content-length: - - '51' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:14:09 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1198' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger start - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - --factory-name --resource-group --name - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007/start?api-version=2018-06-01 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:14:13 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1196' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger stop - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - --factory-name --resource-group --name - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007/stop?api-version=2018-06-01 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:14:14 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1197' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"permissions": "r", "accessResourcePath": "", "profileName": "DefaultProfile", - "startTime": "2018-11-10T02:46:20.2659347Z", "expireTime": "2018-11-10T09:46:20.2659347Z"}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory factory get-data-plane-access - Connection: - - keep-alive - Content-Length: - - '170' - Content-Type: - - application/json - ParameterSetName: - - --name --access-resource-path --expire-time --permissions --profile-name --start-time - --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/getDataPlaneAccess?api-version=2018-06-01 - response: - body: - string: '{"policy":{"permissions":"r","accessResourcePath":"","profileName":"DefaultProfile","startTime":"2018-11-10T02:46:20.2659347Z","expireTime":"2018-11-10T09:46:20.2659347Z"},"dataPlaneUrl":"https://examplefafwbxctazg.eastus.datafactory.azure.net/dataplane","accessToken":"EAAAABs2rqPAHvUHZJwLq4FZp5KwAQAAdZTqBGZ3vpgdb3nfDbaEvsgWjm7PXfcmwlRt1Yo/FlHNMWstj7y483zBkqXcVykADNkUMXieX5YA6he6Ur3TUP3i4o0VBBlFdZLDRbdtLWhCdOAbXJlfdIm9YWAwqsqUsqjwkd5CJ5cnaUGN25kT4y+L56ENMxwFf7jBwU581gI6XsMsFhl02bUigm/PlvBUmSld3ef33UBsJMGFYPzpETU8nnW5/f7qCluv7QCcHyvm5HfMeKe8tqQ3jwKjZPp9kltqbeRTs3jB6KibU6ps/yz5+rtnzz2hogtRcr/qsAAGWvT/9Zbx0DY3gdh7UaiDAyauLzm0E+ZsuypGu4w1Kqb7Jd6HC5sjMe0gRTg2VOToZzidTVpK8T93uPPgxnnPtI2KmReIED9CQmfEfahAufX8S4D7o+taJ1jc9tWIoU/RaSmW9z+uWniihn2B0FPd13/UmwK14lObfLRwl64YuhICisOfKOR5CsaoOXcXWVCVIzSJ7JVsJoT6wxBl6Wb19VvhIToS2AK/CONSz4qQOfI4jsCRNvK5SZzukVOx5D7hgx7WDNroQpLvBSz+EoyVIAAAALeA7vXXRKUNyVz+2av4tXU7YkgIm/C2AtxfkrrUYMM/"}' - headers: - cache-control: - - no-cache - content-length: - - '928' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:14:15 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1195' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"lastUpdatedAfter": "2020-09-09T03:13:00.000Z", "lastUpdatedBefore": "2020-09-09T04:13:00.000Z"}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger-run query-by-factory - Connection: - - keep-alive - Content-Length: - - '97' - Content-Type: - - application/json - ParameterSetName: - - --factory-name --last-updated-after --last-updated-before --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/queryTriggerRuns?api-version=2018-06-01 - response: - body: - string: '{"value":[]}' - headers: - cache-control: - - no-cache - content-length: - - '12' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:14:16 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger-run query-by-factory - Connection: - - keep-alive - ParameterSetName: - - --factory-name --last-updated-after --last-updated-before --resource-group - User-Agent: - - python/3.8.0 (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) msrest/0.6.18 - msrest_azure/0.6.3 azure-mgmt-resource/10.2.0 Azure-SDK-For-Python AZURECLI/2.11.1 - accept-language: - - en-US - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/locations?api-version=2019-11-01 - response: - body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus","name":"eastus","displayName":"East - US","regionalDisplayName":"(US) East US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-79.8164","latitude":"37.3719","physicalLocation":"Virginia","pairedRegion":[{"name":"westus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2","name":"eastus2","displayName":"East - US 2","regionalDisplayName":"(US) East US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","physicalLocation":"Virginia","pairedRegion":[{"name":"centralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus","name":"southcentralus","displayName":"South - Central US","regionalDisplayName":"(US) South Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-98.5","latitude":"29.4167","physicalLocation":"Texas","pairedRegion":[{"name":"northcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2","name":"westus2","displayName":"West - US 2","regionalDisplayName":"(US) West US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-119.852","latitude":"47.233","physicalLocation":"Washington","pairedRegion":[{"name":"westcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast","name":"australiaeast","displayName":"Australia - East","regionalDisplayName":"(Asia Pacific) Australia East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"151.2094","latitude":"-33.86","physicalLocation":"New - South Wales","pairedRegion":[{"name":"australiasoutheast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia","name":"southeastasia","displayName":"Southeast - Asia","regionalDisplayName":"(Asia Pacific) Southeast Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"103.833","latitude":"1.283","physicalLocation":"Singapore","pairedRegion":[{"name":"eastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope","name":"northeurope","displayName":"North - Europe","regionalDisplayName":"(Europe) North Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-6.2597","latitude":"53.3478","physicalLocation":"Ireland","pairedRegion":[{"name":"westeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth","name":"uksouth","displayName":"UK - South","regionalDisplayName":"(Europe) UK South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-0.799","latitude":"50.941","physicalLocation":"London","pairedRegion":[{"name":"ukwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope","name":"westeurope","displayName":"West - Europe","regionalDisplayName":"(Europe) West Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"4.9","latitude":"52.3667","physicalLocation":"Netherlands","pairedRegion":[{"name":"northeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus","name":"centralus","displayName":"Central - US","regionalDisplayName":"(US) Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","physicalLocation":"Iowa","pairedRegion":[{"name":"eastus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus","name":"northcentralus","displayName":"North - Central US","regionalDisplayName":"(US) North Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-87.6278","latitude":"41.8819","physicalLocation":"Illinois","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus","name":"westus","displayName":"West - US","regionalDisplayName":"(US) West US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-122.417","latitude":"37.783","physicalLocation":"California","pairedRegion":[{"name":"eastus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth","name":"southafricanorth","displayName":"South - Africa North","regionalDisplayName":"(Africa) South Africa North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Africa","longitude":"28.218370","latitude":"-25.731340","physicalLocation":"Johannesburg","pairedRegion":[{"name":"southafricawest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia","name":"centralindia","displayName":"Central - India","regionalDisplayName":"(Asia Pacific) Central India","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"73.9197","latitude":"18.5822","physicalLocation":"Pune","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia","name":"eastasia","displayName":"East - Asia","regionalDisplayName":"(Asia Pacific) East Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"114.188","latitude":"22.267","physicalLocation":"Hong - Kong","pairedRegion":[{"name":"southeastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast","name":"japaneast","displayName":"Japan - East","regionalDisplayName":"(Asia Pacific) Japan East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"139.77","latitude":"35.68","physicalLocation":"Tokyo, - Saitama","pairedRegion":[{"name":"japanwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral","name":"koreacentral","displayName":"Korea - Central","regionalDisplayName":"(Asia Pacific) Korea Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"126.9780","latitude":"37.5665","physicalLocation":"Seoul","pairedRegion":[{"name":"koreasouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral","name":"canadacentral","displayName":"Canada - Central","regionalDisplayName":"(Canada) Canada Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Canada","longitude":"-79.383","latitude":"43.653","physicalLocation":"Toronto","pairedRegion":[{"name":"canadaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral","name":"francecentral","displayName":"France - Central","regionalDisplayName":"(Europe) France Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"2.3730","latitude":"46.3772","physicalLocation":"Paris","pairedRegion":[{"name":"francesouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral","name":"germanywestcentral","displayName":"Germany - West Central","regionalDisplayName":"(Europe) Germany West Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.682127","latitude":"50.110924","physicalLocation":"Frankfurt","pairedRegion":[{"name":"germanynorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast","name":"norwayeast","displayName":"Norway - East","regionalDisplayName":"(Europe) Norway East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"10.752245","latitude":"59.913868","physicalLocation":"Norway","pairedRegion":[{"name":"norwaywest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth","name":"switzerlandnorth","displayName":"Switzerland - North","regionalDisplayName":"(Europe) Switzerland North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.564572","latitude":"47.451542","physicalLocation":"Zurich","pairedRegion":[{"name":"switzerlandwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth","name":"uaenorth","displayName":"UAE - North","regionalDisplayName":"(Middle East) UAE North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Middle - East","longitude":"55.316666","latitude":"25.266666","physicalLocation":"Dubai","pairedRegion":[{"name":"uaecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth","name":"brazilsouth","displayName":"Brazil - South","regionalDisplayName":"(South America) Brazil South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"South - America","longitude":"-46.633","latitude":"-23.55","physicalLocation":"Sao - Paulo State","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage","name":"centralusstage","displayName":"Central - US (Stage)","regionalDisplayName":"(US) Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstage","name":"eastusstage","displayName":"East - US (Stage)","regionalDisplayName":"(US) East US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2stage","name":"eastus2stage","displayName":"East - US 2 (Stage)","regionalDisplayName":"(US) East US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralusstage","name":"northcentralusstage","displayName":"North - Central US (Stage)","regionalDisplayName":"(US) North Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstage","name":"southcentralusstage","displayName":"South - Central US (Stage)","regionalDisplayName":"(US) South Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westusstage","name":"westusstage","displayName":"West - US (Stage)","regionalDisplayName":"(US) West US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2stage","name":"westus2stage","displayName":"West - US 2 (Stage)","regionalDisplayName":"(US) West US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asia","name":"asia","displayName":"Asia","regionalDisplayName":"Asia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asiapacific","name":"asiapacific","displayName":"Asia - Pacific","regionalDisplayName":"Asia Pacific","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia","name":"australia","displayName":"Australia","regionalDisplayName":"Australia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil","name":"brazil","displayName":"Brazil","regionalDisplayName":"Brazil","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada","name":"canada","displayName":"Canada","regionalDisplayName":"Canada","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe","name":"europe","displayName":"Europe","regionalDisplayName":"Europe","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global","name":"global","displayName":"Global","regionalDisplayName":"Global","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india","name":"india","displayName":"India","regionalDisplayName":"India","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan","name":"japan","displayName":"Japan","regionalDisplayName":"Japan","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk","name":"uk","displayName":"United - Kingdom","regionalDisplayName":"United Kingdom","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstates","name":"unitedstates","displayName":"United - States","regionalDisplayName":"United States","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage","name":"eastasiastage","displayName":"East - Asia (Stage)","regionalDisplayName":"(Asia Pacific) East Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia - Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasiastage","name":"southeastasiastage","displayName":"Southeast - Asia (Stage)","regionalDisplayName":"(Asia Pacific) Southeast Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia - Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap","name":"centraluseuap","displayName":"Central - US EUAP","regionalDisplayName":"(US) Central US EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","pairedRegion":[{"name":"eastus2euap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap","name":"eastus2euap","displayName":"East - US 2 EUAP","regionalDisplayName":"(US) East US 2 EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","pairedRegion":[{"name":"centraluseuap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus","name":"westcentralus","displayName":"West - Central US","regionalDisplayName":"(US) West Central US","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-110.234","latitude":"40.890","physicalLocation":"Wyoming","pairedRegion":[{"name":"westus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest","name":"southafricawest","displayName":"South - Africa West","regionalDisplayName":"(Africa) South Africa West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Africa","longitude":"18.843266","latitude":"-34.075691","physicalLocation":"Cape - Town","pairedRegion":[{"name":"southafricanorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral","name":"australiacentral","displayName":"Australia - Central","regionalDisplayName":"(Asia Pacific) Australia Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2","name":"australiacentral2","displayName":"Australia - Central 2","regionalDisplayName":"(Asia Pacific) Australia Central 2","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast","name":"australiasoutheast","displayName":"Australia - Southeast","regionalDisplayName":"(Asia Pacific) Australia Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"144.9631","latitude":"-37.8136","physicalLocation":"Victoria","pairedRegion":[{"name":"australiaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest","name":"japanwest","displayName":"Japan - West","regionalDisplayName":"(Asia Pacific) Japan West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"135.5022","latitude":"34.6939","physicalLocation":"Osaka","pairedRegion":[{"name":"japaneast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth","name":"koreasouth","displayName":"Korea - South","regionalDisplayName":"(Asia Pacific) Korea South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"129.0756","latitude":"35.1796","physicalLocation":"Busan","pairedRegion":[{"name":"koreacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia","name":"southindia","displayName":"South - India","regionalDisplayName":"(Asia Pacific) South India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"80.1636","latitude":"12.9822","physicalLocation":"Chennai","pairedRegion":[{"name":"centralindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westindia","name":"westindia","displayName":"West - India","regionalDisplayName":"(Asia Pacific) West India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"72.868","latitude":"19.088","physicalLocation":"Mumbai","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast","name":"canadaeast","displayName":"Canada - East","regionalDisplayName":"(Canada) Canada East","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Canada","longitude":"-71.217","latitude":"46.817","physicalLocation":"Quebec","pairedRegion":[{"name":"canadacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth","name":"francesouth","displayName":"France - South","regionalDisplayName":"(Europe) France South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"2.1972","latitude":"43.8345","physicalLocation":"Marseille","pairedRegion":[{"name":"francecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth","name":"germanynorth","displayName":"Germany - North","regionalDisplayName":"(Europe) Germany North","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"8.806422","latitude":"53.073635","physicalLocation":"Berlin","pairedRegion":[{"name":"germanywestcentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest","name":"norwaywest","displayName":"Norway - West","regionalDisplayName":"(Europe) Norway West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"5.733107","latitude":"58.969975","physicalLocation":"Norway","pairedRegion":[{"name":"norwayeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest","name":"switzerlandwest","displayName":"Switzerland - West","regionalDisplayName":"(Europe) Switzerland West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"6.143158","latitude":"46.204391","physicalLocation":"Geneva","pairedRegion":[{"name":"switzerlandnorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest","name":"ukwest","displayName":"UK - West","regionalDisplayName":"(Europe) UK West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"-3.084","latitude":"53.427","physicalLocation":"Cardiff","pairedRegion":[{"name":"uksouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral","name":"uaecentral","displayName":"UAE - Central","regionalDisplayName":"(Middle East) UAE Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Middle - East","longitude":"54.366669","latitude":"24.466667","physicalLocation":"Abu - Dhabi","pairedRegion":[{"name":"uaenorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsoutheast","name":"brazilsoutheast","displayName":"Brazil - Southeast","regionalDisplayName":"(South America) Brazil Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"South - America","longitude":"-43.2075","latitude":"-22.90278","physicalLocation":"Rio","pairedRegion":[{"name":"brazilsouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth"}]}}]}' - headers: - cache-control: - - no-cache - content-length: - - '25098' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:14:17 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: '{"factoryResourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002", - "repoConfiguration": {"type": "FactoryVSTSConfiguration", "accountName": "ADF", - "repositoryName": "repo", "collaborationBranch": "master", "rootFolder": "/", - "lastCommitId": "", "projectName": "project", "tenantId": ""}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory factory configure-factory-repo - Connection: - - keep-alive - Content-Length: - - '449' - Content-Type: - - application/json - ParameterSetName: - - --factory-resource-id --factory-vsts-configuration --location - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory/locations/eastus/configureFactoryRepo?api-version=2018-06-01 - response: - body: - string: "{\r\n \"name\": \"exampleFa000002\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/examplefafwbxctazg\"\ - ,\r\n \"type\": \"Microsoft.DataFactory/factories\",\r\n \"properties\"\ - : {\r\n \"provisioningState\": \"Succeeded\",\r\n \"createTime\": \"\ - 2020-09-09T03:13:06.9017517Z\",\r\n \"version\": \"2018-06-01\",\r\n \ - \ \"factoryStatistics\": null,\r\n \"repoConfiguration\": {\r\n \"\ - type\": \"FactoryVSTSConfiguration\",\r\n \"accountName\": \"ADF\",\r\ - \n \"repositoryName\": \"repo\",\r\n \"collaborationBranch\": \"\ - master\",\r\n \"rootFolder\": \"/\",\r\n \"lastCommitId\": \"\"\ - ,\r\n \"projectName\": \"project\",\r\n \"tenantId\": \"\"\r\n \ - \ }\r\n },\r\n \"eTag\": \"\\\"9900c917-0000-0100-0000-5f58488a0000\\\"\ - \",\r\n \"location\": \"eastus\",\r\n \"identity\": null,\r\n \"tags\"\ - : {\r\n \"exampleTag\": \"exampleValue\"\r\n }\r\n}" - headers: - cache-control: - - no-cache - content-length: - - '917' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:14:18 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1194' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime delete - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - -y --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003?api-version=2018-06-01 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:14:20 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-deletes: - - '14999' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger delete - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - -y --factory-name --resource-group --name - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007?api-version=2018-06-01 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:14:23 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-deletes: - - '14999' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory pipeline delete - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - -y --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/pipelines/example000006?api-version=2018-06-01 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:14:26 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-deletes: - - '14996' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory dataset delete - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - -y --name --factory-name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/datasets/example000005?api-version=2018-06-01 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:14:27 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-deletes: - - '14997' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory linked-service delete - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - -y --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/linkedservices/exampleLin000004?api-version=2018-06-01 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:14:28 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-deletes: - - '14998' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory factory delete - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - -y --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002?api-version=2018-06-01 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:14:35 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-deletes: - - '14996' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory factory delete - Connection: - - keep-alive - ParameterSetName: - - -y --name --resource-group - User-Agent: - - python/3.8.0 (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) msrest/0.6.18 - msrest_azure/0.6.3 azure-mgmt-resource/10.2.0 Azure-SDK-For-Python AZURECLI/2.11.1 - accept-language: - - en-US - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/locations?api-version=2019-11-01 - response: - body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus","name":"eastus","displayName":"East - US","regionalDisplayName":"(US) East US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-79.8164","latitude":"37.3719","physicalLocation":"Virginia","pairedRegion":[{"name":"westus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2","name":"eastus2","displayName":"East - US 2","regionalDisplayName":"(US) East US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","physicalLocation":"Virginia","pairedRegion":[{"name":"centralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus","name":"southcentralus","displayName":"South - Central US","regionalDisplayName":"(US) South Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-98.5","latitude":"29.4167","physicalLocation":"Texas","pairedRegion":[{"name":"northcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2","name":"westus2","displayName":"West - US 2","regionalDisplayName":"(US) West US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-119.852","latitude":"47.233","physicalLocation":"Washington","pairedRegion":[{"name":"westcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast","name":"australiaeast","displayName":"Australia - East","regionalDisplayName":"(Asia Pacific) Australia East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"151.2094","latitude":"-33.86","physicalLocation":"New - South Wales","pairedRegion":[{"name":"australiasoutheast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia","name":"southeastasia","displayName":"Southeast - Asia","regionalDisplayName":"(Asia Pacific) Southeast Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"103.833","latitude":"1.283","physicalLocation":"Singapore","pairedRegion":[{"name":"eastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope","name":"northeurope","displayName":"North - Europe","regionalDisplayName":"(Europe) North Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-6.2597","latitude":"53.3478","physicalLocation":"Ireland","pairedRegion":[{"name":"westeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth","name":"uksouth","displayName":"UK - South","regionalDisplayName":"(Europe) UK South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-0.799","latitude":"50.941","physicalLocation":"London","pairedRegion":[{"name":"ukwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope","name":"westeurope","displayName":"West - Europe","regionalDisplayName":"(Europe) West Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"4.9","latitude":"52.3667","physicalLocation":"Netherlands","pairedRegion":[{"name":"northeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus","name":"centralus","displayName":"Central - US","regionalDisplayName":"(US) Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","physicalLocation":"Iowa","pairedRegion":[{"name":"eastus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus","name":"northcentralus","displayName":"North - Central US","regionalDisplayName":"(US) North Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-87.6278","latitude":"41.8819","physicalLocation":"Illinois","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus","name":"westus","displayName":"West - US","regionalDisplayName":"(US) West US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-122.417","latitude":"37.783","physicalLocation":"California","pairedRegion":[{"name":"eastus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth","name":"southafricanorth","displayName":"South - Africa North","regionalDisplayName":"(Africa) South Africa North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Africa","longitude":"28.218370","latitude":"-25.731340","physicalLocation":"Johannesburg","pairedRegion":[{"name":"southafricawest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia","name":"centralindia","displayName":"Central - India","regionalDisplayName":"(Asia Pacific) Central India","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"73.9197","latitude":"18.5822","physicalLocation":"Pune","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia","name":"eastasia","displayName":"East - Asia","regionalDisplayName":"(Asia Pacific) East Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"114.188","latitude":"22.267","physicalLocation":"Hong - Kong","pairedRegion":[{"name":"southeastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast","name":"japaneast","displayName":"Japan - East","regionalDisplayName":"(Asia Pacific) Japan East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"139.77","latitude":"35.68","physicalLocation":"Tokyo, - Saitama","pairedRegion":[{"name":"japanwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral","name":"koreacentral","displayName":"Korea - Central","regionalDisplayName":"(Asia Pacific) Korea Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"126.9780","latitude":"37.5665","physicalLocation":"Seoul","pairedRegion":[{"name":"koreasouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral","name":"canadacentral","displayName":"Canada - Central","regionalDisplayName":"(Canada) Canada Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Canada","longitude":"-79.383","latitude":"43.653","physicalLocation":"Toronto","pairedRegion":[{"name":"canadaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral","name":"francecentral","displayName":"France - Central","regionalDisplayName":"(Europe) France Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"2.3730","latitude":"46.3772","physicalLocation":"Paris","pairedRegion":[{"name":"francesouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral","name":"germanywestcentral","displayName":"Germany - West Central","regionalDisplayName":"(Europe) Germany West Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.682127","latitude":"50.110924","physicalLocation":"Frankfurt","pairedRegion":[{"name":"germanynorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast","name":"norwayeast","displayName":"Norway - East","regionalDisplayName":"(Europe) Norway East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"10.752245","latitude":"59.913868","physicalLocation":"Norway","pairedRegion":[{"name":"norwaywest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth","name":"switzerlandnorth","displayName":"Switzerland - North","regionalDisplayName":"(Europe) Switzerland North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.564572","latitude":"47.451542","physicalLocation":"Zurich","pairedRegion":[{"name":"switzerlandwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth","name":"uaenorth","displayName":"UAE - North","regionalDisplayName":"(Middle East) UAE North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Middle - East","longitude":"55.316666","latitude":"25.266666","physicalLocation":"Dubai","pairedRegion":[{"name":"uaecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth","name":"brazilsouth","displayName":"Brazil - South","regionalDisplayName":"(South America) Brazil South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"South - America","longitude":"-46.633","latitude":"-23.55","physicalLocation":"Sao - Paulo State","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage","name":"centralusstage","displayName":"Central - US (Stage)","regionalDisplayName":"(US) Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstage","name":"eastusstage","displayName":"East - US (Stage)","regionalDisplayName":"(US) East US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2stage","name":"eastus2stage","displayName":"East - US 2 (Stage)","regionalDisplayName":"(US) East US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralusstage","name":"northcentralusstage","displayName":"North - Central US (Stage)","regionalDisplayName":"(US) North Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstage","name":"southcentralusstage","displayName":"South - Central US (Stage)","regionalDisplayName":"(US) South Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westusstage","name":"westusstage","displayName":"West - US (Stage)","regionalDisplayName":"(US) West US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2stage","name":"westus2stage","displayName":"West - US 2 (Stage)","regionalDisplayName":"(US) West US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asia","name":"asia","displayName":"Asia","regionalDisplayName":"Asia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asiapacific","name":"asiapacific","displayName":"Asia - Pacific","regionalDisplayName":"Asia Pacific","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia","name":"australia","displayName":"Australia","regionalDisplayName":"Australia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil","name":"brazil","displayName":"Brazil","regionalDisplayName":"Brazil","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada","name":"canada","displayName":"Canada","regionalDisplayName":"Canada","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe","name":"europe","displayName":"Europe","regionalDisplayName":"Europe","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global","name":"global","displayName":"Global","regionalDisplayName":"Global","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india","name":"india","displayName":"India","regionalDisplayName":"India","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan","name":"japan","displayName":"Japan","regionalDisplayName":"Japan","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk","name":"uk","displayName":"United - Kingdom","regionalDisplayName":"United Kingdom","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstates","name":"unitedstates","displayName":"United - States","regionalDisplayName":"United States","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage","name":"eastasiastage","displayName":"East - Asia (Stage)","regionalDisplayName":"(Asia Pacific) East Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia - Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasiastage","name":"southeastasiastage","displayName":"Southeast - Asia (Stage)","regionalDisplayName":"(Asia Pacific) Southeast Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia - Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap","name":"centraluseuap","displayName":"Central - US EUAP","regionalDisplayName":"(US) Central US EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","pairedRegion":[{"name":"eastus2euap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap","name":"eastus2euap","displayName":"East - US 2 EUAP","regionalDisplayName":"(US) East US 2 EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","pairedRegion":[{"name":"centraluseuap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus","name":"westcentralus","displayName":"West - Central US","regionalDisplayName":"(US) West Central US","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-110.234","latitude":"40.890","physicalLocation":"Wyoming","pairedRegion":[{"name":"westus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest","name":"southafricawest","displayName":"South - Africa West","regionalDisplayName":"(Africa) South Africa West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Africa","longitude":"18.843266","latitude":"-34.075691","physicalLocation":"Cape - Town","pairedRegion":[{"name":"southafricanorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral","name":"australiacentral","displayName":"Australia - Central","regionalDisplayName":"(Asia Pacific) Australia Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2","name":"australiacentral2","displayName":"Australia - Central 2","regionalDisplayName":"(Asia Pacific) Australia Central 2","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast","name":"australiasoutheast","displayName":"Australia - Southeast","regionalDisplayName":"(Asia Pacific) Australia Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"144.9631","latitude":"-37.8136","physicalLocation":"Victoria","pairedRegion":[{"name":"australiaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest","name":"japanwest","displayName":"Japan - West","regionalDisplayName":"(Asia Pacific) Japan West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"135.5022","latitude":"34.6939","physicalLocation":"Osaka","pairedRegion":[{"name":"japaneast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth","name":"koreasouth","displayName":"Korea - South","regionalDisplayName":"(Asia Pacific) Korea South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"129.0756","latitude":"35.1796","physicalLocation":"Busan","pairedRegion":[{"name":"koreacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia","name":"southindia","displayName":"South - India","regionalDisplayName":"(Asia Pacific) South India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"80.1636","latitude":"12.9822","physicalLocation":"Chennai","pairedRegion":[{"name":"centralindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westindia","name":"westindia","displayName":"West - India","regionalDisplayName":"(Asia Pacific) West India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"72.868","latitude":"19.088","physicalLocation":"Mumbai","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast","name":"canadaeast","displayName":"Canada - East","regionalDisplayName":"(Canada) Canada East","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Canada","longitude":"-71.217","latitude":"46.817","physicalLocation":"Quebec","pairedRegion":[{"name":"canadacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth","name":"francesouth","displayName":"France - South","regionalDisplayName":"(Europe) France South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"2.1972","latitude":"43.8345","physicalLocation":"Marseille","pairedRegion":[{"name":"francecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth","name":"germanynorth","displayName":"Germany - North","regionalDisplayName":"(Europe) Germany North","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"8.806422","latitude":"53.073635","physicalLocation":"Berlin","pairedRegion":[{"name":"germanywestcentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest","name":"norwaywest","displayName":"Norway - West","regionalDisplayName":"(Europe) Norway West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"5.733107","latitude":"58.969975","physicalLocation":"Norway","pairedRegion":[{"name":"norwayeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest","name":"switzerlandwest","displayName":"Switzerland - West","regionalDisplayName":"(Europe) Switzerland West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"6.143158","latitude":"46.204391","physicalLocation":"Geneva","pairedRegion":[{"name":"switzerlandnorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest","name":"ukwest","displayName":"UK - West","regionalDisplayName":"(Europe) UK West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"-3.084","latitude":"53.427","physicalLocation":"Cardiff","pairedRegion":[{"name":"uksouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral","name":"uaecentral","displayName":"UAE - Central","regionalDisplayName":"(Middle East) UAE Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Middle - East","longitude":"54.366669","latitude":"24.466667","physicalLocation":"Abu - Dhabi","pairedRegion":[{"name":"uaenorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsoutheast","name":"brazilsoutheast","displayName":"Brazil - Southeast","regionalDisplayName":"(South America) Brazil Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"South - America","longitude":"-43.2075","latitude":"-22.90278","physicalLocation":"Rio","pairedRegion":[{"name":"brazilsouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth"}]}}]}' - headers: - cache-control: - - no-cache - content-length: - - '25098' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:14:36 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: '{"location": "eastus"}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory factory create - Connection: - - keep-alive - Content-Length: - - '22' - Content-Type: - - application/json - ParameterSetName: - - --location --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002?api-version=2018-06-01 - response: - body: - string: '{"name":"exampleFa000002","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/examplefafwbxctazg","type":"Microsoft.DataFactory/factories","properties":{"provisioningState":"Succeeded","createTime":"2020-09-09T03:14:44.1070602Z","version":"2018-06-01"},"eTag":"\"99002e18-0000-0100-0000-5f5848a40000\"","location":"eastus","tags":{}}' - headers: - cache-control: - - no-cache - content-length: - - '475' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:14:46 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1197' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"properties": {"type": "Managed", "description": "Managed Integration - Runtime", "typeProperties": {"computeProperties": {"location": "East US 2", - "nodeSize": "Standard_D2_v3", "numberOfNodes": 1, "maxParallelExecutionsPerNode": - 2}, "ssisProperties": {"licenseType": "LicenseIncluded", "edition": "Standard"}}}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime managed create - Connection: - - keep-alive - Content-Length: - - '311' - Content-Type: - - application/json - ParameterSetName: - - --factory-name --name --resource-group --description --type-properties-compute-properties - --type-properties-ssis-properties - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationruntimes/exampleInteg000003","name":"exampleInteg000003","type":"Microsoft.DataFactory/factories/integrationruntimes","properties":{"type":"Managed","description":"Managed - Integration Runtime","typeProperties":{"computeProperties":{"location":"East - US 2","nodeSize":"Standard_D2_v3","numberOfNodes":1,"maxParallelExecutionsPerNode":2},"ssisProperties":{"licenseType":"LicenseIncluded","edition":"Standard"}},"state":"Initial"},"etag":"280132cd-0000-0100-0000-5f5848a70000"}' - headers: - cache-control: - - no-cache - content-length: - - '709' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:14:47 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1194' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime show - Connection: - - keep-alive - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationruntimes/exampleInteg000003","name":"exampleInteg000003","type":"Microsoft.DataFactory/factories/integrationruntimes","properties":{"type":"Managed","description":"Managed - Integration Runtime","typeProperties":{"computeProperties":{"location":"East - US 2","nodeSize":"Standard_D2_v3","numberOfNodes":1,"maxParallelExecutionsPerNode":2},"ssisProperties":{"licenseType":"LicenseIncluded","edition":"Standard"}},"state":"Initial"},"etag":"280132cd-0000-0100-0000-5f5848a70000"}' - headers: - cache-control: - - no-cache - content-length: - - '709' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:14:48 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime start - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/start?api-version=2018-06-01 - response: - body: - string: '' - headers: - azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/start/operationstatuses/8b90a589bf8a4396a9c2d2ba5282ab23?api-version=2018-06-01 - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:14:49 GMT - expires: - - '-1' - location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/start/operationresults/8b90a589bf8a4396a9c2d2ba5282ab23?api-version=2018-06-01 - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1195' - x-powered-by: - - ASP.NET - status: - code: 202 - message: Accepted -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime start - Connection: - - keep-alive - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/start/operationstatuses/8b90a589bf8a4396a9c2d2ba5282ab23?api-version=2018-06-01 - response: - body: - string: '{"status":"InProgress","name":"8b90a589bf8a4396a9c2d2ba5282ab23","properties":null,"error":null}' - headers: - cache-control: - - no-cache - content-length: - - '96' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:15:04 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime start - Connection: - - keep-alive - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/start/operationstatuses/8b90a589bf8a4396a9c2d2ba5282ab23?api-version=2018-06-01 - response: - body: - string: '{"status":"InProgress","name":"8b90a589bf8a4396a9c2d2ba5282ab23","properties":null,"error":null}' - headers: - cache-control: - - no-cache - content-length: - - '96' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:15:34 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime start - Connection: - - keep-alive - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/start/operationstatuses/8b90a589bf8a4396a9c2d2ba5282ab23?api-version=2018-06-01 - response: - body: - string: '{"status":"Succeeded","name":"8b90a589bf8a4396a9c2d2ba5282ab23","properties":{"name":"exampleInteg000003","properties":{"dataFactoryName":"exampleFa000002","state":"Started","type":"Managed","dataFactoryLocation":null,"resourceUri":null,"typeProperties":{"nodes":[],"otherErrors":[],"createTime":"2020-09-09T03:14:49.1299318Z"},"dataFactoryTags":null,"managedVirtualNetwork":null}},"error":null}' - headers: - cache-control: - - no-cache - content-length: - - '405' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:16:05 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime start - Connection: - - keep-alive - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/start/operationresults/8b90a589bf8a4396a9c2d2ba5282ab23?api-version=2018-06-01 - response: - body: - string: '{"name":"exampleInteg000003","properties":{"dataFactoryName":"exampleFa000002","state":"Started","type":"Managed","dataFactoryLocation":null,"resourceUri":null,"typeProperties":{"nodes":[],"otherErrors":[],"createTime":"2020-09-09T03:14:49.1299318Z"},"dataFactoryTags":null,"managedVirtualNetwork":null}}' - headers: - cache-control: - - no-cache - content-length: - - '314' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:16:06 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime stop - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/stop?api-version=2018-06-01 - response: - body: - string: '' - headers: - azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/stop/operationstatuses/63c4ccb1e82a40d9a9b8b7a9b609b22b?api-version=2018-06-01 - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:16:07 GMT - expires: - - '-1' - location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/stop/operationresults/63c4ccb1e82a40d9a9b8b7a9b609b22b?api-version=2018-06-01 - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1194' - x-powered-by: - - ASP.NET - status: - code: 202 - message: Accepted -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime stop - Connection: - - keep-alive - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/stop/operationstatuses/63c4ccb1e82a40d9a9b8b7a9b609b22b?api-version=2018-06-01 - response: - body: - string: '{"status":"InProgress","name":"63c4ccb1e82a40d9a9b8b7a9b609b22b","properties":null,"error":null}' - headers: - cache-control: - - no-cache - content-length: - - '96' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:16:22 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime stop - Connection: - - keep-alive - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/stop/operationstatuses/63c4ccb1e82a40d9a9b8b7a9b609b22b?api-version=2018-06-01 - response: - body: - string: '{"status":"InProgress","name":"63c4ccb1e82a40d9a9b8b7a9b609b22b","properties":null,"error":null}' - headers: - cache-control: - - no-cache - content-length: - - '96' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:16:53 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime stop - Connection: - - keep-alive - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/stop/operationstatuses/63c4ccb1e82a40d9a9b8b7a9b609b22b?api-version=2018-06-01 - response: - body: - string: '{"status":"InProgress","name":"63c4ccb1e82a40d9a9b8b7a9b609b22b","properties":null,"error":null}' - headers: - cache-control: - - no-cache - content-length: - - '96' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:17:24 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime stop - Connection: - - keep-alive - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/stop/operationstatuses/63c4ccb1e82a40d9a9b8b7a9b609b22b?api-version=2018-06-01 - response: - body: - string: '{"status":"InProgress","name":"63c4ccb1e82a40d9a9b8b7a9b609b22b","properties":null,"error":null}' - headers: - cache-control: - - no-cache - content-length: - - '96' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:17:54 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime stop - Connection: - - keep-alive - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/stop/operationstatuses/63c4ccb1e82a40d9a9b8b7a9b609b22b?api-version=2018-06-01 - response: - body: - string: '{"status":"InProgress","name":"63c4ccb1e82a40d9a9b8b7a9b609b22b","properties":null,"error":null}' - headers: - cache-control: - - no-cache - content-length: - - '96' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:18:24 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime stop - Connection: - - keep-alive - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/stop/operationstatuses/63c4ccb1e82a40d9a9b8b7a9b609b22b?api-version=2018-06-01 - response: - body: - string: '{"status":"InProgress","name":"63c4ccb1e82a40d9a9b8b7a9b609b22b","properties":null,"error":null}' - headers: - cache-control: - - no-cache - content-length: - - '96' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:18:55 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime stop - Connection: - - keep-alive - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/stop/operationstatuses/63c4ccb1e82a40d9a9b8b7a9b609b22b?api-version=2018-06-01 - response: - body: - string: '{"status":"InProgress","name":"63c4ccb1e82a40d9a9b8b7a9b609b22b","properties":null,"error":null}' - headers: - cache-control: - - no-cache - content-length: - - '96' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:19:27 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime stop - Connection: - - keep-alive - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/stop/operationstatuses/63c4ccb1e82a40d9a9b8b7a9b609b22b?api-version=2018-06-01 - response: - body: - string: '{"status":"InProgress","name":"63c4ccb1e82a40d9a9b8b7a9b609b22b","properties":null,"error":null}' - headers: - cache-control: - - no-cache - content-length: - - '96' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:19:57 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime stop - Connection: - - keep-alive - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/stop/operationstatuses/63c4ccb1e82a40d9a9b8b7a9b609b22b?api-version=2018-06-01 - response: - body: - string: '{"status":"Succeeded","name":"63c4ccb1e82a40d9a9b8b7a9b609b22b","properties":null,"error":null}' - headers: - cache-control: - - no-cache - content-length: - - '95' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:20:28 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime stop - Connection: - - keep-alive - ParameterSetName: - - --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003/stop/operationresults/63c4ccb1e82a40d9a9b8b7a9b609b22b?api-version=2018-06-01 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:20:28 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory integration-runtime delete - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - -y --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/integrationRuntimes/exampleInteg000003?api-version=2018-06-01 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:20:30 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-deletes: - - '14996' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory factory delete - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - -y --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002?api-version=2018-06-01 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:20:35 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-deletes: - - '14996' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory factory delete - Connection: - - keep-alive - ParameterSetName: - - -y --name --resource-group - User-Agent: - - python/3.8.0 (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) msrest/0.6.18 - msrest_azure/0.6.3 azure-mgmt-resource/10.2.0 Azure-SDK-For-Python AZURECLI/2.11.1 - accept-language: - - en-US - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/locations?api-version=2019-11-01 - response: - body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus","name":"eastus","displayName":"East - US","regionalDisplayName":"(US) East US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-79.8164","latitude":"37.3719","physicalLocation":"Virginia","pairedRegion":[{"name":"westus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2","name":"eastus2","displayName":"East - US 2","regionalDisplayName":"(US) East US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","physicalLocation":"Virginia","pairedRegion":[{"name":"centralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus","name":"southcentralus","displayName":"South - Central US","regionalDisplayName":"(US) South Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-98.5","latitude":"29.4167","physicalLocation":"Texas","pairedRegion":[{"name":"northcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2","name":"westus2","displayName":"West - US 2","regionalDisplayName":"(US) West US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-119.852","latitude":"47.233","physicalLocation":"Washington","pairedRegion":[{"name":"westcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast","name":"australiaeast","displayName":"Australia - East","regionalDisplayName":"(Asia Pacific) Australia East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"151.2094","latitude":"-33.86","physicalLocation":"New - South Wales","pairedRegion":[{"name":"australiasoutheast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia","name":"southeastasia","displayName":"Southeast - Asia","regionalDisplayName":"(Asia Pacific) Southeast Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"103.833","latitude":"1.283","physicalLocation":"Singapore","pairedRegion":[{"name":"eastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope","name":"northeurope","displayName":"North - Europe","regionalDisplayName":"(Europe) North Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-6.2597","latitude":"53.3478","physicalLocation":"Ireland","pairedRegion":[{"name":"westeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth","name":"uksouth","displayName":"UK - South","regionalDisplayName":"(Europe) UK South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-0.799","latitude":"50.941","physicalLocation":"London","pairedRegion":[{"name":"ukwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope","name":"westeurope","displayName":"West - Europe","regionalDisplayName":"(Europe) West Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"4.9","latitude":"52.3667","physicalLocation":"Netherlands","pairedRegion":[{"name":"northeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus","name":"centralus","displayName":"Central - US","regionalDisplayName":"(US) Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","physicalLocation":"Iowa","pairedRegion":[{"name":"eastus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus","name":"northcentralus","displayName":"North - Central US","regionalDisplayName":"(US) North Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-87.6278","latitude":"41.8819","physicalLocation":"Illinois","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus","name":"westus","displayName":"West - US","regionalDisplayName":"(US) West US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-122.417","latitude":"37.783","physicalLocation":"California","pairedRegion":[{"name":"eastus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth","name":"southafricanorth","displayName":"South - Africa North","regionalDisplayName":"(Africa) South Africa North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Africa","longitude":"28.218370","latitude":"-25.731340","physicalLocation":"Johannesburg","pairedRegion":[{"name":"southafricawest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia","name":"centralindia","displayName":"Central - India","regionalDisplayName":"(Asia Pacific) Central India","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"73.9197","latitude":"18.5822","physicalLocation":"Pune","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia","name":"eastasia","displayName":"East - Asia","regionalDisplayName":"(Asia Pacific) East Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"114.188","latitude":"22.267","physicalLocation":"Hong - Kong","pairedRegion":[{"name":"southeastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast","name":"japaneast","displayName":"Japan - East","regionalDisplayName":"(Asia Pacific) Japan East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"139.77","latitude":"35.68","physicalLocation":"Tokyo, - Saitama","pairedRegion":[{"name":"japanwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral","name":"koreacentral","displayName":"Korea - Central","regionalDisplayName":"(Asia Pacific) Korea Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"126.9780","latitude":"37.5665","physicalLocation":"Seoul","pairedRegion":[{"name":"koreasouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral","name":"canadacentral","displayName":"Canada - Central","regionalDisplayName":"(Canada) Canada Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Canada","longitude":"-79.383","latitude":"43.653","physicalLocation":"Toronto","pairedRegion":[{"name":"canadaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral","name":"francecentral","displayName":"France - Central","regionalDisplayName":"(Europe) France Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"2.3730","latitude":"46.3772","physicalLocation":"Paris","pairedRegion":[{"name":"francesouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral","name":"germanywestcentral","displayName":"Germany - West Central","regionalDisplayName":"(Europe) Germany West Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.682127","latitude":"50.110924","physicalLocation":"Frankfurt","pairedRegion":[{"name":"germanynorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast","name":"norwayeast","displayName":"Norway - East","regionalDisplayName":"(Europe) Norway East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"10.752245","latitude":"59.913868","physicalLocation":"Norway","pairedRegion":[{"name":"norwaywest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth","name":"switzerlandnorth","displayName":"Switzerland - North","regionalDisplayName":"(Europe) Switzerland North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.564572","latitude":"47.451542","physicalLocation":"Zurich","pairedRegion":[{"name":"switzerlandwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth","name":"uaenorth","displayName":"UAE - North","regionalDisplayName":"(Middle East) UAE North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Middle - East","longitude":"55.316666","latitude":"25.266666","physicalLocation":"Dubai","pairedRegion":[{"name":"uaecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth","name":"brazilsouth","displayName":"Brazil - South","regionalDisplayName":"(South America) Brazil South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"South - America","longitude":"-46.633","latitude":"-23.55","physicalLocation":"Sao - Paulo State","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage","name":"centralusstage","displayName":"Central - US (Stage)","regionalDisplayName":"(US) Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstage","name":"eastusstage","displayName":"East - US (Stage)","regionalDisplayName":"(US) East US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2stage","name":"eastus2stage","displayName":"East - US 2 (Stage)","regionalDisplayName":"(US) East US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralusstage","name":"northcentralusstage","displayName":"North - Central US (Stage)","regionalDisplayName":"(US) North Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstage","name":"southcentralusstage","displayName":"South - Central US (Stage)","regionalDisplayName":"(US) South Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westusstage","name":"westusstage","displayName":"West - US (Stage)","regionalDisplayName":"(US) West US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2stage","name":"westus2stage","displayName":"West - US 2 (Stage)","regionalDisplayName":"(US) West US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asia","name":"asia","displayName":"Asia","regionalDisplayName":"Asia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asiapacific","name":"asiapacific","displayName":"Asia - Pacific","regionalDisplayName":"Asia Pacific","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia","name":"australia","displayName":"Australia","regionalDisplayName":"Australia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil","name":"brazil","displayName":"Brazil","regionalDisplayName":"Brazil","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada","name":"canada","displayName":"Canada","regionalDisplayName":"Canada","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe","name":"europe","displayName":"Europe","regionalDisplayName":"Europe","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global","name":"global","displayName":"Global","regionalDisplayName":"Global","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india","name":"india","displayName":"India","regionalDisplayName":"India","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan","name":"japan","displayName":"Japan","regionalDisplayName":"Japan","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk","name":"uk","displayName":"United - Kingdom","regionalDisplayName":"United Kingdom","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstates","name":"unitedstates","displayName":"United - States","regionalDisplayName":"United States","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage","name":"eastasiastage","displayName":"East - Asia (Stage)","regionalDisplayName":"(Asia Pacific) East Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia - Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasiastage","name":"southeastasiastage","displayName":"Southeast - Asia (Stage)","regionalDisplayName":"(Asia Pacific) Southeast Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia - Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap","name":"centraluseuap","displayName":"Central - US EUAP","regionalDisplayName":"(US) Central US EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","pairedRegion":[{"name":"eastus2euap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap","name":"eastus2euap","displayName":"East - US 2 EUAP","regionalDisplayName":"(US) East US 2 EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","pairedRegion":[{"name":"centraluseuap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus","name":"westcentralus","displayName":"West - Central US","regionalDisplayName":"(US) West Central US","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-110.234","latitude":"40.890","physicalLocation":"Wyoming","pairedRegion":[{"name":"westus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest","name":"southafricawest","displayName":"South - Africa West","regionalDisplayName":"(Africa) South Africa West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Africa","longitude":"18.843266","latitude":"-34.075691","physicalLocation":"Cape - Town","pairedRegion":[{"name":"southafricanorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral","name":"australiacentral","displayName":"Australia - Central","regionalDisplayName":"(Asia Pacific) Australia Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2","name":"australiacentral2","displayName":"Australia - Central 2","regionalDisplayName":"(Asia Pacific) Australia Central 2","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast","name":"australiasoutheast","displayName":"Australia - Southeast","regionalDisplayName":"(Asia Pacific) Australia Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"144.9631","latitude":"-37.8136","physicalLocation":"Victoria","pairedRegion":[{"name":"australiaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest","name":"japanwest","displayName":"Japan - West","regionalDisplayName":"(Asia Pacific) Japan West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"135.5022","latitude":"34.6939","physicalLocation":"Osaka","pairedRegion":[{"name":"japaneast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth","name":"koreasouth","displayName":"Korea - South","regionalDisplayName":"(Asia Pacific) Korea South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"129.0756","latitude":"35.1796","physicalLocation":"Busan","pairedRegion":[{"name":"koreacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia","name":"southindia","displayName":"South - India","regionalDisplayName":"(Asia Pacific) South India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"80.1636","latitude":"12.9822","physicalLocation":"Chennai","pairedRegion":[{"name":"centralindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westindia","name":"westindia","displayName":"West - India","regionalDisplayName":"(Asia Pacific) West India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"72.868","latitude":"19.088","physicalLocation":"Mumbai","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast","name":"canadaeast","displayName":"Canada - East","regionalDisplayName":"(Canada) Canada East","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Canada","longitude":"-71.217","latitude":"46.817","physicalLocation":"Quebec","pairedRegion":[{"name":"canadacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth","name":"francesouth","displayName":"France - South","regionalDisplayName":"(Europe) France South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"2.1972","latitude":"43.8345","physicalLocation":"Marseille","pairedRegion":[{"name":"francecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth","name":"germanynorth","displayName":"Germany - North","regionalDisplayName":"(Europe) Germany North","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"8.806422","latitude":"53.073635","physicalLocation":"Berlin","pairedRegion":[{"name":"germanywestcentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest","name":"norwaywest","displayName":"Norway - West","regionalDisplayName":"(Europe) Norway West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"5.733107","latitude":"58.969975","physicalLocation":"Norway","pairedRegion":[{"name":"norwayeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest","name":"switzerlandwest","displayName":"Switzerland - West","regionalDisplayName":"(Europe) Switzerland West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"6.143158","latitude":"46.204391","physicalLocation":"Geneva","pairedRegion":[{"name":"switzerlandnorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest","name":"ukwest","displayName":"UK - West","regionalDisplayName":"(Europe) UK West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"-3.084","latitude":"53.427","physicalLocation":"Cardiff","pairedRegion":[{"name":"uksouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral","name":"uaecentral","displayName":"UAE - Central","regionalDisplayName":"(Middle East) UAE Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Middle - East","longitude":"54.366669","latitude":"24.466667","physicalLocation":"Abu - Dhabi","pairedRegion":[{"name":"uaenorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsoutheast","name":"brazilsoutheast","displayName":"Brazil - Southeast","regionalDisplayName":"(South America) Brazil Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"South - America","longitude":"-43.2075","latitude":"-22.90278","physicalLocation":"Rio","pairedRegion":[{"name":"brazilsouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth"}]}}]}' - headers: - cache-control: - - no-cache - content-length: - - '25098' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:20:36 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: '{"location": "eastus"}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory factory create - Connection: - - keep-alive - Content-Length: - - '22' - Content-Type: - - application/json - ParameterSetName: - - --location --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002?api-version=2018-06-01 - response: - body: - string: '{"name":"exampleFa000002","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/examplefafwbxctazg","type":"Microsoft.DataFactory/factories","properties":{"provisioningState":"Succeeded","createTime":"2020-09-09T03:20:45.0542433Z","version":"2018-06-01"},"eTag":"\"9900fa1d-0000-0100-0000-5f584a0d0000\"","location":"eastus","tags":{}}' - headers: - cache-control: - - no-cache - content-length: - - '475' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:20:46 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1195' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"properties": {"activities": [{"name": "Wait1", "type": "Wait", "dependsOn": - [], "userProperties": [], "typeProperties": {"waitTimeInSeconds": 5}}], "annotations": - []}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory pipeline create - Connection: - - keep-alive - Content-Length: - - '169' - Content-Type: - - application/json - ParameterSetName: - - --factory-name --pipeline --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/pipelines/example000006?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/pipelines/example000006","name":"example000006","type":"Microsoft.DataFactory/factories/pipelines","properties":{"activities":[{"name":"Wait1","type":"Wait","dependsOn":[],"userProperties":[],"typeProperties":{"waitTimeInSeconds":5}}],"annotations":[]},"etag":"bd0234fb-0000-0100-0000-5f584a0f0000"}' - headers: - cache-control: - - no-cache - content-length: - - '514' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:20:47 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1195' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"OutputBlobNameList": ["exampleoutput.csv"]}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory pipeline create-run - Connection: - - keep-alive - Content-Length: - - '45' - Content-Type: - - application/json - ParameterSetName: - - --factory-name --parameters --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/pipelines/example000006/createRun?api-version=2018-06-01 - response: - body: - string: '{"runId":"7484afc8-f24b-11ea-8dee-00155d4d8222"}' - headers: - cache-control: - - no-cache - content-length: - - '48' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:20:49 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1198' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory pipeline-run show - Connection: - - keep-alive - ParameterSetName: - - --factory-name --resource-group --run-id - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/pipelineruns/7484afc8-f24b-11ea-8dee-00155d4d8222?api-version=2018-06-01 - response: - body: - string: '{"id":"/SUBSCRIPTIONS/00000000-0000-0000-0000-000000000000/RESOURCEGROUPS/CLITESTEXEZCMJPYVZ4N5KXL6X6Q2XDKQFPWYCPXD2MCWV44QPPAG4LGMU6P24R3H2VIMH5VVGI/PROVIDERS/MICROSOFT.DATAFACTORY/FACTORIES/EXAMPLEFAFWBXCTAZG/pipelineruns/7484afc8-f24b-11ea-8dee-00155d4d8222","runId":"7484afc8-f24b-11ea-8dee-00155d4d8222","debugRunId":null,"runGroupId":"7484afc8-f24b-11ea-8dee-00155d4d8222","pipelineName":"example000006","parameters":{},"invokedBy":{"id":"78107deea71547ca8814e1ff013df901","name":"Manual","invokedByType":"Manual"},"runStart":"2020-09-09T03:20:48.3884378Z","runEnd":"2020-09-09T03:20:55.8014945Z","durationInMs":7413,"status":"Succeeded","message":"","lastUpdated":"2020-09-09T03:20:55.8014945Z","annotations":[],"runDimension":{},"isLatest":true}' - headers: - cache-control: - - no-cache - content-length: - - '755' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:20:55 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"lastUpdatedAfter": "2018-06-16T00:36:44.334576Z", "lastUpdatedBefore": - "2018-06-16T00:49:48.368647Z"}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory activity-run query-by-pipeline-run - Connection: - - keep-alive - Content-Length: - - '103' - Content-Type: - - application/json - ParameterSetName: - - --factory-name --last-updated-after --last-updated-before --resource-group - --run-id - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/pipelineruns/2f7fdb90-5df1-4b8e-ac2f-064cfa58202b/queryActivityruns?api-version=2018-06-01 - response: - body: - string: '{"value":[]}' - headers: - cache-control: - - no-cache - content-length: - - '12' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:20:57 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"OutputBlobNameList": ["exampleoutput.csv"]}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory pipeline create-run - Connection: - - keep-alive - Content-Length: - - '45' - Content-Type: - - application/json - ParameterSetName: - - --factory-name --parameters --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/pipelines/example000006/createRun?api-version=2018-06-01 - response: - body: - string: '{"runId":"7a82691a-f24b-11ea-8dee-00155d4d8222"}' - headers: - cache-control: - - no-cache - content-length: - - '48' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:20:58 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1194' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory pipeline-run cancel - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - --factory-name --resource-group --run-id - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/pipelineruns/7a82691a-f24b-11ea-8dee-00155d4d8222/cancel?api-version=2018-06-01 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:20:59 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1197' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"properties": {"type": "TumblingWindowTrigger", "description": "trumblingwindowtrigger", - "annotations": [], "pipeline": {"pipelineReference": {"type": "PipelineReference", - "referenceName": "example000006"}}, "typeProperties": {"frequency": "Minute", - "interval": 5, "startTime": "2020-09-09T03:13:00.000Z", "endTime": "2020-09-09T04:13:00.000Z", - "delay": "00:00:00", "maxConcurrency": 50, "retryPolicy": {"intervalInSeconds": - 30}, "dependsOn": []}}}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger create - Connection: - - keep-alive - Content-Length: - - '451' - Content-Type: - - application/json - ParameterSetName: - - --resource-group --properties --factory-name --name - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007","name":"example000007","type":"Microsoft.DataFactory/factories/triggers","properties":{"type":"TumblingWindowTrigger","description":"trumblingwindowtrigger","annotations":[],"pipeline":{"pipelineReference":{"type":"PipelineReference","referenceName":"example000006"}},"typeProperties":{"frequency":"Minute","interval":5,"startTime":"2020-09-09T03:13:00Z","endTime":"2020-09-09T04:13:00Z","delay":"00:00:00","maxConcurrency":50,"retryPolicy":{"intervalInSeconds":30},"dependsOn":[]},"runtimeState":"Stopped"},"etag":"bd023dfc-0000-0100-0000-5f584a1e0000"}' - headers: - cache-control: - - no-cache - content-length: - - '793' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:21:01 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1194' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger start - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - --factory-name --resource-group --name - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007/start?api-version=2018-06-01 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:21:04 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1195' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger show - Connection: - - keep-alive - ParameterSetName: - - --factory-name --resource-group --name - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007?api-version=2018-06-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007","name":"example000007","type":"Microsoft.DataFactory/factories/triggers","properties":{"type":"TumblingWindowTrigger","description":"trumblingwindowtrigger","annotations":[],"pipeline":{"pipelineReference":{"type":"PipelineReference","referenceName":"example000006"}},"typeProperties":{"frequency":"Minute","interval":5,"startTime":"2020-09-09T03:13:00Z","endTime":"2020-09-09T04:13:00Z","delay":"00:00:00","maxConcurrency":50,"retryPolicy":{"intervalInSeconds":30},"dependsOn":[]},"runtimeState":"Started"},"etag":"bd026bfc-0000-0100-0000-5f584a210000"}' - headers: - cache-control: - - no-cache - content-length: - - '793' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:21:05 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"lastUpdatedAfter": "2020-09-09T03:13:00.000Z", "lastUpdatedBefore": "2020-09-09T04:13:00.000Z"}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger-run query-by-factory - Connection: - - keep-alive - Content-Length: - - '97' - Content-Type: - - application/json - ParameterSetName: - - --factory-name --last-updated-after --last-updated-before --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/queryTriggerRuns?api-version=2018-06-01 - response: - body: - string: '{"value":[{"triggerName":"example000007","triggerRunId":"08586019852210544144274050918CU85","triggerType":"TumblingWindowTrigger","triggerRunTimestamp":"2020-09-09T03:21:04.4286574Z","status":"Waiting","message":null,"properties":{"TriggerTime":"9/9/2020 - 3:21:04 AM","windowStartTime":"9/9/2020 3:13:00 AM","windowEndTime":"9/9/2020 - 3:18:00 AM"},"triggeredPipelines":{},"groupId":"08586019852210544144274050918CU85","dependencyStatus":{}}]}' - headers: - cache-control: - - no-cache - content-length: - - '441' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:21:05 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"lastUpdatedAfter": "2020-09-09T03:13:00.000Z", "lastUpdatedBefore": "2020-09-09T04:13:00.000Z"}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger-run query-by-factory - Connection: - - keep-alive - Content-Length: - - '97' - Content-Type: - - application/json - ParameterSetName: - - --factory-name --last-updated-after --last-updated-before --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/queryTriggerRuns?api-version=2018-06-01 - response: - body: - string: '{"value":[{"triggerName":"example000007","triggerRunId":"08586019852210544144274050918CU85","triggerType":"TumblingWindowTrigger","triggerRunTimestamp":"2020-09-09T03:21:04.4286574Z","status":"Succeeded","message":null,"properties":{"TriggerTime":"9/9/2020 - 3:21:04 AM","windowStartTime":"9/9/2020 3:13:00 AM","windowEndTime":"9/9/2020 - 3:18:00 AM"},"triggeredPipelines":{"example000006":"51a4b128-096e-4968-a6c7-c963d9e73580"},"groupId":"08586019852210544144274050918CU85","dependencyStatus":{}},{"triggerName":"example000007","triggerRunId":"08586019852209858069295488367CU29","triggerType":"TumblingWindowTrigger","triggerRunTimestamp":"2020-09-09T03:23:00.5544557Z","status":"Succeeded","message":null,"properties":{"TriggerTime":"9/9/2020 - 3:23:00 AM","windowStartTime":"9/9/2020 3:18:00 AM","windowEndTime":"9/9/2020 - 3:23:00 AM"},"triggeredPipelines":{"example000006":"25064632-99e9-42e1-87bb-041d44a5573a"},"groupId":"08586019852209858069295488367CU29","dependencyStatus":{}}]}' - headers: - cache-control: - - no-cache - content-length: - - '987' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:26:07 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger-run rerun - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - --factory-name --resource-group --trigger-name --run-id - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007/triggerRuns/08586019852210544144274050918CU85/rerun?api-version=2018-06-01 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:26:08 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1193' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: '{"lastUpdatedAfter": "2020-09-09T03:13:00.000Z", "lastUpdatedBefore": "2020-09-09T04:13:00.000Z"}' - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger-run query-by-factory - Connection: - - keep-alive - Content-Length: - - '97' - Content-Type: - - application/json - ParameterSetName: - - --factory-name --last-updated-after --last-updated-before --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/queryTriggerRuns?api-version=2018-06-01 - response: - body: - string: '{"value":[{"triggerName":"example000007","triggerRunId":"08586019852210544144274050918CU85","triggerType":"TumblingWindowTrigger","triggerRunTimestamp":"2020-09-09T03:21:04.4286574Z","status":"Succeeded","message":null,"properties":{"TriggerTime":"9/9/2020 - 3:21:04 AM","windowStartTime":"9/9/2020 3:13:00 AM","windowEndTime":"9/9/2020 - 3:18:00 AM"},"triggeredPipelines":{"example000006":"51a4b128-096e-4968-a6c7-c963d9e73580"},"groupId":"08586019852210544144274050918CU85","dependencyStatus":{}},{"triggerName":"example000007","triggerRunId":"08586019852209858069295488367CU29","triggerType":"TumblingWindowTrigger","triggerRunTimestamp":"2020-09-09T03:23:00.5544557Z","status":"Succeeded","message":null,"properties":{"TriggerTime":"9/9/2020 - 3:23:00 AM","windowStartTime":"9/9/2020 3:18:00 AM","windowEndTime":"9/9/2020 - 3:23:00 AM"},"triggeredPipelines":{"example000006":"25064632-99e9-42e1-87bb-041d44a5573a"},"groupId":"08586019852209858069295488367CU29","dependencyStatus":{}},{"triggerName":"example000007","triggerRunId":"08586019849161884553860654381CU29","triggerType":"TumblingWindowTrigger","triggerRunTimestamp":"2020-09-09T03:26:09.2762733Z","status":"Waiting","message":null,"properties":{"TriggerTime":"9/9/2020 - 3:26:09 AM","windowStartTime":"9/9/2020 3:13:00 AM","windowEndTime":"9/9/2020 - 3:18:00 AM"},"triggeredPipelines":{},"groupId":"08586019852210544144274050918CU85","dependencyStatus":{}}]}' - headers: - cache-control: - - no-cache - content-length: - - '1417' - content-type: - - application/json; charset=utf-8 - date: - - Wed, 09 Sep 2020 03:26:09 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger stop - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - --factory-name --resource-group --name - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007/stop?api-version=2018-06-01 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:26:12 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1196' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger delete - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - -y --factory-name --resource-group --name - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/triggers/example000007?api-version=2018-06-01 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:26:16 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-deletes: - - '14999' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory pipeline delete - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - -y --factory-name --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002/pipelines/example000006?api-version=2018-06-01 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:26:21 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-deletes: - - '14995' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory factory delete - Connection: - - keep-alive - Content-Length: - - '0' - ParameterSetName: - - -y --name --resource-group - User-Agent: - - AZURECLI/2.11.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.27) - method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.DataFactory/factories/exampleFa000002?api-version=2018-06-01 - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Wed, 09 Sep 2020 03:26:28 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-ratelimit-remaining-subscription-deletes: - - '14995' - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -version: 1 diff --git a/src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory_main.yaml b/src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory_main.yaml index f3610fa71b6..5dd62cb6db4 100644 --- a/src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory_main.yaml +++ b/src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory_main.yaml @@ -13,165 +13,5039 @@ interactions: ParameterSetName: - --query -o User-Agent: - - python/3.8.0 (Windows-10-10.0.19041-SP0) msrest/0.6.21 msrest_azure/0.6.3 - azure-mgmt-resource/12.1.0 Azure-SDK-For-Python AZURECLI/2.22.1 - accept-language: - - en-US + - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/locations?api-version=2019-11-01 response: body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus","name":"eastus","displayName":"East - US","regionalDisplayName":"(US) East US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-79.8164","latitude":"37.3719","physicalLocation":"Virginia","pairedRegion":[{"name":"westus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2","name":"eastus2","displayName":"East - US 2","regionalDisplayName":"(US) East US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","physicalLocation":"Virginia","pairedRegion":[{"name":"centralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus","name":"southcentralus","displayName":"South - Central US","regionalDisplayName":"(US) South Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-98.5","latitude":"29.4167","physicalLocation":"Texas","pairedRegion":[{"name":"northcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2","name":"westus2","displayName":"West - US 2","regionalDisplayName":"(US) West US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-119.852","latitude":"47.233","physicalLocation":"Washington","pairedRegion":[{"name":"westcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast","name":"australiaeast","displayName":"Australia - East","regionalDisplayName":"(Asia Pacific) Australia East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"151.2094","latitude":"-33.86","physicalLocation":"New - South Wales","pairedRegion":[{"name":"australiasoutheast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia","name":"southeastasia","displayName":"Southeast - Asia","regionalDisplayName":"(Asia Pacific) Southeast Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"103.833","latitude":"1.283","physicalLocation":"Singapore","pairedRegion":[{"name":"eastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope","name":"northeurope","displayName":"North - Europe","regionalDisplayName":"(Europe) North Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-6.2597","latitude":"53.3478","physicalLocation":"Ireland","pairedRegion":[{"name":"westeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth","name":"uksouth","displayName":"UK - South","regionalDisplayName":"(Europe) UK South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-0.799","latitude":"50.941","physicalLocation":"London","pairedRegion":[{"name":"ukwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope","name":"westeurope","displayName":"West - Europe","regionalDisplayName":"(Europe) West Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"4.9","latitude":"52.3667","physicalLocation":"Netherlands","pairedRegion":[{"name":"northeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus","name":"centralus","displayName":"Central - US","regionalDisplayName":"(US) Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","physicalLocation":"Iowa","pairedRegion":[{"name":"eastus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus","name":"northcentralus","displayName":"North - Central US","regionalDisplayName":"(US) North Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-87.6278","latitude":"41.8819","physicalLocation":"Illinois","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus","name":"westus","displayName":"West - US","regionalDisplayName":"(US) West US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-122.417","latitude":"37.783","physicalLocation":"California","pairedRegion":[{"name":"eastus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth","name":"southafricanorth","displayName":"South - Africa North","regionalDisplayName":"(Africa) South Africa North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Africa","longitude":"28.218370","latitude":"-25.731340","physicalLocation":"Johannesburg","pairedRegion":[{"name":"southafricawest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia","name":"centralindia","displayName":"Central - India","regionalDisplayName":"(Asia Pacific) Central India","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"73.9197","latitude":"18.5822","physicalLocation":"Pune","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia","name":"eastasia","displayName":"East - Asia","regionalDisplayName":"(Asia Pacific) East Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"114.188","latitude":"22.267","physicalLocation":"Hong - Kong","pairedRegion":[{"name":"southeastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast","name":"japaneast","displayName":"Japan - East","regionalDisplayName":"(Asia Pacific) Japan East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"139.77","latitude":"35.68","physicalLocation":"Tokyo, - Saitama","pairedRegion":[{"name":"japanwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest","name":"jioindiawest","displayName":"JIO - India West","regionalDisplayName":"(Asia Pacific) JIO India West","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"70.05773","latitude":"22.470701","physicalLocation":"Jamnagar","pairedRegion":[{"name":"jioindiacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral","name":"koreacentral","displayName":"Korea - Central","regionalDisplayName":"(Asia Pacific) Korea Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"126.9780","latitude":"37.5665","physicalLocation":"Seoul","pairedRegion":[{"name":"koreasouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral","name":"canadacentral","displayName":"Canada - Central","regionalDisplayName":"(Canada) Canada Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Canada","longitude":"-79.383","latitude":"43.653","physicalLocation":"Toronto","pairedRegion":[{"name":"canadaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral","name":"francecentral","displayName":"France - Central","regionalDisplayName":"(Europe) France Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"2.3730","latitude":"46.3772","physicalLocation":"Paris","pairedRegion":[{"name":"francesouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral","name":"germanywestcentral","displayName":"Germany - West Central","regionalDisplayName":"(Europe) Germany West Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.682127","latitude":"50.110924","physicalLocation":"Frankfurt","pairedRegion":[{"name":"germanynorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast","name":"norwayeast","displayName":"Norway - East","regionalDisplayName":"(Europe) Norway East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"10.752245","latitude":"59.913868","physicalLocation":"Norway","pairedRegion":[{"name":"norwaywest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth","name":"switzerlandnorth","displayName":"Switzerland - North","regionalDisplayName":"(Europe) Switzerland North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.564572","latitude":"47.451542","physicalLocation":"Zurich","pairedRegion":[{"name":"switzerlandwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth","name":"uaenorth","displayName":"UAE - North","regionalDisplayName":"(Middle East) UAE North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Middle - East","longitude":"55.316666","latitude":"25.266666","physicalLocation":"Dubai","pairedRegion":[{"name":"uaecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth","name":"brazilsouth","displayName":"Brazil - South","regionalDisplayName":"(South America) Brazil South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"South - America","longitude":"-46.633","latitude":"-23.55","physicalLocation":"Sao - Paulo State","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage","name":"centralusstage","displayName":"Central - US (Stage)","regionalDisplayName":"(US) Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstage","name":"eastusstage","displayName":"East - US (Stage)","regionalDisplayName":"(US) East US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2stage","name":"eastus2stage","displayName":"East - US 2 (Stage)","regionalDisplayName":"(US) East US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralusstage","name":"northcentralusstage","displayName":"North - Central US (Stage)","regionalDisplayName":"(US) North Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstage","name":"southcentralusstage","displayName":"South - Central US (Stage)","regionalDisplayName":"(US) South Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westusstage","name":"westusstage","displayName":"West - US (Stage)","regionalDisplayName":"(US) West US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2stage","name":"westus2stage","displayName":"West - US 2 (Stage)","regionalDisplayName":"(US) West US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asia","name":"asia","displayName":"Asia","regionalDisplayName":"Asia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asiapacific","name":"asiapacific","displayName":"Asia - Pacific","regionalDisplayName":"Asia Pacific","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia","name":"australia","displayName":"Australia","regionalDisplayName":"Australia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil","name":"brazil","displayName":"Brazil","regionalDisplayName":"Brazil","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada","name":"canada","displayName":"Canada","regionalDisplayName":"Canada","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe","name":"europe","displayName":"Europe","regionalDisplayName":"Europe","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global","name":"global","displayName":"Global","regionalDisplayName":"Global","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india","name":"india","displayName":"India","regionalDisplayName":"India","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan","name":"japan","displayName":"Japan","regionalDisplayName":"Japan","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk","name":"uk","displayName":"United - Kingdom","regionalDisplayName":"United Kingdom","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstates","name":"unitedstates","displayName":"United - States","regionalDisplayName":"United States","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage","name":"eastasiastage","displayName":"East - Asia (Stage)","regionalDisplayName":"(Asia Pacific) East Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia - Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasiastage","name":"southeastasiastage","displayName":"Southeast - Asia (Stage)","regionalDisplayName":"(Asia Pacific) Southeast Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia - Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap","name":"centraluseuap","displayName":"Central - US EUAP","regionalDisplayName":"(US) Central US EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","pairedRegion":[{"name":"eastus2euap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap","name":"eastus2euap","displayName":"East - US 2 EUAP","regionalDisplayName":"(US) East US 2 EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","pairedRegion":[{"name":"centraluseuap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus","name":"westcentralus","displayName":"West - Central US","regionalDisplayName":"(US) West Central US","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-110.234","latitude":"40.890","physicalLocation":"Wyoming","pairedRegion":[{"name":"westus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus3","name":"westus3","displayName":"West - US 3","regionalDisplayName":"(US) West US 3","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-112.074036","latitude":"33.448376","physicalLocation":"Phoenix","pairedRegion":[{"name":"eastus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest","name":"southafricawest","displayName":"South - Africa West","regionalDisplayName":"(Africa) South Africa West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Africa","longitude":"18.843266","latitude":"-34.075691","physicalLocation":"Cape - Town","pairedRegion":[{"name":"southafricanorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral","name":"australiacentral","displayName":"Australia - Central","regionalDisplayName":"(Asia Pacific) Australia Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2","name":"australiacentral2","displayName":"Australia - Central 2","regionalDisplayName":"(Asia Pacific) Australia Central 2","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast","name":"australiasoutheast","displayName":"Australia - Southeast","regionalDisplayName":"(Asia Pacific) Australia Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"144.9631","latitude":"-37.8136","physicalLocation":"Victoria","pairedRegion":[{"name":"australiaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest","name":"japanwest","displayName":"Japan - West","regionalDisplayName":"(Asia Pacific) Japan West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"135.5022","latitude":"34.6939","physicalLocation":"Osaka","pairedRegion":[{"name":"japaneast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral","name":"jioindiacentral","displayName":"JIO - India Central","regionalDisplayName":"(Asia Pacific) JIO India Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"79.08886","latitude":"21.146633","physicalLocation":"Nagpur","pairedRegion":[{"name":"jioindiawest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth","name":"koreasouth","displayName":"Korea - South","regionalDisplayName":"(Asia Pacific) Korea South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"129.0756","latitude":"35.1796","physicalLocation":"Busan","pairedRegion":[{"name":"koreacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia","name":"southindia","displayName":"South - India","regionalDisplayName":"(Asia Pacific) South India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"80.1636","latitude":"12.9822","physicalLocation":"Chennai","pairedRegion":[{"name":"centralindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westindia","name":"westindia","displayName":"West - India","regionalDisplayName":"(Asia Pacific) West India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"72.868","latitude":"19.088","physicalLocation":"Mumbai","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast","name":"canadaeast","displayName":"Canada - East","regionalDisplayName":"(Canada) Canada East","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Canada","longitude":"-71.217","latitude":"46.817","physicalLocation":"Quebec","pairedRegion":[{"name":"canadacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth","name":"francesouth","displayName":"France - South","regionalDisplayName":"(Europe) France South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"2.1972","latitude":"43.8345","physicalLocation":"Marseille","pairedRegion":[{"name":"francecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth","name":"germanynorth","displayName":"Germany - North","regionalDisplayName":"(Europe) Germany North","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"8.806422","latitude":"53.073635","physicalLocation":"Berlin","pairedRegion":[{"name":"germanywestcentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest","name":"norwaywest","displayName":"Norway - West","regionalDisplayName":"(Europe) Norway West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"5.733107","latitude":"58.969975","physicalLocation":"Norway","pairedRegion":[{"name":"norwayeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest","name":"switzerlandwest","displayName":"Switzerland - West","regionalDisplayName":"(Europe) Switzerland West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"6.143158","latitude":"46.204391","physicalLocation":"Geneva","pairedRegion":[{"name":"switzerlandnorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest","name":"ukwest","displayName":"UK - West","regionalDisplayName":"(Europe) UK West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"-3.084","latitude":"53.427","physicalLocation":"Cardiff","pairedRegion":[{"name":"uksouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral","name":"uaecentral","displayName":"UAE - Central","regionalDisplayName":"(Middle East) UAE Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Middle - East","longitude":"54.366669","latitude":"24.466667","physicalLocation":"Abu - Dhabi","pairedRegion":[{"name":"uaenorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsoutheast","name":"brazilsoutheast","displayName":"Brazil - Southeast","regionalDisplayName":"(South America) Brazil Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"South - America","longitude":"-43.2075","latitude":"-22.90278","physicalLocation":"Rio","pairedRegion":[{"name":"brazilsouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv","name":"eastusslv","displayName":"East - US SLV","regionalDisplayName":"(South America) East US SLV","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"South - America","longitude":"-43.2075","latitude":"-22.90278","physicalLocation":"Silverstone","pairedRegion":[{"name":"eastusslv","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv"}]}}]}' - headers: - cache-control: - - no-cache - content-length: - - '26993' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 26 Apr 2021 06:13:27 GMT + string: "{\"value\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\",\"name\":\"eastus\",\"displayName\":\"East + US\",\"regionalDisplayName\":\"(US) East US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-79.8164\",\"latitude\":\"37.3719\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\":\"westus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2\",\"name\":\"eastus2\",\"displayName\":\"East + US 2\",\"regionalDisplayName\":\"(US) East US 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-78.3889\",\"latitude\":\"36.6681\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\":\"centralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\",\"name\":\"southcentralus\",\"displayName\":\"South + Central US\",\"regionalDisplayName\":\"(US) South Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-98.5\",\"latitude\":\"29.4167\",\"physicalLocation\":\"Texas\",\"pairedRegion\":[{\"name\":\"northcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2\",\"name\":\"westus2\",\"displayName\":\"West + US 2\",\"regionalDisplayName\":\"(US) West US 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-119.852\",\"latitude\":\"47.233\",\"physicalLocation\":\"Washington\",\"pairedRegion\":[{\"name\":\"westcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus3\",\"name\":\"westus3\",\"displayName\":\"West + US 3\",\"regionalDisplayName\":\"(US) West US 3\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-112.074036\",\"latitude\":\"33.448376\",\"physicalLocation\":\"Phoenix\",\"pairedRegion\":[{\"name\":\"eastus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast\",\"name\":\"australiaeast\",\"displayName\":\"Australia + East\",\"regionalDisplayName\":\"(Asia Pacific) Australia East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"151.2094\",\"latitude\":\"-33.86\",\"physicalLocation\":\"New + South Wales\",\"pairedRegion\":[{\"name\":\"australiasoutheast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia\",\"name\":\"southeastasia\",\"displayName\":\"Southeast + Asia\",\"regionalDisplayName\":\"(Asia Pacific) Southeast Asia\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"103.833\",\"latitude\":\"1.283\",\"physicalLocation\":\"Singapore\",\"pairedRegion\":[{\"name\":\"eastasia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope\",\"name\":\"northeurope\",\"displayName\":\"North + Europe\",\"regionalDisplayName\":\"(Europe) North Europe\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"-6.2597\",\"latitude\":\"53.3478\",\"physicalLocation\":\"Ireland\",\"pairedRegion\":[{\"name\":\"westeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedencentral\",\"name\":\"swedencentral\",\"displayName\":\"Sweden + Central\",\"regionalDisplayName\":\"(Europe) Sweden Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"17.14127\",\"latitude\":\"60.67488\",\"physicalLocation\":\"G\xE4vle\",\"pairedRegion\":[{\"name\":\"swedensouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedensouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth\",\"name\":\"uksouth\",\"displayName\":\"UK + South\",\"regionalDisplayName\":\"(Europe) UK South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"-0.799\",\"latitude\":\"50.941\",\"physicalLocation\":\"London\",\"pairedRegion\":[{\"name\":\"ukwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\",\"name\":\"westeurope\",\"displayName\":\"West + Europe\",\"regionalDisplayName\":\"(Europe) West Europe\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"4.9\",\"latitude\":\"52.3667\",\"physicalLocation\":\"Netherlands\",\"pairedRegion\":[{\"name\":\"northeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus\",\"name\":\"centralus\",\"displayName\":\"Central + US\",\"regionalDisplayName\":\"(US) Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-93.6208\",\"latitude\":\"41.5908\",\"physicalLocation\":\"Iowa\",\"pairedRegion\":[{\"name\":\"eastus2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth\",\"name\":\"southafricanorth\",\"displayName\":\"South + Africa North\",\"regionalDisplayName\":\"(Africa) South Africa North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Africa\",\"longitude\":\"28.218370\",\"latitude\":\"-25.731340\",\"physicalLocation\":\"Johannesburg\",\"pairedRegion\":[{\"name\":\"southafricawest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia\",\"name\":\"centralindia\",\"displayName\":\"Central + India\",\"regionalDisplayName\":\"(Asia Pacific) Central India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"73.9197\",\"latitude\":\"18.5822\",\"physicalLocation\":\"Pune\",\"pairedRegion\":[{\"name\":\"southindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia\",\"name\":\"eastasia\",\"displayName\":\"East + Asia\",\"regionalDisplayName\":\"(Asia Pacific) East Asia\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"114.188\",\"latitude\":\"22.267\",\"physicalLocation\":\"Hong + Kong\",\"pairedRegion\":[{\"name\":\"southeastasia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast\",\"name\":\"japaneast\",\"displayName\":\"Japan + East\",\"regionalDisplayName\":\"(Asia Pacific) Japan East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"139.77\",\"latitude\":\"35.68\",\"physicalLocation\":\"Tokyo, + Saitama\",\"pairedRegion\":[{\"name\":\"japanwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral\",\"name\":\"koreacentral\",\"displayName\":\"Korea + Central\",\"regionalDisplayName\":\"(Asia Pacific) Korea Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"126.9780\",\"latitude\":\"37.5665\",\"physicalLocation\":\"Seoul\",\"pairedRegion\":[{\"name\":\"koreasouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral\",\"name\":\"canadacentral\",\"displayName\":\"Canada + Central\",\"regionalDisplayName\":\"(Canada) Canada Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Canada\",\"longitude\":\"-79.383\",\"latitude\":\"43.653\",\"physicalLocation\":\"Toronto\",\"pairedRegion\":[{\"name\":\"canadaeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral\",\"name\":\"francecentral\",\"displayName\":\"France + Central\",\"regionalDisplayName\":\"(Europe) France Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"2.3730\",\"latitude\":\"46.3772\",\"physicalLocation\":\"Paris\",\"pairedRegion\":[{\"name\":\"francesouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral\",\"name\":\"germanywestcentral\",\"displayName\":\"Germany + West Central\",\"regionalDisplayName\":\"(Europe) Germany West Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.682127\",\"latitude\":\"50.110924\",\"physicalLocation\":\"Frankfurt\",\"pairedRegion\":[{\"name\":\"germanynorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast\",\"name\":\"norwayeast\",\"displayName\":\"Norway + East\",\"regionalDisplayName\":\"(Europe) Norway East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"10.752245\",\"latitude\":\"59.913868\",\"physicalLocation\":\"Norway\",\"pairedRegion\":[{\"name\":\"norwaywest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth\",\"name\":\"switzerlandnorth\",\"displayName\":\"Switzerland + North\",\"regionalDisplayName\":\"(Europe) Switzerland North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.564572\",\"latitude\":\"47.451542\",\"physicalLocation\":\"Zurich\",\"pairedRegion\":[{\"name\":\"switzerlandwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth\",\"name\":\"uaenorth\",\"displayName\":\"UAE + North\",\"regionalDisplayName\":\"(Middle East) UAE North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Middle + East\",\"longitude\":\"55.316666\",\"latitude\":\"25.266666\",\"physicalLocation\":\"Dubai\",\"pairedRegion\":[{\"name\":\"uaecentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth\",\"name\":\"brazilsouth\",\"displayName\":\"Brazil + South\",\"regionalDisplayName\":\"(South America) Brazil South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"South + America\",\"longitude\":\"-46.633\",\"latitude\":\"-23.55\",\"physicalLocation\":\"Sao + Paulo State\",\"pairedRegion\":[{\"name\":\"southcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap\",\"name\":\"eastus2euap\",\"displayName\":\"East + US 2 EUAP\",\"regionalDisplayName\":\"(US) East US 2 EUAP\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-78.3889\",\"latitude\":\"36.6681\",\"pairedRegion\":[{\"name\":\"centraluseuap\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/qatarcentral\",\"name\":\"qatarcentral\",\"displayName\":\"Qatar + Central\",\"regionalDisplayName\":\"(Middle East) Qatar Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Middle + East\",\"longitude\":\"51.439327\",\"latitude\":\"25.551462\",\"physicalLocation\":\"Doha\",\"pairedRegion\":[{\"name\":\"westeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage\",\"name\":\"centralusstage\",\"displayName\":\"Central + US (Stage)\",\"regionalDisplayName\":\"(US) Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstage\",\"name\":\"eastusstage\",\"displayName\":\"East + US (Stage)\",\"regionalDisplayName\":\"(US) East US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2stage\",\"name\":\"eastus2stage\",\"displayName\":\"East + US 2 (Stage)\",\"regionalDisplayName\":\"(US) East US 2 (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralusstage\",\"name\":\"northcentralusstage\",\"displayName\":\"North + Central US (Stage)\",\"regionalDisplayName\":\"(US) North Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstage\",\"name\":\"southcentralusstage\",\"displayName\":\"South + Central US (Stage)\",\"regionalDisplayName\":\"(US) South Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westusstage\",\"name\":\"westusstage\",\"displayName\":\"West + US (Stage)\",\"regionalDisplayName\":\"(US) West US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2stage\",\"name\":\"westus2stage\",\"displayName\":\"West + US 2 (Stage)\",\"regionalDisplayName\":\"(US) West US 2 (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asia\",\"name\":\"asia\",\"displayName\":\"Asia\",\"regionalDisplayName\":\"Asia\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asiapacific\",\"name\":\"asiapacific\",\"displayName\":\"Asia + Pacific\",\"regionalDisplayName\":\"Asia Pacific\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia\",\"name\":\"australia\",\"displayName\":\"Australia\",\"regionalDisplayName\":\"Australia\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil\",\"name\":\"brazil\",\"displayName\":\"Brazil\",\"regionalDisplayName\":\"Brazil\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada\",\"name\":\"canada\",\"displayName\":\"Canada\",\"regionalDisplayName\":\"Canada\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe\",\"name\":\"europe\",\"displayName\":\"Europe\",\"regionalDisplayName\":\"Europe\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/france\",\"name\":\"france\",\"displayName\":\"France\",\"regionalDisplayName\":\"France\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germany\",\"name\":\"germany\",\"displayName\":\"Germany\",\"regionalDisplayName\":\"Germany\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global\",\"name\":\"global\",\"displayName\":\"Global\",\"regionalDisplayName\":\"Global\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india\",\"name\":\"india\",\"displayName\":\"India\",\"regionalDisplayName\":\"India\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan\",\"name\":\"japan\",\"displayName\":\"Japan\",\"regionalDisplayName\":\"Japan\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/korea\",\"name\":\"korea\",\"displayName\":\"Korea\",\"regionalDisplayName\":\"Korea\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norway\",\"name\":\"norway\",\"displayName\":\"Norway\",\"regionalDisplayName\":\"Norway\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/singapore\",\"name\":\"singapore\",\"displayName\":\"Singapore\",\"regionalDisplayName\":\"Singapore\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafrica\",\"name\":\"southafrica\",\"displayName\":\"South + Africa\",\"regionalDisplayName\":\"South Africa\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerland\",\"name\":\"switzerland\",\"displayName\":\"Switzerland\",\"regionalDisplayName\":\"Switzerland\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uae\",\"name\":\"uae\",\"displayName\":\"United + Arab Emirates\",\"regionalDisplayName\":\"United Arab Emirates\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk\",\"name\":\"uk\",\"displayName\":\"United + Kingdom\",\"regionalDisplayName\":\"United Kingdom\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstates\",\"name\":\"unitedstates\",\"displayName\":\"United + States\",\"regionalDisplayName\":\"United States\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstateseuap\",\"name\":\"unitedstateseuap\",\"displayName\":\"United + States EUAP\",\"regionalDisplayName\":\"United States EUAP\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage\",\"name\":\"eastasiastage\",\"displayName\":\"East + Asia (Stage)\",\"regionalDisplayName\":\"(Asia Pacific) East Asia (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasiastage\",\"name\":\"southeastasiastage\",\"displayName\":\"Southeast + Asia (Stage)\",\"regionalDisplayName\":\"(Asia Pacific) Southeast Asia (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstg\",\"name\":\"eastusstg\",\"displayName\":\"East + US STG\",\"regionalDisplayName\":\"(US) East US STG\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-79.8164\",\"latitude\":\"37.3719\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\":\"southcentralusstg\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstg\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstg\",\"name\":\"southcentralusstg\",\"displayName\":\"South + Central US STG\",\"regionalDisplayName\":\"(US) South Central US STG\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-98.5\",\"latitude\":\"29.4167\",\"physicalLocation\":\"Texas\",\"pairedRegion\":[{\"name\":\"eastusstg\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstg\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus\",\"name\":\"northcentralus\",\"displayName\":\"North + Central US\",\"regionalDisplayName\":\"(US) North Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-87.6278\",\"latitude\":\"41.8819\",\"physicalLocation\":\"Illinois\",\"pairedRegion\":[{\"name\":\"southcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus\",\"name\":\"westus\",\"displayName\":\"West + US\",\"regionalDisplayName\":\"(US) West US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-122.417\",\"latitude\":\"37.783\",\"physicalLocation\":\"California\",\"pairedRegion\":[{\"name\":\"eastus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest\",\"name\":\"jioindiawest\",\"displayName\":\"Jio + India West\",\"regionalDisplayName\":\"(Asia Pacific) Jio India West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"70.05773\",\"latitude\":\"22.470701\",\"physicalLocation\":\"Jamnagar\",\"pairedRegion\":[{\"name\":\"jioindiacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap\",\"name\":\"centraluseuap\",\"displayName\":\"Central + US EUAP\",\"regionalDisplayName\":\"(US) Central US EUAP\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-93.6208\",\"latitude\":\"41.5908\",\"pairedRegion\":[{\"name\":\"eastus2euap\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus\",\"name\":\"westcentralus\",\"displayName\":\"West + Central US\",\"regionalDisplayName\":\"(US) West Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-110.234\",\"latitude\":\"40.890\",\"physicalLocation\":\"Wyoming\",\"pairedRegion\":[{\"name\":\"westus2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest\",\"name\":\"southafricawest\",\"displayName\":\"South + Africa West\",\"regionalDisplayName\":\"(Africa) South Africa West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Africa\",\"longitude\":\"18.843266\",\"latitude\":\"-34.075691\",\"physicalLocation\":\"Cape + Town\",\"pairedRegion\":[{\"name\":\"southafricanorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral\",\"name\":\"australiacentral\",\"displayName\":\"Australia + Central\",\"regionalDisplayName\":\"(Asia Pacific) Australia Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"149.1244\",\"latitude\":\"-35.3075\",\"physicalLocation\":\"Canberra\",\"pairedRegion\":[{\"name\":\"australiacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2\",\"name\":\"australiacentral2\",\"displayName\":\"Australia + Central 2\",\"regionalDisplayName\":\"(Asia Pacific) Australia Central 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"149.1244\",\"latitude\":\"-35.3075\",\"physicalLocation\":\"Canberra\",\"pairedRegion\":[{\"name\":\"australiacentral2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast\",\"name\":\"australiasoutheast\",\"displayName\":\"Australia + Southeast\",\"regionalDisplayName\":\"(Asia Pacific) Australia Southeast\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"144.9631\",\"latitude\":\"-37.8136\",\"physicalLocation\":\"Victoria\",\"pairedRegion\":[{\"name\":\"australiaeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest\",\"name\":\"japanwest\",\"displayName\":\"Japan + West\",\"regionalDisplayName\":\"(Asia Pacific) Japan West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"135.5022\",\"latitude\":\"34.6939\",\"physicalLocation\":\"Osaka\",\"pairedRegion\":[{\"name\":\"japaneast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral\",\"name\":\"jioindiacentral\",\"displayName\":\"Jio + India Central\",\"regionalDisplayName\":\"(Asia Pacific) Jio India Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"79.08886\",\"latitude\":\"21.146633\",\"physicalLocation\":\"Nagpur\",\"pairedRegion\":[{\"name\":\"jioindiawest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth\",\"name\":\"koreasouth\",\"displayName\":\"Korea + South\",\"regionalDisplayName\":\"(Asia Pacific) Korea South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"129.0756\",\"latitude\":\"35.1796\",\"physicalLocation\":\"Busan\",\"pairedRegion\":[{\"name\":\"koreacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\",\"name\":\"southindia\",\"displayName\":\"South + India\",\"regionalDisplayName\":\"(Asia Pacific) South India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"80.1636\",\"latitude\":\"12.9822\",\"physicalLocation\":\"Chennai\",\"pairedRegion\":[{\"name\":\"centralindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westindia\",\"name\":\"westindia\",\"displayName\":\"West + India\",\"regionalDisplayName\":\"(Asia Pacific) West India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"72.868\",\"latitude\":\"19.088\",\"physicalLocation\":\"Mumbai\",\"pairedRegion\":[{\"name\":\"southindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast\",\"name\":\"canadaeast\",\"displayName\":\"Canada + East\",\"regionalDisplayName\":\"(Canada) Canada East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Canada\",\"longitude\":\"-71.217\",\"latitude\":\"46.817\",\"physicalLocation\":\"Quebec\",\"pairedRegion\":[{\"name\":\"canadacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth\",\"name\":\"francesouth\",\"displayName\":\"France + South\",\"regionalDisplayName\":\"(Europe) France South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"2.1972\",\"latitude\":\"43.8345\",\"physicalLocation\":\"Marseille\",\"pairedRegion\":[{\"name\":\"francecentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth\",\"name\":\"germanynorth\",\"displayName\":\"Germany + North\",\"regionalDisplayName\":\"(Europe) Germany North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.806422\",\"latitude\":\"53.073635\",\"physicalLocation\":\"Berlin\",\"pairedRegion\":[{\"name\":\"germanywestcentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest\",\"name\":\"norwaywest\",\"displayName\":\"Norway + West\",\"regionalDisplayName\":\"(Europe) Norway West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"5.733107\",\"latitude\":\"58.969975\",\"physicalLocation\":\"Norway\",\"pairedRegion\":[{\"name\":\"norwayeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest\",\"name\":\"switzerlandwest\",\"displayName\":\"Switzerland + West\",\"regionalDisplayName\":\"(Europe) Switzerland West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"6.143158\",\"latitude\":\"46.204391\",\"physicalLocation\":\"Geneva\",\"pairedRegion\":[{\"name\":\"switzerlandnorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest\",\"name\":\"ukwest\",\"displayName\":\"UK + West\",\"regionalDisplayName\":\"(Europe) UK West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"-3.084\",\"latitude\":\"53.427\",\"physicalLocation\":\"Cardiff\",\"pairedRegion\":[{\"name\":\"uksouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral\",\"name\":\"uaecentral\",\"displayName\":\"UAE + Central\",\"regionalDisplayName\":\"(Middle East) UAE Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Middle + East\",\"longitude\":\"54.366669\",\"latitude\":\"24.466667\",\"physicalLocation\":\"Abu + Dhabi\",\"pairedRegion\":[{\"name\":\"uaenorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsoutheast\",\"name\":\"brazilsoutheast\",\"displayName\":\"Brazil + Southeast\",\"regionalDisplayName\":\"(South America) Brazil Southeast\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"South + America\",\"longitude\":\"-43.2075\",\"latitude\":\"-22.90278\",\"physicalLocation\":\"Rio\",\"pairedRegion\":[{\"name\":\"brazilsouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth\"}]}}]}" + headers: + cache-control: + - no-cache + content-length: + - '30402' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:41:55 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"location": "eastus", "identity": {"type": "SystemAssigned"}, "properties": + {"encryption": {}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory create + Connection: + - keep-alive + Content-Length: + - '96' + Content-Type: + - application/json + ParameterSetName: + - --location --name --resource-group + User-Agent: + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 + response: + body: + string: '{"error":{"code":"MissingSubscriptionRegistration","message":"The subscription + is not registered to use namespace ''Microsoft.DataFactory''. See https://aka.ms/rps-not-found + for how to register subscriptions.","details":[{"code":"MissingSubscriptionRegistration","target":"Microsoft.DataFactory","message":"The + subscription is not registered to use namespace ''Microsoft.DataFactory''. + See https://aka.ms/rps-not-found for how to register subscriptions."}]}}' + headers: + cache-control: + - no-cache + content-length: + - '454' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:41:57 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-failure-cause: + - gateway + status: + code: 409 + message: Conflict +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + User-Agent: + - python-requests/2.28.1 + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory/register?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:41:57 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:42:07 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:42:17 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:42:27 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:42:37 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:42:47 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:42:57 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:43:08 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:43:18 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:43:28 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:43:38 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:43:48 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:43:58 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:44:08 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:44:18 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:44:28 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:44:38 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:44:48 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:44:58 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:45:09 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:45:19 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:45:29 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:45:39 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:45:49 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:45:59 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:46:09 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:46:19 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:46:29 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:46:39 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:46:49 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:46:59 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:47:09 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:47:19 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:47:29 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:47:39 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:47:50 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:48:00 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:48:10 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:48:20 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:48:30 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:48:40 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:48:50 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:49:00 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:49:10 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:49:20 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:49:30 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:49:40 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:49:50 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:50:00 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:50:10 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:50:20 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:50:31 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:50:41 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:50:51 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:51:00 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:51:10 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:51:20 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:51:30 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:51:40 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registering"}' + headers: + cache-control: + - no-cache + content-length: + - '4522' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:51:51 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.28.1 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory?api-version=2016-02-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory","namespace":"Microsoft.DataFactory","authorizations":[{"applicationId":"0947a342-ab4a-43be-93b3-b8243fc161e5","roleDefinitionId":"f0a6aa2a-e9d8-4bae-bcc2-36b405e8a5da"},{"applicationId":"5d13f7d7-0567-429c-9880-320e9555e5fc","roleDefinitionId":"956a8f20-9168-4c71-8e27-3c0460ac39a4"}],"resourceTypes":[{"resourceType":"dataFactories","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview","2014-04-01"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove"},{"resourceType":"factories","locations":["East + US","East US 2","Central US","South Central US","Japan East","Canada Central","Australia + East","Switzerland North","Germany West Central","Central India","France Central","Korea + Central","Brazil South","West Europe","North Europe","UK South","West Central + US","West US","West US 2","Southeast Asia","East Asia","North Central US","South + Africa North","Australia Southeast","South India","Canada East","UK West","Japan + West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01","2017-09-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity"},{"resourceType":"factories/integrationRuntimes","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Central India","Brazil South","France Central","Korea Central","Australia + East","Switzerland North","Germany West Central","Canada Central","West Central + US","North Europe","UK South","West Europe","Southeast Asia","East Asia","North + Central US","South Africa North","Australia Southeast","South India","Canada + East","UK West","Japan West","Norway East","UAE North","West US 3","Jio India + West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"dataFactories/diagnosticSettings","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"dataFactories/metricDefinitions","locations":["North + Europe","East US","West US","West Central US"],"apiVersions":["2014-04-01"]},{"resourceType":"checkDataFactoryNameAvailability","locations":[],"apiVersions":["2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"checkAzureDataFactoryNameAvailability","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"dataFactorySchema","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"operations","locations":["West + US","North Europe","East US","West Central US"],"apiVersions":["2018-06-01","2017-09-01-preview","2017-03-01-preview","2015-10-01","2015-09-01","2015-08-01","2015-07-01-preview","2015-05-01-preview","2015-01-01-preview"]},{"resourceType":"locations","locations":[],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/configureFactoryRepo","locations":["East + US","East US 2","West US 2","West US","Central US","South Central US","Japan + East","Australia East","Switzerland North","Germany West Central","Canada + Central","Central India","Brazil South","France Central","Korea Central","West + Europe","North Europe","UK South","West Central US","Southeast Asia","East + Asia","North Central US","South Africa North","Australia Southeast","South + India","Canada East","UK West","Japan West","Norway East","UAE North","West + US 3","Jio India West","Switzerland West"],"apiVersions":["2018-06-01","2017-09-01-preview"]},{"resourceType":"locations/getFeatureValue","locations":["East + US","East US 2","West Europe","North Europe","UK South","West Central US","West + US","Central US","South Central US","Japan East","Australia East","Switzerland + North","Germany West Central","Canada Central","Central India","Brazil South","France + Central","Korea Central","West US 2","Southeast Asia","East Asia","North Central + US","South Africa North","Australia Southeast","South India","Canada East","UK + West","Japan West","Norway East","UAE North","West US 3","Jio India West","Switzerland + West"],"apiVersions":["2018-06-01"]}],"registrationState":"Registered"}' + headers: + cache-control: + - no-cache + content-length: + - '4521' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:52:01 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"location": "eastus", "identity": {"type": "SystemAssigned"}, "properties": + {"encryption": {}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory create + Connection: + - keep-alive + Content-Length: + - '96' + Content-Type: + - application/json + ParameterSetName: + - --location --name --resource-group + User-Agent: + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 + response: + body: + string: "{\n \"name\": \"exampleFa000001\",\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/examplefa4rqmquxng\",\n + \ \"type\": \"Microsoft.DataFactory/factories\",\n \"properties\": {\n \"provisioningState\": + \"Succeeded\",\n \"createTime\": \"2022-10-11T20:52:07.4135245Z\",\n \"version\": + \"2018-06-01\",\n \"encryption\": {}\n },\n \"eTag\": \"\\\"0d0027c0-0000-0100-0000-6345d7770000\\\"\",\n + \ \"location\": \"eastus\",\n \"identity\": {\n \"type\": \"SystemAssigned\",\n + \ \"principalId\": \"5639cd3e-8c37-4b3c-b16e-51faf16b8893\",\n \"tenantId\": + \"16b3c013-d300-468d-ac64-7eda0820b6d3\"\n },\n \"tags\": {}\n}" + headers: + cache-control: + - no-cache + content-length: + - '647' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:52:07 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Kestrel + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1198' + status: + code: 200 + message: OK +- request: + body: '{"tags": {"exampleTag": "exampleValue"}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory update + Connection: + - keep-alive + Content-Length: + - '40' + Content-Type: + - application/json + ParameterSetName: + - --name --tags --resource-group + User-Agent: + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: PATCH + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 + response: + body: + string: "{\n \"name\": \"exampleFa000001\",\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/examplefa4rqmquxng\",\n + \ \"type\": \"Microsoft.DataFactory/factories\",\n \"properties\": {\n \"provisioningState\": + \"Succeeded\",\n \"createTime\": \"2022-10-11T20:52:07.4135245Z\",\n \"version\": + \"2018-06-01\",\n \"encryption\": {}\n },\n \"eTag\": \"\\\"0d002ac0-0000-0100-0000-6345d7780000\\\"\",\n + \ \"location\": \"eastus\",\n \"identity\": {\n \"type\": \"SystemAssigned\",\n + \ \"principalId\": \"5639cd3e-8c37-4b3c-b16e-51faf16b8893\",\n \"tenantId\": + \"16b3c013-d300-468d-ac64-7eda0820b6d3\"\n },\n \"tags\": {\n \"exampleTag\": + \"exampleValue\"\n }\n}" + headers: + cache-control: + - no-cache + content-length: + - '683' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:52:08 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Kestrel + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 200 + message: OK +- request: + body: '{"properties": {"type": "AzureStorage", "typeProperties": {"connectionString": + {"type": "SecureString", "value": "DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;AccountKey="}}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory linked-service create + Connection: + - keep-alive + Content-Length: + - '208' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --properties --name --resource-group + User-Agent: + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003?api-version=2018-06-01 + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003\",\n + \ \"name\": \"exampleLin000003\",\n \"type\": \"Microsoft.DataFactory/factories/linkedservices\",\n + \ \"properties\": {\n \"type\": \"AzureStorage\",\n \"typeProperties\": + {\n \"connectionString\": {\n \"type\": \"SecureString\",\n \"value\": + \"**********\"\n },\n \"encryptedCredential\": \"ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkRBVEFGQUNUT1JZQDlEQjFFMjJCLTBDMTUtNERERi04M0FCLTAzRTdCRTI3NTE1MF8zMGE1MmIxYy1kN2UxLTQzMzktYmE4My0yYWMyODQ0NzFkNjciDQp9\"\n + \ }\n },\n \"etag\": \"4100ee3f-0000-0100-0000-6345d7790000\"\n}" + headers: + cache-control: + - no-cache + content-length: + - '808' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:52:09 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Kestrel + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory linked-service update + Connection: + - keep-alive + ParameterSetName: + - --factory-name --description --name --resource-group + User-Agent: + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003?api-version=2018-06-01 + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003\",\n + \ \"name\": \"exampleLin000003\",\n \"type\": \"Microsoft.DataFactory/factories/linkedservices\",\n + \ \"properties\": {\n \"type\": \"AzureStorage\",\n \"typeProperties\": + {\n \"connectionString\": {\n \"type\": \"SecureString\",\n \"value\": + \"**********\"\n },\n \"encryptedCredential\": \"ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkRBVEFGQUNUT1JZQDlEQjFFMjJCLTBDMTUtNERERi04M0FCLTAzRTdCRTI3NTE1MF8zMGE1MmIxYy1kN2UxLTQzMzktYmE4My0yYWMyODQ0NzFkNjciDQp9\"\n + \ }\n },\n \"etag\": \"4100ee3f-0000-0100-0000-6345d7790000\"\n}" + headers: + cache-control: + - no-cache + content-length: + - '808' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:52:10 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Kestrel + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"properties": {"type": "AzureStorage", "description": "Example description", + "typeProperties": {"connectionString": {"type": "SecureString", "value": "**********"}, + "encryptedCredential": "ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkRBVEFGQUNUT1JZQDlEQjFFMjJCLTBDMTUtNERERi04M0FCLTAzRTdCRTI3NTE1MF8zMGE1MmIxYy1kN2UxLTQzMzktYmE4My0yYWMyODQ0NzFkNjciDQp9"}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory linked-service update + Connection: + - keep-alive + Content-Length: + - '466' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --description --name --resource-group + User-Agent: + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003?api-version=2018-06-01 + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003\",\n + \ \"name\": \"exampleLin000003\",\n \"type\": \"Microsoft.DataFactory/factories/linkedservices\",\n + \ \"properties\": {\n \"type\": \"AzureStorage\",\n \"description\": + \"Example description\",\n \"typeProperties\": {\n \"connectionString\": + {\n \"type\": \"SecureString\",\n \"value\": \"**********\"\n + \ },\n \"encryptedCredential\": \"ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkRBVEFGQUNUT1JZQDlEQjFFMjJCLTBDMTUtNERERi04M0FCLTAzRTdCRTI3NTE1MF8zMGE1MmIxYy1kN2UxLTQzMzktYmE4My0yYWMyODQ0NzFkNjciDQp9\"\n + \ }\n },\n \"etag\": \"4100fd3f-0000-0100-0000-6345d77e0000\"\n}" + headers: + cache-control: + - no-cache + content-length: + - '850' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:52:14 GMT expires: - '-1' pragma: - no-cache + server: + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked vary: - Accept-Encoding x-content-type-options: - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' status: code: 200 message: OK - request: - body: '{"location": "eastus", "identity": {"type": "SystemAssigned"}, "properties": - {"encryption": {}}}' + body: '{"properties": {"type": "AzureBlob", "linkedServiceName": {"type": "LinkedServiceReference", + "referenceName": "exampleLin000003"}, "parameters": {"MyFileName": {"type": + "String"}, "MyFolderPath": {"type": "String"}}, "typeProperties": {"folderPath": + {"type": "Expression", "value": "@dataset().MyFolderPath"}, "fileName": {"type": + "Expression", "value": "@dataset().MyFileName"}, "format": {"type": "TextFormat"}}}}' headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory create + - datafactory dataset create Connection: - keep-alive Content-Length: - - '96' + - '415' Content-Type: - application/json ParameterSetName: - - --location --name --resource-group + - --properties --name --factory-name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004?api-version=2018-06-01 response: body: - string: '{"name":"exampleFa000001","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/examplefa5qnc3dfb3","type":"Microsoft.DataFactory/factories","properties":{"provisioningState":"Succeeded","createTime":"2021-04-26T06:13:38.8060441Z","version":"2018-06-01","encryption":{}},"eTag":"\"300010fc-0000-0100-0000-60865a130000\"","location":"eastus","identity":{"type":"SystemAssigned","principalId":"06a7c2de-469c-4e53-bbc5-69adf6b29d6b","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},"tags":{}}' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004\",\n + \ \"name\": \"example000004\",\n \"type\": \"Microsoft.DataFactory/factories/datasets\",\n + \ \"properties\": {\n \"type\": \"AzureBlob\",\n \"linkedServiceName\": + {\n \"type\": \"LinkedServiceReference\",\n \"referenceName\": \"exampleLin000003\"\n + \ },\n \"parameters\": {\n \"MyFileName\": {\n \"type\": + \"String\"\n },\n \"MyFolderPath\": {\n \"type\": \"String\"\n + \ }\n },\n \"typeProperties\": {\n \"folderPath\": {\n \"type\": + \"Expression\",\n \"value\": \"@dataset().MyFolderPath\"\n },\n + \ \"fileName\": {\n \"type\": \"Expression\",\n \"value\": + \"@dataset().MyFileName\"\n },\n \"format\": {\n \"type\": + \"TextFormat\"\n }\n }\n },\n \"etag\": \"41000040-0000-0100-0000-6345d77e0000\"\n}" headers: cache-control: - no-cache content-length: - - '631' + - '899' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:13:40 GMT + - Tue, 11 Oct 2022 20:52:14 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -182,51 +5056,120 @@ interactions: - nosniff x-ms-ratelimit-remaining-subscription-writes: - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK - request: - body: '{"tags": {"exampleTag": "exampleValue"}}' + body: null headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory update + - datafactory dataset update + Connection: + - keep-alive + ParameterSetName: + - --description --linked-service-name --parameters --name --factory-name --resource-group + User-Agent: + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004?api-version=2018-06-01 + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004\",\n + \ \"name\": \"example000004\",\n \"type\": \"Microsoft.DataFactory/factories/datasets\",\n + \ \"properties\": {\n \"type\": \"AzureBlob\",\n \"linkedServiceName\": + {\n \"type\": \"LinkedServiceReference\",\n \"referenceName\": \"exampleLin000003\"\n + \ },\n \"parameters\": {\n \"MyFileName\": {\n \"type\": + \"String\"\n },\n \"MyFolderPath\": {\n \"type\": \"String\"\n + \ }\n },\n \"typeProperties\": {\n \"folderPath\": {\n \"type\": + \"Expression\",\n \"value\": \"@dataset().MyFolderPath\"\n },\n + \ \"fileName\": {\n \"type\": \"Expression\",\n \"value\": + \"@dataset().MyFileName\"\n },\n \"format\": {\n \"type\": + \"TextFormat\"\n }\n }\n },\n \"etag\": \"41000040-0000-0100-0000-6345d77e0000\"\n}" + headers: + cache-control: + - no-cache + content-length: + - '899' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:52:14 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Kestrel + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"properties": {"type": "AzureBlob", "description": "Example description", + "linkedServiceName": {"type": "LinkedServiceReference", "referenceName": "exampleLin000003"}, + "parameters": {"MyFileName": {"type": "String"}, "MyFolderPath": {"type": "String"}}, + "typeProperties": {"folderPath": {"type": "Expression", "value": "@dataset().MyFolderPath"}, + "fileName": {"type": "Expression", "value": "@dataset().MyFileName"}, "format": + {"type": "TextFormat"}}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory dataset update Connection: - keep-alive Content-Length: - - '40' + - '453' Content-Type: - application/json ParameterSetName: - - --name --tags --resource-group + - --description --linked-service-name --parameters --name --factory-name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: PATCH - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004?api-version=2018-06-01 response: body: - string: '{"name":"exampleFa000001","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/examplefa5qnc3dfb3","type":"Microsoft.DataFactory/factories","properties":{"provisioningState":"Succeeded","createTime":"2021-04-26T06:13:38.8060441Z","version":"2018-06-01","encryption":{}},"eTag":"\"30001ffc-0000-0100-0000-60865a170000\"","location":"eastus","identity":{"type":"SystemAssigned","principalId":"06a7c2de-469c-4e53-bbc5-69adf6b29d6b","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},"tags":{"exampleTag":"exampleValue"}}' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004\",\n + \ \"name\": \"example000004\",\n \"type\": \"Microsoft.DataFactory/factories/datasets\",\n + \ \"properties\": {\n \"type\": \"AzureBlob\",\n \"description\": \"Example + description\",\n \"linkedServiceName\": {\n \"type\": \"LinkedServiceReference\",\n + \ \"referenceName\": \"exampleLin000003\"\n },\n \"parameters\": + {\n \"MyFileName\": {\n \"type\": \"String\"\n },\n \"MyFolderPath\": + {\n \"type\": \"String\"\n }\n },\n \"typeProperties\": + {\n \"folderPath\": {\n \"type\": \"Expression\",\n \"value\": + \"@dataset().MyFolderPath\"\n },\n \"fileName\": {\n \"type\": + \"Expression\",\n \"value\": \"@dataset().MyFileName\"\n },\n + \ \"format\": {\n \"type\": \"TextFormat\"\n }\n }\n },\n + \ \"etag\": \"41000640-0000-0100-0000-6345d77f0000\"\n}" headers: cache-control: - no-cache content-length: - - '658' + - '941' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:13:45 GMT + - Tue, 11 Oct 2022 20:52:15 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -237,53 +5180,315 @@ interactions: - nosniff x-ms-ratelimit-remaining-subscription-writes: - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK - request: - body: '{"properties": {"type": "AzureStorage", "typeProperties": {"connectionString": - {"type": "SecureString", "value": "DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;AccountKey="}}}}' + body: '{"properties": {"activities": [{"name": "ExampleForeachActivity", "type": + "ForEach", "typeProperties": {"isSequential": true, "items": {"type": "Expression", + "value": "@pipeline().parameters.OutputBlobNameList"}, "activities": [{"name": + "ExampleCopyActivity", "type": "Copy", "inputs": [{"type": "DatasetReference", + "referenceName": "example000004", "parameters": {"MyFileName": "examplecontainer.csv", + "MyFolderPath": "examplecontainer"}}], "outputs": [{"type": "DatasetReference", + "referenceName": "example000004", "parameters": {"MyFileName": {"type": "Expression", + "value": "@item()"}, "MyFolderPath": "examplecontainer"}}], "typeProperties": + {"source": {"type": "BlobSource"}, "sink": {"type": "BlobSink"}, "dataIntegrationUnits": + 32}}]}}], "parameters": {"JobId": {"type": "String"}, "OutputBlobNameList": + {"type": "Array"}}, "variables": {"TestVariableArray": {"type": "Array"}}, "runDimensions": + {"JobId": {"type": "Expression", "value": "@pipeline().parameters.JobId"}}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory pipeline create + Connection: + - keep-alive + Content-Length: + - '980' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --pipeline --name --resource-group + User-Agent: + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005?api-version=2018-06-01 + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005\",\n + \ \"name\": \"example000005\",\n \"type\": \"Microsoft.DataFactory/factories/pipelines\",\n + \ \"properties\": {\n \"activities\": [\n {\n \"name\": \"ExampleForeachActivity\",\n + \ \"type\": \"ForEach\",\n \"typeProperties\": {\n \"isSequential\": + true,\n \"items\": {\n \"type\": \"Expression\",\n \"value\": + \"@pipeline().parameters.OutputBlobNameList\"\n },\n \"activities\": + [\n {\n \"name\": \"ExampleCopyActivity\",\n \"type\": + \"Copy\",\n \"inputs\": [\n {\n \"type\": + \"DatasetReference\",\n \"referenceName\": \"example000004\",\n + \ \"parameters\": {\n \"MyFileName\": \"examplecontainer.csv\",\n + \ \"MyFolderPath\": \"examplecontainer\"\n }\n + \ }\n ],\n \"outputs\": [\n {\n + \ \"type\": \"DatasetReference\",\n \"referenceName\": + \"example000004\",\n \"parameters\": {\n \"MyFileName\": + {\n \"type\": \"Expression\",\n \"value\": + \"@item()\"\n },\n \"MyFolderPath\": + \"examplecontainer\"\n }\n }\n ],\n + \ \"typeProperties\": {\n \"source\": {\n \"type\": + \"BlobSource\"\n },\n \"sink\": {\n \"type\": + \"BlobSink\"\n },\n \"dataIntegrationUnits\": + 32\n }\n }\n ]\n }\n }\n ],\n + \ \"parameters\": {\n \"JobId\": {\n \"type\": \"String\"\n + \ },\n \"OutputBlobNameList\": {\n \"type\": \"Array\"\n }\n + \ },\n \"variables\": {\n \"TestVariableArray\": {\n \"type\": + \"Array\"\n }\n },\n \"runDimensions\": {\n \"JobId\": {\n + \ \"type\": \"Expression\",\n \"value\": \"@pipeline().parameters.JobId\"\n + \ }\n }\n },\n \"etag\": \"41000b40-0000-0100-0000-6345d77f0000\"\n}" + headers: + cache-control: + - no-cache + content-length: + - '2182' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:52:15 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Kestrel + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory pipeline update + Connection: + - keep-alive + ParameterSetName: + - --factory-name --description --name --resource-group + User-Agent: + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005?api-version=2018-06-01 + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005\",\n + \ \"name\": \"example000005\",\n \"type\": \"Microsoft.DataFactory/factories/pipelines\",\n + \ \"properties\": {\n \"activities\": [\n {\n \"name\": \"ExampleForeachActivity\",\n + \ \"type\": \"ForEach\",\n \"typeProperties\": {\n \"isSequential\": + true,\n \"items\": {\n \"type\": \"Expression\",\n \"value\": + \"@pipeline().parameters.OutputBlobNameList\"\n },\n \"activities\": + [\n {\n \"name\": \"ExampleCopyActivity\",\n \"type\": + \"Copy\",\n \"inputs\": [\n {\n \"type\": + \"DatasetReference\",\n \"referenceName\": \"example000004\",\n + \ \"parameters\": {\n \"MyFileName\": \"examplecontainer.csv\",\n + \ \"MyFolderPath\": \"examplecontainer\"\n }\n + \ }\n ],\n \"outputs\": [\n {\n + \ \"type\": \"DatasetReference\",\n \"referenceName\": + \"example000004\",\n \"parameters\": {\n \"MyFileName\": + {\n \"type\": \"Expression\",\n \"value\": + \"@item()\"\n },\n \"MyFolderPath\": + \"examplecontainer\"\n }\n }\n ],\n + \ \"typeProperties\": {\n \"source\": {\n \"type\": + \"BlobSource\"\n },\n \"sink\": {\n \"type\": + \"BlobSink\"\n },\n \"dataIntegrationUnits\": + 32\n }\n }\n ]\n }\n }\n ],\n + \ \"parameters\": {\n \"JobId\": {\n \"type\": \"String\"\n + \ },\n \"OutputBlobNameList\": {\n \"type\": \"Array\"\n }\n + \ },\n \"variables\": {\n \"TestVariableArray\": {\n \"type\": + \"Array\"\n }\n },\n \"runDimensions\": {\n \"JobId\": {\n + \ \"type\": \"Expression\",\n \"value\": \"@pipeline().parameters.JobId\"\n + \ }\n },\n \"lastPublishTime\": \"2022-10-11T20:52:15Z\"\n },\n + \ \"etag\": \"41000b40-0000-0100-0000-6345d77f0000\"\n}" + headers: + cache-control: + - no-cache + content-length: + - '2229' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:52:16 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Kestrel + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"properties": {"description": "Test Update description", "activities": + [{"name": "ExampleForeachActivity", "type": "ForEach", "typeProperties": {"isSequential": + true, "items": {"type": "Expression", "value": "@pipeline().parameters.OutputBlobNameList"}, + "activities": [{"name": "ExampleCopyActivity", "type": "Copy", "inputs": [{"type": + "DatasetReference", "referenceName": "example000004", "parameters": {"MyFileName": + "examplecontainer.csv", "MyFolderPath": "examplecontainer"}}], "outputs": [{"type": + "DatasetReference", "referenceName": "example000004", "parameters": {"MyFileName": + {"type": "Expression", "value": "@item()"}, "MyFolderPath": "examplecontainer"}}], + "typeProperties": {"source": {"type": "BlobSource"}, "sink": {"type": "BlobSink"}, + "dataIntegrationUnits": 32}}]}}], "parameters": {"JobId": {"type": "String"}, + "OutputBlobNameList": {"type": "Array"}}, "variables": {"TestVariableArray": + {"type": "Array"}}, "runDimensions": {"JobId": {"type": "Expression", "value": + "@pipeline().parameters.JobId"}}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory pipeline update + Connection: + - keep-alive + Content-Length: + - '1022' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --description --name --resource-group + User-Agent: + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005?api-version=2018-06-01 + response: + body: + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005\",\n + \ \"name\": \"example000005\",\n \"type\": \"Microsoft.DataFactory/factories/pipelines\",\n + \ \"properties\": {\n \"description\": \"Test Update description\",\n \"activities\": + [\n {\n \"name\": \"ExampleForeachActivity\",\n \"type\": + \"ForEach\",\n \"typeProperties\": {\n \"isSequential\": true,\n + \ \"items\": {\n \"type\": \"Expression\",\n \"value\": + \"@pipeline().parameters.OutputBlobNameList\"\n },\n \"activities\": + [\n {\n \"name\": \"ExampleCopyActivity\",\n \"type\": + \"Copy\",\n \"inputs\": [\n {\n \"type\": + \"DatasetReference\",\n \"referenceName\": \"example000004\",\n + \ \"parameters\": {\n \"MyFileName\": \"examplecontainer.csv\",\n + \ \"MyFolderPath\": \"examplecontainer\"\n }\n + \ }\n ],\n \"outputs\": [\n {\n + \ \"type\": \"DatasetReference\",\n \"referenceName\": + \"example000004\",\n \"parameters\": {\n \"MyFileName\": + {\n \"type\": \"Expression\",\n \"value\": + \"@item()\"\n },\n \"MyFolderPath\": + \"examplecontainer\"\n }\n }\n ],\n + \ \"typeProperties\": {\n \"source\": {\n \"type\": + \"BlobSource\"\n },\n \"sink\": {\n \"type\": + \"BlobSink\"\n },\n \"dataIntegrationUnits\": + 32\n }\n }\n ]\n }\n }\n ],\n + \ \"parameters\": {\n \"JobId\": {\n \"type\": \"String\"\n + \ },\n \"OutputBlobNameList\": {\n \"type\": \"Array\"\n }\n + \ },\n \"variables\": {\n \"TestVariableArray\": {\n \"type\": + \"Array\"\n }\n },\n \"runDimensions\": {\n \"JobId\": {\n + \ \"type\": \"Expression\",\n \"value\": \"@pipeline().parameters.JobId\"\n + \ }\n }\n },\n \"etag\": \"41001440-0000-0100-0000-6345d7800000\"\n}" + headers: + cache-control: + - no-cache + content-length: + - '2228' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:52:16 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Kestrel + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 200 + message: OK +- request: + body: '{"properties": {"type": "ScheduleTrigger", "pipelines": [{"pipelineReference": + {"type": "PipelineReference", "referenceName": "example000005"}, "parameters": + {"OutputBlobNameList": ["exampleoutput.csv"]}}], "typeProperties": {"recurrence": + {"frequency": "Minute", "interval": 4, "startTime": "2018-06-16T00:39:13.84418Z", + "endTime": "2018-06-16T00:55:13.84418Z", "timeZone": "UTC"}}}}' headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory linked-service create + - datafactory trigger create Connection: - keep-alive Content-Length: - - '208' + - '384' Content-Type: - application/json ParameterSetName: - - --factory-name --properties --name --resource-group + - --factory-name --resource-group --properties --name User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003","name":"exampleLin000003","type":"Microsoft.DataFactory/factories/linkedservices","properties":{"type":"AzureStorage","typeProperties":{"connectionString":{"type":"SecureString","value":"**********"},"encryptedCredential":"ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkVYQU1QTEVGQTVRTkMzREZCM184YWVmOGZkOC0yN2M2LTQ1NDEtOTM0MC01NmU0ZTlkNTBmZTUiDQp9"}},"etag":"1c00f24a-0000-0100-0000-60865a1c0000"}' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006\",\n + \ \"name\": \"example000006\",\n \"type\": \"Microsoft.DataFactory/factories/triggers\",\n + \ \"properties\": {\n \"type\": \"ScheduleTrigger\",\n \"pipelines\": + [\n {\n \"pipelineReference\": {\n \"type\": \"PipelineReference\",\n + \ \"referenceName\": \"example000005\"\n },\n \"parameters\": + {\n \"OutputBlobNameList\": [\n \"exampleoutput.csv\"\n + \ ]\n }\n }\n ],\n \"typeProperties\": {\n \"recurrence\": + {\n \"frequency\": \"Minute\",\n \"interval\": 4,\n \"startTime\": + \"2018-06-16T00:39:13.84418Z\",\n \"endTime\": \"2018-06-16T00:55:13.84418Z\",\n + \ \"timeZone\": \"UTC\"\n }\n },\n \"runtimeState\": \"Stopped\"\n + \ },\n \"etag\": \"41001840-0000-0100-0000-6345d7810000\"\n}" headers: cache-control: - no-cache content-length: - - '757' + - '909' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:13:48 GMT + - Tue, 11 Oct 2022 20:52:16 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -294,8 +5499,6 @@ interactions: - nosniff x-ms-ratelimit-remaining-subscription-writes: - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -307,34 +5510,43 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory linked-service update + - datafactory trigger update Connection: - keep-alive ParameterSetName: - - --factory-name --description --name --resource-group + - --factory-name --resource-group --description --name User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003","name":"exampleLin000003","type":"Microsoft.DataFactory/factories/linkedservices","properties":{"type":"AzureStorage","typeProperties":{"connectionString":{"type":"SecureString","value":"**********"},"encryptedCredential":"ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkVYQU1QTEVGQTVRTkMzREZCM184YWVmOGZkOC0yN2M2LTQ1NDEtOTM0MC01NmU0ZTlkNTBmZTUiDQp9"}},"etag":"1c00f24a-0000-0100-0000-60865a1c0000"}' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006\",\n + \ \"name\": \"example000006\",\n \"type\": \"Microsoft.DataFactory/factories/triggers\",\n + \ \"properties\": {\n \"type\": \"ScheduleTrigger\",\n \"pipelines\": + [\n {\n \"pipelineReference\": {\n \"type\": \"PipelineReference\",\n + \ \"referenceName\": \"example000005\"\n },\n \"parameters\": + {\n \"OutputBlobNameList\": [\n \"exampleoutput.csv\"\n + \ ]\n }\n }\n ],\n \"typeProperties\": {\n \"recurrence\": + {\n \"frequency\": \"Minute\",\n \"interval\": 4,\n \"startTime\": + \"2018-06-16T00:39:13.84418Z\",\n \"endTime\": \"2018-06-16T00:55:13.84418Z\",\n + \ \"timeZone\": \"UTC\"\n }\n },\n \"runtimeState\": \"Stopped\"\n + \ },\n \"etag\": \"41001840-0000-0100-0000-6345d7810000\"\n}" headers: cache-control: - no-cache content-length: - - '757' + - '909' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:13:49 GMT + - Tue, 11 Oct 2022 20:52:17 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -343,54 +5555,64 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK - request: - body: '{"properties": {"type": "AzureStorage", "description": "Example description", - "typeProperties": {"connectionString": {"type": "SecureString", "value": "**********"}, - "encryptedCredential": "ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkVYQU1QTEVGQTVRTkMzREZCM184YWVmOGZkOC0yN2M2LTQ1NDEtOTM0MC01NmU0ZTlkNTBmZTUiDQp9"}}}' + body: '{"properties": {"type": "ScheduleTrigger", "description": "Example description", + "pipelines": [{"pipelineReference": {"type": "PipelineReference", "referenceName": + "example000005"}, "parameters": {"OutputBlobNameList": ["exampleoutput.csv"]}}], + "typeProperties": {"recurrence": {"frequency": "Minute", "interval": 4, "startTime": + "2018-06-16T00:39:13.84418Z", "endTime": "2018-06-16T00:55:13.84418Z", "timeZone": + "UTC"}}}}' headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory linked-service update + - datafactory trigger update Connection: - keep-alive Content-Length: - - '426' + - '422' Content-Type: - application/json ParameterSetName: - - --factory-name --description --name --resource-group + - --factory-name --resource-group --description --name User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003","name":"exampleLin000003","type":"Microsoft.DataFactory/factories/linkedservices","properties":{"type":"AzureStorage","description":"Example - description","typeProperties":{"connectionString":{"type":"SecureString","value":"**********"},"encryptedCredential":"ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkVYQU1QTEVGQTVRTkMzREZCM184YWVmOGZkOC0yN2M2LTQ1NDEtOTM0MC01NmU0ZTlkNTBmZTUiDQp9"}},"etag":"1c00f54a-0000-0100-0000-60865a1e0000"}' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006\",\n + \ \"name\": \"example000006\",\n \"type\": \"Microsoft.DataFactory/factories/triggers\",\n + \ \"properties\": {\n \"type\": \"ScheduleTrigger\",\n \"description\": + \"Example description\",\n \"pipelines\": [\n {\n \"pipelineReference\": + {\n \"type\": \"PipelineReference\",\n \"referenceName\": + \"example000005\"\n },\n \"parameters\": {\n \"OutputBlobNameList\": + [\n \"exampleoutput.csv\"\n ]\n }\n }\n ],\n + \ \"typeProperties\": {\n \"recurrence\": {\n \"frequency\": + \"Minute\",\n \"interval\": 4,\n \"startTime\": \"2018-06-16T00:39:13.84418Z\",\n + \ \"endTime\": \"2018-06-16T00:55:13.84418Z\",\n \"timeZone\": + \"UTC\"\n }\n },\n \"runtimeState\": \"Stopped\"\n },\n \"etag\": + \"41001d40-0000-0100-0000-6345d7820000\"\n}" headers: cache-control: - no-cache content-length: - - '793' + - '951' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:13:50 GMT + - Tue, 11 Oct 2022 20:52:17 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -401,55 +5623,52 @@ interactions: - nosniff x-ms-ratelimit-remaining-subscription-writes: - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK - request: - body: '{"properties": {"type": "AzureBlob", "linkedServiceName": {"type": "LinkedServiceReference", - "referenceName": "exampleLin000003"}, "parameters": {"MyFileName": {"type": - "String"}, "MyFolderPath": {"type": "String"}}, "typeProperties": {"folderPath": - {"type": "Expression", "value": "@dataset().MyFolderPath"}, "fileName": {"type": - "Expression", "value": "@dataset().MyFileName"}, "format": {"type": "TextFormat"}}}}' + body: '{"properties": {"type": "SelfHosted", "description": "A selfhosted integration + runtime"}}' headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory dataset create + - datafactory integration-runtime self-hosted create Connection: - keep-alive Content-Length: - - '419' + - '89' Content-Type: - application/json ParameterSetName: - - --properties --name --factory-name --resource-group + - --factory-name --description --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004","name":"example000004","type":"Microsoft.DataFactory/factories/datasets","properties":{"type":"AzureBlob","linkedServiceName":{"type":"LinkedServiceReference","referenceName":"exampleLin000003"},"parameters":{"MyFileName":{"type":"String"},"MyFolderPath":{"type":"String"}},"typeProperties":{"folderPath":{"type":"Expression","value":"@dataset().MyFolderPath"},"fileName":{"type":"Expression","value":"@dataset().MyFileName"},"format":{"type":"TextFormat"}}},"etag":"1c00f74a-0000-0100-0000-60865a210000"}' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationruntimes/exampleInteg000002\",\n + \ \"name\": \"exampleInteg000002\",\n \"type\": \"Microsoft.DataFactory/factories/integrationruntimes\",\n + \ \"properties\": {\n \"type\": \"SelfHosted\",\n \"description\": \"A + selfhosted integration runtime\"\n },\n \"etag\": \"41002340-0000-0100-0000-6345d7820000\"\n}" headers: cache-control: - no-cache content-length: - - '746' + - '441' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:13:52 GMT + - Tue, 11 Oct 2022 20:52:18 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -459,48 +5678,52 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - - '1198' - x-powered-by: - - ASP.NET + - '1199' status: code: 200 message: OK - request: - body: null + body: '{"autoUpdate": "Off", "updateDelayOffset": "\"PT3H\""}' headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory dataset update + - datafactory integration-runtime update Connection: - keep-alive + Content-Length: + - '54' + Content-Type: + - application/json ParameterSetName: - - --description --linked-service-name --parameters --name --factory-name --resource-group + - --factory-name --name --resource-group --auto-update --update-delay-offset User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: PATCH + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004","name":"example000004","type":"Microsoft.DataFactory/factories/datasets","properties":{"type":"AzureBlob","linkedServiceName":{"type":"LinkedServiceReference","referenceName":"exampleLin000003"},"parameters":{"MyFileName":{"type":"String"},"MyFolderPath":{"type":"String"}},"typeProperties":{"folderPath":{"type":"Expression","value":"@dataset().MyFolderPath"},"fileName":{"type":"Expression","value":"@dataset().MyFileName"},"format":{"type":"TextFormat"}}},"etag":"1c00f74a-0000-0100-0000-60865a210000"}' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationruntimes/exampleInteg000002\",\n + \ \"name\": \"exampleInteg000002\",\n \"type\": \"Microsoft.DataFactory/factories/integrationruntimes\",\n + \ \"properties\": {\n \"type\": \"SelfHosted\",\n \"description\": \"A + selfhosted integration runtime\"\n },\n \"etag\": \"41002340-0000-0100-0000-6345d7820000\"\n}" headers: cache-control: - no-cache content-length: - - '746' + - '441' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:13:54 GMT + - Tue, 11 Oct 2022 20:52:18 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -509,57 +5732,50 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET + x-ms-ratelimit-remaining-subscription-writes: + - '1199' status: code: 200 message: OK - request: - body: '{"properties": {"type": "AzureBlob", "description": "Example description", - "linkedServiceName": {"type": "LinkedServiceReference", "referenceName": "exampleLin000003"}, - "parameters": {"MyFileName": {"type": "String"}, "MyFolderPath": {"type": "String"}}, - "typeProperties": {"folderPath": {"type": "Expression", "value": "@dataset().MyFolderPath"}, - "fileName": {"type": "Expression", "value": "@dataset().MyFileName"}, "format": - {"type": "TextFormat"}}}}' + body: '{"OutputBlobNameList": ["exampleoutput.csv"]}' headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory dataset update + - datafactory pipeline create-run Connection: - keep-alive Content-Length: - - '457' + - '45' Content-Type: - application/json ParameterSetName: - - --description --linked-service-name --parameters --name --factory-name --resource-group + - --factory-name --parameters --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005/createRun?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004","name":"example000004","type":"Microsoft.DataFactory/factories/datasets","properties":{"type":"AzureBlob","description":"Example - description","linkedServiceName":{"type":"LinkedServiceReference","referenceName":"exampleLin000003"},"parameters":{"MyFileName":{"type":"String"},"MyFolderPath":{"type":"String"}},"typeProperties":{"folderPath":{"type":"Expression","value":"@dataset().MyFolderPath"},"fileName":{"type":"Expression","value":"@dataset().MyFileName"},"format":{"type":"TextFormat"}}},"etag":"1c00fb4a-0000-0100-0000-60865a240000"}' + string: "{\n \"runId\": \"8a5026de-49a6-11ed-85e9-00249b7d5bd6\"\n}" headers: cache-control: - no-cache content-length: - - '782' + - '53' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:13:55 GMT + - Tue, 11 Oct 2022 20:52:21 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -570,62 +5786,47 @@ interactions: - nosniff x-ms-ratelimit-remaining-subscription-writes: - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK - request: - body: '{"properties": {"activities": [{"name": "ExampleForeachActivity", "type": - "ForEach", "typeProperties": {"isSequential": true, "items": {"type": "Expression", - "value": "@pipeline().parameters.OutputBlobNameList"}, "activities": [{"name": - "ExampleCopyActivity", "type": "Copy", "inputs": [{"type": "DatasetReference", - "referenceName": "example000004", "parameters": {"MyFileName": "examplecontainer.csv", - "MyFolderPath": "examplecontainer"}}], "outputs": [{"type": "DatasetReference", - "referenceName": "example000004", "parameters": {"MyFileName": {"type": "Expression", - "value": "@item()"}, "MyFolderPath": "examplecontainer"}}], "typeProperties": - {"source": {"type": "BlobSource"}, "sink": {"type": "BlobSink"}, "dataIntegrationUnits": - 32}}]}}], "parameters": {"JobId": {"type": "String"}, "OutputBlobNameList": - {"type": "Array"}}, "variables": {"TestVariableArray": {"type": "Array"}}, "runDimensions": - {"JobId": {"type": "Expression", "value": "@pipeline().parameters.JobId"}}}}' + body: null headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory pipeline create + - datafactory integration-runtime show Connection: - keep-alive - Content-Length: - - '982' - Content-Type: - - application/json ParameterSetName: - - --factory-name --pipeline --name --resource-group + - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005","name":"example000005","type":"Microsoft.DataFactory/factories/pipelines","properties":{"activities":[{"name":"ExampleForeachActivity","type":"ForEach","typeProperties":{"isSequential":true,"items":{"type":"Expression","value":"@pipeline().parameters.OutputBlobNameList"},"activities":[{"name":"ExampleCopyActivity","type":"Copy","inputs":[{"type":"DatasetReference","referenceName":"example000004","parameters":{"MyFileName":"examplecontainer.csv","MyFolderPath":"examplecontainer"}}],"outputs":[{"type":"DatasetReference","referenceName":"example000004","parameters":{"MyFileName":{"type":"Expression","value":"@item()"},"MyFolderPath":"examplecontainer"}}],"typeProperties":{"source":{"type":"BlobSource"},"sink":{"type":"BlobSink"},"dataIntegrationUnits":32}}]}}],"parameters":{"JobId":{"type":"String"},"OutputBlobNameList":{"type":"Array"}},"variables":{"TestVariableArray":{"type":"Array"}},"runDimensions":{"JobId":{"type":"Expression","value":"@pipeline().parameters.JobId"}}},"etag":"1c00fc4a-0000-0100-0000-60865a250000"}' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationruntimes/exampleInteg000002\",\n + \ \"name\": \"exampleInteg000002\",\n \"type\": \"Microsoft.DataFactory/factories/integrationruntimes\",\n + \ \"properties\": {\n \"type\": \"SelfHosted\",\n \"description\": \"A + selfhosted integration runtime\"\n },\n \"etag\": \"41002340-0000-0100-0000-6345d7820000\"\n}" headers: cache-control: - no-cache content-length: - - '1274' + - '441' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:13:56 GMT + - Tue, 11 Oct 2022 20:52:20 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -634,10 +5835,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -649,34 +5846,39 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory pipeline update + - datafactory linked-service show Connection: - keep-alive ParameterSetName: - - --factory-name --description --name --resource-group + - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005","name":"example000005","type":"Microsoft.DataFactory/factories/pipelines","properties":{"activities":[{"name":"ExampleForeachActivity","type":"ForEach","typeProperties":{"isSequential":true,"items":{"type":"Expression","value":"@pipeline().parameters.OutputBlobNameList"},"activities":[{"name":"ExampleCopyActivity","type":"Copy","inputs":[{"type":"DatasetReference","referenceName":"example000004","parameters":{"MyFileName":"examplecontainer.csv","MyFolderPath":"examplecontainer"}}],"outputs":[{"type":"DatasetReference","referenceName":"example000004","parameters":{"MyFileName":{"type":"Expression","value":"@item()"},"MyFolderPath":"examplecontainer"}}],"typeProperties":{"source":{"type":"BlobSource"},"sink":{"type":"BlobSink"},"dataIntegrationUnits":32}}]}}],"parameters":{"JobId":{"type":"String"},"OutputBlobNameList":{"type":"Array"}},"variables":{"TestVariableArray":{"type":"Array"}},"runDimensions":{"JobId":{"type":"Expression","value":"@pipeline().parameters.JobId"}},"lastPublishTime":"2021-04-26T06:13:57Z"},"etag":"1c00fc4a-0000-0100-0000-60865a250000"}' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003\",\n + \ \"name\": \"exampleLin000003\",\n \"type\": \"Microsoft.DataFactory/factories/linkedservices\",\n + \ \"properties\": {\n \"type\": \"AzureStorage\",\n \"description\": + \"Example description\",\n \"typeProperties\": {\n \"connectionString\": + {\n \"type\": \"SecureString\",\n \"value\": \"**********\"\n + \ },\n \"encryptedCredential\": \"ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkRBVEFGQUNUT1JZQDlEQjFFMjJCLTBDMTUtNERERi04M0FCLTAzRTdCRTI3NTE1MF8zMGE1MmIxYy1kN2UxLTQzMzktYmE4My0yYWMyODQ0NzFkNjciDQp9\"\n + \ }\n },\n \"etag\": \"4100fd3f-0000-0100-0000-6345d77e0000\"\n}" headers: cache-control: - no-cache content-length: - - '1315' + - '850' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:13:58 GMT + - Tue, 11 Oct 2022 20:52:21 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -685,64 +5887,72 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK - request: - body: '{"properties": {"description": "Test Update description", "activities": - [{"name": "ExampleForeachActivity", "type": "ForEach", "typeProperties": {"isSequential": - true, "items": {"type": "Expression", "value": "@pipeline().parameters.OutputBlobNameList"}, - "activities": [{"name": "ExampleCopyActivity", "type": "Copy", "inputs": [{"type": - "DatasetReference", "referenceName": "example000004", "parameters": {"MyFileName": - "examplecontainer.csv", "MyFolderPath": "examplecontainer"}}], "outputs": [{"type": - "DatasetReference", "referenceName": "example000004", "parameters": {"MyFileName": - {"type": "Expression", "value": "@item()"}, "MyFolderPath": "examplecontainer"}}], - "typeProperties": {"source": {"type": "BlobSource"}, "sink": {"type": "BlobSink"}, - "dataIntegrationUnits": 32}}]}}], "parameters": {"JobId": {"type": "String"}, - "OutputBlobNameList": {"type": "Array"}}, "variables": {"TestVariableArray": - {"type": "Array"}}, "runDimensions": {"JobId": {"type": "Expression", "value": - "@pipeline().parameters.JobId"}}}}' + body: null headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory pipeline update + - datafactory pipeline show Connection: - keep-alive - Content-Length: - - '1024' - Content-Type: - - application/json ParameterSetName: - - --factory-name --description --name --resource-group + - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005","name":"example000005","type":"Microsoft.DataFactory/factories/pipelines","properties":{"description":"Test - Update description","activities":[{"name":"ExampleForeachActivity","type":"ForEach","typeProperties":{"isSequential":true,"items":{"type":"Expression","value":"@pipeline().parameters.OutputBlobNameList"},"activities":[{"name":"ExampleCopyActivity","type":"Copy","inputs":[{"type":"DatasetReference","referenceName":"example000004","parameters":{"MyFileName":"examplecontainer.csv","MyFolderPath":"examplecontainer"}}],"outputs":[{"type":"DatasetReference","referenceName":"example000004","parameters":{"MyFileName":{"type":"Expression","value":"@item()"},"MyFolderPath":"examplecontainer"}}],"typeProperties":{"source":{"type":"BlobSource"},"sink":{"type":"BlobSink"},"dataIntegrationUnits":32}}]}}],"parameters":{"JobId":{"type":"String"},"OutputBlobNameList":{"type":"Array"}},"variables":{"TestVariableArray":{"type":"Array"}},"runDimensions":{"JobId":{"type":"Expression","value":"@pipeline().parameters.JobId"}}},"etag":"1c00ff4a-0000-0100-0000-60865a270000"}' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005\",\n + \ \"name\": \"example000005\",\n \"type\": \"Microsoft.DataFactory/factories/pipelines\",\n + \ \"properties\": {\n \"description\": \"Test Update description\",\n \"activities\": + [\n {\n \"name\": \"ExampleForeachActivity\",\n \"type\": + \"ForEach\",\n \"typeProperties\": {\n \"isSequential\": true,\n + \ \"items\": {\n \"type\": \"Expression\",\n \"value\": + \"@pipeline().parameters.OutputBlobNameList\"\n },\n \"activities\": + [\n {\n \"name\": \"ExampleCopyActivity\",\n \"type\": + \"Copy\",\n \"inputs\": [\n {\n \"type\": + \"DatasetReference\",\n \"referenceName\": \"example000004\",\n + \ \"parameters\": {\n \"MyFileName\": \"examplecontainer.csv\",\n + \ \"MyFolderPath\": \"examplecontainer\"\n }\n + \ }\n ],\n \"outputs\": [\n {\n + \ \"type\": \"DatasetReference\",\n \"referenceName\": + \"example000004\",\n \"parameters\": {\n \"MyFileName\": + {\n \"type\": \"Expression\",\n \"value\": + \"@item()\"\n },\n \"MyFolderPath\": + \"examplecontainer\"\n }\n }\n ],\n + \ \"typeProperties\": {\n \"source\": {\n \"type\": + \"BlobSource\"\n },\n \"sink\": {\n \"type\": + \"BlobSink\"\n },\n \"dataIntegrationUnits\": + 32\n }\n }\n ]\n }\n }\n ],\n + \ \"parameters\": {\n \"JobId\": {\n \"type\": \"String\"\n + \ },\n \"OutputBlobNameList\": {\n \"type\": \"Array\"\n }\n + \ },\n \"variables\": {\n \"TestVariableArray\": {\n \"type\": + \"Array\"\n }\n },\n \"runDimensions\": {\n \"JobId\": {\n + \ \"type\": \"Expression\",\n \"value\": \"@pipeline().parameters.JobId\"\n + \ }\n },\n \"lastPublishTime\": \"2022-10-11T20:52:16Z\"\n },\n + \ \"etag\": \"41001440-0000-0100-0000-6345d7800000\"\n}" headers: cache-control: - no-cache content-length: - - '1314' + - '2275' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:13:59 GMT + - Tue, 11 Oct 2022 20:52:22 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -751,57 +5961,55 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1198' - x-powered-by: - - ASP.NET status: code: 200 message: OK - request: - body: '{"properties": {"type": "ScheduleTrigger", "pipelines": [{"pipelineReference": - {"type": "PipelineReference", "referenceName": "example000005"}, "parameters": - {"OutputBlobNameList": ["exampleoutput.csv"]}}], "typeProperties": {"recurrence": - {"frequency": "Minute", "interval": 4, "startTime": "2018-06-16T00:39:13.84418Z", - "endTime": "2018-06-16T00:55:13.84418Z", "timeZone": "UTC"}}}}' + body: null headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory trigger create + - datafactory dataset show Connection: - keep-alive - Content-Length: - - '386' - Content-Type: - - application/json ParameterSetName: - - --factory-name --resource-group --properties --name + - --name --factory-name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006","name":"example000006","type":"Microsoft.DataFactory/factories/triggers","properties":{"type":"ScheduleTrigger","pipelines":[{"pipelineReference":{"type":"PipelineReference","referenceName":"example000005"},"parameters":{"OutputBlobNameList":["exampleoutput.csv"]}}],"typeProperties":{"recurrence":{"frequency":"Minute","interval":4,"startTime":"2018-06-16T00:39:13.84418Z","endTime":"2018-06-16T00:55:13.84418Z","timeZone":"UTC"}},"runtimeState":"Stopped"},"etag":"1c00004b-0000-0100-0000-60865a290000"}' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004\",\n + \ \"name\": \"example000004\",\n \"type\": \"Microsoft.DataFactory/factories/datasets\",\n + \ \"properties\": {\n \"type\": \"AzureBlob\",\n \"description\": \"Example + description\",\n \"linkedServiceName\": {\n \"type\": \"LinkedServiceReference\",\n + \ \"referenceName\": \"exampleLin000003\"\n },\n \"parameters\": + {\n \"MyFileName\": {\n \"type\": \"String\"\n },\n \"MyFolderPath\": + {\n \"type\": \"String\"\n }\n },\n \"typeProperties\": + {\n \"folderPath\": {\n \"type\": \"Expression\",\n \"value\": + \"@dataset().MyFolderPath\"\n },\n \"fileName\": {\n \"type\": + \"Expression\",\n \"value\": \"@dataset().MyFileName\"\n },\n + \ \"format\": {\n \"type\": \"TextFormat\"\n }\n }\n },\n + \ \"etag\": \"41000640-0000-0100-0000-6345d77f0000\"\n}" headers: cache-control: - no-cache content-length: - - '743' + - '941' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:01 GMT + - Tue, 11 Oct 2022 20:52:22 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -810,10 +6018,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -825,34 +6029,44 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory trigger update + - datafactory trigger show Connection: - keep-alive ParameterSetName: - - --factory-name --resource-group --description --name + - --factory-name --resource-group --name User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006","name":"example000006","type":"Microsoft.DataFactory/factories/triggers","properties":{"type":"ScheduleTrigger","pipelines":[{"pipelineReference":{"type":"PipelineReference","referenceName":"example000005"},"parameters":{"OutputBlobNameList":["exampleoutput.csv"]}}],"typeProperties":{"recurrence":{"frequency":"Minute","interval":4,"startTime":"2018-06-16T00:39:13.84418Z","endTime":"2018-06-16T00:55:13.84418Z","timeZone":"UTC"}},"runtimeState":"Stopped"},"etag":"1c00004b-0000-0100-0000-60865a290000"}' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006\",\n + \ \"name\": \"example000006\",\n \"type\": \"Microsoft.DataFactory/factories/triggers\",\n + \ \"properties\": {\n \"type\": \"ScheduleTrigger\",\n \"description\": + \"Example description\",\n \"pipelines\": [\n {\n \"pipelineReference\": + {\n \"type\": \"PipelineReference\",\n \"referenceName\": + \"example000005\"\n },\n \"parameters\": {\n \"OutputBlobNameList\": + [\n \"exampleoutput.csv\"\n ]\n }\n }\n ],\n + \ \"typeProperties\": {\n \"recurrence\": {\n \"frequency\": + \"Minute\",\n \"interval\": 4,\n \"startTime\": \"2018-06-16T00:39:13.84418Z\",\n + \ \"endTime\": \"2018-06-16T00:55:13.84418Z\",\n \"timeZone\": + \"UTC\"\n }\n },\n \"runtimeState\": \"Stopped\"\n },\n \"etag\": + \"41001d40-0000-0100-0000-6345d7820000\"\n}" headers: cache-control: - no-cache content-length: - - '743' + - '951' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:03 GMT + - Tue, 11 Oct 2022 20:52:22 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -861,57 +6075,48 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK - request: - body: '{"properties": {"type": "ScheduleTrigger", "description": "Example description", - "pipelines": [{"pipelineReference": {"type": "PipelineReference", "referenceName": - "example000005"}, "parameters": {"OutputBlobNameList": ["exampleoutput.csv"]}}], - "typeProperties": {"recurrence": {"frequency": "Minute", "interval": 4, "startTime": - "2018-06-16T00:39:13.84418Z", "endTime": "2018-06-16T00:55:13.84418Z", "timeZone": - "UTC"}}}}' + body: null headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory trigger update + - datafactory integration-runtime list Connection: - keep-alive - Content-Length: - - '424' - Content-Type: - - application/json ParameterSetName: - - --factory-name --resource-group --description --name + - --factory-name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006","name":"example000006","type":"Microsoft.DataFactory/factories/triggers","properties":{"type":"ScheduleTrigger","description":"Example - description","pipelines":[{"pipelineReference":{"type":"PipelineReference","referenceName":"example000005"},"parameters":{"OutputBlobNameList":["exampleoutput.csv"]}}],"typeProperties":{"recurrence":{"frequency":"Minute","interval":4,"startTime":"2018-06-16T00:39:13.84418Z","endTime":"2018-06-16T00:55:13.84418Z","timeZone":"UTC"}},"runtimeState":"Stopped"},"etag":"1c00054b-0000-0100-0000-60865a2d0000"}' + string: "{\n \"value\": [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationruntimes/exampleInteg000002\",\n + \ \"name\": \"exampleInteg000002\",\n \"type\": \"Microsoft.DataFactory/factories/integrationruntimes\",\n + \ \"properties\": {\n \"type\": \"SelfHosted\",\n \"description\": + \"A selfhosted integration runtime\"\n },\n \"etag\": \"41002340-0000-0100-0000-6345d7820000\"\n + \ }\n ]\n}" headers: cache-control: - no-cache content-length: - - '779' + - '502' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:04 GMT + - Tue, 11 Oct 2022 20:52:23 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -920,55 +6125,51 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK - request: - body: '{"properties": {"type": "SelfHosted", "description": "A selfhosted integration - runtime"}}' + body: null headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory integration-runtime self-hosted create + - datafactory linked-service list Connection: - keep-alive - Content-Length: - - '89' - Content-Type: - - application/json ParameterSetName: - - --factory-name --description --name --resource-group + - --factory-name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationruntimes/exampleInteg000002","name":"exampleInteg000002","type":"Microsoft.DataFactory/factories/integrationruntimes","properties":{"type":"SelfHosted","description":"A - selfhosted integration runtime"},"etag":"1c00084b-0000-0100-0000-60865a2f0000"}' + string: "{\n \"value\": [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003\",\n + \ \"name\": \"exampleLin000003\",\n \"type\": \"Microsoft.DataFactory/factories/linkedservices\",\n + \ \"properties\": {\n \"type\": \"AzureStorage\",\n \"description\": + \"Example description\",\n \"typeProperties\": {\n \"connectionString\": + {\n \"type\": \"SecureString\",\n \"value\": \"**********\"\n + \ },\n \"encryptedCredential\": \"ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkRBVEFGQUNUT1JZQDlEQjFFMjJCLTBDMTUtNERERi04M0FCLTAzRTdCRTI3NTE1MF8zMGE1MmIxYy1kN2UxLTQzMzktYmE4My0yYWMyODQ0NzFkNjciDQp9\"\n + \ }\n },\n \"etag\": \"4100fd3f-0000-0100-0000-6345d77e0000\"\n + \ }\n ]\n}" headers: cache-control: - no-cache content-length: - - '484' + - '939' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:07 GMT + - Tue, 11 Oct 2022 20:52:23 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -977,54 +6178,75 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1198' - x-powered-by: - - ASP.NET status: code: 200 message: OK - request: - body: '{"autoUpdate": "Off", "updateDelayOffset": "\"PT3H\""}' + body: null headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory integration-runtime update + - datafactory pipeline list Connection: - keep-alive - Content-Length: - - '54' - Content-Type: - - application/json ParameterSetName: - - --factory-name --name --resource-group --auto-update --update-delay-offset + - --factory-name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: PATCH - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationruntimes/exampleInteg000002","name":"exampleInteg000002","type":"Microsoft.DataFactory/factories/integrationruntimes","properties":{"type":"SelfHosted","description":"A - selfhosted integration runtime"},"etag":"1c00084b-0000-0100-0000-60865a2f0000"}' + string: "{\n \"value\": [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005\",\n + \ \"name\": \"example000005\",\n \"type\": \"Microsoft.DataFactory/factories/pipelines\",\n + \ \"properties\": {\n \"description\": \"Test Update description\",\n + \ \"activities\": [\n {\n \"name\": \"ExampleForeachActivity\",\n + \ \"type\": \"ForEach\",\n \"typeProperties\": {\n \"isSequential\": + true,\n \"items\": {\n \"type\": \"Expression\",\n + \ \"value\": \"@pipeline().parameters.OutputBlobNameList\"\n + \ },\n \"activities\": [\n {\n \"name\": + \"ExampleCopyActivity\",\n \"type\": \"Copy\",\n \"inputs\": + [\n {\n \"type\": \"DatasetReference\",\n + \ \"referenceName\": \"example000004\",\n \"parameters\": + {\n \"MyFileName\": \"examplecontainer.csv\",\n \"MyFolderPath\": + \"examplecontainer\"\n }\n }\n ],\n + \ \"outputs\": [\n {\n \"type\": + \"DatasetReference\",\n \"referenceName\": \"example000004\",\n + \ \"parameters\": {\n \"MyFileName\": + {\n \"type\": \"Expression\",\n \"value\": + \"@item()\"\n },\n \"MyFolderPath\": + \"examplecontainer\"\n }\n }\n ],\n + \ \"typeProperties\": {\n \"source\": {\n + \ \"type\": \"BlobSource\"\n },\n \"sink\": + {\n \"type\": \"BlobSink\"\n },\n + \ \"dataIntegrationUnits\": 32\n }\n }\n + \ ]\n }\n }\n ],\n \"parameters\": + {\n \"JobId\": {\n \"type\": \"String\"\n },\n + \ \"OutputBlobNameList\": {\n \"type\": \"Array\"\n }\n + \ },\n \"variables\": {\n \"TestVariableArray\": {\n + \ \"type\": \"Array\"\n }\n },\n \"runDimensions\": + {\n \"JobId\": {\n \"type\": \"Expression\",\n \"value\": + \"@pipeline().parameters.JobId\"\n }\n },\n \"lastPublishTime\": + \"2022-10-11T20:52:16Z\"\n },\n \"etag\": \"41001440-0000-0100-0000-6345d7800000\"\n + \ }\n ]\n}" headers: cache-control: - no-cache content-length: - - '484' + - '2616' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:08 GMT + - Tue, 11 Oct 2022 20:52:24 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1033,53 +6255,56 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK - request: - body: '{"OutputBlobNameList": ["exampleoutput.csv"]}' + body: null headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory pipeline create-run + - datafactory trigger list Connection: - keep-alive - Content-Length: - - '45' - Content-Type: - - application/json ParameterSetName: - - --factory-name --parameters --name --resource-group + - --factory-name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005/createRun?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers?api-version=2018-06-01 response: body: - string: '{"runId":"9c51f7b2-a656-11eb-9afd-84a93e64b16e"}' + string: "{\n \"value\": [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006\",\n + \ \"name\": \"example000006\",\n \"type\": \"Microsoft.DataFactory/factories/triggers\",\n + \ \"properties\": {\n \"type\": \"ScheduleTrigger\",\n \"description\": + \"Example description\",\n \"pipelines\": [\n {\n \"pipelineReference\": + {\n \"type\": \"PipelineReference\",\n \"referenceName\": + \"example000005\"\n },\n \"parameters\": {\n \"OutputBlobNameList\": + [\n \"exampleoutput.csv\"\n ]\n }\n + \ }\n ],\n \"typeProperties\": {\n \"recurrence\": + {\n \"frequency\": \"Minute\",\n \"interval\": 4,\n + \ \"startTime\": \"2018-06-16T00:39:13.84418Z\",\n \"endTime\": + \"2018-06-16T00:55:13.84418Z\",\n \"timeZone\": \"UTC\"\n }\n + \ },\n \"runtimeState\": \"Stopped\"\n },\n \"etag\": + \"41001d40-0000-0100-0000-6345d7820000\"\n }\n ]\n}" headers: cache-control: - no-cache content-length: - - '48' + - '1104' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:11 GMT + - Tue, 11 Oct 2022 20:52:24 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1088,10 +6313,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -1103,35 +6324,45 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory integration-runtime show + - datafactory dataset list Connection: - keep-alive ParameterSetName: - - --factory-name --name --resource-group + - --factory-name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationruntimes/exampleInteg000002","name":"exampleInteg000002","type":"Microsoft.DataFactory/factories/integrationruntimes","properties":{"type":"SelfHosted","description":"A - selfhosted integration runtime"},"etag":"1c00084b-0000-0100-0000-60865a2f0000"}' + string: "{\n \"value\": [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004\",\n + \ \"name\": \"example000004\",\n \"type\": \"Microsoft.DataFactory/factories/datasets\",\n + \ \"properties\": {\n \"type\": \"AzureBlob\",\n \"description\": + \"Example description\",\n \"linkedServiceName\": {\n \"type\": + \"LinkedServiceReference\",\n \"referenceName\": \"exampleLin000003\"\n + \ },\n \"parameters\": {\n \"MyFileName\": {\n \"type\": + \"String\"\n },\n \"MyFolderPath\": {\n \"type\": + \"String\"\n }\n },\n \"typeProperties\": {\n \"folderPath\": + {\n \"type\": \"Expression\",\n \"value\": \"@dataset().MyFolderPath\"\n + \ },\n \"fileName\": {\n \"type\": \"Expression\",\n + \ \"value\": \"@dataset().MyFileName\"\n },\n \"format\": + {\n \"type\": \"TextFormat\"\n }\n }\n },\n + \ \"etag\": \"41000640-0000-0100-0000-6345d77f0000\"\n }\n ]\n}" headers: cache-control: - no-cache content-length: - - '484' + - '1102' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:12 GMT + - Tue, 11 Oct 2022 20:52:24 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1140,8 +6371,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -1153,35 +6382,43 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory linked-service show + - datafactory show Connection: - keep-alive ParameterSetName: - - --factory-name --name --resource-group + - --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003","name":"exampleLin000003","type":"Microsoft.DataFactory/factories/linkedservices","properties":{"type":"AzureStorage","description":"Example - description","typeProperties":{"connectionString":{"type":"SecureString","value":"**********"},"encryptedCredential":"ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkVYQU1QTEVGQTVRTkMzREZCM184YWVmOGZkOC0yN2M2LTQ1NDEtOTM0MC01NmU0ZTlkNTBmZTUiDQp9"}},"etag":"1c00f54a-0000-0100-0000-60865a1e0000"}' + string: "{\n \"name\": \"exampleFa000001\",\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/examplefa4rqmquxng\",\n + \ \"type\": \"Microsoft.DataFactory/factories\",\n \"properties\": {\n \"provisioningState\": + \"Succeeded\",\n \"createTime\": \"2022-10-11T20:52:07.4135245Z\",\n \"version\": + \"2018-06-01\",\n \"factoryStatistics\": {\n \"totalResourceCount\": + 0,\n \"maxAllowedResourceCount\": 0,\n \"factorySizeInGbUnits\": + 0,\n \"maxAllowedFactorySizeInGbUnits\": 0\n },\n \"encryption\": + {}\n },\n \"eTag\": \"\\\"0d002ac0-0000-0100-0000-6345d7780000\\\"\",\n + \ \"location\": \"eastus\",\n \"identity\": {\n \"type\": \"SystemAssigned\",\n + \ \"principalId\": \"5639cd3e-8c37-4b3c-b16e-51faf16b8893\",\n \"tenantId\": + \"16b3c013-d300-468d-ac64-7eda0820b6d3\"\n },\n \"tags\": {\n \"exampleTag\": + \"exampleValue\"\n }\n}" headers: cache-control: - no-cache content-length: - - '793' + - '859' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:13 GMT + - Tue, 11 Oct 2022 20:52:24 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1190,8 +6427,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -1203,35 +6438,43 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory pipeline show + - datafactory list Connection: - keep-alive ParameterSetName: - - --factory-name --name --resource-group + - --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005","name":"example000005","type":"Microsoft.DataFactory/factories/pipelines","properties":{"description":"Test - Update description","activities":[{"name":"ExampleForeachActivity","type":"ForEach","typeProperties":{"isSequential":true,"items":{"type":"Expression","value":"@pipeline().parameters.OutputBlobNameList"},"activities":[{"name":"ExampleCopyActivity","type":"Copy","inputs":[{"type":"DatasetReference","referenceName":"example000004","parameters":{"MyFileName":"examplecontainer.csv","MyFolderPath":"examplecontainer"}}],"outputs":[{"type":"DatasetReference","referenceName":"example000004","parameters":{"MyFileName":{"type":"Expression","value":"@item()"},"MyFolderPath":"examplecontainer"}}],"typeProperties":{"source":{"type":"BlobSource"},"sink":{"type":"BlobSink"},"dataIntegrationUnits":32}}]}}],"parameters":{"JobId":{"type":"String"},"OutputBlobNameList":{"type":"Array"}},"variables":{"TestVariableArray":{"type":"Array"}},"runDimensions":{"JobId":{"type":"Expression","value":"@pipeline().parameters.JobId"}},"lastPublishTime":"2021-04-26T06:13:59Z"},"etag":"1c00ff4a-0000-0100-0000-60865a270000"}' + string: "{\n \"value\": [\n {\n \"name\": \"exampleFa000001\",\n \"id\": + \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/examplefa4rqmquxng\",\n + \ \"type\": \"Microsoft.DataFactory/factories\",\n \"properties\": + {\n \"provisioningState\": \"Succeeded\",\n \"createTime\": + \"2022-10-11T20:52:07.4135245Z\",\n \"version\": \"2018-06-01\",\n + \ \"encryption\": {}\n },\n \"eTag\": \"\\\"0d002ac0-0000-0100-0000-6345d7780000\\\"\",\n + \ \"location\": \"eastus\",\n \"identity\": {\n \"type\": + \"SystemAssigned\",\n \"principalId\": \"5639cd3e-8c37-4b3c-b16e-51faf16b8893\",\n + \ \"tenantId\": \"16b3c013-d300-468d-ac64-7eda0820b6d3\"\n },\n + \ \"tags\": {\n \"exampleTag\": \"exampleValue\"\n }\n }\n + \ ]\n}" headers: cache-control: - no-cache content-length: - - '1355' + - '788' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:14 GMT + - Tue, 11 Oct 2022 20:52:25 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1240,8 +6483,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -1253,35 +6494,43 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory dataset show + - datafactory list Connection: - keep-alive ParameterSetName: - - --name --factory-name --resource-group + - -g User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory/factories?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004","name":"example000004","type":"Microsoft.DataFactory/factories/datasets","properties":{"type":"AzureBlob","description":"Example - description","linkedServiceName":{"type":"LinkedServiceReference","referenceName":"exampleLin000003"},"parameters":{"MyFileName":{"type":"String"},"MyFolderPath":{"type":"String"}},"typeProperties":{"folderPath":{"type":"Expression","value":"@dataset().MyFolderPath"},"fileName":{"type":"Expression","value":"@dataset().MyFileName"},"format":{"type":"TextFormat"}}},"etag":"1c00fb4a-0000-0100-0000-60865a240000"}' + string: "{\n \"value\": [\n {\n \"name\": \"exampleFa000001\",\n \"id\": + \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/examplefa4rqmquxng\",\n + \ \"type\": \"Microsoft.DataFactory/factories\",\n \"properties\": + {\n \"provisioningState\": \"Succeeded\",\n \"createTime\": + \"2022-10-11T20:52:07.4135245Z\",\n \"version\": \"2018-06-01\",\n + \ \"encryption\": {}\n },\n \"eTag\": \"\\\"0d002ac0-0000-0100-0000-6345d7780000\\\"\",\n + \ \"location\": \"eastus\",\n \"identity\": {\n \"type\": + \"SystemAssigned\",\n \"principalId\": \"5639cd3e-8c37-4b3c-b16e-51faf16b8893\",\n + \ \"tenantId\": \"16b3c013-d300-468d-ac64-7eda0820b6d3\"\n },\n + \ \"tags\": {\n \"exampleTag\": \"exampleValue\"\n }\n }\n + \ ]\n}" headers: cache-control: - no-cache content-length: - - '782' + - '788' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:15 GMT + - Tue, 11 Oct 2022 20:52:26 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1290,48 +6539,48 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK - request: - body: null + body: '{"keyName": "authKey2"}' headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory trigger show + - datafactory integration-runtime regenerate-auth-key Connection: - keep-alive + Content-Length: + - '23' + Content-Type: + - application/json ParameterSetName: - - --factory-name --resource-group --name + - --factory-name --name --key-name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/regenerateAuthKey?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006","name":"example000006","type":"Microsoft.DataFactory/factories/triggers","properties":{"type":"ScheduleTrigger","description":"Example - description","pipelines":[{"pipelineReference":{"type":"PipelineReference","referenceName":"example000005"},"parameters":{"OutputBlobNameList":["exampleoutput.csv"]}}],"typeProperties":{"recurrence":{"frequency":"Minute","interval":4,"startTime":"2018-06-16T00:39:13.84418Z","endTime":"2018-06-16T00:55:13.84418Z","timeZone":"UTC"}},"runtimeState":"Stopped"},"etag":"1c00054b-0000-0100-0000-60865a2d0000"}' + string: "{\n \"authKey2\": \"IR@c6b916cc-668d-40d4-b46c-676c22cd963e@exampleFa000001@ServiceEndpoint=examplefa4rqmquxng.eastus.datafactory.azure.net@mUaIBKGdBqAbRiCRf45ShpSW/TEqfIiSwCQCnN4p01M=\"\n}" headers: cache-control: - no-cache content-length: - - '779' + - '184' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:17 GMT + - Tue, 11 Oct 2022 20:52:26 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1340,8 +6589,8 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET + x-ms-ratelimit-remaining-subscription-writes: + - '1199' status: code: 200 message: OK @@ -1353,45 +6602,39 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory integration-runtime list + - datafactory integration-runtime sync-credentials Connection: - keep-alive + Content-Length: + - '0' ParameterSetName: - - --factory-name --resource-group + - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/syncCredentials?api-version=2018-06-01 response: body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationruntimes/exampleInteg000002","name":"exampleInteg000002","type":"Microsoft.DataFactory/factories/integrationruntimes","properties":{"type":"SelfHosted","description":"A - selfhosted integration runtime"},"etag":"1c00084b-0000-0100-0000-60865a2f0000"}]}' + string: '' headers: cache-control: - no-cache content-length: - - '496' - content-type: - - application/json; charset=utf-8 + - '0' date: - - Mon, 26 Apr 2021 06:14:18 GMT + - Tue, 11 Oct 2022 20:52:27 GMT expires: - '-1' pragma: - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding + server: + - Kestrel + strict-transport-security: + - max-age=31536000; includeSubDomains x-content-type-options: - nosniff - x-powered-by: - - ASP.NET + x-ms-ratelimit-remaining-subscription-writes: + - '1199' status: code: 200 message: OK @@ -1403,35 +6646,35 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory linked-service list + - datafactory integration-runtime get-monitoring-data Connection: - keep-alive + Content-Length: + - '0' ParameterSetName: - - --factory-name --resource-group + - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/monitoringData?api-version=2018-06-01 response: body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003","name":"exampleLin000003","type":"Microsoft.DataFactory/factories/linkedservices","properties":{"type":"AzureStorage","description":"Example - description","typeProperties":{"connectionString":{"type":"SecureString","value":"**********"},"encryptedCredential":"ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkVYQU1QTEVGQTVRTkMzREZCM184YWVmOGZkOC0yN2M2LTQ1NDEtOTM0MC01NmU0ZTlkNTBmZTUiDQp9"}},"etag":"1c00f54a-0000-0100-0000-60865a1e0000"}]}' + string: "{\n \"name\": \"exampleInteg000002\"\n}" headers: cache-control: - no-cache content-length: - - '805' + - '34' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:19 GMT + - Tue, 11 Oct 2022 20:52:27 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1440,8 +6683,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -1453,35 +6694,36 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory pipeline list + - datafactory integration-runtime list-auth-key Connection: - keep-alive + Content-Length: + - '0' ParameterSetName: - - --factory-name --resource-group + - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/listAuthKeys?api-version=2018-06-01 response: body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005","name":"example000005","type":"Microsoft.DataFactory/factories/pipelines","properties":{"description":"Test - Update description","activities":[{"name":"ExampleForeachActivity","type":"ForEach","typeProperties":{"isSequential":true,"items":{"type":"Expression","value":"@pipeline().parameters.OutputBlobNameList"},"activities":[{"name":"ExampleCopyActivity","type":"Copy","inputs":[{"type":"DatasetReference","referenceName":"example000004","parameters":{"MyFileName":"examplecontainer.csv","MyFolderPath":"examplecontainer"}}],"outputs":[{"type":"DatasetReference","referenceName":"example000004","parameters":{"MyFileName":{"type":"Expression","value":"@item()"},"MyFolderPath":"examplecontainer"}}],"typeProperties":{"source":{"type":"BlobSource"},"sink":{"type":"BlobSink"},"dataIntegrationUnits":32}}]}}],"parameters":{"JobId":{"type":"String"},"OutputBlobNameList":{"type":"Array"}},"variables":{"TestVariableArray":{"type":"Array"}},"runDimensions":{"JobId":{"type":"Expression","value":"@pipeline().parameters.JobId"}},"lastPublishTime":"2021-04-26T06:13:59Z"},"etag":"1c00ff4a-0000-0100-0000-60865a270000"}]}' + string: "{\n \"authKey1\": \"IR@c6b916cc-668d-40d4-b46c-676c22cd963e@exampleFa000001@ServiceEndpoint=examplefa4rqmquxng.eastus.datafactory.azure.net@ZPU6+OKLcgyhsH+rofeuFbUROtjyOhHVGtAL40qLI5E=\",\n + \ \"authKey2\": \"IR@c6b916cc-668d-40d4-b46c-676c22cd963e@exampleFa000001@ServiceEndpoint=examplefa4rqmquxng.eastus.datafactory.azure.net@mUaIBKGdBqAbRiCRf45ShpSW/TEqfIiSwCQCnN4p01M=\"\n}" headers: cache-control: - no-cache content-length: - - '1367' + - '366' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:20 GMT + - Tue, 11 Oct 2022 20:52:27 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1490,58 +6732,54 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET + x-ms-ratelimit-remaining-subscription-writes: + - '1199' status: code: 200 message: OK - request: - body: null + body: '{"factoryName": "exampleFactoryName-linked"}' headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory trigger list + - datafactory integration-runtime remove-link Connection: - keep-alive + Content-Length: + - '44' + Content-Type: + - application/json ParameterSetName: - - --factory-name --resource-group + - --factory-name --name --linked-factory-name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/removeLinks?api-version=2018-06-01 response: body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006","name":"example000006","type":"Microsoft.DataFactory/factories/triggers","properties":{"type":"ScheduleTrigger","description":"Example - description","pipelines":[{"pipelineReference":{"type":"PipelineReference","referenceName":"example000005"},"parameters":{"OutputBlobNameList":["exampleoutput.csv"]}}],"typeProperties":{"recurrence":{"frequency":"Minute","interval":4,"startTime":"2018-06-16T00:39:13.84418Z","endTime":"2018-06-16T00:55:13.84418Z","timeZone":"UTC"}},"runtimeState":"Stopped"},"etag":"1c00054b-0000-0100-0000-60865a2d0000"}]}' + string: '' headers: cache-control: - no-cache content-length: - - '791' - content-type: - - application/json; charset=utf-8 + - '0' date: - - Mon, 26 Apr 2021 06:14:21 GMT + - Tue, 11 Oct 2022 20:52:27 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET + x-ms-ratelimit-remaining-subscription-writes: + - '1199' status: code: 200 message: OK @@ -1553,35 +6791,45 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory dataset list + - datafactory integration-runtime get-status Connection: - keep-alive + Content-Length: + - '0' ParameterSetName: - - --factory-name --resource-group + - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/getStatus?api-version=2018-06-01 response: body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004","name":"example000004","type":"Microsoft.DataFactory/factories/datasets","properties":{"type":"AzureBlob","description":"Example - description","linkedServiceName":{"type":"LinkedServiceReference","referenceName":"exampleLin000003"},"parameters":{"MyFileName":{"type":"String"},"MyFolderPath":{"type":"String"}},"typeProperties":{"folderPath":{"type":"Expression","value":"@dataset().MyFolderPath"},"fileName":{"type":"Expression","value":"@dataset().MyFileName"},"format":{"type":"TextFormat"}}},"etag":"1c00fb4a-0000-0100-0000-60865a240000"}]}' + string: "{\n \"name\": \"exampleInteg000002\",\n \"properties\": {\n \"dataFactoryName\": + \"exampleFa000001\",\n \"state\": \"NeedRegistration\",\n \"type\": + \"SelfHosted\",\n \"typeProperties\": {\n \"serviceRegion\": \"eu\",\n + \ \"autoUpdate\": \"Off\",\n \"internalChannelEncryption\": \"NotSet\",\n + \ \"taskQueueId\": \"c6b916cc-668d-40d4-b46c-676c22cd963e\",\n \"nodes\": + [],\n \"updateDelayOffset\": \"PT3H\",\n \"serviceUrls\": [\n \"examplefa4rqmquxng.eastus.datafactory.azure.net\"\n + \ ],\n \"links\": [],\n \"versionStatus\": \"None\",\n \"capabilities\": + {},\n \"latestVersion\": \"5.20.8244.2\",\n \"newerVersions\": [\n + \ \"5.20.8244.2\",\n \"5.20.8235.2\",\n \"5.20.8227.2\",\n + \ \"5.19.8214.2\",\n \"5.17.8189.1\"\n ],\n \"createTime\": + \"2022-10-11T20:52:18.6516685Z\"\n }\n }\n}" headers: cache-control: - no-cache content-length: - - '794' + - '807' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:23 GMT + - Tue, 11 Oct 2022 20:52:28 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1590,8 +6838,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -1603,34 +6849,35 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory show + - datafactory trigger get-event-subscription-status Connection: - keep-alive + Content-Length: + - '0' ParameterSetName: - - --name --resource-group + - --factory-name --resource-group --name User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/getEventSubscriptionStatus?api-version=2018-06-01 response: body: - string: '{"name":"exampleFa000001","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/examplefa5qnc3dfb3","type":"Microsoft.DataFactory/factories","properties":{"provisioningState":"Succeeded","createTime":"2021-04-26T06:13:38.8060441Z","version":"2018-06-01","factoryStatistics":{"totalResourceCount":0,"maxAllowedResourceCount":0,"factorySizeInGbUnits":0,"maxAllowedFactorySizeInGbUnits":0},"encryption":{}},"eTag":"\"30001ffc-0000-0100-0000-60865a170000\"","location":"eastus","identity":{"type":"SystemAssigned","principalId":"06a7c2de-469c-4e53-bbc5-69adf6b29d6b","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},"tags":{"exampleTag":"exampleValue"}}' + string: "{\n \"triggerName\": \"example000006\",\n \"status\": \"Enabled\"\n}" headers: cache-control: - no-cache content-length: - - '791' + - '59' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:24 GMT + - Tue, 11 Oct 2022 20:52:28 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1639,8 +6886,8 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET + x-ms-ratelimit-remaining-subscription-writes: + - '1199' status: code: 200 message: OK @@ -1652,34 +6899,35 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory list + - datafactory trigger unsubscribe-from-event Connection: - keep-alive + Content-Length: + - '0' ParameterSetName: - - --resource-group + - --factory-name --resource-group --name User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/unsubscribeFromEvents?api-version=2018-06-01 response: body: - string: '{"value":[{"name":"exampleFa000001","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/examplefa5qnc3dfb3","type":"Microsoft.DataFactory/factories","properties":{"provisioningState":"Succeeded","createTime":"2021-04-26T06:13:38.8060441Z","version":"2018-06-01","encryption":{}},"eTag":"\"30001ffc-0000-0100-0000-60865a170000\"","location":"eastus","identity":{"type":"SystemAssigned","principalId":"06a7c2de-469c-4e53-bbc5-69adf6b29d6b","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},"tags":{"exampleTag":"exampleValue"}}]}' + string: "{\n \"triggerName\": \"example000006\",\n \"status\": \"Disabled\"\n}" headers: cache-control: - no-cache content-length: - - '670' + - '60' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:24 GMT + - Tue, 11 Oct 2022 20:52:29 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1688,8 +6936,8 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET + x-ms-ratelimit-remaining-subscription-writes: + - '1199' status: code: 200 message: OK @@ -1701,45 +6949,35 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory list + - datafactory trigger subscribe-to-event Connection: - keep-alive + Content-Length: + - '0' ParameterSetName: - - -g + - --factory-name --resource-group --name User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory/factories?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/subscribeToEvents?api-version=2018-06-01 response: body: - string: "{\r\n \"value\": [\r\n {\r\n \"name\": \"exampleFa000001\",\r\n - \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/examplefa5qnc3dfb3\",\r\n - \ \"type\": \"Microsoft.DataFactory/factories\",\r\n \"properties\": - {\r\n \"provisioningState\": \"Succeeded\",\r\n \"createTime\": - \"2021-04-26T06:13:38.8060441Z\",\r\n \"version\": \"2018-06-01\",\r\n - \ \"factoryStatistics\": null,\r\n \"encryption\": {}\r\n },\r\n - \ \"eTag\": \"\\\"30001ffc-0000-0100-0000-60865a170000\\\"\",\r\n \"location\": - \"eastus\",\r\n \"identity\": {\r\n \"type\": \"SystemAssigned\",\r\n - \ \"principalId\": \"06a7c2de-469c-4e53-bbc5-69adf6b29d6b\",\r\n \"tenantId\": - \"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a\",\r\n \"userAssignedIdentities\": - null\r\n },\r\n \"tags\": {\r\n \"exampleTag\": \"exampleValue\"\r\n - \ }\r\n }\r\n ],\r\n \"nextLink\": null\r\n}" + string: "{\n \"triggerName\": \"example000006\",\n \"status\": \"Enabled\"\n}" headers: cache-control: - no-cache content-length: - - '975' + - '59' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:26 GMT + - Tue, 11 Oct 2022 20:52:30 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1748,51 +6986,48 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET + x-ms-ratelimit-remaining-subscription-writes: + - '1199' status: code: 200 message: OK - request: - body: '{"keyName": "authKey2"}' + body: null headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory integration-runtime regenerate-auth-key + - datafactory trigger start Connection: - keep-alive Content-Length: - - '23' - Content-Type: - - application/json + - '0' ParameterSetName: - - --factory-name --name --key-name --resource-group + - --factory-name --resource-group --name User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/regenerateAuthKey?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/start?api-version=2018-06-01 response: body: - string: '{"authKey2":"IR@a9a706fd-86c1-4717-9b1d-8c451e820276@exampleFa000001@ServiceEndpoint=examplefa5qnc3dfb3.eastus.datafactory.azure.net@ylfb9tUmUb8VxkygSirkWE9RAr5C7EXesqOgJjGsyGU="}' + string: '{}' headers: cache-control: - no-cache content-length: - - '182' + - '2' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:27 GMT + - Tue, 11 Oct 2022 20:52:37 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1803,8 +7038,6 @@ interactions: - nosniff x-ms-ratelimit-remaining-subscription-writes: - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -1816,83 +7049,93 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory integration-runtime sync-credentials + - datafactory trigger stop Connection: - keep-alive Content-Length: - '0' ParameterSetName: - - --factory-name --name --resource-group + - --factory-name --resource-group --name User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/syncCredentials?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/stop?api-version=2018-06-01 response: body: - string: '' + string: '{}' headers: cache-control: - no-cache content-length: - - '0' + - '2' + content-type: + - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:29 GMT + - Tue, 11 Oct 2022 20:52:38 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK - request: - body: null + body: '{"permissions": "r", "accessResourcePath": "", "profileName": "DefaultProfile", + "startTime": "2018-11-10T02:46:20.2659347Z", "expireTime": "2018-11-10T09:46:20.2659347Z"}' headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory integration-runtime get-monitoring-data + - datafactory get-data-plane-access Connection: - keep-alive Content-Length: - - '0' + - '170' + Content-Type: + - application/json ParameterSetName: - - --factory-name --name --resource-group + - --name --access-resource-path --expire-time --permissions --profile-name --start-time + --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/monitoringData?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/getDataPlaneAccess?api-version=2018-06-01 response: body: - string: '{"name":"exampleInteg000002"}' + string: "{\n \"policy\": {\n \"permissions\": \"r\",\n \"accessResourcePath\": + \"\",\n \"profileName\": \"DefaultProfile\",\n \"startTime\": \"2018-11-10T02:46:20.2659347Z\",\n + \ \"expireTime\": \"2018-11-10T09:46:20.2659347Z\"\n },\n \"dataPlaneUrl\": + \"https://dpeastus.svc.datafactory.azure.com/dataplane\",\n \"accessToken\": + \"EAAAAI/pGFPFZoNYNrvn/e8ZTP+wAQAAPXN3JSAYuZksliAQOAO2FPQgDafeotL510O4szgnL2KueRObRATDQd/HkW2z/WFovbkWCiPLoPMsqwynR2Vu+MCYvXxBs2dgaqcphEyst/eEBDTBPA5ugPfG3U6W2xBf4ymwjwY4i1VE9Ezm7H61LnLAKlyRMSTlmT7TaPPlpTOe5st3qRM9g1dokCmhMZDbZ5/8TIg0NdhV+lPA+Yo8EJ2IaLaWbMf9dxW/v5u6azw39e6aq/EKpbv2OxBkWDVBlM1pHj80vK3AGxhGsrn1YT17ufrFQXh8zPvFVV5gf4hIZKKKlCsTAt03n7HdGHKY4lDLPFiZkyyp5+KcBk3vgAnt6wxo5isNb1rtH+rO/xYdAMaoU20efuxo1QD0RzcoZhTrSFZRreQkTQikz0MAa1FtrzMFnOzBVVdY281fV37Ze9wACn7Osj2yHBfgcMjdGQcAMkOay3O4Z7YwNpksqRi8RTno0WaGMdroflarvnx51L2rKQwcOr1SRCvF0kyfHoHEJt4L7Qo5e0BLhtl8VfXhT33npw1qkUNYBiPTbZLqNXYZww3x2aHpGH3CCLBqIAAAAPiEK8kYNLljHwmoxrgWa6QQYFUepeoXBtgTVTFKl3sd\"\n}" headers: cache-control: - no-cache content-length: - - '36' + - '961' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:30 GMT + - Tue, 11 Oct 2022 20:52:39 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1901,49 +7144,50 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET + x-ms-ratelimit-remaining-subscription-writes: + - '1199' status: code: 200 message: OK - request: - body: null + body: '{"lastUpdatedAfter": "2022-10-11T20:41:31.000Z", "lastUpdatedBefore": "2022-10-11T21:41:31.000Z"}' headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory integration-runtime list-auth-key + - datafactory trigger-run query-by-factory Connection: - keep-alive Content-Length: - - '0' + - '97' + Content-Type: + - application/json ParameterSetName: - - --factory-name --name --resource-group + - --factory-name --last-updated-after --last-updated-before --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/listAuthKeys?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/queryTriggerRuns?api-version=2018-06-01 response: body: - string: '{"authKey1":"IR@a9a706fd-86c1-4717-9b1d-8c451e820276@exampleFa000001@ServiceEndpoint=examplefa5qnc3dfb3.eastus.datafactory.azure.net@RaoFzpk8dYtoc+PJqfn0M1Vxak6imWEc6JrR+tHgM74=","authKey2":"IR@a9a706fd-86c1-4717-9b1d-8c451e820276@exampleFa000001@ServiceEndpoint=examplefa5qnc3dfb3.eastus.datafactory.azure.net@ylfb9tUmUb8VxkygSirkWE9RAr5C7EXesqOgJjGsyGU="}' + string: "{\n \"value\": []\n}" headers: cache-control: - no-cache content-length: - - '363' + - '17' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:30 GMT + - Tue, 11 Oct 2022 20:52:38 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -1952,100 +7196,197 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK - request: - body: '{"factoryName": "exampleFactoryName-linked"}' + body: null headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory integration-runtime remove-link + - datafactory trigger-run query-by-factory Connection: - keep-alive - Content-Length: - - '44' - Content-Type: - - application/json ParameterSetName: - - --factory-name --name --linked-factory-name --resource-group + - --factory-name --last-updated-after --last-updated-before --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/removeLinks?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/locations?api-version=2019-11-01 response: body: - string: '' + string: "{\"value\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\",\"name\":\"eastus\",\"displayName\":\"East + US\",\"regionalDisplayName\":\"(US) East US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-79.8164\",\"latitude\":\"37.3719\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\":\"westus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2\",\"name\":\"eastus2\",\"displayName\":\"East + US 2\",\"regionalDisplayName\":\"(US) East US 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-78.3889\",\"latitude\":\"36.6681\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\":\"centralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\",\"name\":\"southcentralus\",\"displayName\":\"South + Central US\",\"regionalDisplayName\":\"(US) South Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-98.5\",\"latitude\":\"29.4167\",\"physicalLocation\":\"Texas\",\"pairedRegion\":[{\"name\":\"northcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2\",\"name\":\"westus2\",\"displayName\":\"West + US 2\",\"regionalDisplayName\":\"(US) West US 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-119.852\",\"latitude\":\"47.233\",\"physicalLocation\":\"Washington\",\"pairedRegion\":[{\"name\":\"westcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus3\",\"name\":\"westus3\",\"displayName\":\"West + US 3\",\"regionalDisplayName\":\"(US) West US 3\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-112.074036\",\"latitude\":\"33.448376\",\"physicalLocation\":\"Phoenix\",\"pairedRegion\":[{\"name\":\"eastus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast\",\"name\":\"australiaeast\",\"displayName\":\"Australia + East\",\"regionalDisplayName\":\"(Asia Pacific) Australia East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"151.2094\",\"latitude\":\"-33.86\",\"physicalLocation\":\"New + South Wales\",\"pairedRegion\":[{\"name\":\"australiasoutheast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia\",\"name\":\"southeastasia\",\"displayName\":\"Southeast + Asia\",\"regionalDisplayName\":\"(Asia Pacific) Southeast Asia\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"103.833\",\"latitude\":\"1.283\",\"physicalLocation\":\"Singapore\",\"pairedRegion\":[{\"name\":\"eastasia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope\",\"name\":\"northeurope\",\"displayName\":\"North + Europe\",\"regionalDisplayName\":\"(Europe) North Europe\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"-6.2597\",\"latitude\":\"53.3478\",\"physicalLocation\":\"Ireland\",\"pairedRegion\":[{\"name\":\"westeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedencentral\",\"name\":\"swedencentral\",\"displayName\":\"Sweden + Central\",\"regionalDisplayName\":\"(Europe) Sweden Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"17.14127\",\"latitude\":\"60.67488\",\"physicalLocation\":\"G\xE4vle\",\"pairedRegion\":[{\"name\":\"swedensouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedensouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth\",\"name\":\"uksouth\",\"displayName\":\"UK + South\",\"regionalDisplayName\":\"(Europe) UK South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"-0.799\",\"latitude\":\"50.941\",\"physicalLocation\":\"London\",\"pairedRegion\":[{\"name\":\"ukwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\",\"name\":\"westeurope\",\"displayName\":\"West + Europe\",\"regionalDisplayName\":\"(Europe) West Europe\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"4.9\",\"latitude\":\"52.3667\",\"physicalLocation\":\"Netherlands\",\"pairedRegion\":[{\"name\":\"northeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus\",\"name\":\"centralus\",\"displayName\":\"Central + US\",\"regionalDisplayName\":\"(US) Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-93.6208\",\"latitude\":\"41.5908\",\"physicalLocation\":\"Iowa\",\"pairedRegion\":[{\"name\":\"eastus2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth\",\"name\":\"southafricanorth\",\"displayName\":\"South + Africa North\",\"regionalDisplayName\":\"(Africa) South Africa North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Africa\",\"longitude\":\"28.218370\",\"latitude\":\"-25.731340\",\"physicalLocation\":\"Johannesburg\",\"pairedRegion\":[{\"name\":\"southafricawest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia\",\"name\":\"centralindia\",\"displayName\":\"Central + India\",\"regionalDisplayName\":\"(Asia Pacific) Central India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"73.9197\",\"latitude\":\"18.5822\",\"physicalLocation\":\"Pune\",\"pairedRegion\":[{\"name\":\"southindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia\",\"name\":\"eastasia\",\"displayName\":\"East + Asia\",\"regionalDisplayName\":\"(Asia Pacific) East Asia\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"114.188\",\"latitude\":\"22.267\",\"physicalLocation\":\"Hong + Kong\",\"pairedRegion\":[{\"name\":\"southeastasia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast\",\"name\":\"japaneast\",\"displayName\":\"Japan + East\",\"regionalDisplayName\":\"(Asia Pacific) Japan East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"139.77\",\"latitude\":\"35.68\",\"physicalLocation\":\"Tokyo, + Saitama\",\"pairedRegion\":[{\"name\":\"japanwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral\",\"name\":\"koreacentral\",\"displayName\":\"Korea + Central\",\"regionalDisplayName\":\"(Asia Pacific) Korea Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"126.9780\",\"latitude\":\"37.5665\",\"physicalLocation\":\"Seoul\",\"pairedRegion\":[{\"name\":\"koreasouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral\",\"name\":\"canadacentral\",\"displayName\":\"Canada + Central\",\"regionalDisplayName\":\"(Canada) Canada Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Canada\",\"longitude\":\"-79.383\",\"latitude\":\"43.653\",\"physicalLocation\":\"Toronto\",\"pairedRegion\":[{\"name\":\"canadaeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral\",\"name\":\"francecentral\",\"displayName\":\"France + Central\",\"regionalDisplayName\":\"(Europe) France Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"2.3730\",\"latitude\":\"46.3772\",\"physicalLocation\":\"Paris\",\"pairedRegion\":[{\"name\":\"francesouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral\",\"name\":\"germanywestcentral\",\"displayName\":\"Germany + West Central\",\"regionalDisplayName\":\"(Europe) Germany West Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.682127\",\"latitude\":\"50.110924\",\"physicalLocation\":\"Frankfurt\",\"pairedRegion\":[{\"name\":\"germanynorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast\",\"name\":\"norwayeast\",\"displayName\":\"Norway + East\",\"regionalDisplayName\":\"(Europe) Norway East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"10.752245\",\"latitude\":\"59.913868\",\"physicalLocation\":\"Norway\",\"pairedRegion\":[{\"name\":\"norwaywest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth\",\"name\":\"switzerlandnorth\",\"displayName\":\"Switzerland + North\",\"regionalDisplayName\":\"(Europe) Switzerland North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.564572\",\"latitude\":\"47.451542\",\"physicalLocation\":\"Zurich\",\"pairedRegion\":[{\"name\":\"switzerlandwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth\",\"name\":\"uaenorth\",\"displayName\":\"UAE + North\",\"regionalDisplayName\":\"(Middle East) UAE North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Middle + East\",\"longitude\":\"55.316666\",\"latitude\":\"25.266666\",\"physicalLocation\":\"Dubai\",\"pairedRegion\":[{\"name\":\"uaecentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth\",\"name\":\"brazilsouth\",\"displayName\":\"Brazil + South\",\"regionalDisplayName\":\"(South America) Brazil South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"South + America\",\"longitude\":\"-46.633\",\"latitude\":\"-23.55\",\"physicalLocation\":\"Sao + Paulo State\",\"pairedRegion\":[{\"name\":\"southcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap\",\"name\":\"eastus2euap\",\"displayName\":\"East + US 2 EUAP\",\"regionalDisplayName\":\"(US) East US 2 EUAP\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-78.3889\",\"latitude\":\"36.6681\",\"pairedRegion\":[{\"name\":\"centraluseuap\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/qatarcentral\",\"name\":\"qatarcentral\",\"displayName\":\"Qatar + Central\",\"regionalDisplayName\":\"(Middle East) Qatar Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Middle + East\",\"longitude\":\"51.439327\",\"latitude\":\"25.551462\",\"physicalLocation\":\"Doha\",\"pairedRegion\":[{\"name\":\"westeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage\",\"name\":\"centralusstage\",\"displayName\":\"Central + US (Stage)\",\"regionalDisplayName\":\"(US) Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstage\",\"name\":\"eastusstage\",\"displayName\":\"East + US (Stage)\",\"regionalDisplayName\":\"(US) East US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2stage\",\"name\":\"eastus2stage\",\"displayName\":\"East + US 2 (Stage)\",\"regionalDisplayName\":\"(US) East US 2 (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralusstage\",\"name\":\"northcentralusstage\",\"displayName\":\"North + Central US (Stage)\",\"regionalDisplayName\":\"(US) North Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstage\",\"name\":\"southcentralusstage\",\"displayName\":\"South + Central US (Stage)\",\"regionalDisplayName\":\"(US) South Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westusstage\",\"name\":\"westusstage\",\"displayName\":\"West + US (Stage)\",\"regionalDisplayName\":\"(US) West US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2stage\",\"name\":\"westus2stage\",\"displayName\":\"West + US 2 (Stage)\",\"regionalDisplayName\":\"(US) West US 2 (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asia\",\"name\":\"asia\",\"displayName\":\"Asia\",\"regionalDisplayName\":\"Asia\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asiapacific\",\"name\":\"asiapacific\",\"displayName\":\"Asia + Pacific\",\"regionalDisplayName\":\"Asia Pacific\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia\",\"name\":\"australia\",\"displayName\":\"Australia\",\"regionalDisplayName\":\"Australia\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil\",\"name\":\"brazil\",\"displayName\":\"Brazil\",\"regionalDisplayName\":\"Brazil\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada\",\"name\":\"canada\",\"displayName\":\"Canada\",\"regionalDisplayName\":\"Canada\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe\",\"name\":\"europe\",\"displayName\":\"Europe\",\"regionalDisplayName\":\"Europe\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/france\",\"name\":\"france\",\"displayName\":\"France\",\"regionalDisplayName\":\"France\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germany\",\"name\":\"germany\",\"displayName\":\"Germany\",\"regionalDisplayName\":\"Germany\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global\",\"name\":\"global\",\"displayName\":\"Global\",\"regionalDisplayName\":\"Global\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india\",\"name\":\"india\",\"displayName\":\"India\",\"regionalDisplayName\":\"India\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan\",\"name\":\"japan\",\"displayName\":\"Japan\",\"regionalDisplayName\":\"Japan\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/korea\",\"name\":\"korea\",\"displayName\":\"Korea\",\"regionalDisplayName\":\"Korea\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norway\",\"name\":\"norway\",\"displayName\":\"Norway\",\"regionalDisplayName\":\"Norway\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/singapore\",\"name\":\"singapore\",\"displayName\":\"Singapore\",\"regionalDisplayName\":\"Singapore\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafrica\",\"name\":\"southafrica\",\"displayName\":\"South + Africa\",\"regionalDisplayName\":\"South Africa\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerland\",\"name\":\"switzerland\",\"displayName\":\"Switzerland\",\"regionalDisplayName\":\"Switzerland\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uae\",\"name\":\"uae\",\"displayName\":\"United + Arab Emirates\",\"regionalDisplayName\":\"United Arab Emirates\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk\",\"name\":\"uk\",\"displayName\":\"United + Kingdom\",\"regionalDisplayName\":\"United Kingdom\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstates\",\"name\":\"unitedstates\",\"displayName\":\"United + States\",\"regionalDisplayName\":\"United States\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstateseuap\",\"name\":\"unitedstateseuap\",\"displayName\":\"United + States EUAP\",\"regionalDisplayName\":\"United States EUAP\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage\",\"name\":\"eastasiastage\",\"displayName\":\"East + Asia (Stage)\",\"regionalDisplayName\":\"(Asia Pacific) East Asia (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasiastage\",\"name\":\"southeastasiastage\",\"displayName\":\"Southeast + Asia (Stage)\",\"regionalDisplayName\":\"(Asia Pacific) Southeast Asia (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstg\",\"name\":\"eastusstg\",\"displayName\":\"East + US STG\",\"regionalDisplayName\":\"(US) East US STG\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-79.8164\",\"latitude\":\"37.3719\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\":\"southcentralusstg\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstg\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstg\",\"name\":\"southcentralusstg\",\"displayName\":\"South + Central US STG\",\"regionalDisplayName\":\"(US) South Central US STG\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-98.5\",\"latitude\":\"29.4167\",\"physicalLocation\":\"Texas\",\"pairedRegion\":[{\"name\":\"eastusstg\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstg\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus\",\"name\":\"northcentralus\",\"displayName\":\"North + Central US\",\"regionalDisplayName\":\"(US) North Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-87.6278\",\"latitude\":\"41.8819\",\"physicalLocation\":\"Illinois\",\"pairedRegion\":[{\"name\":\"southcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus\",\"name\":\"westus\",\"displayName\":\"West + US\",\"regionalDisplayName\":\"(US) West US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-122.417\",\"latitude\":\"37.783\",\"physicalLocation\":\"California\",\"pairedRegion\":[{\"name\":\"eastus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest\",\"name\":\"jioindiawest\",\"displayName\":\"Jio + India West\",\"regionalDisplayName\":\"(Asia Pacific) Jio India West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"70.05773\",\"latitude\":\"22.470701\",\"physicalLocation\":\"Jamnagar\",\"pairedRegion\":[{\"name\":\"jioindiacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap\",\"name\":\"centraluseuap\",\"displayName\":\"Central + US EUAP\",\"regionalDisplayName\":\"(US) Central US EUAP\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-93.6208\",\"latitude\":\"41.5908\",\"pairedRegion\":[{\"name\":\"eastus2euap\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus\",\"name\":\"westcentralus\",\"displayName\":\"West + Central US\",\"regionalDisplayName\":\"(US) West Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-110.234\",\"latitude\":\"40.890\",\"physicalLocation\":\"Wyoming\",\"pairedRegion\":[{\"name\":\"westus2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest\",\"name\":\"southafricawest\",\"displayName\":\"South + Africa West\",\"regionalDisplayName\":\"(Africa) South Africa West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Africa\",\"longitude\":\"18.843266\",\"latitude\":\"-34.075691\",\"physicalLocation\":\"Cape + Town\",\"pairedRegion\":[{\"name\":\"southafricanorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral\",\"name\":\"australiacentral\",\"displayName\":\"Australia + Central\",\"regionalDisplayName\":\"(Asia Pacific) Australia Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"149.1244\",\"latitude\":\"-35.3075\",\"physicalLocation\":\"Canberra\",\"pairedRegion\":[{\"name\":\"australiacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2\",\"name\":\"australiacentral2\",\"displayName\":\"Australia + Central 2\",\"regionalDisplayName\":\"(Asia Pacific) Australia Central 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"149.1244\",\"latitude\":\"-35.3075\",\"physicalLocation\":\"Canberra\",\"pairedRegion\":[{\"name\":\"australiacentral2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast\",\"name\":\"australiasoutheast\",\"displayName\":\"Australia + Southeast\",\"regionalDisplayName\":\"(Asia Pacific) Australia Southeast\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"144.9631\",\"latitude\":\"-37.8136\",\"physicalLocation\":\"Victoria\",\"pairedRegion\":[{\"name\":\"australiaeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest\",\"name\":\"japanwest\",\"displayName\":\"Japan + West\",\"regionalDisplayName\":\"(Asia Pacific) Japan West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"135.5022\",\"latitude\":\"34.6939\",\"physicalLocation\":\"Osaka\",\"pairedRegion\":[{\"name\":\"japaneast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral\",\"name\":\"jioindiacentral\",\"displayName\":\"Jio + India Central\",\"regionalDisplayName\":\"(Asia Pacific) Jio India Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"79.08886\",\"latitude\":\"21.146633\",\"physicalLocation\":\"Nagpur\",\"pairedRegion\":[{\"name\":\"jioindiawest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth\",\"name\":\"koreasouth\",\"displayName\":\"Korea + South\",\"regionalDisplayName\":\"(Asia Pacific) Korea South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"129.0756\",\"latitude\":\"35.1796\",\"physicalLocation\":\"Busan\",\"pairedRegion\":[{\"name\":\"koreacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\",\"name\":\"southindia\",\"displayName\":\"South + India\",\"regionalDisplayName\":\"(Asia Pacific) South India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"80.1636\",\"latitude\":\"12.9822\",\"physicalLocation\":\"Chennai\",\"pairedRegion\":[{\"name\":\"centralindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westindia\",\"name\":\"westindia\",\"displayName\":\"West + India\",\"regionalDisplayName\":\"(Asia Pacific) West India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"72.868\",\"latitude\":\"19.088\",\"physicalLocation\":\"Mumbai\",\"pairedRegion\":[{\"name\":\"southindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast\",\"name\":\"canadaeast\",\"displayName\":\"Canada + East\",\"regionalDisplayName\":\"(Canada) Canada East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Canada\",\"longitude\":\"-71.217\",\"latitude\":\"46.817\",\"physicalLocation\":\"Quebec\",\"pairedRegion\":[{\"name\":\"canadacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth\",\"name\":\"francesouth\",\"displayName\":\"France + South\",\"regionalDisplayName\":\"(Europe) France South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"2.1972\",\"latitude\":\"43.8345\",\"physicalLocation\":\"Marseille\",\"pairedRegion\":[{\"name\":\"francecentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth\",\"name\":\"germanynorth\",\"displayName\":\"Germany + North\",\"regionalDisplayName\":\"(Europe) Germany North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.806422\",\"latitude\":\"53.073635\",\"physicalLocation\":\"Berlin\",\"pairedRegion\":[{\"name\":\"germanywestcentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest\",\"name\":\"norwaywest\",\"displayName\":\"Norway + West\",\"regionalDisplayName\":\"(Europe) Norway West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"5.733107\",\"latitude\":\"58.969975\",\"physicalLocation\":\"Norway\",\"pairedRegion\":[{\"name\":\"norwayeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest\",\"name\":\"switzerlandwest\",\"displayName\":\"Switzerland + West\",\"regionalDisplayName\":\"(Europe) Switzerland West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"6.143158\",\"latitude\":\"46.204391\",\"physicalLocation\":\"Geneva\",\"pairedRegion\":[{\"name\":\"switzerlandnorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest\",\"name\":\"ukwest\",\"displayName\":\"UK + West\",\"regionalDisplayName\":\"(Europe) UK West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"-3.084\",\"latitude\":\"53.427\",\"physicalLocation\":\"Cardiff\",\"pairedRegion\":[{\"name\":\"uksouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral\",\"name\":\"uaecentral\",\"displayName\":\"UAE + Central\",\"regionalDisplayName\":\"(Middle East) UAE Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Middle + East\",\"longitude\":\"54.366669\",\"latitude\":\"24.466667\",\"physicalLocation\":\"Abu + Dhabi\",\"pairedRegion\":[{\"name\":\"uaenorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsoutheast\",\"name\":\"brazilsoutheast\",\"displayName\":\"Brazil + Southeast\",\"regionalDisplayName\":\"(South America) Brazil Southeast\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"South + America\",\"longitude\":\"-43.2075\",\"latitude\":\"-22.90278\",\"physicalLocation\":\"Rio\",\"pairedRegion\":[{\"name\":\"brazilsouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth\"}]}}]}" headers: cache-control: - no-cache content-length: - - '0' + - '30402' + content-type: + - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:31 GMT + - Tue, 11 Oct 2022 20:52:39 GMT expires: - '-1' pragma: - no-cache - server: - - Microsoft-IIS/10.0 strict-transport-security: - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK - request: - body: null + body: '{"factoryResourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001", + "repoConfiguration": {"type": "FactoryVSTSConfiguration", "accountName": "ADF", + "repositoryName": "repo", "collaborationBranch": "master", "rootFolder": "/", + "lastCommitId": "", "projectName": "project", "tenantId": ""}}' headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory integration-runtime get-status + - datafactory configure-factory-repo Connection: - keep-alive Content-Length: - - '0' + - '384' + Content-Type: + - application/json ParameterSetName: - - --factory-name --name --resource-group + - --factory-resource-id --factory-vsts-configuration --location User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/getStatus?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory/locations/eastus/configureFactoryRepo?api-version=2018-06-01 response: body: - string: '{"name":"exampleInteg000002","properties":{"dataFactoryName":"exampleFa000001","state":"NeedRegistration","type":"SelfHosted","typeProperties":{"serviceRegion":"eu","autoUpdate":"Off","internalChannelEncryption":"NotSet","taskQueueId":"a9a706fd-86c1-4717-9b1d-8c451e820276","nodes":[],"updateDelayOffset":"PT3H","serviceUrls":["examplefa5qnc3dfb3.eastus.datafactory.azure.net"],"links":[],"versionStatus":"None","capabilities":{},"latestVersion":"5.5.7762.1","newerVersions":["5.5.7762.1","5.4.7749.1","5.4.7741.1","5.2.7740.4","5.4.7732.1"],"createTime":"2021-04-26T06:14:06.5886459Z"}}}' + string: "{\n \"name\": \"exampleFa000001\",\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/examplefa4rqmquxng\",\n + \ \"type\": \"Microsoft.DataFactory/factories\",\n \"properties\": {\n \"provisioningState\": + \"Succeeded\",\n \"createTime\": \"2022-10-11T20:52:07.4135245Z\",\n \"version\": + \"2018-06-01\",\n \"repoConfiguration\": {\n \"type\": \"FactoryVSTSConfiguration\",\n + \ \"accountName\": \"ADF\",\n \"repositoryName\": \"repo\",\n \"collaborationBranch\": + \"master\",\n \"rootFolder\": \"/\",\n \"lastCommitId\": \"\",\n + \ \"projectName\": \"project\",\n \"tenantId\": \"\"\n },\n \"encryption\": + {}\n },\n \"eTag\": \"\\\"0d004ec0-0000-0100-0000-6345d7980000\\\"\",\n + \ \"location\": \"eastus\",\n \"identity\": {\n \"type\": \"SystemAssigned\",\n + \ \"principalId\": \"5639cd3e-8c37-4b3c-b16e-51faf16b8893\",\n \"tenantId\": + \"16b3c013-d300-468d-ac64-7eda0820b6d3\"\n },\n \"tags\": {\n \"exampleTag\": + \"exampleValue\"\n }\n}" headers: cache-control: - no-cache content-length: - - '598' + - '962' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:32 GMT + - Tue, 11 Oct 2022 20:52:40 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -2054,8 +7395,8 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET + x-ms-ratelimit-remaining-subscription-writes: + - '1199' status: code: 200 message: OK @@ -2067,48 +7408,39 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory trigger get-event-subscription-status + - datafactory integration-runtime delete Connection: - keep-alive Content-Length: - '0' ParameterSetName: - - --factory-name --resource-group --name + - -y --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/getEventSubscriptionStatus?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002?api-version=2018-06-01 response: body: - string: '{"triggerName":"example000006","status":"Enabled"}' + string: '' headers: cache-control: - no-cache content-length: - - '51' - content-type: - - application/json; charset=utf-8 + - '0' date: - - Mon, 26 Apr 2021 06:14:32 GMT + - Tue, 11 Oct 2022 20:52:42 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - x-powered-by: - - ASP.NET + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' status: code: 200 message: OK @@ -2120,48 +7452,39 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory trigger unsubscribe-from-event + - datafactory trigger delete Connection: - keep-alive Content-Length: - '0' ParameterSetName: - - --factory-name --resource-group --name + - -y --factory-name --resource-group --name User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/unsubscribeFromEvents?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 response: body: - string: '{"triggerName":"example000006","status":"Disabled"}' + string: '' headers: cache-control: - no-cache content-length: - - '52' - content-type: - - application/json; charset=utf-8 + - '0' date: - - Mon, 26 Apr 2021 06:14:33 GMT + - Tue, 11 Oct 2022 20:52:44 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - x-powered-by: - - ASP.NET + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' status: code: 200 message: OK @@ -2173,48 +7496,39 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory trigger subscribe-to-event + - datafactory pipeline delete Connection: - keep-alive Content-Length: - '0' ParameterSetName: - - --factory-name --resource-group --name + - -y --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/subscribeToEvents?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005?api-version=2018-06-01 response: body: - string: '{"triggerName":"example000006","status":"Enabled"}' + string: '' headers: cache-control: - no-cache content-length: - - '51' - content-type: - - application/json; charset=utf-8 + - '0' date: - - Mon, 26 Apr 2021 06:14:35 GMT + - Tue, 11 Oct 2022 20:52:45 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - x-powered-by: - - ASP.NET + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' status: code: 200 message: OK @@ -2226,18 +7540,17 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory trigger start + - datafactory dataset delete Connection: - keep-alive Content-Length: - '0' ParameterSetName: - - --factory-name --resource-group --name + - -y --name --factory-name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/start?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004?api-version=2018-06-01 response: body: string: '' @@ -2247,21 +7560,19 @@ interactions: content-length: - '0' date: - - Mon, 26 Apr 2021 06:14:38 GMT + - Tue, 11 Oct 2022 20:52:45 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains x-content-type-options: - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - x-powered-by: - - ASP.NET + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' status: code: 200 message: OK @@ -2273,18 +7584,17 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory trigger stop + - datafactory linked-service delete Connection: - keep-alive Content-Length: - '0' ParameterSetName: - - --factory-name --resource-group --name + - -y --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/stop?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003?api-version=2018-06-01 response: body: string: '' @@ -2294,322 +7604,247 @@ interactions: content-length: - '0' date: - - Mon, 26 Apr 2021 06:14:41 GMT + - Tue, 11 Oct 2022 20:52:47 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains x-content-type-options: - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - x-powered-by: - - ASP.NET + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' status: code: 200 message: OK - request: - body: '{"permissions": "r", "accessResourcePath": "", "profileName": "DefaultProfile", - "startTime": "2018-11-10T02:46:20.2659347Z", "expireTime": "2018-11-10T09:46:20.2659347Z"}' + body: null headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory get-data-plane-access + - datafactory delete Connection: - keep-alive Content-Length: - - '170' - Content-Type: - - application/json + - '0' ParameterSetName: - - --name --access-resource-path --expire-time --permissions --profile-name --start-time - --resource-group + - -y --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/getDataPlaneAccess?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 response: body: - string: '{"policy":{"permissions":"r","accessResourcePath":"","profileName":"DefaultProfile","startTime":"2018-11-10T02:46:20.2659347Z","expireTime":"2018-11-10T09:46:20.2659347Z"},"dataPlaneUrl":"https://dpeastus.svc.datafactory.azure.com/dataplane","accessToken":"EAAAAOzMyR0nKhAsquydok/3FiewAQAAldolR6fZ/QXURYf8LmzIQhlbZ5KPPrhxUN3NtNVOyPfj0LU16xDxhzhPRzibayyb+le1ujMNo6y6cGvIuZ2pghzrqca4ZBXBuTKEr2luQEMttXb36oGBn3CNGXKEeZBYxOt6QEvYvBsrkalH+LhOD2kbGqxhkoWIj58mwG2oW0YD39cuosVcP5NYPGHJ5/dSfCC6y/x9mVYTdwAlAjgyo7eQd/Sj2tJh+WxPnLyft3l+BnXmBZWDU5qyV8SHlHUlKVG9vAuCnc8YTkdLH3+mLOJFU+lLUDHnjf+9AWN9CqLZZ+HX0Vth9MC0HMYXLtF6Kfm0sb4B10nBO38nVcKx2W+pYl3IN2CTaMxLC7SsbWp0VQ/YE9mUG1hgNOtLJpULJ99kVlZBBdSkYwLBOCNR/8nXK+O9y4QVkgf00pZiZmRhZz+HaQq2IwflWvN/AHgGA3Jx61J2XhMebqIQg2+qx3o6n0TLVuz7GE0lQpX5V/pX4iCePb82o6uxIgIdoztYahDNPcCXvwuL1nVWPOHjWc/Cm9EyvYmk0uDHmz3nQ294jRKciOElQg1LyvdEPRx2IAAAAPKNLZM+2jvM64e+RlV+ZRONovuxThkqtN4FTVuhVXQZ"}' + string: '' headers: cache-control: - no-cache content-length: - - '915' - content-type: - - application/json; charset=utf-8 + - '0' date: - - Mon, 26 Apr 2021 06:14:41 GMT + - Tue, 11 Oct 2022 20:52:50 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1199' - x-powered-by: - - ASP.NET + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' status: code: 200 message: OK - request: - body: '{"lastUpdatedAfter": "2021-04-26T06:13:26.000Z", "lastUpdatedBefore": "2021-04-26T07:13:26.000Z"}' + body: null headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory trigger-run query-by-factory + - datafactory delete Connection: - keep-alive - Content-Length: - - '97' - Content-Type: - - application/json ParameterSetName: - - --factory-name --last-updated-after --last-updated-before --resource-group + - -y --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/queryTriggerRuns?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/locations?api-version=2019-11-01 response: body: - string: '{"value":[]}' + string: "{\"value\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\",\"name\":\"eastus\",\"displayName\":\"East + US\",\"regionalDisplayName\":\"(US) East US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-79.8164\",\"latitude\":\"37.3719\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\":\"westus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2\",\"name\":\"eastus2\",\"displayName\":\"East + US 2\",\"regionalDisplayName\":\"(US) East US 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-78.3889\",\"latitude\":\"36.6681\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\":\"centralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\",\"name\":\"southcentralus\",\"displayName\":\"South + Central US\",\"regionalDisplayName\":\"(US) South Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-98.5\",\"latitude\":\"29.4167\",\"physicalLocation\":\"Texas\",\"pairedRegion\":[{\"name\":\"northcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2\",\"name\":\"westus2\",\"displayName\":\"West + US 2\",\"regionalDisplayName\":\"(US) West US 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-119.852\",\"latitude\":\"47.233\",\"physicalLocation\":\"Washington\",\"pairedRegion\":[{\"name\":\"westcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus3\",\"name\":\"westus3\",\"displayName\":\"West + US 3\",\"regionalDisplayName\":\"(US) West US 3\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-112.074036\",\"latitude\":\"33.448376\",\"physicalLocation\":\"Phoenix\",\"pairedRegion\":[{\"name\":\"eastus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast\",\"name\":\"australiaeast\",\"displayName\":\"Australia + East\",\"regionalDisplayName\":\"(Asia Pacific) Australia East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"151.2094\",\"latitude\":\"-33.86\",\"physicalLocation\":\"New + South Wales\",\"pairedRegion\":[{\"name\":\"australiasoutheast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia\",\"name\":\"southeastasia\",\"displayName\":\"Southeast + Asia\",\"regionalDisplayName\":\"(Asia Pacific) Southeast Asia\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"103.833\",\"latitude\":\"1.283\",\"physicalLocation\":\"Singapore\",\"pairedRegion\":[{\"name\":\"eastasia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope\",\"name\":\"northeurope\",\"displayName\":\"North + Europe\",\"regionalDisplayName\":\"(Europe) North Europe\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"-6.2597\",\"latitude\":\"53.3478\",\"physicalLocation\":\"Ireland\",\"pairedRegion\":[{\"name\":\"westeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedencentral\",\"name\":\"swedencentral\",\"displayName\":\"Sweden + Central\",\"regionalDisplayName\":\"(Europe) Sweden Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"17.14127\",\"latitude\":\"60.67488\",\"physicalLocation\":\"G\xE4vle\",\"pairedRegion\":[{\"name\":\"swedensouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedensouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth\",\"name\":\"uksouth\",\"displayName\":\"UK + South\",\"regionalDisplayName\":\"(Europe) UK South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"-0.799\",\"latitude\":\"50.941\",\"physicalLocation\":\"London\",\"pairedRegion\":[{\"name\":\"ukwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\",\"name\":\"westeurope\",\"displayName\":\"West + Europe\",\"regionalDisplayName\":\"(Europe) West Europe\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"4.9\",\"latitude\":\"52.3667\",\"physicalLocation\":\"Netherlands\",\"pairedRegion\":[{\"name\":\"northeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus\",\"name\":\"centralus\",\"displayName\":\"Central + US\",\"regionalDisplayName\":\"(US) Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-93.6208\",\"latitude\":\"41.5908\",\"physicalLocation\":\"Iowa\",\"pairedRegion\":[{\"name\":\"eastus2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth\",\"name\":\"southafricanorth\",\"displayName\":\"South + Africa North\",\"regionalDisplayName\":\"(Africa) South Africa North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Africa\",\"longitude\":\"28.218370\",\"latitude\":\"-25.731340\",\"physicalLocation\":\"Johannesburg\",\"pairedRegion\":[{\"name\":\"southafricawest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia\",\"name\":\"centralindia\",\"displayName\":\"Central + India\",\"regionalDisplayName\":\"(Asia Pacific) Central India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"73.9197\",\"latitude\":\"18.5822\",\"physicalLocation\":\"Pune\",\"pairedRegion\":[{\"name\":\"southindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia\",\"name\":\"eastasia\",\"displayName\":\"East + Asia\",\"regionalDisplayName\":\"(Asia Pacific) East Asia\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"114.188\",\"latitude\":\"22.267\",\"physicalLocation\":\"Hong + Kong\",\"pairedRegion\":[{\"name\":\"southeastasia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast\",\"name\":\"japaneast\",\"displayName\":\"Japan + East\",\"regionalDisplayName\":\"(Asia Pacific) Japan East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"139.77\",\"latitude\":\"35.68\",\"physicalLocation\":\"Tokyo, + Saitama\",\"pairedRegion\":[{\"name\":\"japanwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral\",\"name\":\"koreacentral\",\"displayName\":\"Korea + Central\",\"regionalDisplayName\":\"(Asia Pacific) Korea Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"126.9780\",\"latitude\":\"37.5665\",\"physicalLocation\":\"Seoul\",\"pairedRegion\":[{\"name\":\"koreasouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral\",\"name\":\"canadacentral\",\"displayName\":\"Canada + Central\",\"regionalDisplayName\":\"(Canada) Canada Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Canada\",\"longitude\":\"-79.383\",\"latitude\":\"43.653\",\"physicalLocation\":\"Toronto\",\"pairedRegion\":[{\"name\":\"canadaeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral\",\"name\":\"francecentral\",\"displayName\":\"France + Central\",\"regionalDisplayName\":\"(Europe) France Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"2.3730\",\"latitude\":\"46.3772\",\"physicalLocation\":\"Paris\",\"pairedRegion\":[{\"name\":\"francesouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral\",\"name\":\"germanywestcentral\",\"displayName\":\"Germany + West Central\",\"regionalDisplayName\":\"(Europe) Germany West Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.682127\",\"latitude\":\"50.110924\",\"physicalLocation\":\"Frankfurt\",\"pairedRegion\":[{\"name\":\"germanynorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast\",\"name\":\"norwayeast\",\"displayName\":\"Norway + East\",\"regionalDisplayName\":\"(Europe) Norway East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"10.752245\",\"latitude\":\"59.913868\",\"physicalLocation\":\"Norway\",\"pairedRegion\":[{\"name\":\"norwaywest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth\",\"name\":\"switzerlandnorth\",\"displayName\":\"Switzerland + North\",\"regionalDisplayName\":\"(Europe) Switzerland North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.564572\",\"latitude\":\"47.451542\",\"physicalLocation\":\"Zurich\",\"pairedRegion\":[{\"name\":\"switzerlandwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth\",\"name\":\"uaenorth\",\"displayName\":\"UAE + North\",\"regionalDisplayName\":\"(Middle East) UAE North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Middle + East\",\"longitude\":\"55.316666\",\"latitude\":\"25.266666\",\"physicalLocation\":\"Dubai\",\"pairedRegion\":[{\"name\":\"uaecentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth\",\"name\":\"brazilsouth\",\"displayName\":\"Brazil + South\",\"regionalDisplayName\":\"(South America) Brazil South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"South + America\",\"longitude\":\"-46.633\",\"latitude\":\"-23.55\",\"physicalLocation\":\"Sao + Paulo State\",\"pairedRegion\":[{\"name\":\"southcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap\",\"name\":\"eastus2euap\",\"displayName\":\"East + US 2 EUAP\",\"regionalDisplayName\":\"(US) East US 2 EUAP\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-78.3889\",\"latitude\":\"36.6681\",\"pairedRegion\":[{\"name\":\"centraluseuap\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/qatarcentral\",\"name\":\"qatarcentral\",\"displayName\":\"Qatar + Central\",\"regionalDisplayName\":\"(Middle East) Qatar Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Middle + East\",\"longitude\":\"51.439327\",\"latitude\":\"25.551462\",\"physicalLocation\":\"Doha\",\"pairedRegion\":[{\"name\":\"westeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage\",\"name\":\"centralusstage\",\"displayName\":\"Central + US (Stage)\",\"regionalDisplayName\":\"(US) Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstage\",\"name\":\"eastusstage\",\"displayName\":\"East + US (Stage)\",\"regionalDisplayName\":\"(US) East US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2stage\",\"name\":\"eastus2stage\",\"displayName\":\"East + US 2 (Stage)\",\"regionalDisplayName\":\"(US) East US 2 (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralusstage\",\"name\":\"northcentralusstage\",\"displayName\":\"North + Central US (Stage)\",\"regionalDisplayName\":\"(US) North Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstage\",\"name\":\"southcentralusstage\",\"displayName\":\"South + Central US (Stage)\",\"regionalDisplayName\":\"(US) South Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westusstage\",\"name\":\"westusstage\",\"displayName\":\"West + US (Stage)\",\"regionalDisplayName\":\"(US) West US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2stage\",\"name\":\"westus2stage\",\"displayName\":\"West + US 2 (Stage)\",\"regionalDisplayName\":\"(US) West US 2 (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asia\",\"name\":\"asia\",\"displayName\":\"Asia\",\"regionalDisplayName\":\"Asia\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asiapacific\",\"name\":\"asiapacific\",\"displayName\":\"Asia + Pacific\",\"regionalDisplayName\":\"Asia Pacific\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia\",\"name\":\"australia\",\"displayName\":\"Australia\",\"regionalDisplayName\":\"Australia\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil\",\"name\":\"brazil\",\"displayName\":\"Brazil\",\"regionalDisplayName\":\"Brazil\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada\",\"name\":\"canada\",\"displayName\":\"Canada\",\"regionalDisplayName\":\"Canada\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe\",\"name\":\"europe\",\"displayName\":\"Europe\",\"regionalDisplayName\":\"Europe\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/france\",\"name\":\"france\",\"displayName\":\"France\",\"regionalDisplayName\":\"France\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germany\",\"name\":\"germany\",\"displayName\":\"Germany\",\"regionalDisplayName\":\"Germany\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global\",\"name\":\"global\",\"displayName\":\"Global\",\"regionalDisplayName\":\"Global\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india\",\"name\":\"india\",\"displayName\":\"India\",\"regionalDisplayName\":\"India\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan\",\"name\":\"japan\",\"displayName\":\"Japan\",\"regionalDisplayName\":\"Japan\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/korea\",\"name\":\"korea\",\"displayName\":\"Korea\",\"regionalDisplayName\":\"Korea\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norway\",\"name\":\"norway\",\"displayName\":\"Norway\",\"regionalDisplayName\":\"Norway\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/singapore\",\"name\":\"singapore\",\"displayName\":\"Singapore\",\"regionalDisplayName\":\"Singapore\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafrica\",\"name\":\"southafrica\",\"displayName\":\"South + Africa\",\"regionalDisplayName\":\"South Africa\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerland\",\"name\":\"switzerland\",\"displayName\":\"Switzerland\",\"regionalDisplayName\":\"Switzerland\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uae\",\"name\":\"uae\",\"displayName\":\"United + Arab Emirates\",\"regionalDisplayName\":\"United Arab Emirates\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk\",\"name\":\"uk\",\"displayName\":\"United + Kingdom\",\"regionalDisplayName\":\"United Kingdom\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstates\",\"name\":\"unitedstates\",\"displayName\":\"United + States\",\"regionalDisplayName\":\"United States\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstateseuap\",\"name\":\"unitedstateseuap\",\"displayName\":\"United + States EUAP\",\"regionalDisplayName\":\"United States EUAP\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage\",\"name\":\"eastasiastage\",\"displayName\":\"East + Asia (Stage)\",\"regionalDisplayName\":\"(Asia Pacific) East Asia (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasiastage\",\"name\":\"southeastasiastage\",\"displayName\":\"Southeast + Asia (Stage)\",\"regionalDisplayName\":\"(Asia Pacific) Southeast Asia (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstg\",\"name\":\"eastusstg\",\"displayName\":\"East + US STG\",\"regionalDisplayName\":\"(US) East US STG\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-79.8164\",\"latitude\":\"37.3719\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\":\"southcentralusstg\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstg\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstg\",\"name\":\"southcentralusstg\",\"displayName\":\"South + Central US STG\",\"regionalDisplayName\":\"(US) South Central US STG\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-98.5\",\"latitude\":\"29.4167\",\"physicalLocation\":\"Texas\",\"pairedRegion\":[{\"name\":\"eastusstg\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstg\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus\",\"name\":\"northcentralus\",\"displayName\":\"North + Central US\",\"regionalDisplayName\":\"(US) North Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-87.6278\",\"latitude\":\"41.8819\",\"physicalLocation\":\"Illinois\",\"pairedRegion\":[{\"name\":\"southcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus\",\"name\":\"westus\",\"displayName\":\"West + US\",\"regionalDisplayName\":\"(US) West US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-122.417\",\"latitude\":\"37.783\",\"physicalLocation\":\"California\",\"pairedRegion\":[{\"name\":\"eastus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest\",\"name\":\"jioindiawest\",\"displayName\":\"Jio + India West\",\"regionalDisplayName\":\"(Asia Pacific) Jio India West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"70.05773\",\"latitude\":\"22.470701\",\"physicalLocation\":\"Jamnagar\",\"pairedRegion\":[{\"name\":\"jioindiacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap\",\"name\":\"centraluseuap\",\"displayName\":\"Central + US EUAP\",\"regionalDisplayName\":\"(US) Central US EUAP\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-93.6208\",\"latitude\":\"41.5908\",\"pairedRegion\":[{\"name\":\"eastus2euap\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus\",\"name\":\"westcentralus\",\"displayName\":\"West + Central US\",\"regionalDisplayName\":\"(US) West Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-110.234\",\"latitude\":\"40.890\",\"physicalLocation\":\"Wyoming\",\"pairedRegion\":[{\"name\":\"westus2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest\",\"name\":\"southafricawest\",\"displayName\":\"South + Africa West\",\"regionalDisplayName\":\"(Africa) South Africa West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Africa\",\"longitude\":\"18.843266\",\"latitude\":\"-34.075691\",\"physicalLocation\":\"Cape + Town\",\"pairedRegion\":[{\"name\":\"southafricanorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral\",\"name\":\"australiacentral\",\"displayName\":\"Australia + Central\",\"regionalDisplayName\":\"(Asia Pacific) Australia Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"149.1244\",\"latitude\":\"-35.3075\",\"physicalLocation\":\"Canberra\",\"pairedRegion\":[{\"name\":\"australiacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2\",\"name\":\"australiacentral2\",\"displayName\":\"Australia + Central 2\",\"regionalDisplayName\":\"(Asia Pacific) Australia Central 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"149.1244\",\"latitude\":\"-35.3075\",\"physicalLocation\":\"Canberra\",\"pairedRegion\":[{\"name\":\"australiacentral2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast\",\"name\":\"australiasoutheast\",\"displayName\":\"Australia + Southeast\",\"regionalDisplayName\":\"(Asia Pacific) Australia Southeast\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"144.9631\",\"latitude\":\"-37.8136\",\"physicalLocation\":\"Victoria\",\"pairedRegion\":[{\"name\":\"australiaeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest\",\"name\":\"japanwest\",\"displayName\":\"Japan + West\",\"regionalDisplayName\":\"(Asia Pacific) Japan West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"135.5022\",\"latitude\":\"34.6939\",\"physicalLocation\":\"Osaka\",\"pairedRegion\":[{\"name\":\"japaneast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral\",\"name\":\"jioindiacentral\",\"displayName\":\"Jio + India Central\",\"regionalDisplayName\":\"(Asia Pacific) Jio India Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"79.08886\",\"latitude\":\"21.146633\",\"physicalLocation\":\"Nagpur\",\"pairedRegion\":[{\"name\":\"jioindiawest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth\",\"name\":\"koreasouth\",\"displayName\":\"Korea + South\",\"regionalDisplayName\":\"(Asia Pacific) Korea South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"129.0756\",\"latitude\":\"35.1796\",\"physicalLocation\":\"Busan\",\"pairedRegion\":[{\"name\":\"koreacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\",\"name\":\"southindia\",\"displayName\":\"South + India\",\"regionalDisplayName\":\"(Asia Pacific) South India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"80.1636\",\"latitude\":\"12.9822\",\"physicalLocation\":\"Chennai\",\"pairedRegion\":[{\"name\":\"centralindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westindia\",\"name\":\"westindia\",\"displayName\":\"West + India\",\"regionalDisplayName\":\"(Asia Pacific) West India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"72.868\",\"latitude\":\"19.088\",\"physicalLocation\":\"Mumbai\",\"pairedRegion\":[{\"name\":\"southindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast\",\"name\":\"canadaeast\",\"displayName\":\"Canada + East\",\"regionalDisplayName\":\"(Canada) Canada East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Canada\",\"longitude\":\"-71.217\",\"latitude\":\"46.817\",\"physicalLocation\":\"Quebec\",\"pairedRegion\":[{\"name\":\"canadacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth\",\"name\":\"francesouth\",\"displayName\":\"France + South\",\"regionalDisplayName\":\"(Europe) France South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"2.1972\",\"latitude\":\"43.8345\",\"physicalLocation\":\"Marseille\",\"pairedRegion\":[{\"name\":\"francecentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth\",\"name\":\"germanynorth\",\"displayName\":\"Germany + North\",\"regionalDisplayName\":\"(Europe) Germany North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.806422\",\"latitude\":\"53.073635\",\"physicalLocation\":\"Berlin\",\"pairedRegion\":[{\"name\":\"germanywestcentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest\",\"name\":\"norwaywest\",\"displayName\":\"Norway + West\",\"regionalDisplayName\":\"(Europe) Norway West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"5.733107\",\"latitude\":\"58.969975\",\"physicalLocation\":\"Norway\",\"pairedRegion\":[{\"name\":\"norwayeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest\",\"name\":\"switzerlandwest\",\"displayName\":\"Switzerland + West\",\"regionalDisplayName\":\"(Europe) Switzerland West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"6.143158\",\"latitude\":\"46.204391\",\"physicalLocation\":\"Geneva\",\"pairedRegion\":[{\"name\":\"switzerlandnorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest\",\"name\":\"ukwest\",\"displayName\":\"UK + West\",\"regionalDisplayName\":\"(Europe) UK West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"-3.084\",\"latitude\":\"53.427\",\"physicalLocation\":\"Cardiff\",\"pairedRegion\":[{\"name\":\"uksouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral\",\"name\":\"uaecentral\",\"displayName\":\"UAE + Central\",\"regionalDisplayName\":\"(Middle East) UAE Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Middle + East\",\"longitude\":\"54.366669\",\"latitude\":\"24.466667\",\"physicalLocation\":\"Abu + Dhabi\",\"pairedRegion\":[{\"name\":\"uaenorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsoutheast\",\"name\":\"brazilsoutheast\",\"displayName\":\"Brazil + Southeast\",\"regionalDisplayName\":\"(South America) Brazil Southeast\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"South + America\",\"longitude\":\"-43.2075\",\"latitude\":\"-22.90278\",\"physicalLocation\":\"Rio\",\"pairedRegion\":[{\"name\":\"brazilsouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth\"}]}}]}" headers: cache-control: - no-cache content-length: - - '12' + - '30402' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:42 GMT + - Tue, 11 Oct 2022 20:52:50 GMT expires: - '-1' pragma: - no-cache - server: - - Microsoft-IIS/10.0 strict-transport-security: - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked vary: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK - request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - datafactory trigger-run query-by-factory - Connection: - - keep-alive - ParameterSetName: - - --factory-name --last-updated-after --last-updated-before --resource-group - User-Agent: - - python/3.8.0 (Windows-10-10.0.19041-SP0) msrest/0.6.21 msrest_azure/0.6.3 - azure-mgmt-resource/12.1.0 Azure-SDK-For-Python AZURECLI/2.22.1 - accept-language: - - en-US - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/locations?api-version=2019-11-01 - response: - body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus","name":"eastus","displayName":"East - US","regionalDisplayName":"(US) East US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-79.8164","latitude":"37.3719","physicalLocation":"Virginia","pairedRegion":[{"name":"westus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2","name":"eastus2","displayName":"East - US 2","regionalDisplayName":"(US) East US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","physicalLocation":"Virginia","pairedRegion":[{"name":"centralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus","name":"southcentralus","displayName":"South - Central US","regionalDisplayName":"(US) South Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-98.5","latitude":"29.4167","physicalLocation":"Texas","pairedRegion":[{"name":"northcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2","name":"westus2","displayName":"West - US 2","regionalDisplayName":"(US) West US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-119.852","latitude":"47.233","physicalLocation":"Washington","pairedRegion":[{"name":"westcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast","name":"australiaeast","displayName":"Australia - East","regionalDisplayName":"(Asia Pacific) Australia East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"151.2094","latitude":"-33.86","physicalLocation":"New - South Wales","pairedRegion":[{"name":"australiasoutheast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia","name":"southeastasia","displayName":"Southeast - Asia","regionalDisplayName":"(Asia Pacific) Southeast Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"103.833","latitude":"1.283","physicalLocation":"Singapore","pairedRegion":[{"name":"eastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope","name":"northeurope","displayName":"North - Europe","regionalDisplayName":"(Europe) North Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-6.2597","latitude":"53.3478","physicalLocation":"Ireland","pairedRegion":[{"name":"westeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth","name":"uksouth","displayName":"UK - South","regionalDisplayName":"(Europe) UK South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-0.799","latitude":"50.941","physicalLocation":"London","pairedRegion":[{"name":"ukwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope","name":"westeurope","displayName":"West - Europe","regionalDisplayName":"(Europe) West Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"4.9","latitude":"52.3667","physicalLocation":"Netherlands","pairedRegion":[{"name":"northeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus","name":"centralus","displayName":"Central - US","regionalDisplayName":"(US) Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","physicalLocation":"Iowa","pairedRegion":[{"name":"eastus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus","name":"northcentralus","displayName":"North - Central US","regionalDisplayName":"(US) North Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-87.6278","latitude":"41.8819","physicalLocation":"Illinois","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus","name":"westus","displayName":"West - US","regionalDisplayName":"(US) West US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-122.417","latitude":"37.783","physicalLocation":"California","pairedRegion":[{"name":"eastus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth","name":"southafricanorth","displayName":"South - Africa North","regionalDisplayName":"(Africa) South Africa North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Africa","longitude":"28.218370","latitude":"-25.731340","physicalLocation":"Johannesburg","pairedRegion":[{"name":"southafricawest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia","name":"centralindia","displayName":"Central - India","regionalDisplayName":"(Asia Pacific) Central India","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"73.9197","latitude":"18.5822","physicalLocation":"Pune","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia","name":"eastasia","displayName":"East - Asia","regionalDisplayName":"(Asia Pacific) East Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"114.188","latitude":"22.267","physicalLocation":"Hong - Kong","pairedRegion":[{"name":"southeastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast","name":"japaneast","displayName":"Japan - East","regionalDisplayName":"(Asia Pacific) Japan East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"139.77","latitude":"35.68","physicalLocation":"Tokyo, - Saitama","pairedRegion":[{"name":"japanwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest","name":"jioindiawest","displayName":"JIO - India West","regionalDisplayName":"(Asia Pacific) JIO India West","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"70.05773","latitude":"22.470701","physicalLocation":"Jamnagar","pairedRegion":[{"name":"jioindiacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral","name":"koreacentral","displayName":"Korea - Central","regionalDisplayName":"(Asia Pacific) Korea Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"126.9780","latitude":"37.5665","physicalLocation":"Seoul","pairedRegion":[{"name":"koreasouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral","name":"canadacentral","displayName":"Canada - Central","regionalDisplayName":"(Canada) Canada Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Canada","longitude":"-79.383","latitude":"43.653","physicalLocation":"Toronto","pairedRegion":[{"name":"canadaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral","name":"francecentral","displayName":"France - Central","regionalDisplayName":"(Europe) France Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"2.3730","latitude":"46.3772","physicalLocation":"Paris","pairedRegion":[{"name":"francesouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral","name":"germanywestcentral","displayName":"Germany - West Central","regionalDisplayName":"(Europe) Germany West Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.682127","latitude":"50.110924","physicalLocation":"Frankfurt","pairedRegion":[{"name":"germanynorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast","name":"norwayeast","displayName":"Norway - East","regionalDisplayName":"(Europe) Norway East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"10.752245","latitude":"59.913868","physicalLocation":"Norway","pairedRegion":[{"name":"norwaywest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth","name":"switzerlandnorth","displayName":"Switzerland - North","regionalDisplayName":"(Europe) Switzerland North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.564572","latitude":"47.451542","physicalLocation":"Zurich","pairedRegion":[{"name":"switzerlandwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth","name":"uaenorth","displayName":"UAE - North","regionalDisplayName":"(Middle East) UAE North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Middle - East","longitude":"55.316666","latitude":"25.266666","physicalLocation":"Dubai","pairedRegion":[{"name":"uaecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth","name":"brazilsouth","displayName":"Brazil - South","regionalDisplayName":"(South America) Brazil South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"South - America","longitude":"-46.633","latitude":"-23.55","physicalLocation":"Sao - Paulo State","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage","name":"centralusstage","displayName":"Central - US (Stage)","regionalDisplayName":"(US) Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstage","name":"eastusstage","displayName":"East - US (Stage)","regionalDisplayName":"(US) East US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2stage","name":"eastus2stage","displayName":"East - US 2 (Stage)","regionalDisplayName":"(US) East US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralusstage","name":"northcentralusstage","displayName":"North - Central US (Stage)","regionalDisplayName":"(US) North Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstage","name":"southcentralusstage","displayName":"South - Central US (Stage)","regionalDisplayName":"(US) South Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westusstage","name":"westusstage","displayName":"West - US (Stage)","regionalDisplayName":"(US) West US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2stage","name":"westus2stage","displayName":"West - US 2 (Stage)","regionalDisplayName":"(US) West US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asia","name":"asia","displayName":"Asia","regionalDisplayName":"Asia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asiapacific","name":"asiapacific","displayName":"Asia - Pacific","regionalDisplayName":"Asia Pacific","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia","name":"australia","displayName":"Australia","regionalDisplayName":"Australia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil","name":"brazil","displayName":"Brazil","regionalDisplayName":"Brazil","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada","name":"canada","displayName":"Canada","regionalDisplayName":"Canada","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe","name":"europe","displayName":"Europe","regionalDisplayName":"Europe","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global","name":"global","displayName":"Global","regionalDisplayName":"Global","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india","name":"india","displayName":"India","regionalDisplayName":"India","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan","name":"japan","displayName":"Japan","regionalDisplayName":"Japan","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk","name":"uk","displayName":"United - Kingdom","regionalDisplayName":"United Kingdom","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstates","name":"unitedstates","displayName":"United - States","regionalDisplayName":"United States","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage","name":"eastasiastage","displayName":"East - Asia (Stage)","regionalDisplayName":"(Asia Pacific) East Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia - Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasiastage","name":"southeastasiastage","displayName":"Southeast - Asia (Stage)","regionalDisplayName":"(Asia Pacific) Southeast Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia - Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap","name":"centraluseuap","displayName":"Central - US EUAP","regionalDisplayName":"(US) Central US EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","pairedRegion":[{"name":"eastus2euap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap","name":"eastus2euap","displayName":"East - US 2 EUAP","regionalDisplayName":"(US) East US 2 EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","pairedRegion":[{"name":"centraluseuap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus","name":"westcentralus","displayName":"West - Central US","regionalDisplayName":"(US) West Central US","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-110.234","latitude":"40.890","physicalLocation":"Wyoming","pairedRegion":[{"name":"westus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus3","name":"westus3","displayName":"West - US 3","regionalDisplayName":"(US) West US 3","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-112.074036","latitude":"33.448376","physicalLocation":"Phoenix","pairedRegion":[{"name":"eastus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest","name":"southafricawest","displayName":"South - Africa West","regionalDisplayName":"(Africa) South Africa West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Africa","longitude":"18.843266","latitude":"-34.075691","physicalLocation":"Cape - Town","pairedRegion":[{"name":"southafricanorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral","name":"australiacentral","displayName":"Australia - Central","regionalDisplayName":"(Asia Pacific) Australia Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2","name":"australiacentral2","displayName":"Australia - Central 2","regionalDisplayName":"(Asia Pacific) Australia Central 2","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast","name":"australiasoutheast","displayName":"Australia - Southeast","regionalDisplayName":"(Asia Pacific) Australia Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"144.9631","latitude":"-37.8136","physicalLocation":"Victoria","pairedRegion":[{"name":"australiaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest","name":"japanwest","displayName":"Japan - West","regionalDisplayName":"(Asia Pacific) Japan West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"135.5022","latitude":"34.6939","physicalLocation":"Osaka","pairedRegion":[{"name":"japaneast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral","name":"jioindiacentral","displayName":"JIO - India Central","regionalDisplayName":"(Asia Pacific) JIO India Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"79.08886","latitude":"21.146633","physicalLocation":"Nagpur","pairedRegion":[{"name":"jioindiawest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth","name":"koreasouth","displayName":"Korea - South","regionalDisplayName":"(Asia Pacific) Korea South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"129.0756","latitude":"35.1796","physicalLocation":"Busan","pairedRegion":[{"name":"koreacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia","name":"southindia","displayName":"South - India","regionalDisplayName":"(Asia Pacific) South India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"80.1636","latitude":"12.9822","physicalLocation":"Chennai","pairedRegion":[{"name":"centralindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westindia","name":"westindia","displayName":"West - India","regionalDisplayName":"(Asia Pacific) West India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"72.868","latitude":"19.088","physicalLocation":"Mumbai","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast","name":"canadaeast","displayName":"Canada - East","regionalDisplayName":"(Canada) Canada East","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Canada","longitude":"-71.217","latitude":"46.817","physicalLocation":"Quebec","pairedRegion":[{"name":"canadacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth","name":"francesouth","displayName":"France - South","regionalDisplayName":"(Europe) France South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"2.1972","latitude":"43.8345","physicalLocation":"Marseille","pairedRegion":[{"name":"francecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth","name":"germanynorth","displayName":"Germany - North","regionalDisplayName":"(Europe) Germany North","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"8.806422","latitude":"53.073635","physicalLocation":"Berlin","pairedRegion":[{"name":"germanywestcentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest","name":"norwaywest","displayName":"Norway - West","regionalDisplayName":"(Europe) Norway West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"5.733107","latitude":"58.969975","physicalLocation":"Norway","pairedRegion":[{"name":"norwayeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest","name":"switzerlandwest","displayName":"Switzerland - West","regionalDisplayName":"(Europe) Switzerland West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"6.143158","latitude":"46.204391","physicalLocation":"Geneva","pairedRegion":[{"name":"switzerlandnorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest","name":"ukwest","displayName":"UK - West","regionalDisplayName":"(Europe) UK West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"-3.084","latitude":"53.427","physicalLocation":"Cardiff","pairedRegion":[{"name":"uksouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral","name":"uaecentral","displayName":"UAE - Central","regionalDisplayName":"(Middle East) UAE Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Middle - East","longitude":"54.366669","latitude":"24.466667","physicalLocation":"Abu - Dhabi","pairedRegion":[{"name":"uaenorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsoutheast","name":"brazilsoutheast","displayName":"Brazil - Southeast","regionalDisplayName":"(South America) Brazil Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"South - America","longitude":"-43.2075","latitude":"-22.90278","physicalLocation":"Rio","pairedRegion":[{"name":"brazilsouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv","name":"eastusslv","displayName":"East - US SLV","regionalDisplayName":"(South America) East US SLV","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"South - America","longitude":"-43.2075","latitude":"-22.90278","physicalLocation":"Silverstone","pairedRegion":[{"name":"eastusslv","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv"}]}}]}' - headers: - cache-control: - - no-cache - content-length: - - '26993' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 26 Apr 2021 06:14:44 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: '{"factoryResourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001", - "repoConfiguration": {"type": "FactoryVSTSConfiguration", "accountName": "ADF", - "repositoryName": "repo", "collaborationBranch": "master", "rootFolder": "/", - "lastCommitId": "", "projectName": "project", "tenantId": ""}}' + body: '{"location": "eastus", "identity": {"type": "SystemAssigned"}, "properties": + {"encryption": {}}}' headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory configure-factory-repo + - datafactory create Connection: - keep-alive Content-Length: - - '449' + - '96' Content-Type: - application/json ParameterSetName: - - --factory-resource-id --factory-vsts-configuration --location + - --location --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory/locations/eastus/configureFactoryRepo?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 response: body: - string: "{\r\n \"name\": \"exampleFa000001\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/examplefa5qnc3dfb3\",\r\n - \ \"type\": \"Microsoft.DataFactory/factories\",\r\n \"properties\": {\r\n - \ \"provisioningState\": \"Succeeded\",\r\n \"createTime\": \"2021-04-26T06:13:38.8060441Z\",\r\n - \ \"version\": \"2018-06-01\",\r\n \"factoryStatistics\": null,\r\n \"repoConfiguration\": - {\r\n \"type\": \"FactoryVSTSConfiguration\",\r\n \"accountName\": - \"ADF\",\r\n \"repositoryName\": \"repo\",\r\n \"collaborationBranch\": - \"master\",\r\n \"rootFolder\": \"/\",\r\n \"lastCommitId\": \"\",\r\n - \ \"projectName\": \"project\",\r\n \"tenantId\": \"\"\r\n },\r\n - \ \"encryption\": {}\r\n },\r\n \"eTag\": \"\\\"3000bdfc-0000-0100-0000-60865a550000\\\"\",\r\n - \ \"location\": \"eastus\",\r\n \"identity\": {\r\n \"type\": \"SystemAssigned\",\r\n - \ \"principalId\": \"06a7c2de-469c-4e53-bbc5-69adf6b29d6b\",\r\n \"tenantId\": - \"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a\",\r\n \"userAssignedIdentities\": - null\r\n },\r\n \"tags\": {\r\n \"exampleTag\": \"exampleValue\"\r\n - \ }\r\n}" + string: "{\n \"name\": \"exampleFa000001\",\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/examplefa4rqmquxng\",\n + \ \"type\": \"Microsoft.DataFactory/factories\",\n \"properties\": {\n \"provisioningState\": + \"Succeeded\",\n \"createTime\": \"2022-10-11T20:52:56.4648566Z\",\n \"version\": + \"2018-06-01\",\n \"encryption\": {}\n },\n \"eTag\": \"\\\"0d005ac0-0000-0100-0000-6345d7a80000\\\"\",\n + \ \"location\": \"eastus\",\n \"identity\": {\n \"type\": \"SystemAssigned\",\n + \ \"principalId\": \"132db0ba-5a3d-40fd-b544-d7801c5a4c0e\",\n \"tenantId\": + \"16b3c013-d300-468d-ac64-7eda0820b6d3\"\n },\n \"tags\": {}\n}" headers: cache-control: - no-cache content-length: - - '1126' + - '647' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:45 GMT + - Tue, 11 Oct 2022 20:52:56 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -2619,56 +7854,66 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - - '1199' - x-powered-by: - - ASP.NET + - '1198' status: code: 200 message: OK - request: - body: null + body: '{"properties": {"type": "MappingDataFlow", "description": "Example Text", + "annotations": [], "typeProperties": {"sources": [], "sinks": [], "transformations": + [], "scriptLines": []}}}' headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory integration-runtime delete + - datafactory data-flow create Connection: - keep-alive Content-Length: - - '0' + - '183' + Content-Type: + - application/json ParameterSetName: - - -y --factory-name --name --resource-group + - --factory-name --resource-group --name --flow-type --properties User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/dataflows/example000007?api-version=2018-06-01 response: body: - string: '' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/dataflows/example000007\",\n + \ \"name\": \"example000007\",\n \"type\": \"Microsoft.DataFactory/factories/dataflows\",\n + \ \"properties\": {\n \"type\": \"MappingDataFlow\",\n \"description\": + \"Example Text\",\n \"annotations\": [],\n \"typeProperties\": {\n \"sources\": + [],\n \"sinks\": [],\n \"transformations\": [],\n \"scriptLines\": + []\n }\n },\n \"etag\": \"d1001434-0000-0100-0000-6345d7a90000\"\n}" headers: cache-control: - no-cache content-length: - - '0' + - '543' + content-type: + - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:47 GMT + - Tue, 11 Oct 2022 20:52:56 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-subscription-deletes: - - '14999' - x-powered-by: - - ASP.NET + x-ms-ratelimit-remaining-subscription-writes: + - '1199' status: code: 200 message: OK @@ -2680,89 +7925,105 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory trigger delete + - datafactory data-flow show Connection: - keep-alive - Content-Length: - - '0' ParameterSetName: - - -y --factory-name --resource-group --name + - --factory-name --resource-group --name User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/dataflows/example000007?api-version=2018-06-01 response: body: - string: '' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/dataflows/example000007\",\n + \ \"name\": \"example000007\",\n \"type\": \"Microsoft.DataFactory/factories/dataflows\",\n + \ \"properties\": {\n \"type\": \"MappingDataFlow\",\n \"description\": + \"Example Text\",\n \"annotations\": [],\n \"typeProperties\": {\n \"sources\": + [],\n \"sinks\": [],\n \"transformations\": [],\n \"scriptLines\": + []\n }\n },\n \"etag\": \"d1001434-0000-0100-0000-6345d7a90000\"\n}" headers: cache-control: - no-cache content-length: - - '0' + - '543' + content-type: + - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:50 GMT + - Tue, 11 Oct 2022 20:52:57 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-subscription-deletes: - - '14999' - x-powered-by: - - ASP.NET status: code: 200 message: OK - request: - body: null + body: '{"properties": {"type": "Flowlet", "description": "Example Text", "annotations": + [], "typeProperties": {"sources": [], "sinks": [], "transformations": [], "scriptLines": + []}}}' headers: Accept: - application/json Accept-Encoding: - gzip, deflate CommandName: - - datafactory pipeline delete + - datafactory data-flow create Connection: - keep-alive Content-Length: - - '0' + - '175' + Content-Type: + - application/json ParameterSetName: - - -y --factory-name --name --resource-group + - --factory-name --resource-group --name --flow-type --properties User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/dataflows/example000008?api-version=2018-06-01 response: body: - string: '' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/dataflows/example000008\",\n + \ \"name\": \"example000008\",\n \"type\": \"Microsoft.DataFactory/factories/dataflows\",\n + \ \"properties\": {\n \"type\": \"Flowlet\",\n \"description\": \"Example + Text\",\n \"annotations\": [],\n \"typeProperties\": {\n \"sources\": + [],\n \"sinks\": [],\n \"transformations\": [],\n \"scriptLines\": + []\n }\n },\n \"etag\": \"d1004934-0000-0100-0000-6345d7aa0000\"\n}" headers: cache-control: - no-cache content-length: - - '0' + - '535' + content-type: + - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:55 GMT + - Tue, 11 Oct 2022 20:52:58 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-subscription-deletes: - - '14999' - x-powered-by: - - ASP.NET + x-ms-ratelimit-remaining-subscription-writes: + - '1199' status: code: 200 message: OK @@ -2774,42 +8035,53 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory dataset delete + - datafactory data-flow list Connection: - keep-alive - Content-Length: - - '0' ParameterSetName: - - -y --name --factory-name --resource-group + - --factory-name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) - method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004?api-version=2018-06-01 + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/dataflows?api-version=2018-06-01 response: body: - string: '' + string: "{\n \"value\": [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/dataflows/example000007\",\n + \ \"name\": \"example000007\",\n \"type\": \"Microsoft.DataFactory/factories/dataflows\",\n + \ \"properties\": {\n \"type\": \"MappingDataFlow\",\n \"description\": + \"Example Text\",\n \"annotations\": [],\n \"typeProperties\": + {\n \"sources\": [],\n \"sinks\": [],\n \"transformations\": + [],\n \"scriptLines\": []\n }\n },\n \"etag\": \"d1001434-0000-0100-0000-6345d7a90000\"\n + \ },\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/dataflows/example000008\",\n + \ \"name\": \"example000008\",\n \"type\": \"Microsoft.DataFactory/factories/dataflows\",\n + \ \"properties\": {\n \"type\": \"Flowlet\",\n \"description\": + \"Example Text\",\n \"annotations\": [],\n \"typeProperties\": + {\n \"sources\": [],\n \"sinks\": [],\n \"transformations\": + [],\n \"scriptLines\": []\n }\n },\n \"etag\": \"d1004934-0000-0100-0000-6345d7aa0000\"\n + \ }\n ]\n}" headers: cache-control: - no-cache content-length: - - '0' + - '1237' + content-type: + - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 06:14:56 GMT + - Tue, 11 Oct 2022 20:52:58 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-subscription-deletes: - - '14999' - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -2821,18 +8093,17 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - datafactory linked-service delete + - datafactory data-flow delete Connection: - keep-alive Content-Length: - '0' ParameterSetName: - - -y --factory-name --name --resource-group + - --factory-name --resource-group --name User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/dataflows/example000007?api-version=2018-06-01 response: body: string: '' @@ -2842,21 +8113,19 @@ interactions: content-length: - '0' date: - - Mon, 26 Apr 2021 06:14:58 GMT + - Tue, 11 Oct 2022 20:52:59 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-deletes: - '14999' - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -2876,10 +8145,9 @@ interactions: ParameterSetName: - -y --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 response: body: string: '' @@ -2889,21 +8157,19 @@ interactions: content-length: - '0' date: - - Mon, 26 Apr 2021 06:15:04 GMT + - Tue, 11 Oct 2022 20:53:03 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-deletes: - '14999' - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -2915,117 +8181,120 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - account list + - datafactory delete Connection: - keep-alive ParameterSetName: - - --query -o + - -y --name --resource-group User-Agent: - - python/3.8.0 (Windows-10-10.0.19041-SP0) msrest/0.6.21 msrest_azure/0.6.3 - azure-mgmt-resource/12.1.0 Azure-SDK-For-Python AZURECLI/2.22.1 - accept-language: - - en-US + - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/locations?api-version=2019-11-01 response: body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus","name":"eastus","displayName":"East - US","regionalDisplayName":"(US) East US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-79.8164","latitude":"37.3719","physicalLocation":"Virginia","pairedRegion":[{"name":"westus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2","name":"eastus2","displayName":"East - US 2","regionalDisplayName":"(US) East US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","physicalLocation":"Virginia","pairedRegion":[{"name":"centralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus","name":"southcentralus","displayName":"South - Central US","regionalDisplayName":"(US) South Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-98.5","latitude":"29.4167","physicalLocation":"Texas","pairedRegion":[{"name":"northcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2","name":"westus2","displayName":"West - US 2","regionalDisplayName":"(US) West US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-119.852","latitude":"47.233","physicalLocation":"Washington","pairedRegion":[{"name":"westcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast","name":"australiaeast","displayName":"Australia - East","regionalDisplayName":"(Asia Pacific) Australia East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"151.2094","latitude":"-33.86","physicalLocation":"New - South Wales","pairedRegion":[{"name":"australiasoutheast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia","name":"southeastasia","displayName":"Southeast - Asia","regionalDisplayName":"(Asia Pacific) Southeast Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"103.833","latitude":"1.283","physicalLocation":"Singapore","pairedRegion":[{"name":"eastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope","name":"northeurope","displayName":"North - Europe","regionalDisplayName":"(Europe) North Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-6.2597","latitude":"53.3478","physicalLocation":"Ireland","pairedRegion":[{"name":"westeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth","name":"uksouth","displayName":"UK - South","regionalDisplayName":"(Europe) UK South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-0.799","latitude":"50.941","physicalLocation":"London","pairedRegion":[{"name":"ukwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope","name":"westeurope","displayName":"West - Europe","regionalDisplayName":"(Europe) West Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"4.9","latitude":"52.3667","physicalLocation":"Netherlands","pairedRegion":[{"name":"northeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus","name":"centralus","displayName":"Central - US","regionalDisplayName":"(US) Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","physicalLocation":"Iowa","pairedRegion":[{"name":"eastus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus","name":"northcentralus","displayName":"North - Central US","regionalDisplayName":"(US) North Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-87.6278","latitude":"41.8819","physicalLocation":"Illinois","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus","name":"westus","displayName":"West - US","regionalDisplayName":"(US) West US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-122.417","latitude":"37.783","physicalLocation":"California","pairedRegion":[{"name":"eastus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth","name":"southafricanorth","displayName":"South - Africa North","regionalDisplayName":"(Africa) South Africa North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Africa","longitude":"28.218370","latitude":"-25.731340","physicalLocation":"Johannesburg","pairedRegion":[{"name":"southafricawest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia","name":"centralindia","displayName":"Central - India","regionalDisplayName":"(Asia Pacific) Central India","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"73.9197","latitude":"18.5822","physicalLocation":"Pune","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia","name":"eastasia","displayName":"East - Asia","regionalDisplayName":"(Asia Pacific) East Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"114.188","latitude":"22.267","physicalLocation":"Hong - Kong","pairedRegion":[{"name":"southeastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast","name":"japaneast","displayName":"Japan - East","regionalDisplayName":"(Asia Pacific) Japan East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"139.77","latitude":"35.68","physicalLocation":"Tokyo, - Saitama","pairedRegion":[{"name":"japanwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest","name":"jioindiawest","displayName":"JIO - India West","regionalDisplayName":"(Asia Pacific) JIO India West","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"70.05773","latitude":"22.470701","physicalLocation":"Jamnagar","pairedRegion":[{"name":"jioindiacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral","name":"koreacentral","displayName":"Korea - Central","regionalDisplayName":"(Asia Pacific) Korea Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"126.9780","latitude":"37.5665","physicalLocation":"Seoul","pairedRegion":[{"name":"koreasouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral","name":"canadacentral","displayName":"Canada - Central","regionalDisplayName":"(Canada) Canada Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Canada","longitude":"-79.383","latitude":"43.653","physicalLocation":"Toronto","pairedRegion":[{"name":"canadaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral","name":"francecentral","displayName":"France - Central","regionalDisplayName":"(Europe) France Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"2.3730","latitude":"46.3772","physicalLocation":"Paris","pairedRegion":[{"name":"francesouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral","name":"germanywestcentral","displayName":"Germany - West Central","regionalDisplayName":"(Europe) Germany West Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.682127","latitude":"50.110924","physicalLocation":"Frankfurt","pairedRegion":[{"name":"germanynorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast","name":"norwayeast","displayName":"Norway - East","regionalDisplayName":"(Europe) Norway East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"10.752245","latitude":"59.913868","physicalLocation":"Norway","pairedRegion":[{"name":"norwaywest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth","name":"switzerlandnorth","displayName":"Switzerland - North","regionalDisplayName":"(Europe) Switzerland North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.564572","latitude":"47.451542","physicalLocation":"Zurich","pairedRegion":[{"name":"switzerlandwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth","name":"uaenorth","displayName":"UAE - North","regionalDisplayName":"(Middle East) UAE North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Middle - East","longitude":"55.316666","latitude":"25.266666","physicalLocation":"Dubai","pairedRegion":[{"name":"uaecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth","name":"brazilsouth","displayName":"Brazil - South","regionalDisplayName":"(South America) Brazil South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"South - America","longitude":"-46.633","latitude":"-23.55","physicalLocation":"Sao - Paulo State","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage","name":"centralusstage","displayName":"Central - US (Stage)","regionalDisplayName":"(US) Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstage","name":"eastusstage","displayName":"East - US (Stage)","regionalDisplayName":"(US) East US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2stage","name":"eastus2stage","displayName":"East - US 2 (Stage)","regionalDisplayName":"(US) East US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralusstage","name":"northcentralusstage","displayName":"North - Central US (Stage)","regionalDisplayName":"(US) North Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstage","name":"southcentralusstage","displayName":"South - Central US (Stage)","regionalDisplayName":"(US) South Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westusstage","name":"westusstage","displayName":"West - US (Stage)","regionalDisplayName":"(US) West US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2stage","name":"westus2stage","displayName":"West - US 2 (Stage)","regionalDisplayName":"(US) West US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asia","name":"asia","displayName":"Asia","regionalDisplayName":"Asia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asiapacific","name":"asiapacific","displayName":"Asia - Pacific","regionalDisplayName":"Asia Pacific","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia","name":"australia","displayName":"Australia","regionalDisplayName":"Australia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil","name":"brazil","displayName":"Brazil","regionalDisplayName":"Brazil","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada","name":"canada","displayName":"Canada","regionalDisplayName":"Canada","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe","name":"europe","displayName":"Europe","regionalDisplayName":"Europe","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global","name":"global","displayName":"Global","regionalDisplayName":"Global","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india","name":"india","displayName":"India","regionalDisplayName":"India","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan","name":"japan","displayName":"Japan","regionalDisplayName":"Japan","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk","name":"uk","displayName":"United - Kingdom","regionalDisplayName":"United Kingdom","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstates","name":"unitedstates","displayName":"United - States","regionalDisplayName":"United States","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage","name":"eastasiastage","displayName":"East - Asia (Stage)","regionalDisplayName":"(Asia Pacific) East Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia - Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasiastage","name":"southeastasiastage","displayName":"Southeast - Asia (Stage)","regionalDisplayName":"(Asia Pacific) Southeast Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia - Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap","name":"centraluseuap","displayName":"Central - US EUAP","regionalDisplayName":"(US) Central US EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","pairedRegion":[{"name":"eastus2euap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap","name":"eastus2euap","displayName":"East - US 2 EUAP","regionalDisplayName":"(US) East US 2 EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","pairedRegion":[{"name":"centraluseuap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus","name":"westcentralus","displayName":"West - Central US","regionalDisplayName":"(US) West Central US","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-110.234","latitude":"40.890","physicalLocation":"Wyoming","pairedRegion":[{"name":"westus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus3","name":"westus3","displayName":"West - US 3","regionalDisplayName":"(US) West US 3","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-112.074036","latitude":"33.448376","physicalLocation":"Phoenix","pairedRegion":[{"name":"eastus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest","name":"southafricawest","displayName":"South - Africa West","regionalDisplayName":"(Africa) South Africa West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Africa","longitude":"18.843266","latitude":"-34.075691","physicalLocation":"Cape - Town","pairedRegion":[{"name":"southafricanorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral","name":"australiacentral","displayName":"Australia - Central","regionalDisplayName":"(Asia Pacific) Australia Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2","name":"australiacentral2","displayName":"Australia - Central 2","regionalDisplayName":"(Asia Pacific) Australia Central 2","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast","name":"australiasoutheast","displayName":"Australia - Southeast","regionalDisplayName":"(Asia Pacific) Australia Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"144.9631","latitude":"-37.8136","physicalLocation":"Victoria","pairedRegion":[{"name":"australiaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest","name":"japanwest","displayName":"Japan - West","regionalDisplayName":"(Asia Pacific) Japan West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"135.5022","latitude":"34.6939","physicalLocation":"Osaka","pairedRegion":[{"name":"japaneast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral","name":"jioindiacentral","displayName":"JIO - India Central","regionalDisplayName":"(Asia Pacific) JIO India Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"79.08886","latitude":"21.146633","physicalLocation":"Nagpur","pairedRegion":[{"name":"jioindiawest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth","name":"koreasouth","displayName":"Korea - South","regionalDisplayName":"(Asia Pacific) Korea South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"129.0756","latitude":"35.1796","physicalLocation":"Busan","pairedRegion":[{"name":"koreacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia","name":"southindia","displayName":"South - India","regionalDisplayName":"(Asia Pacific) South India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"80.1636","latitude":"12.9822","physicalLocation":"Chennai","pairedRegion":[{"name":"centralindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westindia","name":"westindia","displayName":"West - India","regionalDisplayName":"(Asia Pacific) West India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"72.868","latitude":"19.088","physicalLocation":"Mumbai","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast","name":"canadaeast","displayName":"Canada - East","regionalDisplayName":"(Canada) Canada East","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Canada","longitude":"-71.217","latitude":"46.817","physicalLocation":"Quebec","pairedRegion":[{"name":"canadacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth","name":"francesouth","displayName":"France - South","regionalDisplayName":"(Europe) France South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"2.1972","latitude":"43.8345","physicalLocation":"Marseille","pairedRegion":[{"name":"francecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth","name":"germanynorth","displayName":"Germany - North","regionalDisplayName":"(Europe) Germany North","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"8.806422","latitude":"53.073635","physicalLocation":"Berlin","pairedRegion":[{"name":"germanywestcentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest","name":"norwaywest","displayName":"Norway - West","regionalDisplayName":"(Europe) Norway West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"5.733107","latitude":"58.969975","physicalLocation":"Norway","pairedRegion":[{"name":"norwayeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest","name":"switzerlandwest","displayName":"Switzerland - West","regionalDisplayName":"(Europe) Switzerland West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"6.143158","latitude":"46.204391","physicalLocation":"Geneva","pairedRegion":[{"name":"switzerlandnorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest","name":"ukwest","displayName":"UK - West","regionalDisplayName":"(Europe) UK West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"-3.084","latitude":"53.427","physicalLocation":"Cardiff","pairedRegion":[{"name":"uksouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral","name":"uaecentral","displayName":"UAE - Central","regionalDisplayName":"(Middle East) UAE Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Middle - East","longitude":"54.366669","latitude":"24.466667","physicalLocation":"Abu - Dhabi","pairedRegion":[{"name":"uaenorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsoutheast","name":"brazilsoutheast","displayName":"Brazil - Southeast","regionalDisplayName":"(South America) Brazil Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"South - America","longitude":"-43.2075","latitude":"-22.90278","physicalLocation":"Rio","pairedRegion":[{"name":"brazilsouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv","name":"eastusslv","displayName":"East - US SLV","regionalDisplayName":"(South America) East US SLV","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"South - America","longitude":"-43.2075","latitude":"-22.90278","physicalLocation":"Silverstone","pairedRegion":[{"name":"eastusslv","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv"}]}}]}' - headers: - cache-control: - - no-cache - content-length: - - '26993' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 26 Apr 2021 03:35:43 GMT + string: "{\"value\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\",\"name\":\"eastus\",\"displayName\":\"East + US\",\"regionalDisplayName\":\"(US) East US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-79.8164\",\"latitude\":\"37.3719\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\":\"westus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2\",\"name\":\"eastus2\",\"displayName\":\"East + US 2\",\"regionalDisplayName\":\"(US) East US 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-78.3889\",\"latitude\":\"36.6681\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\":\"centralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\",\"name\":\"southcentralus\",\"displayName\":\"South + Central US\",\"regionalDisplayName\":\"(US) South Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-98.5\",\"latitude\":\"29.4167\",\"physicalLocation\":\"Texas\",\"pairedRegion\":[{\"name\":\"northcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2\",\"name\":\"westus2\",\"displayName\":\"West + US 2\",\"regionalDisplayName\":\"(US) West US 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-119.852\",\"latitude\":\"47.233\",\"physicalLocation\":\"Washington\",\"pairedRegion\":[{\"name\":\"westcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus3\",\"name\":\"westus3\",\"displayName\":\"West + US 3\",\"regionalDisplayName\":\"(US) West US 3\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-112.074036\",\"latitude\":\"33.448376\",\"physicalLocation\":\"Phoenix\",\"pairedRegion\":[{\"name\":\"eastus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast\",\"name\":\"australiaeast\",\"displayName\":\"Australia + East\",\"regionalDisplayName\":\"(Asia Pacific) Australia East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"151.2094\",\"latitude\":\"-33.86\",\"physicalLocation\":\"New + South Wales\",\"pairedRegion\":[{\"name\":\"australiasoutheast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia\",\"name\":\"southeastasia\",\"displayName\":\"Southeast + Asia\",\"regionalDisplayName\":\"(Asia Pacific) Southeast Asia\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"103.833\",\"latitude\":\"1.283\",\"physicalLocation\":\"Singapore\",\"pairedRegion\":[{\"name\":\"eastasia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope\",\"name\":\"northeurope\",\"displayName\":\"North + Europe\",\"regionalDisplayName\":\"(Europe) North Europe\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"-6.2597\",\"latitude\":\"53.3478\",\"physicalLocation\":\"Ireland\",\"pairedRegion\":[{\"name\":\"westeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedencentral\",\"name\":\"swedencentral\",\"displayName\":\"Sweden + Central\",\"regionalDisplayName\":\"(Europe) Sweden Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"17.14127\",\"latitude\":\"60.67488\",\"physicalLocation\":\"G\xE4vle\",\"pairedRegion\":[{\"name\":\"swedensouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedensouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth\",\"name\":\"uksouth\",\"displayName\":\"UK + South\",\"regionalDisplayName\":\"(Europe) UK South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"-0.799\",\"latitude\":\"50.941\",\"physicalLocation\":\"London\",\"pairedRegion\":[{\"name\":\"ukwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\",\"name\":\"westeurope\",\"displayName\":\"West + Europe\",\"regionalDisplayName\":\"(Europe) West Europe\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"4.9\",\"latitude\":\"52.3667\",\"physicalLocation\":\"Netherlands\",\"pairedRegion\":[{\"name\":\"northeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus\",\"name\":\"centralus\",\"displayName\":\"Central + US\",\"regionalDisplayName\":\"(US) Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-93.6208\",\"latitude\":\"41.5908\",\"physicalLocation\":\"Iowa\",\"pairedRegion\":[{\"name\":\"eastus2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth\",\"name\":\"southafricanorth\",\"displayName\":\"South + Africa North\",\"regionalDisplayName\":\"(Africa) South Africa North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Africa\",\"longitude\":\"28.218370\",\"latitude\":\"-25.731340\",\"physicalLocation\":\"Johannesburg\",\"pairedRegion\":[{\"name\":\"southafricawest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia\",\"name\":\"centralindia\",\"displayName\":\"Central + India\",\"regionalDisplayName\":\"(Asia Pacific) Central India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"73.9197\",\"latitude\":\"18.5822\",\"physicalLocation\":\"Pune\",\"pairedRegion\":[{\"name\":\"southindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia\",\"name\":\"eastasia\",\"displayName\":\"East + Asia\",\"regionalDisplayName\":\"(Asia Pacific) East Asia\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"114.188\",\"latitude\":\"22.267\",\"physicalLocation\":\"Hong + Kong\",\"pairedRegion\":[{\"name\":\"southeastasia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast\",\"name\":\"japaneast\",\"displayName\":\"Japan + East\",\"regionalDisplayName\":\"(Asia Pacific) Japan East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"139.77\",\"latitude\":\"35.68\",\"physicalLocation\":\"Tokyo, + Saitama\",\"pairedRegion\":[{\"name\":\"japanwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral\",\"name\":\"koreacentral\",\"displayName\":\"Korea + Central\",\"regionalDisplayName\":\"(Asia Pacific) Korea Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"126.9780\",\"latitude\":\"37.5665\",\"physicalLocation\":\"Seoul\",\"pairedRegion\":[{\"name\":\"koreasouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral\",\"name\":\"canadacentral\",\"displayName\":\"Canada + Central\",\"regionalDisplayName\":\"(Canada) Canada Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Canada\",\"longitude\":\"-79.383\",\"latitude\":\"43.653\",\"physicalLocation\":\"Toronto\",\"pairedRegion\":[{\"name\":\"canadaeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral\",\"name\":\"francecentral\",\"displayName\":\"France + Central\",\"regionalDisplayName\":\"(Europe) France Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"2.3730\",\"latitude\":\"46.3772\",\"physicalLocation\":\"Paris\",\"pairedRegion\":[{\"name\":\"francesouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral\",\"name\":\"germanywestcentral\",\"displayName\":\"Germany + West Central\",\"regionalDisplayName\":\"(Europe) Germany West Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.682127\",\"latitude\":\"50.110924\",\"physicalLocation\":\"Frankfurt\",\"pairedRegion\":[{\"name\":\"germanynorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast\",\"name\":\"norwayeast\",\"displayName\":\"Norway + East\",\"regionalDisplayName\":\"(Europe) Norway East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"10.752245\",\"latitude\":\"59.913868\",\"physicalLocation\":\"Norway\",\"pairedRegion\":[{\"name\":\"norwaywest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth\",\"name\":\"switzerlandnorth\",\"displayName\":\"Switzerland + North\",\"regionalDisplayName\":\"(Europe) Switzerland North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.564572\",\"latitude\":\"47.451542\",\"physicalLocation\":\"Zurich\",\"pairedRegion\":[{\"name\":\"switzerlandwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth\",\"name\":\"uaenorth\",\"displayName\":\"UAE + North\",\"regionalDisplayName\":\"(Middle East) UAE North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Middle + East\",\"longitude\":\"55.316666\",\"latitude\":\"25.266666\",\"physicalLocation\":\"Dubai\",\"pairedRegion\":[{\"name\":\"uaecentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth\",\"name\":\"brazilsouth\",\"displayName\":\"Brazil + South\",\"regionalDisplayName\":\"(South America) Brazil South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"South + America\",\"longitude\":\"-46.633\",\"latitude\":\"-23.55\",\"physicalLocation\":\"Sao + Paulo State\",\"pairedRegion\":[{\"name\":\"southcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap\",\"name\":\"eastus2euap\",\"displayName\":\"East + US 2 EUAP\",\"regionalDisplayName\":\"(US) East US 2 EUAP\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-78.3889\",\"latitude\":\"36.6681\",\"pairedRegion\":[{\"name\":\"centraluseuap\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/qatarcentral\",\"name\":\"qatarcentral\",\"displayName\":\"Qatar + Central\",\"regionalDisplayName\":\"(Middle East) Qatar Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Middle + East\",\"longitude\":\"51.439327\",\"latitude\":\"25.551462\",\"physicalLocation\":\"Doha\",\"pairedRegion\":[{\"name\":\"westeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage\",\"name\":\"centralusstage\",\"displayName\":\"Central + US (Stage)\",\"regionalDisplayName\":\"(US) Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstage\",\"name\":\"eastusstage\",\"displayName\":\"East + US (Stage)\",\"regionalDisplayName\":\"(US) East US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2stage\",\"name\":\"eastus2stage\",\"displayName\":\"East + US 2 (Stage)\",\"regionalDisplayName\":\"(US) East US 2 (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralusstage\",\"name\":\"northcentralusstage\",\"displayName\":\"North + Central US (Stage)\",\"regionalDisplayName\":\"(US) North Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstage\",\"name\":\"southcentralusstage\",\"displayName\":\"South + Central US (Stage)\",\"regionalDisplayName\":\"(US) South Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westusstage\",\"name\":\"westusstage\",\"displayName\":\"West + US (Stage)\",\"regionalDisplayName\":\"(US) West US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2stage\",\"name\":\"westus2stage\",\"displayName\":\"West + US 2 (Stage)\",\"regionalDisplayName\":\"(US) West US 2 (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asia\",\"name\":\"asia\",\"displayName\":\"Asia\",\"regionalDisplayName\":\"Asia\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asiapacific\",\"name\":\"asiapacific\",\"displayName\":\"Asia + Pacific\",\"regionalDisplayName\":\"Asia Pacific\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia\",\"name\":\"australia\",\"displayName\":\"Australia\",\"regionalDisplayName\":\"Australia\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil\",\"name\":\"brazil\",\"displayName\":\"Brazil\",\"regionalDisplayName\":\"Brazil\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada\",\"name\":\"canada\",\"displayName\":\"Canada\",\"regionalDisplayName\":\"Canada\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe\",\"name\":\"europe\",\"displayName\":\"Europe\",\"regionalDisplayName\":\"Europe\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/france\",\"name\":\"france\",\"displayName\":\"France\",\"regionalDisplayName\":\"France\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germany\",\"name\":\"germany\",\"displayName\":\"Germany\",\"regionalDisplayName\":\"Germany\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global\",\"name\":\"global\",\"displayName\":\"Global\",\"regionalDisplayName\":\"Global\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india\",\"name\":\"india\",\"displayName\":\"India\",\"regionalDisplayName\":\"India\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan\",\"name\":\"japan\",\"displayName\":\"Japan\",\"regionalDisplayName\":\"Japan\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/korea\",\"name\":\"korea\",\"displayName\":\"Korea\",\"regionalDisplayName\":\"Korea\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norway\",\"name\":\"norway\",\"displayName\":\"Norway\",\"regionalDisplayName\":\"Norway\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/singapore\",\"name\":\"singapore\",\"displayName\":\"Singapore\",\"regionalDisplayName\":\"Singapore\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafrica\",\"name\":\"southafrica\",\"displayName\":\"South + Africa\",\"regionalDisplayName\":\"South Africa\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerland\",\"name\":\"switzerland\",\"displayName\":\"Switzerland\",\"regionalDisplayName\":\"Switzerland\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uae\",\"name\":\"uae\",\"displayName\":\"United + Arab Emirates\",\"regionalDisplayName\":\"United Arab Emirates\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk\",\"name\":\"uk\",\"displayName\":\"United + Kingdom\",\"regionalDisplayName\":\"United Kingdom\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstates\",\"name\":\"unitedstates\",\"displayName\":\"United + States\",\"regionalDisplayName\":\"United States\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstateseuap\",\"name\":\"unitedstateseuap\",\"displayName\":\"United + States EUAP\",\"regionalDisplayName\":\"United States EUAP\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage\",\"name\":\"eastasiastage\",\"displayName\":\"East + Asia (Stage)\",\"regionalDisplayName\":\"(Asia Pacific) East Asia (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasiastage\",\"name\":\"southeastasiastage\",\"displayName\":\"Southeast + Asia (Stage)\",\"regionalDisplayName\":\"(Asia Pacific) Southeast Asia (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstg\",\"name\":\"eastusstg\",\"displayName\":\"East + US STG\",\"regionalDisplayName\":\"(US) East US STG\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-79.8164\",\"latitude\":\"37.3719\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\":\"southcentralusstg\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstg\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstg\",\"name\":\"southcentralusstg\",\"displayName\":\"South + Central US STG\",\"regionalDisplayName\":\"(US) South Central US STG\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-98.5\",\"latitude\":\"29.4167\",\"physicalLocation\":\"Texas\",\"pairedRegion\":[{\"name\":\"eastusstg\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstg\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus\",\"name\":\"northcentralus\",\"displayName\":\"North + Central US\",\"regionalDisplayName\":\"(US) North Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-87.6278\",\"latitude\":\"41.8819\",\"physicalLocation\":\"Illinois\",\"pairedRegion\":[{\"name\":\"southcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus\",\"name\":\"westus\",\"displayName\":\"West + US\",\"regionalDisplayName\":\"(US) West US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-122.417\",\"latitude\":\"37.783\",\"physicalLocation\":\"California\",\"pairedRegion\":[{\"name\":\"eastus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest\",\"name\":\"jioindiawest\",\"displayName\":\"Jio + India West\",\"regionalDisplayName\":\"(Asia Pacific) Jio India West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"70.05773\",\"latitude\":\"22.470701\",\"physicalLocation\":\"Jamnagar\",\"pairedRegion\":[{\"name\":\"jioindiacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap\",\"name\":\"centraluseuap\",\"displayName\":\"Central + US EUAP\",\"regionalDisplayName\":\"(US) Central US EUAP\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-93.6208\",\"latitude\":\"41.5908\",\"pairedRegion\":[{\"name\":\"eastus2euap\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus\",\"name\":\"westcentralus\",\"displayName\":\"West + Central US\",\"regionalDisplayName\":\"(US) West Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-110.234\",\"latitude\":\"40.890\",\"physicalLocation\":\"Wyoming\",\"pairedRegion\":[{\"name\":\"westus2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest\",\"name\":\"southafricawest\",\"displayName\":\"South + Africa West\",\"regionalDisplayName\":\"(Africa) South Africa West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Africa\",\"longitude\":\"18.843266\",\"latitude\":\"-34.075691\",\"physicalLocation\":\"Cape + Town\",\"pairedRegion\":[{\"name\":\"southafricanorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral\",\"name\":\"australiacentral\",\"displayName\":\"Australia + Central\",\"regionalDisplayName\":\"(Asia Pacific) Australia Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"149.1244\",\"latitude\":\"-35.3075\",\"physicalLocation\":\"Canberra\",\"pairedRegion\":[{\"name\":\"australiacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2\",\"name\":\"australiacentral2\",\"displayName\":\"Australia + Central 2\",\"regionalDisplayName\":\"(Asia Pacific) Australia Central 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"149.1244\",\"latitude\":\"-35.3075\",\"physicalLocation\":\"Canberra\",\"pairedRegion\":[{\"name\":\"australiacentral2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast\",\"name\":\"australiasoutheast\",\"displayName\":\"Australia + Southeast\",\"regionalDisplayName\":\"(Asia Pacific) Australia Southeast\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"144.9631\",\"latitude\":\"-37.8136\",\"physicalLocation\":\"Victoria\",\"pairedRegion\":[{\"name\":\"australiaeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest\",\"name\":\"japanwest\",\"displayName\":\"Japan + West\",\"regionalDisplayName\":\"(Asia Pacific) Japan West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"135.5022\",\"latitude\":\"34.6939\",\"physicalLocation\":\"Osaka\",\"pairedRegion\":[{\"name\":\"japaneast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral\",\"name\":\"jioindiacentral\",\"displayName\":\"Jio + India Central\",\"regionalDisplayName\":\"(Asia Pacific) Jio India Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"79.08886\",\"latitude\":\"21.146633\",\"physicalLocation\":\"Nagpur\",\"pairedRegion\":[{\"name\":\"jioindiawest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth\",\"name\":\"koreasouth\",\"displayName\":\"Korea + South\",\"regionalDisplayName\":\"(Asia Pacific) Korea South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"129.0756\",\"latitude\":\"35.1796\",\"physicalLocation\":\"Busan\",\"pairedRegion\":[{\"name\":\"koreacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\",\"name\":\"southindia\",\"displayName\":\"South + India\",\"regionalDisplayName\":\"(Asia Pacific) South India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"80.1636\",\"latitude\":\"12.9822\",\"physicalLocation\":\"Chennai\",\"pairedRegion\":[{\"name\":\"centralindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westindia\",\"name\":\"westindia\",\"displayName\":\"West + India\",\"regionalDisplayName\":\"(Asia Pacific) West India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"72.868\",\"latitude\":\"19.088\",\"physicalLocation\":\"Mumbai\",\"pairedRegion\":[{\"name\":\"southindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast\",\"name\":\"canadaeast\",\"displayName\":\"Canada + East\",\"regionalDisplayName\":\"(Canada) Canada East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Canada\",\"longitude\":\"-71.217\",\"latitude\":\"46.817\",\"physicalLocation\":\"Quebec\",\"pairedRegion\":[{\"name\":\"canadacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth\",\"name\":\"francesouth\",\"displayName\":\"France + South\",\"regionalDisplayName\":\"(Europe) France South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"2.1972\",\"latitude\":\"43.8345\",\"physicalLocation\":\"Marseille\",\"pairedRegion\":[{\"name\":\"francecentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth\",\"name\":\"germanynorth\",\"displayName\":\"Germany + North\",\"regionalDisplayName\":\"(Europe) Germany North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.806422\",\"latitude\":\"53.073635\",\"physicalLocation\":\"Berlin\",\"pairedRegion\":[{\"name\":\"germanywestcentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest\",\"name\":\"norwaywest\",\"displayName\":\"Norway + West\",\"regionalDisplayName\":\"(Europe) Norway West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"5.733107\",\"latitude\":\"58.969975\",\"physicalLocation\":\"Norway\",\"pairedRegion\":[{\"name\":\"norwayeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest\",\"name\":\"switzerlandwest\",\"displayName\":\"Switzerland + West\",\"regionalDisplayName\":\"(Europe) Switzerland West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"6.143158\",\"latitude\":\"46.204391\",\"physicalLocation\":\"Geneva\",\"pairedRegion\":[{\"name\":\"switzerlandnorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest\",\"name\":\"ukwest\",\"displayName\":\"UK + West\",\"regionalDisplayName\":\"(Europe) UK West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"-3.084\",\"latitude\":\"53.427\",\"physicalLocation\":\"Cardiff\",\"pairedRegion\":[{\"name\":\"uksouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral\",\"name\":\"uaecentral\",\"displayName\":\"UAE + Central\",\"regionalDisplayName\":\"(Middle East) UAE Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Middle + East\",\"longitude\":\"54.366669\",\"latitude\":\"24.466667\",\"physicalLocation\":\"Abu + Dhabi\",\"pairedRegion\":[{\"name\":\"uaenorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsoutheast\",\"name\":\"brazilsoutheast\",\"displayName\":\"Brazil + Southeast\",\"regionalDisplayName\":\"(South America) Brazil Southeast\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"South + America\",\"longitude\":\"-43.2075\",\"latitude\":\"-22.90278\",\"physicalLocation\":\"Rio\",\"pairedRegion\":[{\"name\":\"brazilsouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth\"}]}}]}" + headers: + cache-control: + - no-cache + content-length: + - '30402' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:53:03 GMT expires: - '-1' pragma: @@ -3058,28 +8327,33 @@ interactions: ParameterSetName: - --location --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 response: body: - string: '{"name":"exampleFa000001","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/examplefaakcpqyyqy","type":"Microsoft.DataFactory/factories","properties":{"provisioningState":"Succeeded","createTime":"2021-04-26T03:35:56.3240034Z","version":"2018-06-01","encryption":{}},"eTag":"\"22000c62-0000-0100-0000-6086351c0000\"","location":"eastus","identity":{"type":"SystemAssigned","principalId":"0b4983b4-7dc6-426f-b5ff-fc26da9b2cdc","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},"tags":{}}' + string: "{\n \"name\": \"exampleFa000001\",\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/examplefa4rqmquxng\",\n + \ \"type\": \"Microsoft.DataFactory/factories\",\n \"properties\": {\n \"provisioningState\": + \"Succeeded\",\n \"createTime\": \"2022-10-11T20:53:10.1952115Z\",\n \"version\": + \"2018-06-01\",\n \"encryption\": {}\n },\n \"eTag\": \"\\\"0d0092c0-0000-0100-0000-6345d7b60000\\\"\",\n + \ \"location\": \"eastus\",\n \"identity\": {\n \"type\": \"SystemAssigned\",\n + \ \"principalId\": \"8a2c3420-30b3-4fd8-a9ff-558c24b948c8\",\n \"tenantId\": + \"16b3c013-d300-468d-ac64-7eda0820b6d3\"\n },\n \"tags\": {}\n}" headers: cache-control: - no-cache content-length: - - '631' + - '647' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 03:35:58 GMT + - Tue, 11 Oct 2022 20:53:10 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3090,8 +8364,6 @@ interactions: - nosniff x-ms-ratelimit-remaining-subscription-writes: - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -3117,30 +8389,35 @@ interactions: - --factory-name --name --resource-group --description --compute-properties --ssis-properties User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationruntimes/exampleInteg000002","name":"exampleInteg000002","type":"Microsoft.DataFactory/factories/integrationruntimes","properties":{"type":"Managed","description":"Managed - Integration Runtime","typeProperties":{"computeProperties":{"location":"East - US 2","nodeSize":"Standard_D2_v3","numberOfNodes":1,"maxParallelExecutionsPerNode":2},"ssisProperties":{"licenseType":"LicenseIncluded","edition":"Standard"}},"state":"Initial"},"etag":"3100568e-0000-0100-0000-608635200000"}' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationruntimes/exampleInteg000002\",\n + \ \"name\": \"exampleInteg000002\",\n \"type\": \"Microsoft.DataFactory/factories/integrationruntimes\",\n + \ \"properties\": {\n \"type\": \"Managed\",\n \"description\": \"Managed + Integration Runtime\",\n \"typeProperties\": {\n \"computeProperties\": + {\n \"location\": \"East US 2\",\n \"nodeSize\": \"Standard_D2_v3\",\n + \ \"numberOfNodes\": 1,\n \"maxParallelExecutionsPerNode\": 2\n + \ },\n \"ssisProperties\": {\n \"licenseType\": \"LicenseIncluded\",\n + \ \"edition\": \"Standard\"\n }\n },\n \"state\": \"Initial\"\n + \ },\n \"etag\": \"9a01a1fc-0000-0100-0000-6345d7b70000\"\n}" headers: cache-control: - no-cache content-length: - - '709' + - '773' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 03:36:00 GMT + - Tue, 11 Oct 2022 20:53:11 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3150,9 +8427,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - - '1198' - x-powered-by: - - ASP.NET + - '1199' status: code: 200 message: OK @@ -3170,30 +8445,35 @@ interactions: ParameterSetName: - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationruntimes/exampleInteg000002","name":"exampleInteg000002","type":"Microsoft.DataFactory/factories/integrationruntimes","properties":{"type":"Managed","description":"Managed - Integration Runtime","typeProperties":{"computeProperties":{"location":"East - US 2","nodeSize":"Standard_D2_v3","numberOfNodes":1,"maxParallelExecutionsPerNode":2},"ssisProperties":{"licenseType":"LicenseIncluded","edition":"Standard"}},"state":"Initial"},"etag":"3100568e-0000-0100-0000-608635200000"}' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationruntimes/exampleInteg000002\",\n + \ \"name\": \"exampleInteg000002\",\n \"type\": \"Microsoft.DataFactory/factories/integrationruntimes\",\n + \ \"properties\": {\n \"type\": \"Managed\",\n \"description\": \"Managed + Integration Runtime\",\n \"typeProperties\": {\n \"computeProperties\": + {\n \"location\": \"East US 2\",\n \"nodeSize\": \"Standard_D2_v3\",\n + \ \"numberOfNodes\": 1,\n \"maxParallelExecutionsPerNode\": 2\n + \ },\n \"ssisProperties\": {\n \"licenseType\": \"LicenseIncluded\",\n + \ \"edition\": \"Standard\"\n }\n },\n \"state\": \"Initial\"\n + \ },\n \"etag\": \"9a01a1fc-0000-0100-0000-6345d7b70000\"\n}" headers: cache-control: - no-cache content-length: - - '709' + - '773' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 03:36:01 GMT + - Tue, 11 Oct 2022 20:53:11 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3202,8 +8482,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -3223,38 +8501,35 @@ interactions: ParameterSetName: - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/start?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/start?api-version=2018-06-01 response: body: string: '' headers: azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/start/operationstatuses/4acc0fceb75e485985c62f8f9282673a?api-version=2018-06-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/start/operationstatuses/040d7ea8e5ed4611ae71fff27c1487b4?api-version=2018-06-01 cache-control: - no-cache content-length: - '0' date: - - Mon, 26 Apr 2021 03:36:04 GMT + - Tue, 11 Oct 2022 20:53:12 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/start/operationresults/4acc0fceb75e485985c62f8f9282673a?api-version=2018-06-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/start/operationresults/040d7ea8e5ed4611ae71fff27c1487b4?api-version=2018-06-01 pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - '1199' - x-powered-by: - - ASP.NET status: code: 202 message: Accepted @@ -3272,28 +8547,28 @@ interactions: ParameterSetName: - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/start/operationstatuses/4acc0fceb75e485985c62f8f9282673a?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/start/operationstatuses/040d7ea8e5ed4611ae71fff27c1487b4?api-version=2018-06-01 response: body: - string: '{"status":"InProgress","name":"4acc0fceb75e485985c62f8f9282673a","properties":null,"error":null}' + string: "{\n \"status\": \"InProgress\",\n \"name\": \"040d7ea8e5ed4611ae71fff27c1487b4\",\n + \ \"properties\": null,\n \"error\": null\n}" headers: cache-control: - no-cache content-length: - - '96' + - '113' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 03:36:19 GMT + - Tue, 11 Oct 2022 20:53:28 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3302,8 +8577,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -3321,28 +8594,28 @@ interactions: ParameterSetName: - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/start/operationstatuses/4acc0fceb75e485985c62f8f9282673a?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/start/operationstatuses/040d7ea8e5ed4611ae71fff27c1487b4?api-version=2018-06-01 response: body: - string: '{"status":"InProgress","name":"4acc0fceb75e485985c62f8f9282673a","properties":null,"error":null}' + string: "{\n \"status\": \"InProgress\",\n \"name\": \"040d7ea8e5ed4611ae71fff27c1487b4\",\n + \ \"properties\": null,\n \"error\": null\n}" headers: cache-control: - no-cache content-length: - - '96' + - '113' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 03:36:49 GMT + - Tue, 11 Oct 2022 20:53:58 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3351,8 +8624,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -3370,28 +8641,34 @@ interactions: ParameterSetName: - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/start/operationstatuses/4acc0fceb75e485985c62f8f9282673a?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/start/operationstatuses/040d7ea8e5ed4611ae71fff27c1487b4?api-version=2018-06-01 response: body: - string: '{"status":"Succeeded","name":"4acc0fceb75e485985c62f8f9282673a","properties":{"name":"exampleInteg000002","properties":{"dataFactoryName":"exampleFa000001","state":"Started","type":"Managed","dataFactoryLocation":null,"resourceUri":null,"typeProperties":{"nodes":[],"otherErrors":[],"createTime":"2021-04-26T03:36:03.6784667Z"},"dataFactoryTags":null,"managedVirtualNetwork":null}},"error":null}' + string: "{\n \"status\": \"Succeeded\",\n \"name\": \"040d7ea8e5ed4611ae71fff27c1487b4\",\n + \ \"properties\": {\n \"name\": \"exampleInteg000002\",\n \"properties\": + {\n \"dataFactoryName\": \"exampleFa000001\",\n \"state\": \"Started\",\n + \ \"type\": \"Managed\",\n \"dataFactoryLocation\": null,\n \"resourceUri\": + null,\n \"typeProperties\": {\n \"nodes\": [],\n \"otherErrors\": + [],\n \"createTime\": \"2022-10-11T20:53:12.1913318Z\"\n },\n + \ \"dataFactoryTags\": null,\n \"managedVirtualNetwork\": null\n + \ }\n },\n \"error\": null\n}" headers: cache-control: - no-cache content-length: - - '405' + - '533' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 03:37:20 GMT + - Tue, 11 Oct 2022 20:54:28 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3400,8 +8677,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -3419,28 +8694,32 @@ interactions: ParameterSetName: - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/start/operationresults/4acc0fceb75e485985c62f8f9282673a?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/start/operationresults/040d7ea8e5ed4611ae71fff27c1487b4?api-version=2018-06-01 response: body: - string: '{"name":"exampleInteg000002","properties":{"dataFactoryName":"exampleFa000001","state":"Started","type":"Managed","dataFactoryLocation":null,"resourceUri":null,"typeProperties":{"nodes":[],"otherErrors":[],"createTime":"2021-04-26T03:36:03.6784667Z"},"dataFactoryTags":null,"managedVirtualNetwork":null}}' + string: "{\n \"name\": \"exampleInteg000002\",\n \"properties\": {\n \"dataFactoryName\": + \"exampleFa000001\",\n \"state\": \"Started\",\n \"type\": \"Managed\",\n + \ \"dataFactoryLocation\": null,\n \"resourceUri\": null,\n \"typeProperties\": + {\n \"nodes\": [],\n \"otherErrors\": [],\n \"createTime\": + \"2022-10-11T20:53:12.1913318Z\"\n },\n \"dataFactoryTags\": null,\n + \ \"managedVirtualNetwork\": null\n }\n}" headers: cache-control: - no-cache content-length: - - '314' + - '393' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 03:37:20 GMT + - Tue, 11 Oct 2022 20:54:28 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3449,8 +8728,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -3470,38 +8747,35 @@ interactions: ParameterSetName: - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop?api-version=2018-06-01 response: body: string: '' headers: azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/259ffcdffd0342ae8e903cdb2d9031d5?api-version=2018-06-01 cache-control: - no-cache content-length: - '0' date: - - Mon, 26 Apr 2021 03:37:22 GMT + - Tue, 11 Oct 2022 20:54:29 GMT expires: - '-1' location: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationresults/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationresults/259ffcdffd0342ae8e903cdb2d9031d5?api-version=2018-06-01 pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - '1199' - x-powered-by: - - ASP.NET status: code: 202 message: Accepted @@ -3519,28 +8793,28 @@ interactions: ParameterSetName: - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/259ffcdffd0342ae8e903cdb2d9031d5?api-version=2018-06-01 response: body: - string: '{"status":"InProgress","name":"0cfd1f0e21614269807f6a5f699d32f2","properties":null,"error":null}' + string: "{\n \"status\": \"InProgress\",\n \"name\": \"259ffcdffd0342ae8e903cdb2d9031d5\",\n + \ \"properties\": null,\n \"error\": null\n}" headers: cache-control: - no-cache content-length: - - '96' + - '113' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 03:37:38 GMT + - Tue, 11 Oct 2022 20:54:46 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3549,8 +8823,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -3568,28 +8840,28 @@ interactions: ParameterSetName: - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/259ffcdffd0342ae8e903cdb2d9031d5?api-version=2018-06-01 response: body: - string: '{"status":"InProgress","name":"0cfd1f0e21614269807f6a5f699d32f2","properties":null,"error":null}' + string: "{\n \"status\": \"InProgress\",\n \"name\": \"259ffcdffd0342ae8e903cdb2d9031d5\",\n + \ \"properties\": null,\n \"error\": null\n}" headers: cache-control: - no-cache content-length: - - '96' + - '113' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 03:38:08 GMT + - Tue, 11 Oct 2022 20:55:16 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3598,8 +8870,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -3617,28 +8887,28 @@ interactions: ParameterSetName: - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/259ffcdffd0342ae8e903cdb2d9031d5?api-version=2018-06-01 response: body: - string: '{"status":"InProgress","name":"0cfd1f0e21614269807f6a5f699d32f2","properties":null,"error":null}' + string: "{\n \"status\": \"InProgress\",\n \"name\": \"259ffcdffd0342ae8e903cdb2d9031d5\",\n + \ \"properties\": null,\n \"error\": null\n}" headers: cache-control: - no-cache content-length: - - '96' + - '113' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 03:38:38 GMT + - Tue, 11 Oct 2022 20:55:45 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3647,8 +8917,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -3666,28 +8934,28 @@ interactions: ParameterSetName: - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/259ffcdffd0342ae8e903cdb2d9031d5?api-version=2018-06-01 response: body: - string: '{"status":"InProgress","name":"0cfd1f0e21614269807f6a5f699d32f2","properties":null,"error":null}' + string: "{\n \"status\": \"InProgress\",\n \"name\": \"259ffcdffd0342ae8e903cdb2d9031d5\",\n + \ \"properties\": null,\n \"error\": null\n}" headers: cache-control: - no-cache content-length: - - '96' + - '113' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 03:39:09 GMT + - Tue, 11 Oct 2022 20:56:16 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3696,8 +8964,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -3715,28 +8981,28 @@ interactions: ParameterSetName: - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/259ffcdffd0342ae8e903cdb2d9031d5?api-version=2018-06-01 response: body: - string: '{"status":"InProgress","name":"0cfd1f0e21614269807f6a5f699d32f2","properties":null,"error":null}' + string: "{\n \"status\": \"InProgress\",\n \"name\": \"259ffcdffd0342ae8e903cdb2d9031d5\",\n + \ \"properties\": null,\n \"error\": null\n}" headers: cache-control: - no-cache content-length: - - '96' + - '113' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 03:39:39 GMT + - Tue, 11 Oct 2022 20:56:46 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3745,8 +9011,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -3764,28 +9028,28 @@ interactions: ParameterSetName: - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/259ffcdffd0342ae8e903cdb2d9031d5?api-version=2018-06-01 response: body: - string: '{"status":"InProgress","name":"0cfd1f0e21614269807f6a5f699d32f2","properties":null,"error":null}' + string: "{\n \"status\": \"InProgress\",\n \"name\": \"259ffcdffd0342ae8e903cdb2d9031d5\",\n + \ \"properties\": null,\n \"error\": null\n}" headers: cache-control: - no-cache content-length: - - '96' + - '113' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 03:40:10 GMT + - Tue, 11 Oct 2022 20:57:17 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3794,8 +9058,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -3813,28 +9075,28 @@ interactions: ParameterSetName: - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/259ffcdffd0342ae8e903cdb2d9031d5?api-version=2018-06-01 response: body: - string: '{"status":"InProgress","name":"0cfd1f0e21614269807f6a5f699d32f2","properties":null,"error":null}' + string: "{\n \"status\": \"InProgress\",\n \"name\": \"259ffcdffd0342ae8e903cdb2d9031d5\",\n + \ \"properties\": null,\n \"error\": null\n}" headers: cache-control: - no-cache content-length: - - '96' + - '113' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 03:40:41 GMT + - Tue, 11 Oct 2022 20:57:47 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3843,8 +9105,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -3862,28 +9122,75 @@ interactions: ParameterSetName: - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/259ffcdffd0342ae8e903cdb2d9031d5?api-version=2018-06-01 response: body: - string: '{"status":"InProgress","name":"0cfd1f0e21614269807f6a5f699d32f2","properties":null,"error":null}' + string: "{\n \"status\": \"InProgress\",\n \"name\": \"259ffcdffd0342ae8e903cdb2d9031d5\",\n + \ \"properties\": null,\n \"error\": null\n}" headers: cache-control: - no-cache content-length: - - '96' + - '113' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:58:17 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Kestrel + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime stop + Connection: + - keep-alive + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/259ffcdffd0342ae8e903cdb2d9031d5?api-version=2018-06-01 + response: + body: + string: "{\n \"status\": \"InProgress\",\n \"name\": \"259ffcdffd0342ae8e903cdb2d9031d5\",\n + \ \"properties\": null,\n \"error\": null\n}" + headers: + cache-control: + - no-cache + content-length: + - '113' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 03:41:11 GMT + - Tue, 11 Oct 2022 20:58:47 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3892,8 +9199,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -3911,28 +9216,28 @@ interactions: ParameterSetName: - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/259ffcdffd0342ae8e903cdb2d9031d5?api-version=2018-06-01 response: body: - string: '{"status":"Succeeded","name":"0cfd1f0e21614269807f6a5f699d32f2","properties":null,"error":null}' + string: "{\n \"status\": \"Succeeded\",\n \"name\": \"259ffcdffd0342ae8e903cdb2d9031d5\",\n + \ \"properties\": null,\n \"error\": null\n}" headers: cache-control: - no-cache content-length: - - '95' + - '112' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 03:41:41 GMT + - Tue, 11 Oct 2022 20:59:17 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -3941,8 +9246,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -3960,10 +9263,9 @@ interactions: ParameterSetName: - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationresults/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationresults/259ffcdffd0342ae8e903cdb2d9031d5?api-version=2018-06-01 response: body: string: '' @@ -3973,19 +9275,17 @@ interactions: content-length: - '0' date: - - Mon, 26 Apr 2021 03:41:42 GMT + - Tue, 11 Oct 2022 20:59:17 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -4005,10 +9305,9 @@ interactions: ParameterSetName: - -y --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002?api-version=2018-06-01 response: body: string: '' @@ -4018,21 +9317,19 @@ interactions: content-length: - '0' date: - - Mon, 26 Apr 2021 03:41:43 GMT + - Tue, 11 Oct 2022 20:59:19 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-deletes: - '14999' - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -4052,10 +9349,9 @@ interactions: ParameterSetName: - -y --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 response: body: string: '' @@ -4065,21 +9361,19 @@ interactions: content-length: - '0' date: - - Mon, 26 Apr 2021 03:41:52 GMT + - Tue, 11 Oct 2022 20:59:22 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-deletes: - '14999' - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -4091,117 +9385,120 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - account list + - datafactory delete Connection: - keep-alive ParameterSetName: - - --query -o + - -y --name --resource-group User-Agent: - - python/3.8.0 (Windows-10-10.0.19041-SP0) msrest/0.6.21 msrest_azure/0.6.3 - azure-mgmt-resource/12.1.0 Azure-SDK-For-Python AZURECLI/2.22.1 - accept-language: - - en-US + - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/locations?api-version=2019-11-01 response: body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus","name":"eastus","displayName":"East - US","regionalDisplayName":"(US) East US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-79.8164","latitude":"37.3719","physicalLocation":"Virginia","pairedRegion":[{"name":"westus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2","name":"eastus2","displayName":"East - US 2","regionalDisplayName":"(US) East US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","physicalLocation":"Virginia","pairedRegion":[{"name":"centralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus","name":"southcentralus","displayName":"South - Central US","regionalDisplayName":"(US) South Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-98.5","latitude":"29.4167","physicalLocation":"Texas","pairedRegion":[{"name":"northcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2","name":"westus2","displayName":"West - US 2","regionalDisplayName":"(US) West US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-119.852","latitude":"47.233","physicalLocation":"Washington","pairedRegion":[{"name":"westcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast","name":"australiaeast","displayName":"Australia - East","regionalDisplayName":"(Asia Pacific) Australia East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"151.2094","latitude":"-33.86","physicalLocation":"New - South Wales","pairedRegion":[{"name":"australiasoutheast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia","name":"southeastasia","displayName":"Southeast - Asia","regionalDisplayName":"(Asia Pacific) Southeast Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"103.833","latitude":"1.283","physicalLocation":"Singapore","pairedRegion":[{"name":"eastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope","name":"northeurope","displayName":"North - Europe","regionalDisplayName":"(Europe) North Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-6.2597","latitude":"53.3478","physicalLocation":"Ireland","pairedRegion":[{"name":"westeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth","name":"uksouth","displayName":"UK - South","regionalDisplayName":"(Europe) UK South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-0.799","latitude":"50.941","physicalLocation":"London","pairedRegion":[{"name":"ukwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope","name":"westeurope","displayName":"West - Europe","regionalDisplayName":"(Europe) West Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"4.9","latitude":"52.3667","physicalLocation":"Netherlands","pairedRegion":[{"name":"northeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus","name":"centralus","displayName":"Central - US","regionalDisplayName":"(US) Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","physicalLocation":"Iowa","pairedRegion":[{"name":"eastus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus","name":"northcentralus","displayName":"North - Central US","regionalDisplayName":"(US) North Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-87.6278","latitude":"41.8819","physicalLocation":"Illinois","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus","name":"westus","displayName":"West - US","regionalDisplayName":"(US) West US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-122.417","latitude":"37.783","physicalLocation":"California","pairedRegion":[{"name":"eastus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth","name":"southafricanorth","displayName":"South - Africa North","regionalDisplayName":"(Africa) South Africa North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Africa","longitude":"28.218370","latitude":"-25.731340","physicalLocation":"Johannesburg","pairedRegion":[{"name":"southafricawest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia","name":"centralindia","displayName":"Central - India","regionalDisplayName":"(Asia Pacific) Central India","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"73.9197","latitude":"18.5822","physicalLocation":"Pune","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia","name":"eastasia","displayName":"East - Asia","regionalDisplayName":"(Asia Pacific) East Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"114.188","latitude":"22.267","physicalLocation":"Hong - Kong","pairedRegion":[{"name":"southeastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast","name":"japaneast","displayName":"Japan - East","regionalDisplayName":"(Asia Pacific) Japan East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"139.77","latitude":"35.68","physicalLocation":"Tokyo, - Saitama","pairedRegion":[{"name":"japanwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest","name":"jioindiawest","displayName":"JIO - India West","regionalDisplayName":"(Asia Pacific) JIO India West","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"70.05773","latitude":"22.470701","physicalLocation":"Jamnagar","pairedRegion":[{"name":"jioindiacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral","name":"koreacentral","displayName":"Korea - Central","regionalDisplayName":"(Asia Pacific) Korea Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia - Pacific","longitude":"126.9780","latitude":"37.5665","physicalLocation":"Seoul","pairedRegion":[{"name":"koreasouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral","name":"canadacentral","displayName":"Canada - Central","regionalDisplayName":"(Canada) Canada Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Canada","longitude":"-79.383","latitude":"43.653","physicalLocation":"Toronto","pairedRegion":[{"name":"canadaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral","name":"francecentral","displayName":"France - Central","regionalDisplayName":"(Europe) France Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"2.3730","latitude":"46.3772","physicalLocation":"Paris","pairedRegion":[{"name":"francesouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral","name":"germanywestcentral","displayName":"Germany - West Central","regionalDisplayName":"(Europe) Germany West Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.682127","latitude":"50.110924","physicalLocation":"Frankfurt","pairedRegion":[{"name":"germanynorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast","name":"norwayeast","displayName":"Norway - East","regionalDisplayName":"(Europe) Norway East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"10.752245","latitude":"59.913868","physicalLocation":"Norway","pairedRegion":[{"name":"norwaywest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth","name":"switzerlandnorth","displayName":"Switzerland - North","regionalDisplayName":"(Europe) Switzerland North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.564572","latitude":"47.451542","physicalLocation":"Zurich","pairedRegion":[{"name":"switzerlandwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth","name":"uaenorth","displayName":"UAE - North","regionalDisplayName":"(Middle East) UAE North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Middle - East","longitude":"55.316666","latitude":"25.266666","physicalLocation":"Dubai","pairedRegion":[{"name":"uaecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth","name":"brazilsouth","displayName":"Brazil - South","regionalDisplayName":"(South America) Brazil South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"South - America","longitude":"-46.633","latitude":"-23.55","physicalLocation":"Sao - Paulo State","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage","name":"centralusstage","displayName":"Central - US (Stage)","regionalDisplayName":"(US) Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstage","name":"eastusstage","displayName":"East - US (Stage)","regionalDisplayName":"(US) East US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2stage","name":"eastus2stage","displayName":"East - US 2 (Stage)","regionalDisplayName":"(US) East US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralusstage","name":"northcentralusstage","displayName":"North - Central US (Stage)","regionalDisplayName":"(US) North Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstage","name":"southcentralusstage","displayName":"South - Central US (Stage)","regionalDisplayName":"(US) South Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westusstage","name":"westusstage","displayName":"West - US (Stage)","regionalDisplayName":"(US) West US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2stage","name":"westus2stage","displayName":"West - US 2 (Stage)","regionalDisplayName":"(US) West US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asia","name":"asia","displayName":"Asia","regionalDisplayName":"Asia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asiapacific","name":"asiapacific","displayName":"Asia - Pacific","regionalDisplayName":"Asia Pacific","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia","name":"australia","displayName":"Australia","regionalDisplayName":"Australia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil","name":"brazil","displayName":"Brazil","regionalDisplayName":"Brazil","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada","name":"canada","displayName":"Canada","regionalDisplayName":"Canada","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe","name":"europe","displayName":"Europe","regionalDisplayName":"Europe","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global","name":"global","displayName":"Global","regionalDisplayName":"Global","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india","name":"india","displayName":"India","regionalDisplayName":"India","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan","name":"japan","displayName":"Japan","regionalDisplayName":"Japan","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk","name":"uk","displayName":"United - Kingdom","regionalDisplayName":"United Kingdom","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstates","name":"unitedstates","displayName":"United - States","regionalDisplayName":"United States","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage","name":"eastasiastage","displayName":"East - Asia (Stage)","regionalDisplayName":"(Asia Pacific) East Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia - Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasiastage","name":"southeastasiastage","displayName":"Southeast - Asia (Stage)","regionalDisplayName":"(Asia Pacific) Southeast Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia - Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap","name":"centraluseuap","displayName":"Central - US EUAP","regionalDisplayName":"(US) Central US EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","pairedRegion":[{"name":"eastus2euap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap","name":"eastus2euap","displayName":"East - US 2 EUAP","regionalDisplayName":"(US) East US 2 EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","pairedRegion":[{"name":"centraluseuap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus","name":"westcentralus","displayName":"West - Central US","regionalDisplayName":"(US) West Central US","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-110.234","latitude":"40.890","physicalLocation":"Wyoming","pairedRegion":[{"name":"westus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus3","name":"westus3","displayName":"West - US 3","regionalDisplayName":"(US) West US 3","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-112.074036","latitude":"33.448376","physicalLocation":"Phoenix","pairedRegion":[{"name":"eastus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest","name":"southafricawest","displayName":"South - Africa West","regionalDisplayName":"(Africa) South Africa West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Africa","longitude":"18.843266","latitude":"-34.075691","physicalLocation":"Cape - Town","pairedRegion":[{"name":"southafricanorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral","name":"australiacentral","displayName":"Australia - Central","regionalDisplayName":"(Asia Pacific) Australia Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2","name":"australiacentral2","displayName":"Australia - Central 2","regionalDisplayName":"(Asia Pacific) Australia Central 2","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast","name":"australiasoutheast","displayName":"Australia - Southeast","regionalDisplayName":"(Asia Pacific) Australia Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"144.9631","latitude":"-37.8136","physicalLocation":"Victoria","pairedRegion":[{"name":"australiaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest","name":"japanwest","displayName":"Japan - West","regionalDisplayName":"(Asia Pacific) Japan West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"135.5022","latitude":"34.6939","physicalLocation":"Osaka","pairedRegion":[{"name":"japaneast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral","name":"jioindiacentral","displayName":"JIO - India Central","regionalDisplayName":"(Asia Pacific) JIO India Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"79.08886","latitude":"21.146633","physicalLocation":"Nagpur","pairedRegion":[{"name":"jioindiawest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth","name":"koreasouth","displayName":"Korea - South","regionalDisplayName":"(Asia Pacific) Korea South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"129.0756","latitude":"35.1796","physicalLocation":"Busan","pairedRegion":[{"name":"koreacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia","name":"southindia","displayName":"South - India","regionalDisplayName":"(Asia Pacific) South India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"80.1636","latitude":"12.9822","physicalLocation":"Chennai","pairedRegion":[{"name":"centralindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westindia","name":"westindia","displayName":"West - India","regionalDisplayName":"(Asia Pacific) West India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia - Pacific","longitude":"72.868","latitude":"19.088","physicalLocation":"Mumbai","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast","name":"canadaeast","displayName":"Canada - East","regionalDisplayName":"(Canada) Canada East","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Canada","longitude":"-71.217","latitude":"46.817","physicalLocation":"Quebec","pairedRegion":[{"name":"canadacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth","name":"francesouth","displayName":"France - South","regionalDisplayName":"(Europe) France South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"2.1972","latitude":"43.8345","physicalLocation":"Marseille","pairedRegion":[{"name":"francecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth","name":"germanynorth","displayName":"Germany - North","regionalDisplayName":"(Europe) Germany North","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"8.806422","latitude":"53.073635","physicalLocation":"Berlin","pairedRegion":[{"name":"germanywestcentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest","name":"norwaywest","displayName":"Norway - West","regionalDisplayName":"(Europe) Norway West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"5.733107","latitude":"58.969975","physicalLocation":"Norway","pairedRegion":[{"name":"norwayeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest","name":"switzerlandwest","displayName":"Switzerland - West","regionalDisplayName":"(Europe) Switzerland West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"6.143158","latitude":"46.204391","physicalLocation":"Geneva","pairedRegion":[{"name":"switzerlandnorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest","name":"ukwest","displayName":"UK - West","regionalDisplayName":"(Europe) UK West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"-3.084","latitude":"53.427","physicalLocation":"Cardiff","pairedRegion":[{"name":"uksouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral","name":"uaecentral","displayName":"UAE - Central","regionalDisplayName":"(Middle East) UAE Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Middle - East","longitude":"54.366669","latitude":"24.466667","physicalLocation":"Abu - Dhabi","pairedRegion":[{"name":"uaenorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsoutheast","name":"brazilsoutheast","displayName":"Brazil - Southeast","regionalDisplayName":"(South America) Brazil Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"South - America","longitude":"-43.2075","latitude":"-22.90278","physicalLocation":"Rio","pairedRegion":[{"name":"brazilsouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv","name":"eastusslv","displayName":"East - US SLV","regionalDisplayName":"(South America) East US SLV","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"South - America","longitude":"-43.2075","latitude":"-22.90278","physicalLocation":"Silverstone","pairedRegion":[{"name":"eastusslv","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv"}]}}]}' - headers: - cache-control: - - no-cache - content-length: - - '26993' - content-type: - - application/json; charset=utf-8 - date: - - Mon, 26 Apr 2021 04:35:24 GMT + string: "{\"value\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\",\"name\":\"eastus\",\"displayName\":\"East + US\",\"regionalDisplayName\":\"(US) East US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-79.8164\",\"latitude\":\"37.3719\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\":\"westus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2\",\"name\":\"eastus2\",\"displayName\":\"East + US 2\",\"regionalDisplayName\":\"(US) East US 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-78.3889\",\"latitude\":\"36.6681\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\":\"centralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\",\"name\":\"southcentralus\",\"displayName\":\"South + Central US\",\"regionalDisplayName\":\"(US) South Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-98.5\",\"latitude\":\"29.4167\",\"physicalLocation\":\"Texas\",\"pairedRegion\":[{\"name\":\"northcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2\",\"name\":\"westus2\",\"displayName\":\"West + US 2\",\"regionalDisplayName\":\"(US) West US 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-119.852\",\"latitude\":\"47.233\",\"physicalLocation\":\"Washington\",\"pairedRegion\":[{\"name\":\"westcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus3\",\"name\":\"westus3\",\"displayName\":\"West + US 3\",\"regionalDisplayName\":\"(US) West US 3\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-112.074036\",\"latitude\":\"33.448376\",\"physicalLocation\":\"Phoenix\",\"pairedRegion\":[{\"name\":\"eastus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast\",\"name\":\"australiaeast\",\"displayName\":\"Australia + East\",\"regionalDisplayName\":\"(Asia Pacific) Australia East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"151.2094\",\"latitude\":\"-33.86\",\"physicalLocation\":\"New + South Wales\",\"pairedRegion\":[{\"name\":\"australiasoutheast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia\",\"name\":\"southeastasia\",\"displayName\":\"Southeast + Asia\",\"regionalDisplayName\":\"(Asia Pacific) Southeast Asia\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"103.833\",\"latitude\":\"1.283\",\"physicalLocation\":\"Singapore\",\"pairedRegion\":[{\"name\":\"eastasia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope\",\"name\":\"northeurope\",\"displayName\":\"North + Europe\",\"regionalDisplayName\":\"(Europe) North Europe\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"-6.2597\",\"latitude\":\"53.3478\",\"physicalLocation\":\"Ireland\",\"pairedRegion\":[{\"name\":\"westeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedencentral\",\"name\":\"swedencentral\",\"displayName\":\"Sweden + Central\",\"regionalDisplayName\":\"(Europe) Sweden Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"17.14127\",\"latitude\":\"60.67488\",\"physicalLocation\":\"G\xE4vle\",\"pairedRegion\":[{\"name\":\"swedensouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedensouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth\",\"name\":\"uksouth\",\"displayName\":\"UK + South\",\"regionalDisplayName\":\"(Europe) UK South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"-0.799\",\"latitude\":\"50.941\",\"physicalLocation\":\"London\",\"pairedRegion\":[{\"name\":\"ukwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\",\"name\":\"westeurope\",\"displayName\":\"West + Europe\",\"regionalDisplayName\":\"(Europe) West Europe\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"4.9\",\"latitude\":\"52.3667\",\"physicalLocation\":\"Netherlands\",\"pairedRegion\":[{\"name\":\"northeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus\",\"name\":\"centralus\",\"displayName\":\"Central + US\",\"regionalDisplayName\":\"(US) Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-93.6208\",\"latitude\":\"41.5908\",\"physicalLocation\":\"Iowa\",\"pairedRegion\":[{\"name\":\"eastus2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth\",\"name\":\"southafricanorth\",\"displayName\":\"South + Africa North\",\"regionalDisplayName\":\"(Africa) South Africa North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Africa\",\"longitude\":\"28.218370\",\"latitude\":\"-25.731340\",\"physicalLocation\":\"Johannesburg\",\"pairedRegion\":[{\"name\":\"southafricawest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia\",\"name\":\"centralindia\",\"displayName\":\"Central + India\",\"regionalDisplayName\":\"(Asia Pacific) Central India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"73.9197\",\"latitude\":\"18.5822\",\"physicalLocation\":\"Pune\",\"pairedRegion\":[{\"name\":\"southindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia\",\"name\":\"eastasia\",\"displayName\":\"East + Asia\",\"regionalDisplayName\":\"(Asia Pacific) East Asia\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"114.188\",\"latitude\":\"22.267\",\"physicalLocation\":\"Hong + Kong\",\"pairedRegion\":[{\"name\":\"southeastasia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast\",\"name\":\"japaneast\",\"displayName\":\"Japan + East\",\"regionalDisplayName\":\"(Asia Pacific) Japan East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"139.77\",\"latitude\":\"35.68\",\"physicalLocation\":\"Tokyo, + Saitama\",\"pairedRegion\":[{\"name\":\"japanwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral\",\"name\":\"koreacentral\",\"displayName\":\"Korea + Central\",\"regionalDisplayName\":\"(Asia Pacific) Korea Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"126.9780\",\"latitude\":\"37.5665\",\"physicalLocation\":\"Seoul\",\"pairedRegion\":[{\"name\":\"koreasouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral\",\"name\":\"canadacentral\",\"displayName\":\"Canada + Central\",\"regionalDisplayName\":\"(Canada) Canada Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Canada\",\"longitude\":\"-79.383\",\"latitude\":\"43.653\",\"physicalLocation\":\"Toronto\",\"pairedRegion\":[{\"name\":\"canadaeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral\",\"name\":\"francecentral\",\"displayName\":\"France + Central\",\"regionalDisplayName\":\"(Europe) France Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"2.3730\",\"latitude\":\"46.3772\",\"physicalLocation\":\"Paris\",\"pairedRegion\":[{\"name\":\"francesouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral\",\"name\":\"germanywestcentral\",\"displayName\":\"Germany + West Central\",\"regionalDisplayName\":\"(Europe) Germany West Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.682127\",\"latitude\":\"50.110924\",\"physicalLocation\":\"Frankfurt\",\"pairedRegion\":[{\"name\":\"germanynorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast\",\"name\":\"norwayeast\",\"displayName\":\"Norway + East\",\"regionalDisplayName\":\"(Europe) Norway East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"10.752245\",\"latitude\":\"59.913868\",\"physicalLocation\":\"Norway\",\"pairedRegion\":[{\"name\":\"norwaywest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth\",\"name\":\"switzerlandnorth\",\"displayName\":\"Switzerland + North\",\"regionalDisplayName\":\"(Europe) Switzerland North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.564572\",\"latitude\":\"47.451542\",\"physicalLocation\":\"Zurich\",\"pairedRegion\":[{\"name\":\"switzerlandwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth\",\"name\":\"uaenorth\",\"displayName\":\"UAE + North\",\"regionalDisplayName\":\"(Middle East) UAE North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Middle + East\",\"longitude\":\"55.316666\",\"latitude\":\"25.266666\",\"physicalLocation\":\"Dubai\",\"pairedRegion\":[{\"name\":\"uaecentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth\",\"name\":\"brazilsouth\",\"displayName\":\"Brazil + South\",\"regionalDisplayName\":\"(South America) Brazil South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"South + America\",\"longitude\":\"-46.633\",\"latitude\":\"-23.55\",\"physicalLocation\":\"Sao + Paulo State\",\"pairedRegion\":[{\"name\":\"southcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap\",\"name\":\"eastus2euap\",\"displayName\":\"East + US 2 EUAP\",\"regionalDisplayName\":\"(US) East US 2 EUAP\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-78.3889\",\"latitude\":\"36.6681\",\"pairedRegion\":[{\"name\":\"centraluseuap\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/qatarcentral\",\"name\":\"qatarcentral\",\"displayName\":\"Qatar + Central\",\"regionalDisplayName\":\"(Middle East) Qatar Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Middle + East\",\"longitude\":\"51.439327\",\"latitude\":\"25.551462\",\"physicalLocation\":\"Doha\",\"pairedRegion\":[{\"name\":\"westeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage\",\"name\":\"centralusstage\",\"displayName\":\"Central + US (Stage)\",\"regionalDisplayName\":\"(US) Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstage\",\"name\":\"eastusstage\",\"displayName\":\"East + US (Stage)\",\"regionalDisplayName\":\"(US) East US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2stage\",\"name\":\"eastus2stage\",\"displayName\":\"East + US 2 (Stage)\",\"regionalDisplayName\":\"(US) East US 2 (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralusstage\",\"name\":\"northcentralusstage\",\"displayName\":\"North + Central US (Stage)\",\"regionalDisplayName\":\"(US) North Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstage\",\"name\":\"southcentralusstage\",\"displayName\":\"South + Central US (Stage)\",\"regionalDisplayName\":\"(US) South Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westusstage\",\"name\":\"westusstage\",\"displayName\":\"West + US (Stage)\",\"regionalDisplayName\":\"(US) West US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2stage\",\"name\":\"westus2stage\",\"displayName\":\"West + US 2 (Stage)\",\"regionalDisplayName\":\"(US) West US 2 (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asia\",\"name\":\"asia\",\"displayName\":\"Asia\",\"regionalDisplayName\":\"Asia\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asiapacific\",\"name\":\"asiapacific\",\"displayName\":\"Asia + Pacific\",\"regionalDisplayName\":\"Asia Pacific\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia\",\"name\":\"australia\",\"displayName\":\"Australia\",\"regionalDisplayName\":\"Australia\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil\",\"name\":\"brazil\",\"displayName\":\"Brazil\",\"regionalDisplayName\":\"Brazil\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada\",\"name\":\"canada\",\"displayName\":\"Canada\",\"regionalDisplayName\":\"Canada\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe\",\"name\":\"europe\",\"displayName\":\"Europe\",\"regionalDisplayName\":\"Europe\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/france\",\"name\":\"france\",\"displayName\":\"France\",\"regionalDisplayName\":\"France\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germany\",\"name\":\"germany\",\"displayName\":\"Germany\",\"regionalDisplayName\":\"Germany\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global\",\"name\":\"global\",\"displayName\":\"Global\",\"regionalDisplayName\":\"Global\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india\",\"name\":\"india\",\"displayName\":\"India\",\"regionalDisplayName\":\"India\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan\",\"name\":\"japan\",\"displayName\":\"Japan\",\"regionalDisplayName\":\"Japan\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/korea\",\"name\":\"korea\",\"displayName\":\"Korea\",\"regionalDisplayName\":\"Korea\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norway\",\"name\":\"norway\",\"displayName\":\"Norway\",\"regionalDisplayName\":\"Norway\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/singapore\",\"name\":\"singapore\",\"displayName\":\"Singapore\",\"regionalDisplayName\":\"Singapore\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafrica\",\"name\":\"southafrica\",\"displayName\":\"South + Africa\",\"regionalDisplayName\":\"South Africa\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerland\",\"name\":\"switzerland\",\"displayName\":\"Switzerland\",\"regionalDisplayName\":\"Switzerland\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uae\",\"name\":\"uae\",\"displayName\":\"United + Arab Emirates\",\"regionalDisplayName\":\"United Arab Emirates\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk\",\"name\":\"uk\",\"displayName\":\"United + Kingdom\",\"regionalDisplayName\":\"United Kingdom\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstates\",\"name\":\"unitedstates\",\"displayName\":\"United + States\",\"regionalDisplayName\":\"United States\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstateseuap\",\"name\":\"unitedstateseuap\",\"displayName\":\"United + States EUAP\",\"regionalDisplayName\":\"United States EUAP\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage\",\"name\":\"eastasiastage\",\"displayName\":\"East + Asia (Stage)\",\"regionalDisplayName\":\"(Asia Pacific) East Asia (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasiastage\",\"name\":\"southeastasiastage\",\"displayName\":\"Southeast + Asia (Stage)\",\"regionalDisplayName\":\"(Asia Pacific) Southeast Asia (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstg\",\"name\":\"eastusstg\",\"displayName\":\"East + US STG\",\"regionalDisplayName\":\"(US) East US STG\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-79.8164\",\"latitude\":\"37.3719\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\":\"southcentralusstg\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstg\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstg\",\"name\":\"southcentralusstg\",\"displayName\":\"South + Central US STG\",\"regionalDisplayName\":\"(US) South Central US STG\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-98.5\",\"latitude\":\"29.4167\",\"physicalLocation\":\"Texas\",\"pairedRegion\":[{\"name\":\"eastusstg\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstg\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus\",\"name\":\"northcentralus\",\"displayName\":\"North + Central US\",\"regionalDisplayName\":\"(US) North Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-87.6278\",\"latitude\":\"41.8819\",\"physicalLocation\":\"Illinois\",\"pairedRegion\":[{\"name\":\"southcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus\",\"name\":\"westus\",\"displayName\":\"West + US\",\"regionalDisplayName\":\"(US) West US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-122.417\",\"latitude\":\"37.783\",\"physicalLocation\":\"California\",\"pairedRegion\":[{\"name\":\"eastus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest\",\"name\":\"jioindiawest\",\"displayName\":\"Jio + India West\",\"regionalDisplayName\":\"(Asia Pacific) Jio India West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"70.05773\",\"latitude\":\"22.470701\",\"physicalLocation\":\"Jamnagar\",\"pairedRegion\":[{\"name\":\"jioindiacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap\",\"name\":\"centraluseuap\",\"displayName\":\"Central + US EUAP\",\"regionalDisplayName\":\"(US) Central US EUAP\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-93.6208\",\"latitude\":\"41.5908\",\"pairedRegion\":[{\"name\":\"eastus2euap\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus\",\"name\":\"westcentralus\",\"displayName\":\"West + Central US\",\"regionalDisplayName\":\"(US) West Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-110.234\",\"latitude\":\"40.890\",\"physicalLocation\":\"Wyoming\",\"pairedRegion\":[{\"name\":\"westus2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest\",\"name\":\"southafricawest\",\"displayName\":\"South + Africa West\",\"regionalDisplayName\":\"(Africa) South Africa West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Africa\",\"longitude\":\"18.843266\",\"latitude\":\"-34.075691\",\"physicalLocation\":\"Cape + Town\",\"pairedRegion\":[{\"name\":\"southafricanorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral\",\"name\":\"australiacentral\",\"displayName\":\"Australia + Central\",\"regionalDisplayName\":\"(Asia Pacific) Australia Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"149.1244\",\"latitude\":\"-35.3075\",\"physicalLocation\":\"Canberra\",\"pairedRegion\":[{\"name\":\"australiacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2\",\"name\":\"australiacentral2\",\"displayName\":\"Australia + Central 2\",\"regionalDisplayName\":\"(Asia Pacific) Australia Central 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"149.1244\",\"latitude\":\"-35.3075\",\"physicalLocation\":\"Canberra\",\"pairedRegion\":[{\"name\":\"australiacentral2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast\",\"name\":\"australiasoutheast\",\"displayName\":\"Australia + Southeast\",\"regionalDisplayName\":\"(Asia Pacific) Australia Southeast\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"144.9631\",\"latitude\":\"-37.8136\",\"physicalLocation\":\"Victoria\",\"pairedRegion\":[{\"name\":\"australiaeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest\",\"name\":\"japanwest\",\"displayName\":\"Japan + West\",\"regionalDisplayName\":\"(Asia Pacific) Japan West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"135.5022\",\"latitude\":\"34.6939\",\"physicalLocation\":\"Osaka\",\"pairedRegion\":[{\"name\":\"japaneast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral\",\"name\":\"jioindiacentral\",\"displayName\":\"Jio + India Central\",\"regionalDisplayName\":\"(Asia Pacific) Jio India Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"79.08886\",\"latitude\":\"21.146633\",\"physicalLocation\":\"Nagpur\",\"pairedRegion\":[{\"name\":\"jioindiawest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth\",\"name\":\"koreasouth\",\"displayName\":\"Korea + South\",\"regionalDisplayName\":\"(Asia Pacific) Korea South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"129.0756\",\"latitude\":\"35.1796\",\"physicalLocation\":\"Busan\",\"pairedRegion\":[{\"name\":\"koreacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\",\"name\":\"southindia\",\"displayName\":\"South + India\",\"regionalDisplayName\":\"(Asia Pacific) South India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"80.1636\",\"latitude\":\"12.9822\",\"physicalLocation\":\"Chennai\",\"pairedRegion\":[{\"name\":\"centralindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westindia\",\"name\":\"westindia\",\"displayName\":\"West + India\",\"regionalDisplayName\":\"(Asia Pacific) West India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"72.868\",\"latitude\":\"19.088\",\"physicalLocation\":\"Mumbai\",\"pairedRegion\":[{\"name\":\"southindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast\",\"name\":\"canadaeast\",\"displayName\":\"Canada + East\",\"regionalDisplayName\":\"(Canada) Canada East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Canada\",\"longitude\":\"-71.217\",\"latitude\":\"46.817\",\"physicalLocation\":\"Quebec\",\"pairedRegion\":[{\"name\":\"canadacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth\",\"name\":\"francesouth\",\"displayName\":\"France + South\",\"regionalDisplayName\":\"(Europe) France South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"2.1972\",\"latitude\":\"43.8345\",\"physicalLocation\":\"Marseille\",\"pairedRegion\":[{\"name\":\"francecentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth\",\"name\":\"germanynorth\",\"displayName\":\"Germany + North\",\"regionalDisplayName\":\"(Europe) Germany North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.806422\",\"latitude\":\"53.073635\",\"physicalLocation\":\"Berlin\",\"pairedRegion\":[{\"name\":\"germanywestcentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest\",\"name\":\"norwaywest\",\"displayName\":\"Norway + West\",\"regionalDisplayName\":\"(Europe) Norway West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"5.733107\",\"latitude\":\"58.969975\",\"physicalLocation\":\"Norway\",\"pairedRegion\":[{\"name\":\"norwayeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest\",\"name\":\"switzerlandwest\",\"displayName\":\"Switzerland + West\",\"regionalDisplayName\":\"(Europe) Switzerland West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"6.143158\",\"latitude\":\"46.204391\",\"physicalLocation\":\"Geneva\",\"pairedRegion\":[{\"name\":\"switzerlandnorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest\",\"name\":\"ukwest\",\"displayName\":\"UK + West\",\"regionalDisplayName\":\"(Europe) UK West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"-3.084\",\"latitude\":\"53.427\",\"physicalLocation\":\"Cardiff\",\"pairedRegion\":[{\"name\":\"uksouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral\",\"name\":\"uaecentral\",\"displayName\":\"UAE + Central\",\"regionalDisplayName\":\"(Middle East) UAE Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Middle + East\",\"longitude\":\"54.366669\",\"latitude\":\"24.466667\",\"physicalLocation\":\"Abu + Dhabi\",\"pairedRegion\":[{\"name\":\"uaenorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsoutheast\",\"name\":\"brazilsoutheast\",\"displayName\":\"Brazil + Southeast\",\"regionalDisplayName\":\"(South America) Brazil Southeast\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"South + America\",\"longitude\":\"-43.2075\",\"latitude\":\"-22.90278\",\"physicalLocation\":\"Rio\",\"pairedRegion\":[{\"name\":\"brazilsouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth\"}]}}]}" + headers: + cache-control: + - no-cache + content-length: + - '30402' + content-type: + - application/json; charset=utf-8 + date: + - Tue, 11 Oct 2022 20:59:22 GMT expires: - '-1' pragma: @@ -4234,28 +9531,33 @@ interactions: ParameterSetName: - --location --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 response: body: - string: '{"name":"exampleFa000001","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/examplefataiszwk32","type":"Microsoft.DataFactory/factories","properties":{"provisioningState":"Succeeded","createTime":"2021-04-26T04:35:35.9320014Z","version":"2018-06-01","encryption":{}},"eTag":"\"30000750-0000-0100-0000-608643180000\"","location":"eastus","identity":{"type":"SystemAssigned","principalId":"7d2cdddd-762b-41b6-a3cb-b2798bee84bf","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},"tags":{}}' + string: "{\n \"name\": \"exampleFa000001\",\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/examplefa4rqmquxng\",\n + \ \"type\": \"Microsoft.DataFactory/factories\",\n \"properties\": {\n \"provisioningState\": + \"Succeeded\",\n \"createTime\": \"2022-10-11T20:59:28.2978843Z\",\n \"version\": + \"2018-06-01\",\n \"encryption\": {}\n },\n \"eTag\": \"\\\"0d0055cb-0000-0100-0000-6345d9300000\\\"\",\n + \ \"location\": \"eastus\",\n \"identity\": {\n \"type\": \"SystemAssigned\",\n + \ \"principalId\": \"cdd99071-3a87-4b2b-aa21-a132d4a3ce5b\",\n \"tenantId\": + \"16b3c013-d300-468d-ac64-7eda0820b6d3\"\n },\n \"tags\": {}\n}" headers: cache-control: - no-cache content-length: - - '631' + - '647' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 04:35:38 GMT + - Tue, 11 Oct 2022 20:59:28 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -4265,9 +9567,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - - '1197' - x-powered-by: - - ASP.NET + - '1199' status: code: 200 message: OK @@ -4291,28 +9591,32 @@ interactions: ParameterSetName: - --factory-name --pipeline --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005","name":"example000005","type":"Microsoft.DataFactory/factories/pipelines","properties":{"activities":[{"name":"Wait1","type":"Wait","dependsOn":[],"userProperties":[],"typeProperties":{"waitTimeInSeconds":5}}],"annotations":[]},"etag":"5b0004ed-0000-0100-0000-6086431d0000"}' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005\",\n + \ \"name\": \"example000005\",\n \"type\": \"Microsoft.DataFactory/factories/pipelines\",\n + \ \"properties\": {\n \"activities\": [\n {\n \"name\": \"Wait1\",\n + \ \"type\": \"Wait\",\n \"dependsOn\": [],\n \"userProperties\": + [],\n \"typeProperties\": {\n \"waitTimeInSeconds\": 5\n }\n + \ }\n ],\n \"annotations\": []\n },\n \"etag\": \"01000377-0000-0100-0000-6345d9310000\"\n}" headers: cache-control: - no-cache content-length: - - '514' + - '571' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 04:35:40 GMT + - Tue, 11 Oct 2022 20:59:28 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -4323,8 +9627,6 @@ interactions: - nosniff x-ms-ratelimit-remaining-subscription-writes: - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -4346,28 +9648,27 @@ interactions: ParameterSetName: - --factory-name --parameters --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005/createRun?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005/createRun?api-version=2018-06-01 response: body: - string: '{"runId":"db9d3cb1-a648-11eb-b950-84a93e64b16e"}' + string: "{\n \"runId\": \"8ab11cb8-49a7-11ed-85e9-00249b7d5bd6\"\n}" headers: cache-control: - no-cache content-length: - - '48' + - '53' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 04:35:44 GMT + - Tue, 11 Oct 2022 20:59:30 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -4377,9 +9678,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - - '1198' - x-powered-by: - - ASP.NET + - '1199' status: code: 200 message: OK @@ -4397,28 +9696,35 @@ interactions: ParameterSetName: - --factory-name --resource-group --run-id User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelineruns/db9d3cb1-a648-11eb-b950-84a93e64b16e?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelineruns/8ab11cb8-49a7-11ed-85e9-00249b7d5bd6?api-version=2018-06-01 response: body: - string: '{"id":"/SUBSCRIPTIONS/00000000-0000-0000-0000-000000000000/RESOURCEGROUPS/CLITESTUREUOWB2FIDALQDHZBZQFN5YX3MAHYIR2OCULQUBA2QXHMFOYTDLJ5C3QD7OFFHGGZI3/PROVIDERS/MICROSOFT.DATAFACTORY/FACTORIES/EXAMPLEFATAISZWK32/pipelineruns/db9d3cb1-a648-11eb-b950-84a93e64b16e","runId":"db9d3cb1-a648-11eb-b950-84a93e64b16e","debugRunId":null,"runGroupId":"db9d3cb1-a648-11eb-b950-84a93e64b16e","pipelineName":"example000005","parameters":{},"invokedBy":{"id":"d5b6b289661d402c8f76621689c77f40","name":"Manual","invokedByType":"Manual"},"runStart":"2021-04-26T04:35:43.0610781Z","runEnd":"2021-04-26T04:35:51.3152593Z","durationInMs":8254,"status":"Succeeded","message":"","lastUpdated":"2021-04-26T04:35:51.3152593Z","annotations":[],"runDimension":{},"isLatest":true}' + string: "{\n \"id\": \"/SUBSCRIPTIONS/00000000-0000-0000-0000-000000000000/RESOURCEGROUPS/CLITESTEH2CNXAN5LYGO3EPRCFDIYK6YAKNFABF4WJJVU4F3KSNZPDL67OKJHWS5KJ6MBCA44EQ/PROVIDERS/MICROSOFT.DATAFACTORY/FACTORIES/EXAMPLEFA4RQMQUXNG/pipelineruns/8ab11cb8-49a7-11ed-85e9-00249b7d5bd6\",\n + \ \"runId\": \"8ab11cb8-49a7-11ed-85e9-00249b7d5bd6\",\n \"debugRunId\": + null,\n \"runGroupId\": \"8ab11cb8-49a7-11ed-85e9-00249b7d5bd6\",\n \"pipelineName\": + \"example000005\",\n \"parameters\": {},\n \"invokedBy\": {\n \"id\": + \"d83c0ec47c50410e99e4027762775fc5\",\n \"name\": \"Manual\",\n \"invokedByType\": + \"Manual\"\n },\n \"runStart\": \"2022-10-11T20:59:29.9492575Z\",\n \"runEnd\": + null,\n \"durationInMs\": null,\n \"status\": \"InProgress\",\n \"message\": + \"\",\n \"pipelineReturnValue\": null,\n \"lastUpdated\": \"2022-10-11T20:59:31.0581041Z\",\n + \ \"annotations\": [],\n \"runDimension\": {},\n \"isLatest\": true\n}" headers: cache-control: - no-cache content-length: - - '755' + - '845' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 04:35:51 GMT + - Tue, 11 Oct 2022 20:59:35 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -4427,8 +9733,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -4452,28 +9756,27 @@ interactions: - --factory-name --last-updated-after --last-updated-before --resource-group --run-id User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelineruns/2f7fdb90-5df1-4b8e-ac2f-064cfa58202b/queryActivityruns?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelineruns/2f7fdb90-5df1-4b8e-ac2f-064cfa58202b/queryActivityruns?api-version=2018-06-01 response: body: - string: '{"value":[]}' + string: "{\n \"value\": []\n}" headers: cache-control: - no-cache content-length: - - '12' + - '17' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 04:35:51 GMT + - Tue, 11 Oct 2022 20:59:36 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -4482,8 +9785,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -4505,40 +9806,33 @@ interactions: ParameterSetName: - --factory-name --parameters --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005/createRun?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005/createRun?api-version=2018-06-01 response: body: - string: '{"runId":"e21653a1-a648-11eb-a1d9-84a93e64b16e"}' + string: "{\n \"runId\": \"8f06e8ce-49a7-11ed-85e9-00249b7d5bd6\"\n}" headers: cache-control: - no-cache content-length: - - '48' + - '53' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 04:35:52 GMT + - Tue, 11 Oct 2022 20:59:36 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -4558,34 +9852,37 @@ interactions: ParameterSetName: - --factory-name --resource-group --run-id User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelineruns/e21653a1-a648-11eb-a1d9-84a93e64b16e/cancel?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelineruns/8f06e8ce-49a7-11ed-85e9-00249b7d5bd6/cancel?api-version=2018-06-01 response: body: - string: '' + string: '""' headers: cache-control: - no-cache content-length: - - '0' + - '2' + content-type: + - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 04:35:54 GMT + - Tue, 11 Oct 2022 20:59:38 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -4593,7 +9890,7 @@ interactions: body: '{"properties": {"type": "TumblingWindowTrigger", "description": "trumblingwindowtrigger", "annotations": [], "pipeline": {"pipelineReference": {"type": "PipelineReference", "referenceName": "example000005"}}, "typeProperties": {"frequency": "Minute", - "interval": 5, "startTime": "2021-04-26T04:35:23.000Z", "endTime": "2021-04-26T05:35:23.000Z", + "interval": 5, "startTime": "2022-10-11T20:41:31.000Z", "endTime": "2022-10-11T21:41:31.000Z", "delay": "00:00:00", "maxConcurrency": 50, "retryPolicy": {"intervalInSeconds": 30}, "dependsOn": []}}}' headers: @@ -4606,34 +9903,43 @@ interactions: Connection: - keep-alive Content-Length: - - '451' + - '449' Content-Type: - application/json ParameterSetName: - --resource-group --properties --factory-name --name User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006","name":"example000006","type":"Microsoft.DataFactory/factories/triggers","properties":{"type":"TumblingWindowTrigger","description":"trumblingwindowtrigger","annotations":[],"pipeline":{"pipelineReference":{"type":"PipelineReference","referenceName":"example000005"}},"typeProperties":{"frequency":"Minute","interval":5,"startTime":"2021-04-26T04:35:23Z","endTime":"2021-04-26T05:35:23Z","delay":"00:00:00","maxConcurrency":50,"retryPolicy":{"intervalInSeconds":30},"dependsOn":[]},"runtimeState":"Stopped"},"etag":"5b00f6ee-0000-0100-0000-6086432d0000"}' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006\",\n + \ \"name\": \"example000006\",\n \"type\": \"Microsoft.DataFactory/factories/triggers\",\n + \ \"properties\": {\n \"type\": \"TumblingWindowTrigger\",\n \"description\": + \"trumblingwindowtrigger\",\n \"annotations\": [],\n \"pipeline\": {\n + \ \"pipelineReference\": {\n \"type\": \"PipelineReference\",\n + \ \"referenceName\": \"example000005\"\n }\n },\n \"typeProperties\": + {\n \"frequency\": \"Minute\",\n \"interval\": 5,\n \"startTime\": + \"2022-10-11T20:41:31Z\",\n \"endTime\": \"2022-10-11T21:41:31Z\",\n + \ \"delay\": \"00:00:00\",\n \"maxConcurrency\": 50,\n \"retryPolicy\": + {\n \"intervalInSeconds\": 30\n },\n \"dependsOn\": []\n + \ },\n \"runtimeState\": \"Stopped\"\n },\n \"etag\": \"01000977-0000-0100-0000-6345d93a0000\"\n}" headers: cache-control: - no-cache content-length: - - '793' + - '910' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 04:35:56 GMT + - Tue, 11 Oct 2022 20:59:38 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -4643,9 +9949,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - - '1198' - x-powered-by: - - ASP.NET + - '1199' status: code: 200 message: OK @@ -4665,34 +9969,37 @@ interactions: ParameterSetName: - --factory-name --resource-group --name User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/start?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/start?api-version=2018-06-01 response: body: - string: '' + string: '{}' headers: cache-control: - no-cache content-length: - - '0' + - '2' + content-type: + - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 04:36:00 GMT + - Tue, 11 Oct 2022 20:59:40 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -4710,28 +10017,37 @@ interactions: ParameterSetName: - --factory-name --resource-group --name User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006","name":"example000006","type":"Microsoft.DataFactory/factories/triggers","properties":{"type":"TumblingWindowTrigger","description":"trumblingwindowtrigger","annotations":[],"pipeline":{"pipelineReference":{"type":"PipelineReference","referenceName":"example000005"}},"typeProperties":{"frequency":"Minute","interval":5,"startTime":"2021-04-26T04:35:23Z","endTime":"2021-04-26T05:35:23Z","delay":"00:00:00","maxConcurrency":50,"retryPolicy":{"intervalInSeconds":30},"dependsOn":[]},"runtimeState":"Started"},"etag":"5b008aef-0000-0100-0000-608643310000"}' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006\",\n + \ \"name\": \"example000006\",\n \"type\": \"Microsoft.DataFactory/factories/triggers\",\n + \ \"properties\": {\n \"type\": \"TumblingWindowTrigger\",\n \"description\": + \"trumblingwindowtrigger\",\n \"annotations\": [],\n \"pipeline\": {\n + \ \"pipelineReference\": {\n \"type\": \"PipelineReference\",\n + \ \"referenceName\": \"example000005\"\n }\n },\n \"typeProperties\": + {\n \"frequency\": \"Minute\",\n \"interval\": 5,\n \"startTime\": + \"2022-10-11T20:41:31Z\",\n \"endTime\": \"2022-10-11T21:41:31Z\",\n + \ \"delay\": \"00:00:00\",\n \"maxConcurrency\": 50,\n \"retryPolicy\": + {\n \"intervalInSeconds\": 30\n },\n \"dependsOn\": []\n + \ },\n \"runtimeState\": \"Started\"\n },\n \"etag\": \"01000e77-0000-0100-0000-6345d93d0000\"\n}" headers: cache-control: - no-cache content-length: - - '793' + - '910' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 04:36:01 GMT + - Tue, 11 Oct 2022 20:59:41 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -4740,13 +10056,11 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK - request: - body: '{"lastUpdatedAfter": "2021-04-26T04:35:23.000Z", "lastUpdatedBefore": "2021-04-26T05:35:23.000Z"}' + body: '{"lastUpdatedAfter": "2022-10-11T20:41:31.000Z", "lastUpdatedBefore": "2022-10-11T21:41:31.000Z"}' headers: Accept: - application/json @@ -4763,28 +10077,48 @@ interactions: ParameterSetName: - --factory-name --last-updated-after --last-updated-before --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/queryTriggerRuns?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/queryTriggerRuns?api-version=2018-06-01 response: body: - string: '{"value":[]}' + string: "{\n \"value\": [\n {\n \"triggerName\": \"example000006\",\n + \ \"triggerRunId\": \"08585360849045338661560259885CU87\",\n \"triggerType\": + \"TumblingWindowTrigger\",\n \"triggerRunTimestamp\": \"2022-10-11T20:59:40.9423133Z\",\n + \ \"status\": \"Waiting\",\n \"message\": null,\n \"properties\": + {\n \"TriggerTime\": \"10/11/2022 20:59:40\",\n \"windowStartTime\": + \"10/11/2022 20:41:31\",\n \"windowEndTime\": \"10/11/2022 20:46:31\"\n + \ },\n \"triggeredPipelines\": {},\n \"groupId\": \"08585360849045338661560259885CU87\",\n + \ \"dependencyStatus\": {}\n },\n {\n \"triggerName\": \"example000006\",\n + \ \"triggerRunId\": \"08585360849043347091298881340CU87\",\n \"triggerType\": + \"TumblingWindowTrigger\",\n \"triggerRunTimestamp\": \"2022-10-11T20:59:41.1610634Z\",\n + \ \"status\": \"Waiting\",\n \"message\": null,\n \"properties\": + {\n \"TriggerTime\": \"10/11/2022 20:59:41\",\n \"windowStartTime\": + \"10/11/2022 20:51:31\",\n \"windowEndTime\": \"10/11/2022 20:56:31\"\n + \ },\n \"triggeredPipelines\": {},\n \"groupId\": \"08585360849043347091298881340CU87\",\n + \ \"dependencyStatus\": {}\n },\n {\n \"triggerName\": \"example000006\",\n + \ \"triggerRunId\": \"08585360849044311646911277910CU16\",\n \"triggerType\": + \"TumblingWindowTrigger\",\n \"triggerRunTimestamp\": \"2022-10-11T20:59:41.0829891Z\",\n + \ \"status\": \"Waiting\",\n \"message\": null,\n \"properties\": + {\n \"TriggerTime\": \"10/11/2022 20:59:41\",\n \"windowStartTime\": + \"10/11/2022 20:46:31\",\n \"windowEndTime\": \"10/11/2022 20:51:31\"\n + \ },\n \"triggeredPipelines\": {},\n \"groupId\": \"08585360849044311646911277910CU16\",\n + \ \"dependencyStatus\": {}\n }\n ]\n}" headers: cache-control: - no-cache content-length: - - '12' + - '1687' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 04:36:03 GMT + - Tue, 11 Oct 2022 20:59:41 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -4793,13 +10127,11 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK - request: - body: '{"lastUpdatedAfter": "2021-04-26T04:35:23.000Z", "lastUpdatedBefore": "2021-04-26T05:35:23.000Z"}' + body: '{"lastUpdatedAfter": "2022-10-11T20:41:31.000Z", "lastUpdatedBefore": "2022-10-11T21:41:31.000Z"}' headers: Accept: - application/json @@ -4816,30 +10148,59 @@ interactions: ParameterSetName: - --factory-name --last-updated-after --last-updated-before --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/queryTriggerRuns?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/queryTriggerRuns?api-version=2018-06-01 response: body: - string: '{"value":[{"triggerName":"example000006","triggerRunId":"08585821951252995598172155524CU16","triggerType":"TumblingWindowTrigger","triggerRunTimestamp":"2021-04-26T04:40:23.2861718Z","status":"Succeeded","message":null,"properties":{"TriggerTime":"4/26/2021 - 4:40:23 AM","windowStartTime":"4/26/2021 4:35:23 AM","windowEndTime":"4/26/2021 - 4:40:23 AM"},"triggeredPipelines":{"example000005":"904d185a-ceed-43cb-9812-639b5e92ac89"},"groupId":"08585821951252995598172155524CU16","dependencyStatus":{}}]}' + string: "{\n \"value\": [\n {\n \"triggerName\": \"example000006\",\n + \ \"triggerRunId\": \"08585360849045338661560259885CU87\",\n \"triggerType\": + \"TumblingWindowTrigger\",\n \"triggerRunTimestamp\": \"2022-10-11T20:59:40.9423133Z\",\n + \ \"status\": \"Succeeded\",\n \"message\": null,\n \"properties\": + {\n \"TriggerTime\": \"10/11/2022 20:59:40\",\n \"windowStartTime\": + \"10/11/2022 20:41:31\",\n \"windowEndTime\": \"10/11/2022 20:46:31\"\n + \ },\n \"triggeredPipelines\": {\n \"example000005\": \"f91e7f15-0e56-4f30-a390-6588035ee495\"\n + \ },\n \"groupId\": \"08585360849045338661560259885CU87\",\n \"dependencyStatus\": + {}\n },\n {\n \"triggerName\": \"example000006\",\n \"triggerRunId\": + \"08585360849043347091298881340CU87\",\n \"triggerType\": \"TumblingWindowTrigger\",\n + \ \"triggerRunTimestamp\": \"2022-10-11T20:59:41.1610634Z\",\n \"status\": + \"Succeeded\",\n \"message\": null,\n \"properties\": {\n \"TriggerTime\": + \"10/11/2022 20:59:41\",\n \"windowStartTime\": \"10/11/2022 20:51:31\",\n + \ \"windowEndTime\": \"10/11/2022 20:56:31\"\n },\n \"triggeredPipelines\": + {\n \"example000005\": \"169f939a-fcfa-4339-b14b-2a2e6ecaa36a\"\n },\n + \ \"groupId\": \"08585360849043347091298881340CU87\",\n \"dependencyStatus\": + {}\n },\n {\n \"triggerName\": \"example000006\",\n \"triggerRunId\": + \"08585360849044311646911277910CU16\",\n \"triggerType\": \"TumblingWindowTrigger\",\n + \ \"triggerRunTimestamp\": \"2022-10-11T20:59:41.0829891Z\",\n \"status\": + \"Succeeded\",\n \"message\": null,\n \"properties\": {\n \"TriggerTime\": + \"10/11/2022 20:59:41\",\n \"windowStartTime\": \"10/11/2022 20:46:31\",\n + \ \"windowEndTime\": \"10/11/2022 20:51:31\"\n },\n \"triggeredPipelines\": + {\n \"example000005\": \"0ad433ee-e659-4f27-b545-9325d119ddd1\"\n },\n + \ \"groupId\": \"08585360849044311646911277910CU16\",\n \"dependencyStatus\": + {}\n },\n {\n \"triggerName\": \"example000006\",\n \"triggerRunId\": + \"08585360849042515124897705358CU81\",\n \"triggerType\": \"TumblingWindowTrigger\",\n + \ \"triggerRunTimestamp\": \"2022-10-11T21:01:31.5358258Z\",\n \"status\": + \"Succeeded\",\n \"message\": null,\n \"properties\": {\n \"TriggerTime\": + \"10/11/2022 21:01:31\",\n \"windowStartTime\": \"10/11/2022 20:56:31\",\n + \ \"windowEndTime\": \"10/11/2022 21:01:31\"\n },\n \"triggeredPipelines\": + {\n \"example000005\": \"c608d0aa-89da-4ec8-8d64-da1cf2518c9c\"\n },\n + \ \"groupId\": \"08585360849042515124897705358CU81\",\n \"dependencyStatus\": + {}\n }\n ]\n}" headers: cache-control: - no-cache content-length: - - '502' + - '2535' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 04:41:05 GMT + - Tue, 11 Oct 2022 21:04:41 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -4848,8 +10209,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -4869,41 +10228,42 @@ interactions: ParameterSetName: - --factory-name --resource-group --trigger-name --run-id User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/triggerRuns/08585821951252995598172155524CU16/rerun?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/triggerRuns/08585360849045338661560259885CU87/rerun?api-version=2018-06-01 response: body: - string: '' + string: '""' headers: cache-control: - no-cache content-length: - - '0' + - '2' + content-type: + - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 04:41:07 GMT + - Tue, 11 Oct 2022 21:04:42 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK - request: - body: '{"lastUpdatedAfter": "2018-06-16T00:36:44.334575Z", "lastUpdatedBefore": - "2018-06-16T00:49:48.368647Z", "filters": [{"operand": "TriggerName", "operator": - "Equals", "values": ["example000006"]}]}' + body: '{"lastUpdatedAfter": "2022-10-11T20:41:31.000Z", "lastUpdatedBefore": "2022-10-11T21:41:31.000Z"}' headers: Accept: - application/json @@ -4914,34 +10274,65 @@ interactions: Connection: - keep-alive Content-Length: - - '196' + - '97' Content-Type: - application/json ParameterSetName: - - --factory-name --filters --last-updated-after --last-updated-before --resource-group + - --factory-name --last-updated-after --last-updated-before --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/queryTriggerRuns?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/queryTriggerRuns?api-version=2018-06-01 response: body: - string: '{"value":[]}' + string: "{\n \"value\": [\n {\n \"triggerName\": \"example000006\",\n + \ \"triggerRunId\": \"08585360849045338661560259885CU87\",\n \"triggerType\": + \"TumblingWindowTrigger\",\n \"triggerRunTimestamp\": \"2022-10-11T20:59:40.9423133Z\",\n + \ \"status\": \"Succeeded\",\n \"message\": null,\n \"properties\": + {\n \"TriggerTime\": \"10/11/2022 20:59:40\",\n \"windowStartTime\": + \"10/11/2022 20:41:31\",\n \"windowEndTime\": \"10/11/2022 20:46:31\"\n + \ },\n \"triggeredPipelines\": {\n \"example000005\": \"f91e7f15-0e56-4f30-a390-6588035ee495\"\n + \ },\n \"groupId\": \"08585360849045338661560259885CU87\",\n \"dependencyStatus\": + {}\n },\n {\n \"triggerName\": \"example000006\",\n \"triggerRunId\": + \"08585360849043347091298881340CU87\",\n \"triggerType\": \"TumblingWindowTrigger\",\n + \ \"triggerRunTimestamp\": \"2022-10-11T20:59:41.1610634Z\",\n \"status\": + \"Succeeded\",\n \"message\": null,\n \"properties\": {\n \"TriggerTime\": + \"10/11/2022 20:59:41\",\n \"windowStartTime\": \"10/11/2022 20:51:31\",\n + \ \"windowEndTime\": \"10/11/2022 20:56:31\"\n },\n \"triggeredPipelines\": + {\n \"example000005\": \"169f939a-fcfa-4339-b14b-2a2e6ecaa36a\"\n },\n + \ \"groupId\": \"08585360849043347091298881340CU87\",\n \"dependencyStatus\": + {}\n },\n {\n \"triggerName\": \"example000006\",\n \"triggerRunId\": + \"08585360849044311646911277910CU16\",\n \"triggerType\": \"TumblingWindowTrigger\",\n + \ \"triggerRunTimestamp\": \"2022-10-11T20:59:41.0829891Z\",\n \"status\": + \"Succeeded\",\n \"message\": null,\n \"properties\": {\n \"TriggerTime\": + \"10/11/2022 20:59:41\",\n \"windowStartTime\": \"10/11/2022 20:46:31\",\n + \ \"windowEndTime\": \"10/11/2022 20:51:31\"\n },\n \"triggeredPipelines\": + {\n \"example000005\": \"0ad433ee-e659-4f27-b545-9325d119ddd1\"\n },\n + \ \"groupId\": \"08585360849044311646911277910CU16\",\n \"dependencyStatus\": + {}\n },\n {\n \"triggerName\": \"example000006\",\n \"triggerRunId\": + \"08585360849042515124897705358CU81\",\n \"triggerType\": \"TumblingWindowTrigger\",\n + \ \"triggerRunTimestamp\": \"2022-10-11T21:01:31.5358258Z\",\n \"status\": + \"Succeeded\",\n \"message\": null,\n \"properties\": {\n \"TriggerTime\": + \"10/11/2022 21:01:31\",\n \"windowStartTime\": \"10/11/2022 20:56:31\",\n + \ \"windowEndTime\": \"10/11/2022 21:01:31\"\n },\n \"triggeredPipelines\": + {\n \"example000005\": \"c608d0aa-89da-4ec8-8d64-da1cf2518c9c\"\n },\n + \ \"groupId\": \"08585360849042515124897705358CU81\",\n \"dependencyStatus\": + {}\n }\n ]\n}" headers: cache-control: - no-cache content-length: - - '12' + - '2535' content-type: - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 04:41:08 GMT + - Tue, 11 Oct 2022 21:04:42 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -4950,8 +10341,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -4971,34 +10360,37 @@ interactions: ParameterSetName: - --factory-name --resource-group --name User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/stop?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/stop?api-version=2018-06-01 response: body: - string: '' + string: '{}' headers: cache-control: - no-cache content-length: - - '0' + - '2' + content-type: + - application/json; charset=utf-8 date: - - Mon, 26 Apr 2021 04:41:11 GMT + - Tue, 11 Oct 2022 21:04:44 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -5018,10 +10410,9 @@ interactions: ParameterSetName: - -y --factory-name --resource-group --name User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 response: body: string: '' @@ -5031,21 +10422,19 @@ interactions: content-length: - '0' date: - - Mon, 26 Apr 2021 04:41:16 GMT + - Tue, 11 Oct 2022 21:04:46 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-deletes: - '14999' - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -5065,10 +10454,9 @@ interactions: ParameterSetName: - -y --factory-name --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005?api-version=2018-06-01 response: body: string: '' @@ -5078,21 +10466,19 @@ interactions: content-length: - '0' date: - - Mon, 26 Apr 2021 04:41:20 GMT + - Tue, 11 Oct 2022 21:04:48 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-deletes: - '14999' - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -5112,10 +10498,9 @@ interactions: ParameterSetName: - -y --name --resource-group User-Agent: - - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 - (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: DELETE - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000009/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 response: body: string: '' @@ -5125,21 +10510,19 @@ interactions: content-length: - '0' date: - - Mon, 26 Apr 2021 04:41:27 GMT + - Tue, 11 Oct 2022 21:04:52 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-deletes: - '14999' - x-powered-by: - - ASP.NET status: code: 200 message: OK diff --git a/src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory_managedPrivateEndpoint.yaml b/src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory_managedPrivateEndpoint.yaml index f6596ff4246..b0925977d1c 100644 --- a/src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory_managedPrivateEndpoint.yaml +++ b/src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory_managedPrivateEndpoint.yaml @@ -13,7 +13,7 @@ interactions: ParameterSetName: - --query -o User-Agent: - - AZURECLI/2.27.0 azsdk-python-azure-mgmt-resource/18.0.0 Python/3.7.9 (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/locations?api-version=2019-11-01 response: @@ -33,9 +33,7 @@ interactions: Central\",\"regionalDisplayName\":\"(Europe) Sweden Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"17.14127\",\"latitude\":\"60.67488\",\"physicalLocation\":\"G\xE4vle\",\"pairedRegion\":[{\"name\":\"swedensouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedensouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth\",\"name\":\"uksouth\",\"displayName\":\"UK South\",\"regionalDisplayName\":\"(Europe) UK South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"-0.799\",\"latitude\":\"50.941\",\"physicalLocation\":\"London\",\"pairedRegion\":[{\"name\":\"ukwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\",\"name\":\"westeurope\",\"displayName\":\"West Europe\",\"regionalDisplayName\":\"(Europe) West Europe\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"4.9\",\"latitude\":\"52.3667\",\"physicalLocation\":\"Netherlands\",\"pairedRegion\":[{\"name\":\"northeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus\",\"name\":\"centralus\",\"displayName\":\"Central - US\",\"regionalDisplayName\":\"(US) Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-93.6208\",\"latitude\":\"41.5908\",\"physicalLocation\":\"Iowa\",\"pairedRegion\":[{\"name\":\"eastus2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus\",\"name\":\"northcentralus\",\"displayName\":\"North - Central US\",\"regionalDisplayName\":\"(US) North Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-87.6278\",\"latitude\":\"41.8819\",\"physicalLocation\":\"Illinois\",\"pairedRegion\":[{\"name\":\"southcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus\",\"name\":\"westus\",\"displayName\":\"West - US\",\"regionalDisplayName\":\"(US) West US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-122.417\",\"latitude\":\"37.783\",\"physicalLocation\":\"California\",\"pairedRegion\":[{\"name\":\"eastus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth\",\"name\":\"southafricanorth\",\"displayName\":\"South + US\",\"regionalDisplayName\":\"(US) Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-93.6208\",\"latitude\":\"41.5908\",\"physicalLocation\":\"Iowa\",\"pairedRegion\":[{\"name\":\"eastus2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth\",\"name\":\"southafricanorth\",\"displayName\":\"South Africa North\",\"regionalDisplayName\":\"(Africa) South Africa North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Africa\",\"longitude\":\"28.218370\",\"latitude\":\"-25.731340\",\"physicalLocation\":\"Johannesburg\",\"pairedRegion\":[{\"name\":\"southafricawest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia\",\"name\":\"centralindia\",\"displayName\":\"Central India\",\"regionalDisplayName\":\"(Asia Pacific) Central India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia Pacific\",\"longitude\":\"73.9197\",\"latitude\":\"18.5822\",\"physicalLocation\":\"Pune\",\"pairedRegion\":[{\"name\":\"southindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia\",\"name\":\"eastasia\",\"displayName\":\"East @@ -44,9 +42,7 @@ interactions: Kong\",\"pairedRegion\":[{\"name\":\"southeastasia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast\",\"name\":\"japaneast\",\"displayName\":\"Japan East\",\"regionalDisplayName\":\"(Asia Pacific) Japan East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia Pacific\",\"longitude\":\"139.77\",\"latitude\":\"35.68\",\"physicalLocation\":\"Tokyo, - Saitama\",\"pairedRegion\":[{\"name\":\"japanwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest\",\"name\":\"jioindiawest\",\"displayName\":\"Jio - India West\",\"regionalDisplayName\":\"(Asia Pacific) Jio India West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia - Pacific\",\"longitude\":\"70.05773\",\"latitude\":\"22.470701\",\"physicalLocation\":\"Jamnagar\",\"pairedRegion\":[{\"name\":\"jioindiacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral\",\"name\":\"koreacentral\",\"displayName\":\"Korea + Saitama\",\"pairedRegion\":[{\"name\":\"japanwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral\",\"name\":\"koreacentral\",\"displayName\":\"Korea Central\",\"regionalDisplayName\":\"(Asia Pacific) Korea Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia Pacific\",\"longitude\":\"126.9780\",\"latitude\":\"37.5665\",\"physicalLocation\":\"Seoul\",\"pairedRegion\":[{\"name\":\"koreasouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral\",\"name\":\"canadacentral\",\"displayName\":\"Canada Central\",\"regionalDisplayName\":\"(Canada) Canada Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Canada\",\"longitude\":\"-79.383\",\"latitude\":\"43.653\",\"physicalLocation\":\"Toronto\",\"pairedRegion\":[{\"name\":\"canadaeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral\",\"name\":\"francecentral\",\"displayName\":\"France @@ -58,7 +54,10 @@ interactions: East\",\"longitude\":\"55.316666\",\"latitude\":\"25.266666\",\"physicalLocation\":\"Dubai\",\"pairedRegion\":[{\"name\":\"uaecentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth\",\"name\":\"brazilsouth\",\"displayName\":\"Brazil South\",\"regionalDisplayName\":\"(South America) Brazil South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"South America\",\"longitude\":\"-46.633\",\"latitude\":\"-23.55\",\"physicalLocation\":\"Sao - Paulo State\",\"pairedRegion\":[{\"name\":\"southcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage\",\"name\":\"centralusstage\",\"displayName\":\"Central + Paulo State\",\"pairedRegion\":[{\"name\":\"southcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap\",\"name\":\"eastus2euap\",\"displayName\":\"East + US 2 EUAP\",\"regionalDisplayName\":\"(US) East US 2 EUAP\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-78.3889\",\"latitude\":\"36.6681\",\"pairedRegion\":[{\"name\":\"centraluseuap\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/qatarcentral\",\"name\":\"qatarcentral\",\"displayName\":\"Qatar + Central\",\"regionalDisplayName\":\"(Middle East) Qatar Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Middle + East\",\"longitude\":\"51.439327\",\"latitude\":\"25.551462\",\"physicalLocation\":\"Doha\",\"pairedRegion\":[{\"name\":\"westeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage\",\"name\":\"centralusstage\",\"displayName\":\"Central US (Stage)\",\"regionalDisplayName\":\"(US) Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstage\",\"name\":\"eastusstage\",\"displayName\":\"East US (Stage)\",\"regionalDisplayName\":\"(US) East US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2stage\",\"name\":\"eastus2stage\",\"displayName\":\"East US 2 (Stage)\",\"regionalDisplayName\":\"(US) East US 2 (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralusstage\",\"name\":\"northcentralusstage\",\"displayName\":\"North @@ -66,15 +65,23 @@ interactions: Central US (Stage)\",\"regionalDisplayName\":\"(US) South Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westusstage\",\"name\":\"westusstage\",\"displayName\":\"West US (Stage)\",\"regionalDisplayName\":\"(US) West US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2stage\",\"name\":\"westus2stage\",\"displayName\":\"West US 2 (Stage)\",\"regionalDisplayName\":\"(US) West US 2 (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asia\",\"name\":\"asia\",\"displayName\":\"Asia\",\"regionalDisplayName\":\"Asia\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asiapacific\",\"name\":\"asiapacific\",\"displayName\":\"Asia - Pacific\",\"regionalDisplayName\":\"Asia Pacific\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia\",\"name\":\"australia\",\"displayName\":\"Australia\",\"regionalDisplayName\":\"Australia\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil\",\"name\":\"brazil\",\"displayName\":\"Brazil\",\"regionalDisplayName\":\"Brazil\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada\",\"name\":\"canada\",\"displayName\":\"Canada\",\"regionalDisplayName\":\"Canada\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe\",\"name\":\"europe\",\"displayName\":\"Europe\",\"regionalDisplayName\":\"Europe\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global\",\"name\":\"global\",\"displayName\":\"Global\",\"regionalDisplayName\":\"Global\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india\",\"name\":\"india\",\"displayName\":\"India\",\"regionalDisplayName\":\"India\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan\",\"name\":\"japan\",\"displayName\":\"Japan\",\"regionalDisplayName\":\"Japan\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk\",\"name\":\"uk\",\"displayName\":\"United + Pacific\",\"regionalDisplayName\":\"Asia Pacific\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia\",\"name\":\"australia\",\"displayName\":\"Australia\",\"regionalDisplayName\":\"Australia\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil\",\"name\":\"brazil\",\"displayName\":\"Brazil\",\"regionalDisplayName\":\"Brazil\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada\",\"name\":\"canada\",\"displayName\":\"Canada\",\"regionalDisplayName\":\"Canada\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe\",\"name\":\"europe\",\"displayName\":\"Europe\",\"regionalDisplayName\":\"Europe\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/france\",\"name\":\"france\",\"displayName\":\"France\",\"regionalDisplayName\":\"France\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germany\",\"name\":\"germany\",\"displayName\":\"Germany\",\"regionalDisplayName\":\"Germany\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global\",\"name\":\"global\",\"displayName\":\"Global\",\"regionalDisplayName\":\"Global\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india\",\"name\":\"india\",\"displayName\":\"India\",\"regionalDisplayName\":\"India\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan\",\"name\":\"japan\",\"displayName\":\"Japan\",\"regionalDisplayName\":\"Japan\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/korea\",\"name\":\"korea\",\"displayName\":\"Korea\",\"regionalDisplayName\":\"Korea\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norway\",\"name\":\"norway\",\"displayName\":\"Norway\",\"regionalDisplayName\":\"Norway\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/singapore\",\"name\":\"singapore\",\"displayName\":\"Singapore\",\"regionalDisplayName\":\"Singapore\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafrica\",\"name\":\"southafrica\",\"displayName\":\"South + Africa\",\"regionalDisplayName\":\"South Africa\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerland\",\"name\":\"switzerland\",\"displayName\":\"Switzerland\",\"regionalDisplayName\":\"Switzerland\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uae\",\"name\":\"uae\",\"displayName\":\"United + Arab Emirates\",\"regionalDisplayName\":\"United Arab Emirates\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk\",\"name\":\"uk\",\"displayName\":\"United Kingdom\",\"regionalDisplayName\":\"United Kingdom\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstates\",\"name\":\"unitedstates\",\"displayName\":\"United - States\",\"regionalDisplayName\":\"United States\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage\",\"name\":\"eastasiastage\",\"displayName\":\"East + States\",\"regionalDisplayName\":\"United States\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstateseuap\",\"name\":\"unitedstateseuap\",\"displayName\":\"United + States EUAP\",\"regionalDisplayName\":\"United States EUAP\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage\",\"name\":\"eastasiastage\",\"displayName\":\"East Asia (Stage)\",\"regionalDisplayName\":\"(Asia Pacific) East Asia (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia Pacific\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasiastage\",\"name\":\"southeastasiastage\",\"displayName\":\"Southeast Asia (Stage)\",\"regionalDisplayName\":\"(Asia Pacific) Southeast Asia (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia - Pacific\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap\",\"name\":\"centraluseuap\",\"displayName\":\"Central - US EUAP\",\"regionalDisplayName\":\"(US) Central US EUAP\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-93.6208\",\"latitude\":\"41.5908\",\"pairedRegion\":[{\"name\":\"eastus2euap\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap\",\"name\":\"eastus2euap\",\"displayName\":\"East - US 2 EUAP\",\"regionalDisplayName\":\"(US) East US 2 EUAP\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-78.3889\",\"latitude\":\"36.6681\",\"pairedRegion\":[{\"name\":\"centraluseuap\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus\",\"name\":\"westcentralus\",\"displayName\":\"West + Pacific\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstg\",\"name\":\"eastusstg\",\"displayName\":\"East + US STG\",\"regionalDisplayName\":\"(US) East US STG\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-79.8164\",\"latitude\":\"37.3719\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\":\"southcentralusstg\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstg\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstg\",\"name\":\"southcentralusstg\",\"displayName\":\"South + Central US STG\",\"regionalDisplayName\":\"(US) South Central US STG\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-98.5\",\"latitude\":\"29.4167\",\"physicalLocation\":\"Texas\",\"pairedRegion\":[{\"name\":\"eastusstg\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstg\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus\",\"name\":\"northcentralus\",\"displayName\":\"North + Central US\",\"regionalDisplayName\":\"(US) North Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-87.6278\",\"latitude\":\"41.8819\",\"physicalLocation\":\"Illinois\",\"pairedRegion\":[{\"name\":\"southcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus\",\"name\":\"westus\",\"displayName\":\"West + US\",\"regionalDisplayName\":\"(US) West US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-122.417\",\"latitude\":\"37.783\",\"physicalLocation\":\"California\",\"pairedRegion\":[{\"name\":\"eastus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest\",\"name\":\"jioindiawest\",\"displayName\":\"Jio + India West\",\"regionalDisplayName\":\"(Asia Pacific) Jio India West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"70.05773\",\"latitude\":\"22.470701\",\"physicalLocation\":\"Jamnagar\",\"pairedRegion\":[{\"name\":\"jioindiacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap\",\"name\":\"centraluseuap\",\"displayName\":\"Central + US EUAP\",\"regionalDisplayName\":\"(US) Central US EUAP\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-93.6208\",\"latitude\":\"41.5908\",\"pairedRegion\":[{\"name\":\"eastus2euap\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus\",\"name\":\"westcentralus\",\"displayName\":\"West Central US\",\"regionalDisplayName\":\"(US) West Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-110.234\",\"latitude\":\"40.890\",\"physicalLocation\":\"Wyoming\",\"pairedRegion\":[{\"name\":\"westus2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest\",\"name\":\"southafricawest\",\"displayName\":\"South Africa West\",\"regionalDisplayName\":\"(Africa) South Africa West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Africa\",\"longitude\":\"18.843266\",\"latitude\":\"-34.075691\",\"physicalLocation\":\"Cape Town\",\"pairedRegion\":[{\"name\":\"southafricanorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral\",\"name\":\"australiacentral\",\"displayName\":\"Australia @@ -97,27 +104,23 @@ interactions: East\",\"regionalDisplayName\":\"(Canada) Canada East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Canada\",\"longitude\":\"-71.217\",\"latitude\":\"46.817\",\"physicalLocation\":\"Quebec\",\"pairedRegion\":[{\"name\":\"canadacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth\",\"name\":\"francesouth\",\"displayName\":\"France South\",\"regionalDisplayName\":\"(Europe) France South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"2.1972\",\"latitude\":\"43.8345\",\"physicalLocation\":\"Marseille\",\"pairedRegion\":[{\"name\":\"francecentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth\",\"name\":\"germanynorth\",\"displayName\":\"Germany North\",\"regionalDisplayName\":\"(Europe) Germany North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.806422\",\"latitude\":\"53.073635\",\"physicalLocation\":\"Berlin\",\"pairedRegion\":[{\"name\":\"germanywestcentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest\",\"name\":\"norwaywest\",\"displayName\":\"Norway - West\",\"regionalDisplayName\":\"(Europe) Norway West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"5.733107\",\"latitude\":\"58.969975\",\"physicalLocation\":\"Norway\",\"pairedRegion\":[{\"name\":\"norwayeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedensouth\",\"name\":\"swedensouth\",\"displayName\":\"Sweden - South\",\"regionalDisplayName\":\"(Europe) Sweden South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"13.0007\",\"latitude\":\"55.6059\",\"physicalLocation\":\"Malmo\",\"pairedRegion\":[{\"name\":\"swedencentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedencentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest\",\"name\":\"switzerlandwest\",\"displayName\":\"Switzerland + West\",\"regionalDisplayName\":\"(Europe) Norway West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"5.733107\",\"latitude\":\"58.969975\",\"physicalLocation\":\"Norway\",\"pairedRegion\":[{\"name\":\"norwayeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest\",\"name\":\"switzerlandwest\",\"displayName\":\"Switzerland West\",\"regionalDisplayName\":\"(Europe) Switzerland West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"6.143158\",\"latitude\":\"46.204391\",\"physicalLocation\":\"Geneva\",\"pairedRegion\":[{\"name\":\"switzerlandnorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest\",\"name\":\"ukwest\",\"displayName\":\"UK West\",\"regionalDisplayName\":\"(Europe) UK West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"-3.084\",\"latitude\":\"53.427\",\"physicalLocation\":\"Cardiff\",\"pairedRegion\":[{\"name\":\"uksouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral\",\"name\":\"uaecentral\",\"displayName\":\"UAE Central\",\"regionalDisplayName\":\"(Middle East) UAE Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Middle East\",\"longitude\":\"54.366669\",\"latitude\":\"24.466667\",\"physicalLocation\":\"Abu Dhabi\",\"pairedRegion\":[{\"name\":\"uaenorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsoutheast\",\"name\":\"brazilsoutheast\",\"displayName\":\"Brazil Southeast\",\"regionalDisplayName\":\"(South America) Brazil Southeast\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"South - America\",\"longitude\":\"-43.2075\",\"latitude\":\"-22.90278\",\"physicalLocation\":\"Rio\",\"pairedRegion\":[{\"name\":\"brazilsouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv\",\"name\":\"eastusslv\",\"displayName\":\"East - US SLV\",\"regionalDisplayName\":\"(South America) East US SLV\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"South - America\",\"longitude\":\"-43.2075\",\"latitude\":\"-22.90278\",\"physicalLocation\":\"Silverstone\",\"pairedRegion\":[{\"name\":\"eastusslv\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/qatarcentral\",\"name\":\"qatarcentral\",\"displayName\":\"Qatar - Central\",\"regionalDisplayName\":\"(Europe) Qatar Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"51.439327\",\"latitude\":\"25.551462\",\"physicalLocation\":\"Doha\",\"pairedRegion\":[{\"name\":\"westeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\"}]}}]}" + America\",\"longitude\":\"-43.2075\",\"latitude\":\"-22.90278\",\"physicalLocation\":\"Rio\",\"pairedRegion\":[{\"name\":\"brazilsouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth\"}]}}]}" headers: cache-control: - no-cache content-length: - - '28399' + - '30402' content-type: - application/json; charset=utf-8 date: - - Mon, 16 Aug 2021 07:27:17 GMT + - Tue, 11 Oct 2022 20:43:31 GMT expires: - '-1' pragma: @@ -150,27 +153,33 @@ interactions: ParameterSetName: - --location --name --resource-group User-Agent: - - AZURECLI/2.27.0 azsdk-python-mgmt-datafactory/1.0.0 Python/3.7.9 (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 response: body: - string: '{"name":"exampleFa000001","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/examplefaag7y7rdu5","type":"Microsoft.DataFactory/factories","properties":{"provisioningState":"Succeeded","createTime":"2021-08-16T07:27:31.2087066Z","version":"2018-06-01","encryption":{}},"eTag":"\"08004aba-0000-0100-0000-611a13630000\"","location":"eastus","identity":{"type":"SystemAssigned","principalId":"e059b2b7-5d2c-44f5-81c2-3662b3cbdeb4","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},"tags":{}}' + string: "{\n \"name\": \"exampleFa000001\",\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/examplefawcy3nwo4q\",\n + \ \"type\": \"Microsoft.DataFactory/factories\",\n \"properties\": {\n \"provisioningState\": + \"Succeeded\",\n \"createTime\": \"2022-10-11T20:43:36.2361508Z\",\n \"version\": + \"2018-06-01\",\n \"encryption\": {}\n },\n \"eTag\": \"\\\"0d009bb5-0000-0100-0000-6345d5780000\\\"\",\n + \ \"location\": \"eastus\",\n \"identity\": {\n \"type\": \"SystemAssigned\",\n + \ \"principalId\": \"42bf85ee-d628-4b15-8fb1-fe990570596c\",\n \"tenantId\": + \"16b3c013-d300-468d-ac64-7eda0820b6d3\"\n },\n \"tags\": {}\n}" headers: cache-control: - no-cache content-length: - - '631' + - '647' content-type: - application/json; charset=utf-8 date: - - Mon, 16 Aug 2021 07:27:33 GMT + - Tue, 11 Oct 2022 20:43:36 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -181,8 +190,6 @@ interactions: - nosniff x-ms-ratelimit-remaining-subscription-writes: - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -204,27 +211,31 @@ interactions: ParameterSetName: - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.27.0 azsdk-python-mgmt-datafactory/1.0.0 Python/3.7.9 (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedVirtualNetworks/exampleManagedVi000002?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedvirtualnetworks/exampleManagedVi000002","name":"exampleManagedVi000002","type":"Microsoft.DataFactory/factories/managedvirtualnetworks","properties":{"vNetId":"3eb3a45b-f6c4-4ab8-887d-88eaf048162b","preventDataExfiltration":false,"alias":"examplefaag7y7rdu5"},"etag":"07001564-0000-0100-0000-611a13680000"}' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedvirtualnetworks/exampleManagedVi000002\",\n + \ \"name\": \"exampleManagedVi000002\",\n \"type\": \"Microsoft.DataFactory/factories/managedvirtualnetworks\",\n + \ \"properties\": {\n \"vNetId\": \"4b0c0bce-c230-4460-afdf-267bf6cc87fa\",\n + \ \"preventDataExfiltration\": false,\n \"alias\": \"examplefawcy3nwo4q\"\n + \ },\n \"etag\": \"06007aa3-0000-0100-0000-6345d57a0000\"\n}" headers: cache-control: - no-cache content-length: - - '544' + - '501' content-type: - application/json; charset=utf-8 date: - - Mon, 16 Aug 2021 07:27:36 GMT + - Tue, 11 Oct 2022 20:43:37 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -235,8 +246,6 @@ interactions: - nosniff x-ms-ratelimit-remaining-subscription-writes: - '1199' - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -254,27 +263,32 @@ interactions: ParameterSetName: - --factory-name --resource-group User-Agent: - - AZURECLI/2.27.0 azsdk-python-mgmt-datafactory/1.0.0 Python/3.7.9 (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedVirtualNetworks?api-version=2018-06-01 response: body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedvirtualnetworks/exampleManagedVi000002","name":"exampleManagedVi000002","type":"Microsoft.DataFactory/factories/managedvirtualnetworks","properties":{"vNetId":"3eb3a45b-f6c4-4ab8-887d-88eaf048162b","preventDataExfiltration":false,"alias":"examplefaag7y7rdu5"},"etag":"07001564-0000-0100-0000-611a13680000"}]}' + string: "{\n \"value\": [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedvirtualnetworks/exampleManagedVi000002\",\n + \ \"name\": \"exampleManagedVi000002\",\n \"type\": \"Microsoft.DataFactory/factories/managedvirtualnetworks\",\n + \ \"properties\": {\n \"vNetId\": \"4b0c0bce-c230-4460-afdf-267bf6cc87fa\",\n + \ \"preventDataExfiltration\": false,\n \"alias\": \"examplefawcy3nwo4q\"\n + \ },\n \"etag\": \"06007aa3-0000-0100-0000-6345d57a0000\"\n }\n + \ ]\n}" headers: cache-control: - no-cache content-length: - - '556' + - '566' content-type: - application/json; charset=utf-8 date: - - Mon, 16 Aug 2021 07:27:37 GMT + - Tue, 11 Oct 2022 20:43:37 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -283,8 +297,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -302,27 +314,31 @@ interactions: ParameterSetName: - --factory-name --name --resource-group User-Agent: - - AZURECLI/2.27.0 azsdk-python-mgmt-datafactory/1.0.0 Python/3.7.9 (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedVirtualNetworks/exampleManagedVi000002?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedvirtualnetworks/exampleManagedVi000002","name":"exampleManagedVi000002","type":"Microsoft.DataFactory/factories/managedvirtualnetworks","properties":{"vNetId":"3eb3a45b-f6c4-4ab8-887d-88eaf048162b","preventDataExfiltration":false,"alias":"examplefaag7y7rdu5"},"etag":"07001564-0000-0100-0000-611a13680000"}' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedvirtualnetworks/exampleManagedVi000002\",\n + \ \"name\": \"exampleManagedVi000002\",\n \"type\": \"Microsoft.DataFactory/factories/managedvirtualnetworks\",\n + \ \"properties\": {\n \"vNetId\": \"4b0c0bce-c230-4460-afdf-267bf6cc87fa\",\n + \ \"preventDataExfiltration\": false,\n \"alias\": \"examplefawcy3nwo4q\"\n + \ },\n \"etag\": \"06007aa3-0000-0100-0000-6345d57a0000\"\n}" headers: cache-control: - no-cache content-length: - - '544' + - '501' content-type: - application/json; charset=utf-8 date: - - Mon, 16 Aug 2021 07:27:38 GMT + - Tue, 11 Oct 2022 20:43:38 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -331,8 +347,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -348,34 +362,41 @@ interactions: Connection: - keep-alive Content-Length: - - '275' + - '202' Content-Type: - application/json ParameterSetName: - --factory-name --group-id --private-link-resource-id --name --managed-virtual-network-name --resource-group User-Agent: - - AZURECLI/2.27.0 azsdk-python-mgmt-datafactory/1.0.0 Python/3.7.9 (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedVirtualNetworks/exampleManagedVi000002/managedPrivateEndpoints/exampleManagedPr000003?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedvirtualnetworks/exampleManagedVi000002/managedprivateendpoints/exampleManagedPr000003","name":"exampleManagedPr000003","type":"Microsoft.DataFactory/factories/managedvirtualnetworks/managedprivateendpoints","properties":{"provisioningState":"Provisioning","privateLinkResourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.Storage/storageAccounts/clitest000005","groupId":"blob","fqdns":[],"connectionState":{"status":"","description":"","actionsRequired":""}}}' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedvirtualnetworks/exampleManagedVi000002/managedprivateendpoints/exampleManagedPr000003\",\n + \ \"name\": \"exampleManagedPr000003\",\n \"type\": \"Microsoft.DataFactory/factories/managedvirtualnetworks/managedprivateendpoints\",\n + \ \"properties\": {\n \"error\": null,\n \"provisioningState\": \"Provisioning\",\n + \ \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/vnet-87108e66-eastus-116-rg/providers/Microsoft.Network/privateEndpoints/examplefawcy3nwo4q.exampleManagedPr000003\",\n + \ \"privateLinkResourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.Storage/storageAccounts/clitest000005\",\n + \ \"groupId\": \"blob\",\n \"fqdns\": [],\n \"ipAddress\": null,\n + \ \"linkId\": null,\n \"connectionState\": {\n \"status\": \"\",\n + \ \"description\": \"\",\n \"actionsRequired\": \"\"\n }\n }\n}" headers: cache-control: - no-cache content-length: - - '843' + - '1017' content-type: - application/json; charset=utf-8 date: - - Mon, 16 Aug 2021 07:27:39 GMT + - Tue, 11 Oct 2022 20:43:39 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -385,9 +406,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - - '1198' - x-powered-by: - - ASP.NET + - '1199' status: code: 200 message: OK @@ -405,27 +424,35 @@ interactions: ParameterSetName: - --factory-name --managed-virtual-network-name --resource-group User-Agent: - - AZURECLI/2.27.0 azsdk-python-mgmt-datafactory/1.0.0 Python/3.7.9 (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedVirtualNetworks/exampleManagedVi000002/managedPrivateEndpoints?api-version=2018-06-01 response: body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedvirtualnetworks/exampleManagedVi000002/managedprivateendpoints/exampleManagedPr000003","name":"exampleManagedPr000003","type":"Microsoft.DataFactory/factories/managedvirtualnetworks/managedprivateendpoints","properties":{"provisioningState":"Provisioning","privateLinkResourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.Storage/storageAccounts/clitest000005","groupId":"blob","fqdns":[],"connectionState":{"status":"","description":"","actionsRequired":""}}}]}' + string: "{\n \"value\": [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedvirtualnetworks/exampleManagedVi000002/managedprivateendpoints/exampleManagedPr000003\",\n + \ \"name\": \"exampleManagedPr000003\",\n \"type\": \"Microsoft.DataFactory/factories/managedvirtualnetworks/managedprivateendpoints\",\n + \ \"properties\": {\n \"error\": null,\n \"provisioningState\": + \"Provisioning\",\n \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/vnet-87108e66-eastus-116-rg/providers/Microsoft.Network/privateEndpoints/examplefawcy3nwo4q.exampleManagedPr000003\",\n + \ \"privateLinkResourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.Storage/storageAccounts/clitest000005\",\n + \ \"groupId\": \"blob\",\n \"fqdns\": [],\n \"ipAddress\": + null,\n \"linkId\": null,\n \"connectionState\": {\n \"status\": + \"\",\n \"description\": \"\",\n \"actionsRequired\": \"\"\n + \ }\n }\n }\n ]\n}" headers: cache-control: - no-cache content-length: - - '855' + - '1118' content-type: - application/json; charset=utf-8 date: - - Mon, 16 Aug 2021 07:27:41 GMT + - Tue, 11 Oct 2022 20:43:39 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -434,8 +461,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -453,27 +478,34 @@ interactions: ParameterSetName: - --factory-name --name --managed-virtual-network-name --resource-group User-Agent: - - AZURECLI/2.27.0 azsdk-python-mgmt-datafactory/1.0.0 Python/3.7.9 (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedVirtualNetworks/exampleManagedVi000002/managedPrivateEndpoints/exampleManagedPr000003?api-version=2018-06-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedvirtualnetworks/exampleManagedVi000002/managedprivateendpoints/exampleManagedPr000003","name":"exampleManagedPr000003","type":"Microsoft.DataFactory/factories/managedvirtualnetworks/managedprivateendpoints","properties":{"provisioningState":"Provisioning","privateLinkResourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.Storage/storageAccounts/clitest000005","groupId":"blob","fqdns":[],"connectionState":{"status":"","description":"","actionsRequired":""}}}' + string: "{\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedvirtualnetworks/exampleManagedVi000002/managedprivateendpoints/exampleManagedPr000003\",\n + \ \"name\": \"exampleManagedPr000003\",\n \"type\": \"Microsoft.DataFactory/factories/managedvirtualnetworks/managedprivateendpoints\",\n + \ \"properties\": {\n \"error\": null,\n \"provisioningState\": \"Provisioning\",\n + \ \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/vnet-87108e66-eastus-116-rg/providers/Microsoft.Network/privateEndpoints/examplefawcy3nwo4q.exampleManagedPr000003\",\n + \ \"privateLinkResourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.Storage/storageAccounts/clitest000005\",\n + \ \"groupId\": \"blob\",\n \"fqdns\": [],\n \"ipAddress\": null,\n + \ \"linkId\": null,\n \"connectionState\": {\n \"status\": \"\",\n + \ \"description\": \"\",\n \"actionsRequired\": \"\"\n }\n }\n}" headers: cache-control: - no-cache content-length: - - '843' + - '1017' content-type: - application/json; charset=utf-8 date: - - Mon, 16 Aug 2021 07:27:42 GMT + - Tue, 11 Oct 2022 20:43:40 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -482,8 +514,6 @@ interactions: - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET status: code: 200 message: OK @@ -503,7 +533,7 @@ interactions: ParameterSetName: - -y --name --resource-group User-Agent: - - AZURECLI/2.27.0 azsdk-python-mgmt-datafactory/1.0.0 Python/3.7.9 (Windows-10-10.0.19041-SP0) + - AZURECLI/2.40.0 azsdk-python-mgmt-datafactory/2.8.0 Python/3.10.6 (macOS-12.6-x86_64-i386-64bit) method: DELETE uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 response: @@ -515,21 +545,19 @@ interactions: content-length: - '0' date: - - Mon, 16 Aug 2021 07:27:48 GMT + - Tue, 11 Oct 2022 20:43:44 GMT expires: - '-1' pragma: - no-cache server: - - Microsoft-IIS/10.0 + - Kestrel strict-transport-security: - max-age=31536000; includeSubDomains x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-deletes: - '14999' - x-powered-by: - - ASP.NET status: code: 200 message: OK diff --git a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py index 6bfdf3885a3..f5a027e97dd 100644 --- a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py +++ b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py @@ -9,9 +9,11 @@ # -------------------------------------------------------------------------- import os +from sys import prefix from azure.cli.testsdk import ScenarioTest from azure.cli.testsdk import ResourceGroupPreparer from azure.cli.testsdk import StorageAccountPreparer + from .example_steps import step_create from .example_steps import step_update from .example_steps import step_linked_service_create @@ -70,14 +72,10 @@ from .example_steps import step_managed_private_endpoint_create from .example_steps import step_managed_private_endpoint_list from .example_steps import step_managed_private_endpoint_show -from .. import ( - try_manual, - raise_if, - calc_coverage -) +from .. import try_manual, raise_if, calc_coverage -TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..')) +TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), "..")) # Env setup_main @@ -165,26 +163,49 @@ def call_main(test): class DatafactorymainTest(ScenarioTest): def __init__(self, *args, **kwargs): super(DatafactorymainTest, self).__init__(*args, **kwargs) - self.kwargs.update({ - 'subscription_id': self.get_subscription_id() - }) - - self.kwargs.update({ - 'myFactory': self.create_random_name(prefix='exampleFactoryName'[:9], length=18), - 'myIntegrationRuntime': self.create_random_name(prefix='exampleIntegrationRuntime'[:12], length=25), - 'myIntegrationRuntime2': 'exampleManagedIntegrationRuntime', - 'myLinkedService': self.create_random_name(prefix='exampleLinkedService'[:10], length=20), - 'myDataset': self.create_random_name(prefix='exampleDataset'[:7], length=14), - 'myPipeline': self.create_random_name(prefix='examplePipeline'[:7], length=15), - 'myTrigger': self.create_random_name(prefix='exampleTrigger'[:7], length=14), - }) - - @ResourceGroupPreparer(name_prefix='clitestdatafactory_exampleResourceGroup'[:7], key='rg', parameter_name='rg') + self.kwargs.update({"subscription_id": self.get_subscription_id()}) + + self.kwargs.update( + { + "myFactory": self.create_random_name( + prefix="exampleFactoryName"[:9], length=18 + ), + "myIntegrationRuntime": self.create_random_name( + prefix="exampleIntegrationRuntime"[:12], length=25 + ), + "myIntegrationRuntime2": "exampleManagedIntegrationRuntime", + "myLinkedService": self.create_random_name( + prefix="exampleLinkedService"[:10], length=20 + ), + "myDataset": self.create_random_name( + prefix="exampleDataset"[:7], length=14 + ), + "myPipeline": self.create_random_name( + prefix="examplePipeline"[:7], length=15 + ), + "myTrigger": self.create_random_name( + prefix="exampleTrigger"[:7], length=14 + ), + "myMappingDataFlow": self.create_random_name( + prefix="exampleDataFlow"[:7], length=14 + ), + "myFlowletDataFlow": self.create_random_name( + prefix="exampleDataFlow"[:7], length=14 + ), + } + ) + + @ResourceGroupPreparer( + name_prefix="clitestdatafactory_exampleResourceGroup"[:7], + key="rg", + parameter_name="rg", + ) def test_datafactory_main(self, rg): call_main(self) calc_coverage(__file__) raise_if() + # Env setup_managedprivateendpoint @try_manual def setup_managedprivateendpoint(test): @@ -217,21 +238,32 @@ def call_managedprivateendpoint(test): class DatafactorymanagedPrivateEndpointTest(ScenarioTest): def __init__(self, *args, **kwargs): super(DatafactorymanagedPrivateEndpointTest, self).__init__(*args, **kwargs) - self.kwargs.update({ - 'subscription_id': self.get_subscription_id() - }) - - self.kwargs.update({ - 'myFactory': self.create_random_name(prefix='exampleFactoryName'[:9], length=18), - 'myManagedVirtualNetwork': self.create_random_name(prefix='exampleManagedVirtualNetworkName'[:16], - length=32), - 'myManagedPrivateEndpoint': self.create_random_name(prefix='exampleManagedPrivateEndpointName'[:16], - length=33), - }) - - @ResourceGroupPreparer(name_prefix='clitestdatafactory_exampleResourceGroup'[:7], key='rg', parameter_name='rg') - @StorageAccountPreparer(name_prefix='clitestdatafactory_exampleBlobStorage'[:7], key='sa', - resource_group_parameter_name='rg') + self.kwargs.update({"subscription_id": self.get_subscription_id()}) + + self.kwargs.update( + { + "myFactory": self.create_random_name( + prefix="exampleFactoryName"[:9], length=18 + ), + "myManagedVirtualNetwork": self.create_random_name( + prefix="exampleManagedVirtualNetworkName"[:16], length=32 + ), + "myManagedPrivateEndpoint": self.create_random_name( + prefix="exampleManagedPrivateEndpointName"[:16], length=33 + ), + } + ) + + @ResourceGroupPreparer( + name_prefix="clitestdatafactory_exampleResourceGroup"[:7], + key="rg", + parameter_name="rg", + ) + @StorageAccountPreparer( + name_prefix="clitestdatafaƒfsctory_exampleBlobStorage"[:7], + key="sa", + resource_group_parameter_name="rg", + ) def test_datafactory_managedPrivateEndpoint(self, rg): call_managedprivateendpoint(self) calc_coverage(__file__) diff --git a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md index ca7eec23d45..cbf45d849be 100644 --- a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md +++ b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md @@ -1,10 +1,45 @@ |Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt| -|step_create|successed||||2021-08-16 07:27:16.493725|2021-08-16 07:27:34.207925| -|step_managed_virtual_network_create|successed||||2021-08-16 07:27:34.207925|2021-08-16 07:27:36.767592| -|step_managed_virtual_network_list|successed||||2021-08-16 07:27:36.767592|2021-08-16 07:27:37.485625| -|step_managed_virtual_network_show|successed||||2021-08-16 07:27:37.486610|2021-08-16 07:27:38.936934| -|step_managed_private_endpoint_create|successed||||2021-08-16 07:27:38.936934|2021-08-16 07:27:40.441373| -|step_managed_private_endpoint_list|successed||||2021-08-16 07:27:40.441373|2021-08-16 07:27:41.849929| -|step_managed_private_endpoint_show|successed||||2021-08-16 07:27:41.849929|2021-08-16 07:27:43.250730| -|step_delete|successed||||2021-08-16 07:27:43.250730|2021-08-16 07:27:48.809653| -Coverage: 8/8 +|step_create|successed||||2022-10-11 20:58:58.870214|2022-10-11 20:59:04.881006| +|step_update|successed||||2022-10-11 20:51:43.633502|2022-10-11 20:51:45.064990| +|step_linked_service_create|successed||||2022-10-11 20:51:45.065259|2022-10-11 20:51:45.709025| +|step_dataset_create|successed||||2022-10-11 20:51:49.975034|2022-10-11 20:51:50.442502| +|step_pipeline_create|successed||||2022-10-11 20:51:51.282427|2022-10-11 20:51:51.788205| +|step_trigger_create|successed||||2022-10-11 20:51:52.549569|2022-10-11 20:51:53.037292| +|step_integration_runtime_self_hosted_create|successed||||2022-10-11 20:51:53.891475|2022-10-11 20:51:54.668545| +|step_integration_runtime_update|successed||||2022-10-11 20:51:54.668794|2022-10-11 20:51:55.224695| +|step_integration_runtime_show|successed||||2022-10-11 20:52:46.986948|2022-10-11 20:52:47.329958| +|step_linked_service_show|successed||||2022-10-11 20:51:57.504173|2022-10-11 20:51:57.847822| +|step_pipeline_show|successed||||2022-10-11 20:51:57.848068|2022-10-11 20:51:58.440734| +|step_dataset_show|successed||||2022-10-11 20:51:58.440982|2022-10-11 20:51:58.760415| +|step_trigger_show|successed||||2022-10-11 20:59:17.100039|2022-10-11 20:59:17.526346| +|step_integration_runtime_list|successed||||2022-10-11 20:51:59.300029|2022-10-11 20:51:59.736363| +|step_linked_service_list|successed||||2022-10-11 20:51:59.736616|2022-10-11 20:52:00.119698| +|step_pipeline_list|successed||||2022-10-11 20:52:00.119953|2022-10-11 20:52:00.520802| +|step_trigger_list|successed||||2022-10-11 20:52:00.521048|2022-10-11 20:52:00.897153| +|step_dataset_list|successed||||2022-10-11 20:52:00.897393|2022-10-11 20:52:01.294760| +|step_show|successed||||2022-10-11 20:52:01.295002|2022-10-11 20:52:01.626740| +|step_list2|successed||||2022-10-11 20:52:01.626983|2022-10-11 20:52:01.927502| +|step_list|successed||||2022-10-11 20:52:01.927739|2022-10-11 20:52:02.204280| +|step_integration_runtime_regenerate_auth_key|successed||||2022-10-11 20:52:02.204525|2022-10-11 20:52:02.769945| +|step_integration_runtime_sync_credentials|successed||||2022-10-11 20:52:02.770194|2022-10-11 20:52:03.200032| +|step_integration_runtime_get_monitoring_data|successed||||2022-10-11 20:52:03.200338|2022-10-11 20:52:03.567813| +|step_integration_runtime_list_auth_key|successed||||2022-10-11 20:52:03.568055|2022-10-11 20:52:04.068878| +|step_integration_runtime_remove_link|successed||||2022-10-11 20:52:04.069125|2022-10-11 20:52:04.556767| +|step_integration_runtime_get_status|successed||||2022-10-11 20:52:04.557058|2022-10-11 20:52:04.972822| +|step_trigger_get_event_subscription_status|successed||||2022-10-11 20:52:04.973076|2022-10-11 20:52:05.445863| +|step_trigger_unsubscribe_from_event|successed||||2022-10-11 20:52:05.446104|2022-10-11 20:52:05.924384| +|step_trigger_subscribe_to_event|successed||||2022-10-11 20:52:05.924568|2022-10-11 20:52:06.361782| +|step_trigger_start|successed||||2022-10-11 20:59:14.786636|2022-10-11 20:59:17.099790| +|step_trigger_stop|successed||||2022-10-11 21:04:19.322723|2022-10-11 21:04:20.936735| +|step_get_data_plane_access|successed||||2022-10-11 20:52:14.607217|2022-10-11 20:52:15.001074| +|step_configure_factory_repo|successed||||2022-10-11 20:52:15.366459|2022-10-11 20:52:16.652870| +|step_integration_runtime_delete|successed||||2022-10-11 20:58:54.552469|2022-10-11 20:58:55.149449| +|step_trigger_delete|successed||||2022-10-11 21:04:20.936979|2022-10-11 21:04:23.045543| +|step_pipeline_delete|successed||||2022-10-11 21:04:23.045848|2022-10-11 21:04:24.350932| +|step_dataset_delete|successed||||2022-10-11 20:52:22.039657|2022-10-11 20:52:22.543616| +|step_linked_service_delete|successed||||2022-10-11 20:52:22.543916|2022-10-11 20:52:23.235743| +|step_delete|successed||||2022-10-11 21:04:24.351246|2022-10-11 21:04:28.515132| +|step_integration_runtime_start|successed||||2022-10-11 20:52:47.330206|2022-10-11 20:54:05.151899| +|step_integration_runtime_stop|successed||||2022-10-11 20:54:05.152243|2022-10-11 20:58:54.552144| +|step_activity_run_query_by_pipeline_run|successed||||2022-10-11 20:59:12.188838|2022-10-11 20:59:12.629985| +Coverage: 43/43 diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/__init__.py index 7b501d880bb..041b0591e4d 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/__init__.py @@ -17,7 +17,8 @@ except ImportError: _patch_all = [] from ._patch import patch_sdk as _patch_sdk -__all__ = ['DataFactoryManagementClient'] + +__all__ = ["DataFactoryManagementClient"] __all__.extend([p for p in _patch_all if p not in __all__]) _patch_sdk() diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py index d006bbdc9eb..bee9053930a 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py @@ -25,23 +25,18 @@ class DataFactoryManagementClientConfiguration(Configuration): # pylint: disabl Note that all parameters used to create this instance are saved as instance attributes. - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials.TokenCredential - :param subscription_id: The subscription identifier. + :param subscription_id: The subscription identifier. Required. :type subscription_id: str :keyword api_version: Api Version. Default value is "2018-06-01". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ - def __init__( - self, - credential: "TokenCredential", - subscription_id: str, - **kwargs: Any - ) -> None: + def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None: super(DataFactoryManagementClientConfiguration, self).__init__(**kwargs) - api_version = kwargs.pop('api_version', "2018-06-01") # type: str + api_version = kwargs.pop("api_version", "2018-06-01") # type: str if credential is None: raise ValueError("Parameter 'credential' must not be None.") @@ -51,23 +46,24 @@ def __init__( self.credential = credential self.subscription_id = subscription_id self.api_version = api_version - self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) - kwargs.setdefault('sdk_moniker', 'mgmt-datafactory/{}'.format(VERSION)) + self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "mgmt-datafactory/{}".format(VERSION)) self._configure(**kwargs) def _configure( - self, - **kwargs # type: Any + self, **kwargs # type: Any ): # type: (...) -> None - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") if self.credential and not self.authentication_policy: - self.authentication_policy = ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs) + self.authentication_policy = ARMChallengeAuthenticationPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py index ced35099e81..5273db8e582 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py @@ -9,20 +9,42 @@ from copy import deepcopy from typing import Any, TYPE_CHECKING -from msrest import Deserializer, Serializer - from azure.core.rest import HttpRequest, HttpResponse from azure.mgmt.core import ARMPipelineClient from . import models from ._configuration import DataFactoryManagementClientConfiguration -from .operations import ActivityRunsOperations, DataFlowDebugSessionOperations, DataFlowsOperations, DatasetsOperations, ExposureControlOperations, FactoriesOperations, GlobalParametersOperations, IntegrationRuntimeNodesOperations, IntegrationRuntimeObjectMetadataOperations, IntegrationRuntimesOperations, LinkedServicesOperations, ManagedPrivateEndpointsOperations, ManagedVirtualNetworksOperations, Operations, PipelineRunsOperations, PipelinesOperations, PrivateEndPointConnectionsOperations, PrivateEndpointConnectionOperations, PrivateLinkResourcesOperations, TriggerRunsOperations, TriggersOperations +from ._serialization import Deserializer, Serializer +from .operations import ( + ActivityRunsOperations, + DataFlowDebugSessionOperations, + DataFlowsOperations, + DatasetsOperations, + ExposureControlOperations, + FactoriesOperations, + GlobalParametersOperations, + IntegrationRuntimeNodesOperations, + IntegrationRuntimeObjectMetadataOperations, + IntegrationRuntimesOperations, + LinkedServicesOperations, + ManagedPrivateEndpointsOperations, + ManagedVirtualNetworksOperations, + Operations, + PipelineRunsOperations, + PipelinesOperations, + PrivateEndPointConnectionsOperations, + PrivateEndpointConnectionOperations, + PrivateLinkResourcesOperations, + TriggerRunsOperations, + TriggersOperations, +) if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials import TokenCredential -class DataFactoryManagementClient: # pylint: disable=too-many-instance-attributes + +class DataFactoryManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes """The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. @@ -77,9 +99,9 @@ class DataFactoryManagementClient: # pylint: disable=too-many-instance-attrib azure.mgmt.datafactory.operations.PrivateLinkResourcesOperations :ivar global_parameters: GlobalParametersOperations operations :vartype global_parameters: azure.mgmt.datafactory.operations.GlobalParametersOperations - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials.TokenCredential - :param subscription_id: The subscription identifier. + :param subscription_id: The subscription identifier. Required. :type subscription_id: str :param base_url: Service URL. Default value is "https://management.azure.com". :type base_url: str @@ -97,19 +119,17 @@ def __init__( base_url: str = "https://management.azure.com", **kwargs: Any ) -> None: - self._config = DataFactoryManagementClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs) + self._config = DataFactoryManagementClientConfiguration( + credential=credential, subscription_id=subscription_id, **kwargs + ) self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self._serialize.client_side_validation = False - self.operations = Operations( - self._client, self._config, self._serialize, self._deserialize - ) - self.factories = FactoriesOperations( - self._client, self._config, self._serialize, self._deserialize - ) + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) + self.factories = FactoriesOperations(self._client, self._config, self._serialize, self._deserialize) self.exposure_control = ExposureControlOperations( self._client, self._config, self._serialize, self._deserialize ) @@ -122,30 +142,14 @@ def __init__( self.integration_runtime_nodes = IntegrationRuntimeNodesOperations( self._client, self._config, self._serialize, self._deserialize ) - self.linked_services = LinkedServicesOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.datasets = DatasetsOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.pipelines = PipelinesOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.pipeline_runs = PipelineRunsOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.activity_runs = ActivityRunsOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.triggers = TriggersOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.trigger_runs = TriggerRunsOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.data_flows = DataFlowsOperations( - self._client, self._config, self._serialize, self._deserialize - ) + self.linked_services = LinkedServicesOperations(self._client, self._config, self._serialize, self._deserialize) + self.datasets = DatasetsOperations(self._client, self._config, self._serialize, self._deserialize) + self.pipelines = PipelinesOperations(self._client, self._config, self._serialize, self._deserialize) + self.pipeline_runs = PipelineRunsOperations(self._client, self._config, self._serialize, self._deserialize) + self.activity_runs = ActivityRunsOperations(self._client, self._config, self._serialize, self._deserialize) + self.triggers = TriggersOperations(self._client, self._config, self._serialize, self._deserialize) + self.trigger_runs = TriggerRunsOperations(self._client, self._config, self._serialize, self._deserialize) + self.data_flows = DataFlowsOperations(self._client, self._config, self._serialize, self._deserialize) self.data_flow_debug_session = DataFlowDebugSessionOperations( self._client, self._config, self._serialize, self._deserialize ) @@ -168,12 +172,7 @@ def __init__( self._client, self._config, self._serialize, self._deserialize ) - - def _send_request( - self, - request: HttpRequest, - **kwargs: Any - ) -> HttpResponse: + def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest @@ -182,7 +181,7 @@ def _send_request( >>> response = client._send_request(request) - For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request :param request: The network request you want to make. Required. :type request: ~azure.core.rest.HttpRequest diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_patch.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_patch.py index 74e48ecd07c..f99e77fef98 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_patch.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_patch.py @@ -28,4 +28,4 @@ # This file is used for handwritten extensions to the generated code. Example: # https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md def patch_sdk(): - pass \ No newline at end of file + pass diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_serialization.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_serialization.py new file mode 100644 index 00000000000..7c1dedb5133 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_serialization.py @@ -0,0 +1,1970 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# pylint: skip-file + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote # type: ignore +import xml.etree.ElementTree as ET + +import isodate + +from typing import Dict, Any, cast, TYPE_CHECKING + +from azure.core.exceptions import DeserializationError, SerializationError, raise_with_traceback + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +if TYPE_CHECKING: + from typing import Optional, Union, AnyStr, IO, Mapping + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data, content_type=None): + # type: (Optional[Union[AnyStr, IO]], Optional[str]) -> Any + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise_with_traceback(DeserializationError, "XML is invalid") + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes, headers): + # type: (Optional[Union[AnyStr, IO]], Mapping) -> Any + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +try: + basestring # type: ignore + unicode_str = unicode # type: ignore +except NameError: + basestring = str # type: ignore + unicode_str = str # type: ignore + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + + +class UTC(datetime.tzinfo): + """Time Zone info for handling UTC""" + + def utcoffset(self, dt): + """UTF offset for UTC is 0.""" + return datetime.timedelta(0) + + def tzname(self, dt): + """Timestamp representation.""" + return "Z" + + def dst(self, dt): + """No daylight saving for UTC.""" + return datetime.timedelta(hours=1) + + +try: + from datetime import timezone as _FixedOffset +except ImportError: # Python 2.7 + + class _FixedOffset(datetime.tzinfo): # type: ignore + """Fixed offset in minutes east from UTC. + Copy/pasted from Python doc + :param datetime.timedelta offset: offset in timedelta format + """ + + def __init__(self, offset): + self.__offset = offset + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return str(self.__offset.total_seconds() / 3600) + + def __repr__(self): + return "".format(self.tzname(None)) + + def dst(self, dt): + return datetime.timedelta(0) + + def __getinitargs__(self): + return (self.__offset,) + + +try: + from datetime import timezone + + TZ_UTC = timezone.utc # type: ignore +except ImportError: + TZ_UTC = UTC() # type: ignore + +_FLATTEN = re.compile(r"(? y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes=None): + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize(self, target_obj, data_type=None, **kwargs): + """Serialize data into a string according to type. + + :param target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises: SerializationError if serialization fails. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() + try: + attributes = target_obj._attribute_map + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) + continue + if xml_desc.get("text", False): + serialized.text = new_attr + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = unicode_str(new_attr) + serialized.append(local_node) + else: # JSON + for k in reversed(keys): + unflattened = {k: new_attr} + new_attr = unflattened + + _new_attr = new_attr + _serialized = serialized + for k in keys: + if k not in _serialized: + _serialized.update(_new_attr) + _new_attr = _new_attr[k] + _serialized = _serialized[k] + except ValueError: + continue + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise_with_traceback(SerializationError, msg, err) + else: + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises: SerializationError if serialization fails. + :raises: ValueError if data is None + """ + + # Just in case this is a dict + internal_data_type = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) + except DeserializationError as err: + raise_with_traceback(SerializationError, "Unable to build a model: " + str(err), err) + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + data = [self.serialize_data(d, internal_data_type, **kwargs) if d is not None else "" for d in data] + if not kwargs.get("skip_quote", False): + data = [quote(str(d), safe="") for d in data] + return str(self.serialize_iter(data, internal_data_type, **kwargs)) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :param bool required: Whether it's essential that the data not be + empty or None + :raises: AttributeError if required data is None. + :raises: ValueError if data is None + :raises: SerializationError if serialization fails. + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + elif data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise_with_traceback(SerializationError, msg.format(data, data_type), err) + else: + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param data: Object to be serialized. + :param str data_type: Type of object in the iterable. + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param data: Object to be serialized. + :rtype: str + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + else: + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list attr: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param bool required: Whether the objects in the iterable must + not be None or empty. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + :rtype: list, str + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError: + serialized.append(None) + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :param bool required: Whether the objects in the dictionary must + not be None or empty. + :rtype: dict + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is unicode_str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + elif obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) + return result + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) + + @staticmethod + def serialize_bytearray(attr, **kwargs): + """Serialize bytearray into base-64 string. + + :param attr: Object to be serialized. + :rtype: str + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): + """Serialize str into base-64 string. + + :param attr: Object to be serialized. + :rtype: str + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): + """Serialize Decimal object to float. + + :param attr: Object to be serialized. + :rtype: float + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): + """Serialize long (Py2) or int (Py3). + + :param attr: Object to be serialized. + :rtype: int/long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: TypeError if format invalid. + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError: + raise TypeError("RFC1123 object must be valid Datetime object.") + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: SerializationError if format invalid. + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise_with_traceback(SerializationError, msg, err) + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise_with_traceback(TypeError, msg, err) + + @staticmethod + def serialize_unix(attr, **kwargs): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises: SerializationError if format invalid + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError: + raise TypeError("Unix time object must be valid Datetime object.") + + +def rest_key_extractor(attr, attr_desc, data): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + # https://github.com/Azure/msrest-for-python/issues/197 + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor(attr, attr_desc, data): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + # https://github.com/Azure/msrest-for-python/issues/197 + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): + """Extract the attribute in "data" based on the last part of the JSON path key.""" + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + else: + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + else: # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer(object): + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes=None): + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, basestring): + return self.deserialize_data(data, response) + elif isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None: + return data + try: + attributes = response._attribute_map + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name + raise_with_traceback(DeserializationError, msg, err) + else: + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deseralize. + """ + if target is None: + return None, None + + if isinstance(target, basestring): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deseralize. + :param str content_type: Swagger "produces" if available. + """ + try: + return self(target_obj, data, content_type=content_type) + except: + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param raw_data: Data to be processed. + :param content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param response: The response model class. + :param d_attrs: The deserialized response attributes. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [k for k, v in response._validation.items() if v.get("readonly")] + const = [k for k, v in response._validation.items() if v.get("constant")] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) + raise DeserializationError(msg + str(err)) + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) + + def deserialize_data(self, data, data_type): + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise_with_traceback(DeserializationError, msg, err) + else: + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :rtype: dict + :raises: TypeError if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, basestring): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + else: + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :rtype: str, int, float or bool + :raises: TypeError if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + else: + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + elif isinstance(attr, basestring): + if attr.lower() in ["true", "1"]: + return True + elif attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): + return data + except NameError: + return str(data) + else: + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + # https://github.com/Azure/azure-rest-api-specs/issues/141 + try: + return list(enum_obj.__members__.values())[data] + except IndexError: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) + attr = attr + padding + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :rtype: Decimal + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(attr) + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise_with_traceback(DeserializationError, msg, err) + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :rtype: long or int + :raises: ValueError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :rtype: TimeDelta + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise_with_traceback(DeserializationError, msg, err) + else: + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :rtype: Date + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :rtype: Datetime + :raises: DeserializationError if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) + try: + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_vendor.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_vendor.py index 138f663c53a..9aad73fc743 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_vendor.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_vendor.py @@ -7,6 +7,7 @@ from azure.core.pipeline.transport import HttpRequest + def _convert_request(request, files=None): data = request.content if not files else None request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) @@ -14,6 +15,7 @@ def _convert_request(request, files=None): request.set_formdata_body(files) return request + def _format_url_section(template, **kwargs): components = template.split("/") while components: @@ -21,7 +23,5 @@ def _format_url_section(template, **kwargs): return template.format(**kwargs) except KeyError as key: formatted_components = template.split("/") - components = [ - c for c in formatted_components if "{}".format(key.args[0]) not in c - ] + components = [c for c in formatted_components if "{}".format(key.args[0]) not in c] template = "/".join(components) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_version.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_version.py index b457a55ac9d..40f18e7a20f 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_version.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "2.7.0" +VERSION = "2.8.0" diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/__init__.py index fba7666a306..3cbee83abd5 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/__init__.py @@ -14,7 +14,8 @@ except ImportError: _patch_all = [] from ._patch import patch_sdk as _patch_sdk -__all__ = ['DataFactoryManagementClient'] + +__all__ = ["DataFactoryManagementClient"] __all__.extend([p for p in _patch_all if p not in __all__]) _patch_sdk() diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration.py index ae06026ecb9..87477edc745 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration.py @@ -25,23 +25,18 @@ class DataFactoryManagementClientConfiguration(Configuration): # pylint: disabl Note that all parameters used to create this instance are saved as instance attributes. - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: The subscription identifier. + :param subscription_id: The subscription identifier. Required. :type subscription_id: str :keyword api_version: Api Version. Default value is "2018-06-01". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ - def __init__( - self, - credential: "AsyncTokenCredential", - subscription_id: str, - **kwargs: Any - ) -> None: + def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None: super(DataFactoryManagementClientConfiguration, self).__init__(**kwargs) - api_version = kwargs.pop('api_version', "2018-06-01") # type: str + api_version = kwargs.pop("api_version", "2018-06-01") # type: str if credential is None: raise ValueError("Parameter 'credential' must not be None.") @@ -51,22 +46,21 @@ def __init__( self.credential = credential self.subscription_id = subscription_id self.api_version = api_version - self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) - kwargs.setdefault('sdk_moniker', 'mgmt-datafactory/{}'.format(VERSION)) + self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "mgmt-datafactory/{}".format(VERSION)) self._configure(**kwargs) - def _configure( - self, - **kwargs: Any - ) -> None: - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") if self.credential and not self.authentication_policy: - self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs) + self.authentication_policy = AsyncARMChallengeAuthenticationPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py index 202e597e087..6892262f922 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py @@ -9,20 +9,42 @@ from copy import deepcopy from typing import Any, Awaitable, TYPE_CHECKING -from msrest import Deserializer, Serializer - from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.mgmt.core import AsyncARMPipelineClient from .. import models +from .._serialization import Deserializer, Serializer from ._configuration import DataFactoryManagementClientConfiguration -from .operations import ActivityRunsOperations, DataFlowDebugSessionOperations, DataFlowsOperations, DatasetsOperations, ExposureControlOperations, FactoriesOperations, GlobalParametersOperations, IntegrationRuntimeNodesOperations, IntegrationRuntimeObjectMetadataOperations, IntegrationRuntimesOperations, LinkedServicesOperations, ManagedPrivateEndpointsOperations, ManagedVirtualNetworksOperations, Operations, PipelineRunsOperations, PipelinesOperations, PrivateEndPointConnectionsOperations, PrivateEndpointConnectionOperations, PrivateLinkResourcesOperations, TriggerRunsOperations, TriggersOperations +from .operations import ( + ActivityRunsOperations, + DataFlowDebugSessionOperations, + DataFlowsOperations, + DatasetsOperations, + ExposureControlOperations, + FactoriesOperations, + GlobalParametersOperations, + IntegrationRuntimeNodesOperations, + IntegrationRuntimeObjectMetadataOperations, + IntegrationRuntimesOperations, + LinkedServicesOperations, + ManagedPrivateEndpointsOperations, + ManagedVirtualNetworksOperations, + Operations, + PipelineRunsOperations, + PipelinesOperations, + PrivateEndPointConnectionsOperations, + PrivateEndpointConnectionOperations, + PrivateLinkResourcesOperations, + TriggerRunsOperations, + TriggersOperations, +) if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -class DataFactoryManagementClient: # pylint: disable=too-many-instance-attributes + +class DataFactoryManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes """The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. @@ -78,9 +100,9 @@ class DataFactoryManagementClient: # pylint: disable=too-many-instance-attrib azure.mgmt.datafactory.aio.operations.PrivateLinkResourcesOperations :ivar global_parameters: GlobalParametersOperations operations :vartype global_parameters: azure.mgmt.datafactory.aio.operations.GlobalParametersOperations - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: The subscription identifier. + :param subscription_id: The subscription identifier. Required. :type subscription_id: str :param base_url: Service URL. Default value is "https://management.azure.com". :type base_url: str @@ -98,19 +120,17 @@ def __init__( base_url: str = "https://management.azure.com", **kwargs: Any ) -> None: - self._config = DataFactoryManagementClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs) + self._config = DataFactoryManagementClientConfiguration( + credential=credential, subscription_id=subscription_id, **kwargs + ) self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self._serialize.client_side_validation = False - self.operations = Operations( - self._client, self._config, self._serialize, self._deserialize - ) - self.factories = FactoriesOperations( - self._client, self._config, self._serialize, self._deserialize - ) + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) + self.factories = FactoriesOperations(self._client, self._config, self._serialize, self._deserialize) self.exposure_control = ExposureControlOperations( self._client, self._config, self._serialize, self._deserialize ) @@ -123,30 +143,14 @@ def __init__( self.integration_runtime_nodes = IntegrationRuntimeNodesOperations( self._client, self._config, self._serialize, self._deserialize ) - self.linked_services = LinkedServicesOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.datasets = DatasetsOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.pipelines = PipelinesOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.pipeline_runs = PipelineRunsOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.activity_runs = ActivityRunsOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.triggers = TriggersOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.trigger_runs = TriggerRunsOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.data_flows = DataFlowsOperations( - self._client, self._config, self._serialize, self._deserialize - ) + self.linked_services = LinkedServicesOperations(self._client, self._config, self._serialize, self._deserialize) + self.datasets = DatasetsOperations(self._client, self._config, self._serialize, self._deserialize) + self.pipelines = PipelinesOperations(self._client, self._config, self._serialize, self._deserialize) + self.pipeline_runs = PipelineRunsOperations(self._client, self._config, self._serialize, self._deserialize) + self.activity_runs = ActivityRunsOperations(self._client, self._config, self._serialize, self._deserialize) + self.triggers = TriggersOperations(self._client, self._config, self._serialize, self._deserialize) + self.trigger_runs = TriggerRunsOperations(self._client, self._config, self._serialize, self._deserialize) + self.data_flows = DataFlowsOperations(self._client, self._config, self._serialize, self._deserialize) self.data_flow_debug_session = DataFlowDebugSessionOperations( self._client, self._config, self._serialize, self._deserialize ) @@ -169,12 +173,7 @@ def __init__( self._client, self._config, self._serialize, self._deserialize ) - - def _send_request( - self, - request: HttpRequest, - **kwargs: Any - ) -> Awaitable[AsyncHttpResponse]: + def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest @@ -183,7 +182,7 @@ def _send_request( >>> response = await client._send_request(request) - For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request :param request: The network request you want to make. Required. :type request: ~azure.core.rest.HttpRequest diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_patch.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_patch.py index 74e48ecd07c..f99e77fef98 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_patch.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_patch.py @@ -28,4 +28,4 @@ # This file is used for handwritten extensions to the generated code. Example: # https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md def patch_sdk(): - pass \ No newline at end of file + pass diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/__init__.py index 61caa433ff9..a20c42cd8bf 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/__init__.py @@ -31,28 +31,29 @@ from ._patch import __all__ as _patch_all from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import from ._patch import patch_sdk as _patch_sdk + __all__ = [ - 'Operations', - 'FactoriesOperations', - 'ExposureControlOperations', - 'IntegrationRuntimesOperations', - 'IntegrationRuntimeObjectMetadataOperations', - 'IntegrationRuntimeNodesOperations', - 'LinkedServicesOperations', - 'DatasetsOperations', - 'PipelinesOperations', - 'PipelineRunsOperations', - 'ActivityRunsOperations', - 'TriggersOperations', - 'TriggerRunsOperations', - 'DataFlowsOperations', - 'DataFlowDebugSessionOperations', - 'ManagedVirtualNetworksOperations', - 'ManagedPrivateEndpointsOperations', - 'PrivateEndPointConnectionsOperations', - 'PrivateEndpointConnectionOperations', - 'PrivateLinkResourcesOperations', - 'GlobalParametersOperations', + "Operations", + "FactoriesOperations", + "ExposureControlOperations", + "IntegrationRuntimesOperations", + "IntegrationRuntimeObjectMetadataOperations", + "IntegrationRuntimeNodesOperations", + "LinkedServicesOperations", + "DatasetsOperations", + "PipelinesOperations", + "PipelineRunsOperations", + "ActivityRunsOperations", + "TriggersOperations", + "TriggerRunsOperations", + "DataFlowsOperations", + "DataFlowDebugSessionOperations", + "ManagedVirtualNetworksOperations", + "ManagedPrivateEndpointsOperations", + "PrivateEndPointConnectionsOperations", + "PrivateEndpointConnectionOperations", + "PrivateLinkResourcesOperations", + "GlobalParametersOperations", ] __all__.extend([p for p in _patch_all if p not in __all__]) -_patch_sdk() \ No newline at end of file +_patch_sdk() diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_activity_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_activity_runs_operations.py index bf771e46735..0677d5916d5 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_activity_runs_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_activity_runs_operations.py @@ -6,9 +6,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Optional, TypeVar - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest @@ -19,9 +25,11 @@ from ... import models as _models from ..._vendor import _convert_request from ...operations._activity_runs_operations import build_query_by_pipeline_run_request -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + class ActivityRunsOperations: """ .. warning:: @@ -41,54 +49,122 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async + @overload async def query_by_pipeline_run( self, resource_group_name: str, factory_name: str, run_id: str, filter_parameters: _models.RunFilterParameters, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.ActivityRunsQueryResponse: """Query activity runs based on input filter conditions. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param run_id: The pipeline run identifier. + :param run_id: The pipeline run identifier. Required. :type run_id: str - :param filter_parameters: Parameters to filter the activity runs. + :param filter_parameters: Parameters to filter the activity runs. Required. :type filter_parameters: ~azure.mgmt.datafactory.models.RunFilterParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ActivityRunsQueryResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ActivityRunsQueryResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def query_by_pipeline_run( + self, + resource_group_name: str, + factory_name: str, + run_id: str, + filter_parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ActivityRunsQueryResponse: + """Query activity runs based on input filter conditions. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param run_id: The pipeline run identifier. Required. + :type run_id: str + :param filter_parameters: Parameters to filter the activity runs. Required. + :type filter_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ActivityRunsQueryResponse, or the result of cls(response) + :return: ActivityRunsQueryResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.ActivityRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @distributed_trace_async + async def query_by_pipeline_run( + self, + resource_group_name: str, + factory_name: str, + run_id: str, + filter_parameters: Union[_models.RunFilterParameters, IO], + **kwargs: Any + ) -> _models.ActivityRunsQueryResponse: + """Query activity runs based on input filter conditions. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param run_id: The pipeline run identifier. Required. + :type run_id: str + :param filter_parameters: Parameters to filter the activity runs. Is either a model type or a + IO type. Required. + :type filter_parameters: ~azure.mgmt.datafactory.models.RunFilterParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ActivityRunsQueryResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ActivityRunsQueryResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.ActivityRunsQueryResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ActivityRunsQueryResponse] - _json = self._serialize.body(filter_parameters, 'RunFilterParameters') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(filter_parameters, (IO, bytes)): + _content = filter_parameters + else: + _json = self._serialize.body(filter_parameters, "RunFilterParameters") request = build_query_by_pipeline_run_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, run_id=run_id, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.query_by_pipeline_run.metadata['url'], + content=_content, + template_url=self.query_by_pipeline_run.metadata["url"], headers=_headers, params=_params, ) @@ -96,22 +172,20 @@ async def query_by_pipeline_run( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('ActivityRunsQueryResponse', pipeline_response) + deserialized = self._deserialize("ActivityRunsQueryResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - query_by_pipeline_run.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns"} # type: ignore - + query_by_pipeline_run.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_debug_session_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_debug_session_operations.py index e15ad93f47a..166f1538a59 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_debug_session_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_debug_session_operations.py @@ -6,10 +6,17 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -22,10 +29,18 @@ from ... import models as _models from ..._vendor import _convert_request -from ...operations._data_flow_debug_session_operations import build_add_data_flow_request, build_create_request_initial, build_delete_request, build_execute_command_request_initial, build_query_by_factory_request -T = TypeVar('T') +from ...operations._data_flow_debug_session_operations import ( + build_add_data_flow_request, + build_create_request, + build_delete_request, + build_execute_command_request, + build_query_by_factory_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + class DataFlowDebugSessionOperations: """ .. warning:: @@ -45,36 +60,40 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - async def _create_initial( self, resource_group_name: str, factory_name: str, - request: _models.CreateDataFlowDebugSessionRequest, + request: Union[_models.CreateDataFlowDebugSessionRequest, IO], **kwargs: Any ) -> Optional[_models.CreateDataFlowDebugSessionResponse]: - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.CreateDataFlowDebugSessionResponse]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.CreateDataFlowDebugSessionResponse]] - _json = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(request, (IO, bytes)): + _content = request + else: + _json = self._serialize.body(request, "CreateDataFlowDebugSessionRequest") - request = build_create_request_initial( - subscription_id=self._config.subscription_id, + request = build_create_request( resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_initial.metadata['url'], + content=_content, + template_url=self._create_initial.metadata["url"], headers=_headers, params=_params, ) @@ -82,10 +101,9 @@ async def _create_initial( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: @@ -95,36 +113,110 @@ async def _create_initial( deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', pipeline_response) + deserialized = self._deserialize("CreateDataFlowDebugSessionResponse", pipeline_response) if response.status_code == 202: - response_headers['location']=self._deserialize('str', response.headers.get('location')) - + response_headers["location"] = self._deserialize("str", response.headers.get("location")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _create_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession"} # type: ignore - + _create_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession"} # type: ignore - @distributed_trace_async + @overload async def begin_create( self, resource_group_name: str, factory_name: str, request: _models.CreateDataFlowDebugSessionRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> AsyncLROPoller[_models.CreateDataFlowDebugSessionResponse]: """Creates a data flow debug session. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param request: Data flow debug session definition. + :param request: Data flow debug session definition. Required. :type request: ~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CreateDataFlowDebugSessionResponse + or the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create( + self, + resource_group_name: str, + factory_name: str, + request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.CreateDataFlowDebugSessionResponse]: + """Creates a data flow debug session. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param request: Data flow debug session definition. Required. + :type request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CreateDataFlowDebugSessionResponse + or the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create( + self, + resource_group_name: str, + factory_name: str, + request: Union[_models.CreateDataFlowDebugSessionRequest, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.CreateDataFlowDebugSessionResponse]: + """Creates a data flow debug session. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param request: Data flow debug session definition. Is either a model type or a IO type. + Required. + :type request: ~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -137,20 +229,17 @@ async def begin_create( or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.CreateDataFlowDebugSessionResponse] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CreateDataFlowDebugSessionResponse] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = await self._create_initial( # type: ignore resource_group_name=resource_group_name, @@ -158,79 +247,71 @@ async def begin_create( request=request, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', pipeline_response) + deserialized = self._deserialize("CreateDataFlowDebugSessionResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: - polling_method = cast(AsyncPollingMethod, AsyncARMPolling( - lro_delay, - - - **kwargs - )) # type: AsyncPollingMethod - elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: polling_method = polling + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_create.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession"} # type: ignore + begin_create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession"} # type: ignore @distributed_trace def query_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any - ) -> AsyncIterable[_models.QueryDataFlowDebugSessionsResponse]: + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> AsyncIterable["_models.DataFlowDebugSessionInfo"]: """Query all active data flow debug sessions. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either QueryDataFlowDebugSessionsResponse or the result - of cls(response) + :return: An iterator like instance of either DataFlowDebugSessionInfo or the result of + cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.QueryDataFlowDebugSessionsResponse] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.DataFlowDebugSessionInfo] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.QueryDataFlowDebugSessionsResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.QueryDataFlowDebugSessionsResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_query_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.query_by_factory.metadata['url'], + template_url=self.query_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -238,16 +319,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_query_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -263,10 +339,8 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -276,55 +350,116 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - query_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryDataFlowDebugSessions"} # type: ignore + query_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryDataFlowDebugSessions"} # type: ignore - @distributed_trace_async + @overload async def add_data_flow( self, resource_group_name: str, factory_name: str, request: _models.DataFlowDebugPackage, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.AddDataFlowToDebugSessionResponse: """Add a data flow into debug session. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param request: Data flow debug session definition with debug content. + :param request: Data flow debug session definition with debug content. Required. :type request: ~azure.mgmt.datafactory.models.DataFlowDebugPackage + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AddDataFlowToDebugSessionResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.AddDataFlowToDebugSessionResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def add_data_flow( + self, + resource_group_name: str, + factory_name: str, + request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.AddDataFlowToDebugSessionResponse: + """Add a data flow into debug session. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param request: Data flow debug session definition with debug content. Required. + :type request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AddDataFlowToDebugSessionResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.AddDataFlowToDebugSessionResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def add_data_flow( + self, + resource_group_name: str, + factory_name: str, + request: Union[_models.DataFlowDebugPackage, IO], + **kwargs: Any + ) -> _models.AddDataFlowToDebugSessionResponse: + """Add a data flow into debug session. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param request: Data flow debug session definition with debug content. Is either a model type + or a IO type. Required. + :type request: ~azure.mgmt.datafactory.models.DataFlowDebugPackage or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: AddDataFlowToDebugSessionResponse, or the result of cls(response) + :return: AddDataFlowToDebugSessionResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.AddDataFlowToDebugSessionResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.AddDataFlowToDebugSessionResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AddDataFlowToDebugSessionResponse] - _json = self._serialize.body(request, 'DataFlowDebugPackage') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(request, (IO, bytes)): + _content = request + else: + _json = self._serialize.body(request, "DataFlowDebugPackage") request = build_add_data_flow_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.add_data_flow.metadata['url'], + content=_content, + template_url=self.add_data_flow.metadata["url"], headers=_headers, params=_params, ) @@ -332,69 +467,130 @@ async def add_data_flow( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('AddDataFlowToDebugSessionResponse', pipeline_response) + deserialized = self._deserialize("AddDataFlowToDebugSessionResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - add_data_flow.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/addDataFlowToDebugSession"} # type: ignore - + add_data_flow.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/addDataFlowToDebugSession"} # type: ignore - @distributed_trace_async + @overload async def delete( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, factory_name: str, request: _models.DeleteDataFlowDebugSessionRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> None: """Deletes a data flow debug session. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param request: Data flow debug session definition for deletion. + :param request: Data flow debug session definition for deletion. Required. :type request: ~azure.mgmt.datafactory.models.DeleteDataFlowDebugSessionRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + factory_name: str, + request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Deletes a data flow debug session. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param request: Data flow debug session definition for deletion. Required. + :type request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + factory_name: str, + request: Union[_models.DeleteDataFlowDebugSessionRequest, IO], + **kwargs: Any + ) -> None: + """Deletes a data flow debug session. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param request: Data flow debug session definition for deletion. Is either a model type or a IO + type. Required. + :type request: ~azure.mgmt.datafactory.models.DeleteDataFlowDebugSessionRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] - _json = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(request, (IO, bytes)): + _content = request + else: + _json = self._serialize.body(request, "DeleteDataFlowDebugSessionRequest") request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.delete.metadata['url'], + content=_content, + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -402,10 +598,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -415,38 +610,42 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/deleteDataFlowDebugSession"} # type: ignore - + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/deleteDataFlowDebugSession"} # type: ignore async def _execute_command_initial( self, resource_group_name: str, factory_name: str, - request: _models.DataFlowDebugCommandRequest, + request: Union[_models.DataFlowDebugCommandRequest, IO], **kwargs: Any ) -> Optional[_models.DataFlowDebugCommandResponse]: - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.DataFlowDebugCommandResponse]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.DataFlowDebugCommandResponse]] - _json = self._serialize.body(request, 'DataFlowDebugCommandRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(request, (IO, bytes)): + _content = request + else: + _json = self._serialize.body(request, "DataFlowDebugCommandRequest") - request = build_execute_command_request_initial( - subscription_id=self._config.subscription_id, + request = build_execute_command_request( resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._execute_command_initial.metadata['url'], + content=_content, + template_url=self._execute_command_initial.metadata["url"], headers=_headers, params=_params, ) @@ -454,10 +653,9 @@ async def _execute_command_initial( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: @@ -467,36 +665,75 @@ async def _execute_command_initial( deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('DataFlowDebugCommandResponse', pipeline_response) + deserialized = self._deserialize("DataFlowDebugCommandResponse", pipeline_response) if response.status_code == 202: - response_headers['location']=self._deserialize('str', response.headers.get('location')) - + response_headers["location"] = self._deserialize("str", response.headers.get("location")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _execute_command_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand"} # type: ignore + _execute_command_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand"} # type: ignore - - @distributed_trace_async + @overload async def begin_execute_command( self, resource_group_name: str, factory_name: str, request: _models.DataFlowDebugCommandRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> AsyncLROPoller[_models.DataFlowDebugCommandResponse]: """Execute a data flow debug command. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param request: Data flow debug command definition. + :param request: Data flow debug command definition. Required. :type request: ~azure.mgmt.datafactory.models.DataFlowDebugCommandRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DataFlowDebugCommandResponse or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.DataFlowDebugCommandResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_execute_command( + self, + resource_group_name: str, + factory_name: str, + request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DataFlowDebugCommandResponse]: + """Execute a data flow debug command. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param request: Data flow debug command definition. Required. + :type request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -509,20 +746,52 @@ async def begin_execute_command( result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.DataFlowDebugCommandResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_execute_command( + self, + resource_group_name: str, + factory_name: str, + request: Union[_models.DataFlowDebugCommandRequest, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.DataFlowDebugCommandResponse]: + """Execute a data flow debug command. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param request: Data flow debug command definition. Is either a model type or a IO type. + Required. + :type request: ~azure.mgmt.datafactory.models.DataFlowDebugCommandRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DataFlowDebugCommandResponse or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.DataFlowDebugCommandResponse] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.DataFlowDebugCommandResponse] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DataFlowDebugCommandResponse] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = await self._execute_command_initial( # type: ignore resource_group_name=resource_group_name, @@ -530,36 +799,32 @@ async def begin_execute_command( request=request, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('DataFlowDebugCommandResponse', pipeline_response) + deserialized = self._deserialize("DataFlowDebugCommandResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: - polling_method = cast(AsyncPollingMethod, AsyncARMPolling( - lro_delay, - - - **kwargs - )) # type: AsyncPollingMethod - elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: polling_method = polling + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_execute_command.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand"} # type: ignore + begin_execute_command.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flows_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flows_operations.py index cc5fd671f45..3671dca7b1a 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flows_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flows_operations.py @@ -6,10 +6,17 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest @@ -20,10 +27,17 @@ from ... import models as _models from ..._vendor import _convert_request -from ...operations._data_flows_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_by_factory_request -T = TypeVar('T') +from ...operations._data_flows_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_by_factory_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + class DataFlowsOperations: """ .. warning:: @@ -43,8 +57,7 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async + @overload async def create_or_update( self, resource_group_name: str, @@ -52,50 +65,126 @@ async def create_or_update( data_flow_name: str, data_flow: _models.DataFlowResource, if_match: Optional[str] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.DataFlowResource: """Creates or updates a data flow. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param data_flow_name: The data flow name. + :param data_flow_name: The data flow name. Required. :type data_flow_name: str - :param data_flow: Data flow resource definition. + :param data_flow: Data flow resource definition. Required. :type data_flow: ~azure.mgmt.datafactory.models.DataFlowResource :param if_match: ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. Default value is None. :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataFlowResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.DataFlowResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + data_flow_name: str, + data_flow: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DataFlowResource: + """Creates or updates a data flow. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param data_flow_name: The data flow name. Required. + :type data_flow_name: str + :param data_flow: Data flow resource definition. Required. + :type data_flow: IO + :param if_match: ETag of the data flow entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. Default value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataFlowResource, or the result of cls(response) + :return: DataFlowResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.DataFlowResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + data_flow_name: str, + data_flow: Union[_models.DataFlowResource, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.DataFlowResource: + """Creates or updates a data flow. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param data_flow_name: The data flow name. Required. + :type data_flow_name: str + :param data_flow: Data flow resource definition. Is either a model type or a IO type. Required. + :type data_flow: ~azure.mgmt.datafactory.models.DataFlowResource or IO + :param if_match: ETag of the data flow entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. Default value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataFlowResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.DataFlowResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.DataFlowResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DataFlowResource] - _json = self._serialize.body(data_flow, 'DataFlowResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(data_flow, (IO, bytes)): + _content = data_flow + else: + _json = self._serialize.body(data_flow, "DataFlowResource") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, data_flow_name=data_flow_name, + subscription_id=self._config.subscription_id, + if_match=if_match, api_version=api_version, content_type=content_type, json=_json, - if_match=if_match, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -103,25 +192,23 @@ async def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('DataFlowResource', pipeline_response) + deserialized = self._deserialize("DataFlowResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}"} # type: ignore - + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}"} # type: ignore @distributed_trace_async async def get( @@ -134,41 +221,38 @@ async def get( ) -> _models.DataFlowResource: """Gets a data flow. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param data_flow_name: The data flow name. + :param data_flow_name: The data flow name. Required. :type data_flow_name: str :param if_none_match: ETag of the data flow entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. Default value is None. :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataFlowResource, or the result of cls(response) + :return: DataFlowResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.DataFlowResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.DataFlowResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DataFlowResource] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, data_flow_name=data_flow_name, - api_version=api_version, + subscription_id=self._config.subscription_id, if_none_match=if_none_match, - template_url=self.get.metadata['url'], + api_version=api_version, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -176,66 +260,57 @@ async def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('DataFlowResource', pipeline_response) + deserialized = self._deserialize("DataFlowResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}"} # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - data_flow_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, data_flow_name: str, **kwargs: Any ) -> None: """Deletes a data flow. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param data_flow_name: The data flow name. + :param data_flow_name: The data flow name. Required. :type data_flow_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, data_flow_name=data_flow_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -243,10 +318,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -256,48 +330,42 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}"} # type: ignore - + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}"} # type: ignore @distributed_trace def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any - ) -> AsyncIterable[_models.DataFlowListResponse]: + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> AsyncIterable["_models.DataFlowResource"]: """Lists data flows. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DataFlowListResponse or the result of - cls(response) + :return: An iterator like instance of either DataFlowResource or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.DataFlowListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.DataFlowResource] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.DataFlowListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DataFlowListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_factory.metadata['url'], + template_url=self.list_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -305,16 +373,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -330,10 +393,8 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -343,8 +404,6 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows"} # type: ignore + list_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_datasets_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_datasets_operations.py index 3aec4a349a8..b5e2d3a7ba3 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_datasets_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_datasets_operations.py @@ -6,10 +6,17 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest @@ -20,10 +27,17 @@ from ... import models as _models from ..._vendor import _convert_request -from ...operations._datasets_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_by_factory_request -T = TypeVar('T') +from ...operations._datasets_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_by_factory_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + class DatasetsOperations: """ .. warning:: @@ -43,45 +57,39 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any - ) -> AsyncIterable[_models.DatasetListResponse]: + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> AsyncIterable["_models.DatasetResource"]: """Lists datasets. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DatasetListResponse or the result of cls(response) - :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.DatasetListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either DatasetResource or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.DatasetResource] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.DatasetListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DatasetListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_factory.metadata['url'], + template_url=self.list_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -89,16 +97,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -114,10 +117,8 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -127,13 +128,11 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets"} # type: ignore + list_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets"} # type: ignore - @distributed_trace_async + @overload async def create_or_update( self, resource_group_name: str, @@ -141,50 +140,126 @@ async def create_or_update( dataset_name: str, dataset: _models.DatasetResource, if_match: Optional[str] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.DatasetResource: """Creates or updates a dataset. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param dataset_name: The dataset name. + :param dataset_name: The dataset name. Required. :type dataset_name: str - :param dataset: Dataset resource definition. + :param dataset: Dataset resource definition. Required. :type dataset: ~azure.mgmt.datafactory.models.DatasetResource :param if_match: ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. Default value is None. :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DatasetResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.DatasetResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + dataset_name: str, + dataset: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DatasetResource: + """Creates or updates a dataset. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param dataset_name: The dataset name. Required. + :type dataset_name: str + :param dataset: Dataset resource definition. Required. + :type dataset: IO + :param if_match: ETag of the dataset entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. Default value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatasetResource, or the result of cls(response) + :return: DatasetResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.DatasetResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + dataset_name: str, + dataset: Union[_models.DatasetResource, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.DatasetResource: + """Creates or updates a dataset. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param dataset_name: The dataset name. Required. + :type dataset_name: str + :param dataset: Dataset resource definition. Is either a model type or a IO type. Required. + :type dataset: ~azure.mgmt.datafactory.models.DatasetResource or IO + :param if_match: ETag of the dataset entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. Default value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DatasetResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.DatasetResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.DatasetResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DatasetResource] - _json = self._serialize.body(dataset, 'DatasetResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(dataset, (IO, bytes)): + _content = dataset + else: + _json = self._serialize.body(dataset, "DatasetResource") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, dataset_name=dataset_name, + subscription_id=self._config.subscription_id, + if_match=if_match, api_version=api_version, content_type=content_type, json=_json, - if_match=if_match, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -192,25 +267,23 @@ async def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('DatasetResource', pipeline_response) + deserialized = self._deserialize("DatasetResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}"} # type: ignore - + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}"} # type: ignore @distributed_trace_async async def get( @@ -223,41 +296,38 @@ async def get( ) -> Optional[_models.DatasetResource]: """Gets a dataset. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param dataset_name: The dataset name. + :param dataset_name: The dataset name. Required. :type dataset_name: str :param if_none_match: ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. Default value is None. :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatasetResource, or the result of cls(response) + :return: DatasetResource or None or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.DatasetResource or None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.DatasetResource]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.DatasetResource]] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, dataset_name=dataset_name, - api_version=api_version, + subscription_id=self._config.subscription_id, if_none_match=if_none_match, - template_url=self.get.metadata['url'], + api_version=api_version, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -265,10 +335,9 @@ async def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 304]: @@ -277,56 +346,48 @@ async def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('DatasetResource', pipeline_response) + deserialized = self._deserialize("DatasetResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}"} # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - dataset_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, dataset_name: str, **kwargs: Any ) -> None: """Deletes a dataset. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param dataset_name: The dataset name. + :param dataset_name: The dataset name. Required. :type dataset_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, dataset_name=dataset_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -334,10 +395,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -347,5 +407,4 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}"} # type: ignore - + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_exposure_control_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_exposure_control_operations.py index 9a5fb12bd30..b3e087fda45 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_exposure_control_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_exposure_control_operations.py @@ -6,9 +6,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Optional, TypeVar - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest @@ -18,10 +24,16 @@ from ... import models as _models from ..._vendor import _convert_request -from ...operations._exposure_control_operations import build_get_feature_value_by_factory_request, build_get_feature_value_request, build_query_feature_values_by_factory_request -T = TypeVar('T') +from ...operations._exposure_control_operations import ( + build_get_feature_value_by_factory_request, + build_get_feature_value_request, + build_query_feature_values_by_factory_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + class ExposureControlOperations: """ .. warning:: @@ -41,46 +53,94 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async + @overload async def get_feature_value( self, location_id: str, exposure_control_request: _models.ExposureControlRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.ExposureControlResponse: """Get exposure control feature for specific location. - :param location_id: The location identifier. + :param location_id: The location identifier. Required. :type location_id: str - :param exposure_control_request: The exposure control request. + :param exposure_control_request: The exposure control request. Required. :type exposure_control_request: ~azure.mgmt.datafactory.models.ExposureControlRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ExposureControlResponse, or the result of cls(response) + :return: ExposureControlResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + async def get_feature_value( + self, location_id: str, exposure_control_request: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.ExposureControlResponse: + """Get exposure control feature for specific location. + + :param location_id: The location identifier. Required. + :type location_id: str + :param exposure_control_request: The exposure control request. Required. + :type exposure_control_request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExposureControlResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def get_feature_value( + self, location_id: str, exposure_control_request: Union[_models.ExposureControlRequest, IO], **kwargs: Any + ) -> _models.ExposureControlResponse: + """Get exposure control feature for specific location. + + :param location_id: The location identifier. Required. + :type location_id: str + :param exposure_control_request: The exposure control request. Is either a model type or a IO + type. Required. + :type exposure_control_request: ~azure.mgmt.datafactory.models.ExposureControlRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExposureControlResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.ExposureControlResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ExposureControlResponse] - _json = self._serialize.body(exposure_control_request, 'ExposureControlRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(exposure_control_request, (IO, bytes)): + _content = exposure_control_request + else: + _json = self._serialize.body(exposure_control_request, "ExposureControlRequest") request = build_get_feature_value_request( - subscription_id=self._config.subscription_id, location_id=location_id, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.get_feature_value.metadata['url'], + content=_content, + template_url=self.get_feature_value.metadata["url"], headers=_headers, params=_params, ) @@ -88,69 +148,130 @@ async def get_feature_value( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('ExposureControlResponse', pipeline_response) + deserialized = self._deserialize("ExposureControlResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_feature_value.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/getFeatureValue"} # type: ignore - + get_feature_value.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/getFeatureValue"} # type: ignore - @distributed_trace_async + @overload async def get_feature_value_by_factory( self, resource_group_name: str, factory_name: str, exposure_control_request: _models.ExposureControlRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.ExposureControlResponse: """Get exposure control feature for specific factory. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param exposure_control_request: The exposure control request. + :param exposure_control_request: The exposure control request. Required. :type exposure_control_request: ~azure.mgmt.datafactory.models.ExposureControlRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExposureControlResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def get_feature_value_by_factory( + self, + resource_group_name: str, + factory_name: str, + exposure_control_request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ExposureControlResponse: + """Get exposure control feature for specific factory. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param exposure_control_request: The exposure control request. Required. + :type exposure_control_request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExposureControlResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def get_feature_value_by_factory( + self, + resource_group_name: str, + factory_name: str, + exposure_control_request: Union[_models.ExposureControlRequest, IO], + **kwargs: Any + ) -> _models.ExposureControlResponse: + """Get exposure control feature for specific factory. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param exposure_control_request: The exposure control request. Is either a model type or a IO + type. Required. + :type exposure_control_request: ~azure.mgmt.datafactory.models.ExposureControlRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ExposureControlResponse, or the result of cls(response) + :return: ExposureControlResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.ExposureControlResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ExposureControlResponse] - _json = self._serialize.body(exposure_control_request, 'ExposureControlRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(exposure_control_request, (IO, bytes)): + _content = exposure_control_request + else: + _json = self._serialize.body(exposure_control_request, "ExposureControlRequest") request = build_get_feature_value_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.get_feature_value_by_factory.metadata['url'], + content=_content, + template_url=self.get_feature_value_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -158,70 +279,134 @@ async def get_feature_value_by_factory( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('ExposureControlResponse', pipeline_response) + deserialized = self._deserialize("ExposureControlResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_feature_value_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue"} # type: ignore - + get_feature_value_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue"} # type: ignore - @distributed_trace_async + @overload async def query_feature_values_by_factory( self, resource_group_name: str, factory_name: str, exposure_control_batch_request: _models.ExposureControlBatchRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.ExposureControlBatchResponse: """Get list of exposure control features for specific factory. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :param exposure_control_batch_request: The exposure control request for list of features. + Required. :type exposure_control_batch_request: ~azure.mgmt.datafactory.models.ExposureControlBatchRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ExposureControlBatchResponse, or the result of cls(response) + :return: ExposureControlBatchResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.ExposureControlBatchResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + async def query_feature_values_by_factory( + self, + resource_group_name: str, + factory_name: str, + exposure_control_batch_request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ExposureControlBatchResponse: + """Get list of exposure control features for specific factory. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param exposure_control_batch_request: The exposure control request for list of features. + Required. + :type exposure_control_batch_request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExposureControlBatchResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ExposureControlBatchResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def query_feature_values_by_factory( + self, + resource_group_name: str, + factory_name: str, + exposure_control_batch_request: Union[_models.ExposureControlBatchRequest, IO], + **kwargs: Any + ) -> _models.ExposureControlBatchResponse: + """Get list of exposure control features for specific factory. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param exposure_control_batch_request: The exposure control request for list of features. Is + either a model type or a IO type. Required. + :type exposure_control_batch_request: + ~azure.mgmt.datafactory.models.ExposureControlBatchRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExposureControlBatchResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ExposureControlBatchResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.ExposureControlBatchResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ExposureControlBatchResponse] - _json = self._serialize.body(exposure_control_batch_request, 'ExposureControlBatchRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(exposure_control_batch_request, (IO, bytes)): + _content = exposure_control_batch_request + else: + _json = self._serialize.body(exposure_control_batch_request, "ExposureControlBatchRequest") request = build_query_feature_values_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.query_feature_values_by_factory.metadata['url'], + content=_content, + template_url=self.query_feature_values_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -229,22 +414,20 @@ async def query_feature_values_by_factory( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('ExposureControlBatchResponse', pipeline_response) + deserialized = self._deserialize("ExposureControlBatchResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - query_feature_values_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue"} # type: ignore - + query_feature_values_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_factories_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_factories_operations.py index 01e91aa1c62..e575fae2d7b 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_factories_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_factories_operations.py @@ -6,10 +6,17 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest @@ -20,10 +27,22 @@ from ... import models as _models from ..._vendor import _convert_request -from ...operations._factories_operations import build_configure_factory_repo_request, build_create_or_update_request, build_delete_request, build_get_data_plane_access_request, build_get_git_hub_access_token_request, build_get_request, build_list_by_resource_group_request, build_list_request, build_update_request -T = TypeVar('T') +from ...operations._factories_operations import ( + build_configure_factory_repo_request, + build_create_or_update_request, + build_delete_request, + build_get_data_plane_access_request, + build_get_git_hub_access_token_request, + build_get_request, + build_list_by_resource_group_request, + build_list_request, + build_update_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + class FactoriesOperations: """ .. warning:: @@ -43,37 +62,31 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace - def list( - self, - **kwargs: Any - ) -> AsyncIterable[_models.FactoryListResponse]: + def list(self, **kwargs: Any) -> AsyncIterable["_models.Factory"]: """Lists factories under the specified subscription. :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.FactoryListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Factory or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.Factory] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.FactoryListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.FactoryListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_request( subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], headers=_headers, params=_params, ) @@ -81,14 +94,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -104,10 +114,8 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -117,51 +125,98 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories"} # type: ignore + list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories"} # type: ignore - @distributed_trace_async + @overload async def configure_factory_repo( self, location_id: str, factory_repo_update: _models.FactoryRepoUpdate, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.Factory: """Updates a factory's repo information. - :param location_id: The location identifier. + :param location_id: The location identifier. Required. :type location_id: str - :param factory_repo_update: Update factory repo request definition. + :param factory_repo_update: Update factory repo request definition. Required. :type factory_repo_update: ~azure.mgmt.datafactory.models.FactoryRepoUpdate + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) + :return: Factory or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + async def configure_factory_repo( + self, location_id: str, factory_repo_update: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.Factory: + """Updates a factory's repo information. + + :param location_id: The location identifier. Required. + :type location_id: str + :param factory_repo_update: Update factory repo request definition. Required. + :type factory_repo_update: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Factory or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.Factory + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def configure_factory_repo( + self, location_id: str, factory_repo_update: Union[_models.FactoryRepoUpdate, IO], **kwargs: Any + ) -> _models.Factory: + """Updates a factory's repo information. + + :param location_id: The location identifier. Required. + :type location_id: str + :param factory_repo_update: Update factory repo request definition. Is either a model type or a + IO type. Required. + :type factory_repo_update: ~azure.mgmt.datafactory.models.FactoryRepoUpdate or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Factory or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.Factory + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.Factory] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Factory] - _json = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(factory_repo_update, (IO, bytes)): + _content = factory_repo_update + else: + _json = self._serialize.body(factory_repo_update, "FactoryRepoUpdate") request = build_configure_factory_repo_request( - subscription_id=self._config.subscription_id, location_id=location_id, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.configure_factory_repo.metadata['url'], + content=_content, + template_url=self.configure_factory_repo.metadata["url"], headers=_headers, params=_params, ) @@ -169,60 +224,52 @@ async def configure_factory_repo( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('Factory', pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - configure_factory_repo.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo"} # type: ignore - + configure_factory_repo.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo"} # type: ignore @distributed_trace - def list_by_resource_group( - self, - resource_group_name: str, - **kwargs: Any - ) -> AsyncIterable[_models.FactoryListResponse]: + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncIterable["_models.Factory"]: """Lists factories. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.FactoryListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Factory or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.Factory] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.FactoryListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.FactoryListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_resource_group_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_resource_group.metadata['url'], + template_url=self.list_by_resource_group.metadata["url"], headers=_headers, params=_params, ) @@ -230,15 +277,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_resource_group_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -254,10 +297,8 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -267,60 +308,128 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list_by_resource_group.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories"} # type: ignore + list_by_resource_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories"} # type: ignore - @distributed_trace_async + @overload async def create_or_update( self, resource_group_name: str, factory_name: str, factory: _models.Factory, if_match: Optional[str] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.Factory: """Creates or updates a factory. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param factory: Factory resource definition. + :param factory: Factory resource definition. Required. :type factory: ~azure.mgmt.datafactory.models.Factory :param if_match: ETag of the factory entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. Default value is None. :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) + :return: Factory or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + factory: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Factory: + """Creates or updates a factory. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param factory: Factory resource definition. Required. + :type factory: IO + :param if_match: ETag of the factory entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. Default value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Factory or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.Factory + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + factory: Union[_models.Factory, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.Factory: + """Creates or updates a factory. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param factory: Factory resource definition. Is either a model type or a IO type. Required. + :type factory: ~azure.mgmt.datafactory.models.Factory or IO + :param if_match: ETag of the factory entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. Default value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Factory or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.Factory + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.Factory] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Factory] - _json = self._serialize.body(factory, 'Factory') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(factory, (IO, bytes)): + _content = factory + else: + _json = self._serialize.body(factory, "Factory") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, + if_match=if_match, api_version=api_version, content_type=content_type, json=_json, - if_match=if_match, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -328,69 +437,130 @@ async def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('Factory', pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}"} # type: ignore - + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}"} # type: ignore - @distributed_trace_async + @overload async def update( self, resource_group_name: str, factory_name: str, factory_update_parameters: _models.FactoryUpdateParameters, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.Factory: """Updates a factory. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param factory_update_parameters: The parameters for updating a factory. + :param factory_update_parameters: The parameters for updating a factory. Required. :type factory_update_parameters: ~azure.mgmt.datafactory.models.FactoryUpdateParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) + :return: Factory or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + async def update( + self, + resource_group_name: str, + factory_name: str, + factory_update_parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Factory: + """Updates a factory. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param factory_update_parameters: The parameters for updating a factory. Required. + :type factory_update_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Factory or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.Factory + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def update( + self, + resource_group_name: str, + factory_name: str, + factory_update_parameters: Union[_models.FactoryUpdateParameters, IO], + **kwargs: Any + ) -> _models.Factory: + """Updates a factory. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param factory_update_parameters: The parameters for updating a factory. Is either a model type + or a IO type. Required. + :type factory_update_parameters: ~azure.mgmt.datafactory.models.FactoryUpdateParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Factory or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.Factory + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.Factory] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Factory] - _json = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(factory_update_parameters, (IO, bytes)): + _content = factory_update_parameters + else: + _json = self._serialize.body(factory_update_parameters, "FactoryUpdateParameters") request = build_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.update.metadata['url'], + content=_content, + template_url=self.update.metadata["url"], headers=_headers, params=_params, ) @@ -398,68 +568,59 @@ async def update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('Factory', pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}"} # type: ignore - + update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}"} # type: ignore @distributed_trace_async async def get( - self, - resource_group_name: str, - factory_name: str, - if_none_match: Optional[str] = None, - **kwargs: Any + self, resource_group_name: str, factory_name: str, if_none_match: Optional[str] = None, **kwargs: Any ) -> Optional[_models.Factory]: """Gets a factory. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :param if_none_match: ETag of the factory entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. Default value is None. :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) + :return: Factory or None or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.Factory or None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.Factory]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.Factory]] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, - api_version=api_version, + subscription_id=self._config.subscription_id, if_none_match=if_none_match, - template_url=self.get.metadata['url'], + api_version=api_version, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -467,10 +628,9 @@ async def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 304]: @@ -479,52 +639,45 @@ async def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('Factory', pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}"} # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, **kwargs: Any ) -> None: """Deletes a factory. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -532,10 +685,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -545,52 +697,115 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}"} # type: ignore + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}"} # type: ignore - - @distributed_trace_async + @overload async def get_git_hub_access_token( self, resource_group_name: str, factory_name: str, git_hub_access_token_request: _models.GitHubAccessTokenRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.GitHubAccessTokenResponse: """Get GitHub Access Token. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param git_hub_access_token_request: Get GitHub access token request definition. + :param git_hub_access_token_request: Get GitHub access token request definition. Required. :type git_hub_access_token_request: ~azure.mgmt.datafactory.models.GitHubAccessTokenRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GitHubAccessTokenResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.GitHubAccessTokenResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def get_git_hub_access_token( + self, + resource_group_name: str, + factory_name: str, + git_hub_access_token_request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.GitHubAccessTokenResponse: + """Get GitHub Access Token. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param git_hub_access_token_request: Get GitHub access token request definition. Required. + :type git_hub_access_token_request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: GitHubAccessTokenResponse, or the result of cls(response) + :return: GitHubAccessTokenResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.GitHubAccessTokenResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @distributed_trace_async + async def get_git_hub_access_token( + self, + resource_group_name: str, + factory_name: str, + git_hub_access_token_request: Union[_models.GitHubAccessTokenRequest, IO], + **kwargs: Any + ) -> _models.GitHubAccessTokenResponse: + """Get GitHub Access Token. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param git_hub_access_token_request: Get GitHub access token request definition. Is either a + model type or a IO type. Required. + :type git_hub_access_token_request: ~azure.mgmt.datafactory.models.GitHubAccessTokenRequest or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GitHubAccessTokenResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.GitHubAccessTokenResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.GitHubAccessTokenResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.GitHubAccessTokenResponse] - _json = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(git_hub_access_token_request, (IO, bytes)): + _content = git_hub_access_token_request + else: + _json = self._serialize.body(git_hub_access_token_request, "GitHubAccessTokenRequest") request = build_get_git_hub_access_token_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.get_git_hub_access_token.metadata['url'], + content=_content, + template_url=self.get_git_hub_access_token.metadata["url"], headers=_headers, params=_params, ) @@ -598,69 +813,126 @@ async def get_git_hub_access_token( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('GitHubAccessTokenResponse', pipeline_response) + deserialized = self._deserialize("GitHubAccessTokenResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_git_hub_access_token.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken"} # type: ignore - + get_git_hub_access_token.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken"} # type: ignore - @distributed_trace_async + @overload async def get_data_plane_access( self, resource_group_name: str, factory_name: str, policy: _models.UserAccessPolicy, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.AccessPolicyResponse: """Get Data Plane access. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param policy: Data Plane user access policy definition. + :param policy: Data Plane user access policy definition. Required. :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AccessPolicyResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.AccessPolicyResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def get_data_plane_access( + self, + resource_group_name: str, + factory_name: str, + policy: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.AccessPolicyResponse: + """Get Data Plane access. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param policy: Data Plane user access policy definition. Required. + :type policy: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: AccessPolicyResponse, or the result of cls(response) + :return: AccessPolicyResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.AccessPolicyResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @distributed_trace_async + async def get_data_plane_access( + self, resource_group_name: str, factory_name: str, policy: Union[_models.UserAccessPolicy, IO], **kwargs: Any + ) -> _models.AccessPolicyResponse: + """Get Data Plane access. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param policy: Data Plane user access policy definition. Is either a model type or a IO type. + Required. + :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AccessPolicyResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.AccessPolicyResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.AccessPolicyResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AccessPolicyResponse] - _json = self._serialize.body(policy, 'UserAccessPolicy') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(policy, (IO, bytes)): + _content = policy + else: + _json = self._serialize.body(policy, "UserAccessPolicy") request = build_get_data_plane_access_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.get_data_plane_access.metadata['url'], + content=_content, + template_url=self.get_data_plane_access.metadata["url"], headers=_headers, params=_params, ) @@ -668,22 +940,20 @@ async def get_data_plane_access( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('AccessPolicyResponse', pipeline_response) + deserialized = self._deserialize("AccessPolicyResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_data_plane_access.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess"} # type: ignore - + get_data_plane_access.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_global_parameters_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_global_parameters_operations.py index fdd15f5d3aa..e7747e84793 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_global_parameters_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_global_parameters_operations.py @@ -6,10 +6,17 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest @@ -20,10 +27,17 @@ from ... import models as _models from ..._vendor import _convert_request -from ...operations._global_parameters_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_by_factory_request -T = TypeVar('T') +from ...operations._global_parameters_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_by_factory_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + class GlobalParametersOperations: """ .. warning:: @@ -43,46 +57,41 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any - ) -> AsyncIterable[_models.GlobalParameterListResponse]: + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> AsyncIterable["_models.GlobalParameterResource"]: """Lists Global parameters. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either GlobalParameterListResponse or the result of + :return: An iterator like instance of either GlobalParameterResource or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.GlobalParameterListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.GlobalParameterResource] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.GlobalParameterListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.GlobalParameterListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_factory.metadata['url'], + template_url=self.list_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -90,16 +99,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -115,10 +119,8 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -128,52 +130,43 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters"} # type: ignore + list_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters"} # type: ignore @distributed_trace_async async def get( - self, - resource_group_name: str, - factory_name: str, - global_parameter_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, global_parameter_name: str, **kwargs: Any ) -> _models.GlobalParameterResource: """Gets a Global parameter. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param global_parameter_name: The global parameter name. + :param global_parameter_name: The global parameter name. Required. :type global_parameter_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: GlobalParameterResource, or the result of cls(response) + :return: GlobalParameterResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.GlobalParameterResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.GlobalParameterResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.GlobalParameterResource] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, global_parameter_name=global_parameter_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -181,73 +174,140 @@ async def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('GlobalParameterResource', pipeline_response) + deserialized = self._deserialize("GlobalParameterResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}"} # type: ignore + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}"} # type: ignore - - @distributed_trace_async + @overload async def create_or_update( self, resource_group_name: str, factory_name: str, global_parameter_name: str, default: _models.GlobalParameterResource, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.GlobalParameterResource: """Creates or updates a Global parameter. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param global_parameter_name: The global parameter name. + :param global_parameter_name: The global parameter name. Required. :type global_parameter_name: str - :param default: Global parameter resource definition. + :param default: Global parameter resource definition. Required. :type default: ~azure.mgmt.datafactory.models.GlobalParameterResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: GlobalParameterResource, or the result of cls(response) + :return: GlobalParameterResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.GlobalParameterResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + global_parameter_name: str, + default: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.GlobalParameterResource: + """Creates or updates a Global parameter. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param global_parameter_name: The global parameter name. Required. + :type global_parameter_name: str + :param default: Global parameter resource definition. Required. + :type default: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GlobalParameterResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.GlobalParameterResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + global_parameter_name: str, + default: Union[_models.GlobalParameterResource, IO], + **kwargs: Any + ) -> _models.GlobalParameterResource: + """Creates or updates a Global parameter. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param global_parameter_name: The global parameter name. Required. + :type global_parameter_name: str + :param default: Global parameter resource definition. Is either a model type or a IO type. + Required. + :type default: ~azure.mgmt.datafactory.models.GlobalParameterResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GlobalParameterResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.GlobalParameterResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.GlobalParameterResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.GlobalParameterResource] - _json = self._serialize.body(default, 'GlobalParameterResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(default, (IO, bytes)): + _content = default + else: + _json = self._serialize.body(default, "GlobalParameterResource") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, global_parameter_name=global_parameter_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -255,66 +315,57 @@ async def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('GlobalParameterResource', pipeline_response) + deserialized = self._deserialize("GlobalParameterResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}"} # type: ignore - + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}"} # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - global_parameter_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, global_parameter_name: str, **kwargs: Any ) -> None: """Deletes a Global parameter. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param global_parameter_name: The global parameter name. + :param global_parameter_name: The global parameter name. Required. :type global_parameter_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, global_parameter_name=global_parameter_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -322,10 +373,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -335,5 +385,4 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}"} # type: ignore - + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_nodes_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_nodes_operations.py index 7cd263c0073..5c97dff951a 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_nodes_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_nodes_operations.py @@ -6,9 +6,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Optional, TypeVar - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest @@ -18,10 +24,17 @@ from ... import models as _models from ..._vendor import _convert_request -from ...operations._integration_runtime_nodes_operations import build_delete_request, build_get_ip_address_request, build_get_request, build_update_request -T = TypeVar('T') +from ...operations._integration_runtime_nodes_operations import ( + build_delete_request, + build_get_ip_address_request, + build_get_request, + build_update_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + class IntegrationRuntimeNodesOperations: """ .. warning:: @@ -41,51 +54,42 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace_async async def get( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - node_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, node_name: str, **kwargs: Any ) -> _models.SelfHostedIntegrationRuntimeNode: """Gets a self-hosted integration runtime node. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str - :param node_name: The integration runtime node name. + :param node_name: The integration runtime node name. Required. :type node_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) + :return: SelfHostedIntegrationRuntimeNode or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.SelfHostedIntegrationRuntimeNode] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.SelfHostedIntegrationRuntimeNode] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, node_name=node_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -93,70 +97,60 @@ async def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response) + deserialized = self._deserialize("SelfHostedIntegrationRuntimeNode", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}"} # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - node_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, node_name: str, **kwargs: Any ) -> None: """Deletes a self-hosted integration runtime node. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str - :param node_name: The integration runtime node name. + :param node_name: The integration runtime node name. Required. :type node_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, node_name=node_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -164,10 +158,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -177,10 +170,9 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}"} # type: ignore - + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}"} # type: ignore - @distributed_trace_async + @overload async def update( self, resource_group_name: str, @@ -188,51 +180,128 @@ async def update( integration_runtime_name: str, node_name: str, update_integration_runtime_node_request: _models.UpdateIntegrationRuntimeNodeRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.SelfHostedIntegrationRuntimeNode: """Updates a self-hosted integration runtime node. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str - :param node_name: The integration runtime node name. + :param node_name: The integration runtime node name. Required. :type node_name: str :param update_integration_runtime_node_request: The parameters for updating an integration - runtime node. + runtime node. Required. :type update_integration_runtime_node_request: ~azure.mgmt.datafactory.models.UpdateIntegrationRuntimeNodeRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SelfHostedIntegrationRuntimeNode or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + node_name: str, + update_integration_runtime_node_request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.SelfHostedIntegrationRuntimeNode: + """Updates a self-hosted integration runtime node. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param node_name: The integration runtime node name. Required. + :type node_name: str + :param update_integration_runtime_node_request: The parameters for updating an integration + runtime node. Required. + :type update_integration_runtime_node_request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) + :return: SelfHostedIntegrationRuntimeNode or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @distributed_trace_async + async def update( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + node_name: str, + update_integration_runtime_node_request: Union[_models.UpdateIntegrationRuntimeNodeRequest, IO], + **kwargs: Any + ) -> _models.SelfHostedIntegrationRuntimeNode: + """Updates a self-hosted integration runtime node. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param node_name: The integration runtime node name. Required. + :type node_name: str + :param update_integration_runtime_node_request: The parameters for updating an integration + runtime node. Is either a model type or a IO type. Required. + :type update_integration_runtime_node_request: + ~azure.mgmt.datafactory.models.UpdateIntegrationRuntimeNodeRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SelfHostedIntegrationRuntimeNode or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.SelfHostedIntegrationRuntimeNode] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.SelfHostedIntegrationRuntimeNode] - _json = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(update_integration_runtime_node_request, (IO, bytes)): + _content = update_integration_runtime_node_request + else: + _json = self._serialize.body(update_integration_runtime_node_request, "UpdateIntegrationRuntimeNodeRequest") request = build_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, node_name=node_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.update.metadata['url'], + content=_content, + template_url=self.update.metadata["url"], headers=_headers, params=_params, ) @@ -240,70 +309,60 @@ async def update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response) + deserialized = self._deserialize("SelfHostedIntegrationRuntimeNode", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}"} # type: ignore - + update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}"} # type: ignore @distributed_trace_async async def get_ip_address( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - node_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, node_name: str, **kwargs: Any ) -> _models.IntegrationRuntimeNodeIpAddress: """Get the IP address of self-hosted integration runtime node. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str - :param node_name: The integration runtime node name. + :param node_name: The integration runtime node name. Required. :type node_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeNodeIpAddress, or the result of cls(response) + :return: IntegrationRuntimeNodeIpAddress or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeNodeIpAddress - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeNodeIpAddress] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeNodeIpAddress] - request = build_get_ip_address_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, node_name=node_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_ip_address.metadata['url'], + template_url=self.get_ip_address.metadata["url"], headers=_headers, params=_params, ) @@ -311,22 +370,20 @@ async def get_ip_address( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('IntegrationRuntimeNodeIpAddress', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeNodeIpAddress", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_ip_address.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress"} # type: ignore - + get_ip_address.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py index 5fe5c7a530b..1c424381eb7 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py @@ -6,9 +6,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Optional, TypeVar, Union, cast - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -20,10 +26,12 @@ from ... import models as _models from ..._vendor import _convert_request -from ...operations._integration_runtime_object_metadata_operations import build_get_request, build_refresh_request_initial -T = TypeVar('T') +from ...operations._integration_runtime_object_metadata_operations import build_get_request, build_refresh_request + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + class IntegrationRuntimeObjectMetadataOperations: """ .. warning:: @@ -43,33 +51,25 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - async def _refresh_initial( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> Optional[_models.SsisObjectMetadataStatusResponse]: - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.SsisObjectMetadataStatusResponse]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.SsisObjectMetadataStatusResponse]] - - request = build_refresh_request_initial( - subscription_id=self._config.subscription_id, + request = build_refresh_request( resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._refresh_initial.metadata['url'], + template_url=self._refresh_initial.metadata["url"], headers=_headers, params=_params, ) @@ -77,10 +77,9 @@ async def _refresh_initial( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: @@ -89,31 +88,26 @@ async def _refresh_initial( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('SsisObjectMetadataStatusResponse', pipeline_response) + deserialized = self._deserialize("SsisObjectMetadataStatusResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _refresh_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata"} # type: ignore - + _refresh_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata"} # type: ignore @distributed_trace_async async def begin_refresh( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> AsyncLROPoller[_models.SsisObjectMetadataStatusResponse]: """Refresh a SSIS integration runtime object metadata. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -127,111 +121,176 @@ async def begin_refresh( the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.SsisObjectMetadataStatusResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.SsisObjectMetadataStatusResponse] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.SsisObjectMetadataStatusResponse] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = await self._refresh_initial( # type: ignore resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('SsisObjectMetadataStatusResponse', pipeline_response) + deserialized = self._deserialize("SsisObjectMetadataStatusResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: - polling_method = cast(AsyncPollingMethod, AsyncARMPolling( - lro_delay, - - - **kwargs - )) # type: AsyncPollingMethod - elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: polling_method = polling + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_refresh.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata"} # type: ignore + begin_refresh.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata"} # type: ignore - @distributed_trace_async + @overload async def get( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, get_metadata_request: Optional[_models.GetSsisObjectMetadataRequest] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.SsisObjectMetadataListResponse: """Get a SSIS integration runtime object metadata by specified path. The return is pageable metadata list. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :param get_metadata_request: The parameters for getting a SSIS object metadata. Default value is None. :type get_metadata_request: ~azure.mgmt.datafactory.models.GetSsisObjectMetadataRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SsisObjectMetadataListResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.SsisObjectMetadataListResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def get( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + get_metadata_request: Optional[IO] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.SsisObjectMetadataListResponse: + """Get a SSIS integration runtime object metadata by specified path. The return is pageable + metadata list. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param get_metadata_request: The parameters for getting a SSIS object metadata. Default value + is None. + :type get_metadata_request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SsisObjectMetadataListResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.SsisObjectMetadataListResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + get_metadata_request: Optional[Union[_models.GetSsisObjectMetadataRequest, IO]] = None, + **kwargs: Any + ) -> _models.SsisObjectMetadataListResponse: + """Get a SSIS integration runtime object metadata by specified path. The return is pageable + metadata list. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param get_metadata_request: The parameters for getting a SSIS object metadata. Is either a + model type or a IO type. Default value is None. + :type get_metadata_request: ~azure.mgmt.datafactory.models.GetSsisObjectMetadataRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: SsisObjectMetadataListResponse, or the result of cls(response) + :return: SsisObjectMetadataListResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.SsisObjectMetadataListResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.SsisObjectMetadataListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.SsisObjectMetadataListResponse] - if get_metadata_request is not None: - _json = self._serialize.body(get_metadata_request, 'GetSsisObjectMetadataRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(get_metadata_request, (IO, bytes)): + _content = get_metadata_request else: - _json = None + if get_metadata_request is not None: + _json = self._serialize.body(get_metadata_request, "GetSsisObjectMetadataRequest") + else: + _json = None request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.get.metadata['url'], + content=_content, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -239,22 +298,20 @@ async def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('SsisObjectMetadataListResponse', pipeline_response) + deserialized = self._deserialize("SsisObjectMetadataListResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getObjectMetadata"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getObjectMetadata"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py index a1c8763c540..571d4bd686b 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py @@ -6,10 +6,17 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -22,10 +29,30 @@ from ... import models as _models from ..._vendor import _convert_request -from ...operations._integration_runtimes_operations import build_create_linked_integration_runtime_request, build_create_or_update_request, build_delete_request, build_get_connection_info_request, build_get_monitoring_data_request, build_get_request, build_get_status_request, build_list_auth_keys_request, build_list_by_factory_request, build_list_outbound_network_dependencies_endpoints_request, build_regenerate_auth_key_request, build_remove_links_request, build_start_request_initial, build_stop_request_initial, build_sync_credentials_request, build_update_request, build_upgrade_request -T = TypeVar('T') +from ...operations._integration_runtimes_operations import ( + build_create_linked_integration_runtime_request, + build_create_or_update_request, + build_delete_request, + build_get_connection_info_request, + build_get_monitoring_data_request, + build_get_request, + build_get_status_request, + build_list_auth_keys_request, + build_list_by_factory_request, + build_list_outbound_network_dependencies_endpoints_request, + build_regenerate_auth_key_request, + build_remove_links_request, + build_start_request, + build_stop_request, + build_sync_credentials_request, + build_update_request, + build_upgrade_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + class IntegrationRuntimesOperations: """ .. warning:: @@ -45,46 +72,41 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any - ) -> AsyncIterable[_models.IntegrationRuntimeListResponse]: + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> AsyncIterable["_models.IntegrationRuntimeResource"]: """Lists integration runtimes. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either IntegrationRuntimeListResponse or the result of + :return: An iterator like instance of either IntegrationRuntimeResource or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.IntegrationRuntimeListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.IntegrationRuntimeResource] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_factory.metadata['url'], + template_url=self.list_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -92,16 +114,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -117,10 +134,8 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -130,13 +145,11 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes"} # type: ignore + list_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes"} # type: ignore - @distributed_trace_async + @overload async def create_or_update( self, resource_group_name: str, @@ -144,51 +157,130 @@ async def create_or_update( integration_runtime_name: str, integration_runtime: _models.IntegrationRuntimeResource, if_match: Optional[str] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.IntegrationRuntimeResource: """Creates or updates an integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str - :param integration_runtime: Integration runtime resource definition. + :param integration_runtime: Integration runtime resource definition. Required. :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource :param if_match: ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. Default value is None. :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + integration_runtime: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.IntegrationRuntimeResource: + """Creates or updates an integration runtime. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param integration_runtime: Integration runtime resource definition. Required. + :type integration_runtime: IO + :param if_match: ETag of the integration runtime entity. Should only be specified for update, + for which it should match existing entity or can be * for unconditional update. Default value + is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + integration_runtime: Union[_models.IntegrationRuntimeResource, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.IntegrationRuntimeResource: + """Creates or updates an integration runtime. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param integration_runtime: Integration runtime resource definition. Is either a model type or + a IO type. Required. + :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource or IO + :param if_match: ETag of the integration runtime entity. Should only be specified for update, + for which it should match existing entity or can be * for unconditional update. Default value + is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) + :return: IntegrationRuntimeResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeResource] - _json = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(integration_runtime, (IO, bytes)): + _content = integration_runtime + else: + _json = self._serialize.body(integration_runtime, "IntegrationRuntimeResource") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, + if_match=if_match, api_version=api_version, content_type=content_type, json=_json, - if_match=if_match, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -196,25 +288,23 @@ async def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}"} # type: ignore - + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}"} # type: ignore @distributed_trace_async async def get( @@ -227,41 +317,38 @@ async def get( ) -> Optional[_models.IntegrationRuntimeResource]: """Gets an integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :param if_none_match: ETag of the integration runtime entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. Default value is None. :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) + :return: IntegrationRuntimeResource or None or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource or None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.IntegrationRuntimeResource]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.IntegrationRuntimeResource]] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, - api_version=api_version, + subscription_id=self._config.subscription_id, if_none_match=if_none_match, - template_url=self.get.metadata['url'], + api_version=api_version, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -269,10 +356,9 @@ async def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 304]: @@ -281,64 +367,135 @@ async def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}"} # type: ignore + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}"} # type: ignore - - @distributed_trace_async + @overload async def update( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, update_integration_runtime_request: _models.UpdateIntegrationRuntimeRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.IntegrationRuntimeResource: """Updates an integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :param update_integration_runtime_request: The parameters for updating an integration runtime. + Required. :type update_integration_runtime_request: ~azure.mgmt.datafactory.models.UpdateIntegrationRuntimeRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + update_integration_runtime_request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.IntegrationRuntimeResource: + """Updates an integration runtime. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param update_integration_runtime_request: The parameters for updating an integration runtime. + Required. + :type update_integration_runtime_request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def update( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + update_integration_runtime_request: Union[_models.UpdateIntegrationRuntimeRequest, IO], + **kwargs: Any + ) -> _models.IntegrationRuntimeResource: + """Updates an integration runtime. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param update_integration_runtime_request: The parameters for updating an integration runtime. + Is either a model type or a IO type. Required. + :type update_integration_runtime_request: + ~azure.mgmt.datafactory.models.UpdateIntegrationRuntimeRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) + :return: IntegrationRuntimeResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeResource] - _json = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(update_integration_runtime_request, (IO, bytes)): + _content = update_integration_runtime_request + else: + _json = self._serialize.body(update_integration_runtime_request, "UpdateIntegrationRuntimeRequest") request = build_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.update.metadata['url'], + content=_content, + template_url=self.update.metadata["url"], headers=_headers, params=_params, ) @@ -346,66 +503,57 @@ async def update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}"} # type: ignore - + update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}"} # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> None: """Deletes an integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -413,10 +561,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -426,49 +573,41 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}"} # type: ignore - + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}"} # type: ignore @distributed_trace_async async def get_status( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> _models.IntegrationRuntimeStatusResponse: """Gets detailed status information for an integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeStatusResponse, or the result of cls(response) + :return: IntegrationRuntimeStatusResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeStatusResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeStatusResponse] - request = build_get_status_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_status.metadata['url'], + template_url=self.get_status.metadata["url"], headers=_headers, params=_params, ) @@ -476,68 +615,61 @@ async def get_status( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_status.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus"} # type: ignore - + get_status.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus"} # type: ignore @distributed_trace_async async def list_outbound_network_dependencies_endpoints( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> _models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse: """Gets the list of outbound network dependencies for a given Azure-SSIS integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, or the result of + :return: IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop( + "cls", None + ) # type: ClsType[_models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse] - request = build_list_outbound_network_dependencies_endpoints_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_outbound_network_dependencies_endpoints.metadata['url'], + template_url=self.list_outbound_network_dependencies_endpoints.metadata["url"], headers=_headers, params=_params, ) @@ -545,67 +677,60 @@ async def list_outbound_network_dependencies_endpoints( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse', pipeline_response) + deserialized = self._deserialize( + "IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse", pipeline_response + ) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_outbound_network_dependencies_endpoints.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/outboundNetworkDependenciesEndpoints"} # type: ignore - + list_outbound_network_dependencies_endpoints.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/outboundNetworkDependenciesEndpoints"} # type: ignore @distributed_trace_async async def get_connection_info( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> _models.IntegrationRuntimeConnectionInfo: """Gets the on-premises integration runtime connection information for encrypting the on-premises data source credentials. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeConnectionInfo, or the result of cls(response) + :return: IntegrationRuntimeConnectionInfo or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeConnectionInfo - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeConnectionInfo] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeConnectionInfo] - request = build_get_connection_info_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_connection_info.metadata['url'], + template_url=self.get_connection_info.metadata["url"], headers=_headers, params=_params, ) @@ -613,75 +738,144 @@ async def get_connection_info( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('IntegrationRuntimeConnectionInfo', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeConnectionInfo", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_connection_info.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo"} # type: ignore - + get_connection_info.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo"} # type: ignore - @distributed_trace_async + @overload async def regenerate_auth_key( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, regenerate_key_parameters: _models.IntegrationRuntimeRegenerateKeyParameters, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.IntegrationRuntimeAuthKeys: """Regenerates the authentication key for an integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :param regenerate_key_parameters: The parameters for regenerating integration runtime - authentication key. + authentication key. Required. :type regenerate_key_parameters: ~azure.mgmt.datafactory.models.IntegrationRuntimeRegenerateKeyParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeAuthKeys or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def regenerate_auth_key( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + regenerate_key_parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.IntegrationRuntimeAuthKeys: + """Regenerates the authentication key for an integration runtime. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param regenerate_key_parameters: The parameters for regenerating integration runtime + authentication key. Required. + :type regenerate_key_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeAuthKeys, or the result of cls(response) + :return: IntegrationRuntimeAuthKeys or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @distributed_trace_async + async def regenerate_auth_key( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + regenerate_key_parameters: Union[_models.IntegrationRuntimeRegenerateKeyParameters, IO], + **kwargs: Any + ) -> _models.IntegrationRuntimeAuthKeys: + """Regenerates the authentication key for an integration runtime. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param regenerate_key_parameters: The parameters for regenerating integration runtime + authentication key. Is either a model type or a IO type. Required. + :type regenerate_key_parameters: + ~azure.mgmt.datafactory.models.IntegrationRuntimeRegenerateKeyParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeAuthKeys or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeAuthKeys] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeAuthKeys] - _json = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(regenerate_key_parameters, (IO, bytes)): + _content = regenerate_key_parameters + else: + _json = self._serialize.body(regenerate_key_parameters, "IntegrationRuntimeRegenerateKeyParameters") request = build_regenerate_auth_key_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.regenerate_auth_key.metadata['url'], + content=_content, + template_url=self.regenerate_auth_key.metadata["url"], headers=_headers, params=_params, ) @@ -689,66 +883,57 @@ async def regenerate_auth_key( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeAuthKeys", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - regenerate_auth_key.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey"} # type: ignore - + regenerate_auth_key.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey"} # type: ignore @distributed_trace_async async def list_auth_keys( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> _models.IntegrationRuntimeAuthKeys: """Retrieves the authentication keys for an integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeAuthKeys, or the result of cls(response) + :return: IntegrationRuntimeAuthKeys or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeAuthKeys] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeAuthKeys] - request = build_list_auth_keys_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_auth_keys.metadata['url'], + template_url=self.list_auth_keys.metadata["url"], headers=_headers, params=_params, ) @@ -756,52 +941,43 @@ async def list_auth_keys( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeAuthKeys", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_auth_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys"} # type: ignore - + list_auth_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys"} # type: ignore async def _start_initial( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> Optional[_models.IntegrationRuntimeStatusResponse]: - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.IntegrationRuntimeStatusResponse]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.IntegrationRuntimeStatusResponse]] - - request = build_start_request_initial( - subscription_id=self._config.subscription_id, + request = build_start_request( resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._start_initial.metadata['url'], + template_url=self._start_initial.metadata["url"], headers=_headers, params=_params, ) @@ -809,10 +985,9 @@ async def _start_initial( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: @@ -821,31 +996,26 @@ async def _start_initial( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _start_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start"} # type: ignore - + _start_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start"} # type: ignore @distributed_trace_async async def begin_start( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> AsyncLROPoller[_models.IntegrationRuntimeStatusResponse]: """Starts a ManagedReserved type integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -859,85 +1029,71 @@ async def begin_start( the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeStatusResponse] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeStatusResponse] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = await self._start_initial( # type: ignore resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: - polling_method = cast(AsyncPollingMethod, AsyncARMPolling( - lro_delay, - - - **kwargs - )) # type: AsyncPollingMethod - elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: polling_method = polling + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_start.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start"} # type: ignore + begin_start.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start"} # type: ignore async def _stop_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> None: - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - - request = build_stop_request_initial( - subscription_id=self._config.subscription_id, + request = build_stop_request( resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._stop_initial.metadata['url'], + template_url=self._stop_initial.metadata["url"], headers=_headers, params=_params, ) @@ -945,10 +1101,9 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: @@ -958,24 +1113,19 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - _stop_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop"} # type: ignore - + _stop_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop"} # type: ignore @distributed_trace_async - async def begin_stop( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + async def begin_stop( + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Stops a ManagedReserved type integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -987,100 +1137,86 @@ async def begin_stop( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = await self._stop_initial( # type: ignore resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - if polling is True: - polling_method = cast(AsyncPollingMethod, AsyncARMPolling( - lro_delay, - - - **kwargs - )) # type: AsyncPollingMethod - elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: polling_method = polling + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_stop.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop"} # type: ignore + begin_stop.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop"} # type: ignore @distributed_trace_async async def sync_credentials( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> None: """Force the integration runtime to synchronize credentials across integration runtime nodes, and this will override the credentials across all worker nodes with those available on the dispatcher node. If you already have the latest credential backup file, you should manually import it (preferred) on any self-hosted integration runtime node than using this API directly. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_sync_credentials_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.sync_credentials.metadata['url'], + template_url=self.sync_credentials.metadata["url"], headers=_headers, params=_params, ) @@ -1088,10 +1224,9 @@ async def sync_credentials( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1101,50 +1236,42 @@ async def sync_credentials( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - sync_credentials.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials"} # type: ignore - + sync_credentials.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials"} # type: ignore @distributed_trace_async async def get_monitoring_data( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> _models.IntegrationRuntimeMonitoringData: """Get the integration runtime monitoring data, which includes the monitor data for all the nodes under this integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeMonitoringData, or the result of cls(response) + :return: IntegrationRuntimeMonitoringData or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeMonitoringData - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeMonitoringData] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeMonitoringData] - request = build_get_monitoring_data_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_monitoring_data.metadata['url'], + template_url=self.get_monitoring_data.metadata["url"], headers=_headers, params=_params, ) @@ -1152,66 +1279,57 @@ async def get_monitoring_data( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('IntegrationRuntimeMonitoringData', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeMonitoringData", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_monitoring_data.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData"} # type: ignore - + get_monitoring_data.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData"} # type: ignore @distributed_trace_async async def upgrade( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> None: """Upgrade self-hosted integration runtime to latest version if availability. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_upgrade_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.upgrade.metadata['url'], + template_url=self.upgrade.metadata["url"], headers=_headers, params=_params, ) @@ -1219,10 +1337,9 @@ async def upgrade( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1232,59 +1349,131 @@ async def upgrade( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - upgrade.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade"} # type: ignore - + upgrade.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade"} # type: ignore - @distributed_trace_async + @overload async def remove_links( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, factory_name: str, integration_runtime_name: str, linked_integration_runtime_request: _models.LinkedIntegrationRuntimeRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> None: """Remove all linked integration runtimes under specific data factory in a self-hosted integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :param linked_integration_runtime_request: The data factory name for the linked integration - runtime. + runtime. Required. :type linked_integration_runtime_request: ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def remove_links( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + linked_integration_runtime_request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Remove all linked integration runtimes under specific data factory in a self-hosted integration + runtime. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param linked_integration_runtime_request: The data factory name for the linked integration + runtime. Required. + :type linked_integration_runtime_request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def remove_links( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + linked_integration_runtime_request: Union[_models.LinkedIntegrationRuntimeRequest, IO], + **kwargs: Any + ) -> None: + """Remove all linked integration runtimes under specific data factory in a self-hosted integration + runtime. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param linked_integration_runtime_request: The data factory name for the linked integration + runtime. Is either a model type or a IO type. Required. + :type linked_integration_runtime_request: + ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] - _json = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(linked_integration_runtime_request, (IO, bytes)): + _content = linked_integration_runtime_request + else: + _json = self._serialize.body(linked_integration_runtime_request, "LinkedIntegrationRuntimeRequest") request = build_remove_links_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.remove_links.metadata['url'], + content=_content, + template_url=self.remove_links.metadata["url"], headers=_headers, params=_params, ) @@ -1292,10 +1481,9 @@ async def remove_links( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1305,57 +1493,130 @@ async def remove_links( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - remove_links.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks"} # type: ignore + remove_links.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks"} # type: ignore - - @distributed_trace_async + @overload async def create_linked_integration_runtime( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, create_linked_integration_runtime_request: _models.CreateLinkedIntegrationRuntimeRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.IntegrationRuntimeStatusResponse: """Create a linked integration runtime entry in a shared integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :param create_linked_integration_runtime_request: The linked integration runtime properties. + Required. :type create_linked_integration_runtime_request: ~azure.mgmt.datafactory.models.CreateLinkedIntegrationRuntimeRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeStatusResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_linked_integration_runtime( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + create_linked_integration_runtime_request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.IntegrationRuntimeStatusResponse: + """Create a linked integration runtime entry in a shared integration runtime. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param create_linked_integration_runtime_request: The linked integration runtime properties. + Required. + :type create_linked_integration_runtime_request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeStatusResponse, or the result of cls(response) + :return: IntegrationRuntimeStatusResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @distributed_trace_async + async def create_linked_integration_runtime( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + create_linked_integration_runtime_request: Union[_models.CreateLinkedIntegrationRuntimeRequest, IO], + **kwargs: Any + ) -> _models.IntegrationRuntimeStatusResponse: + """Create a linked integration runtime entry in a shared integration runtime. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param create_linked_integration_runtime_request: The linked integration runtime properties. Is + either a model type or a IO type. Required. + :type create_linked_integration_runtime_request: + ~azure.mgmt.datafactory.models.CreateLinkedIntegrationRuntimeRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeStatusResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeStatusResponse] - - _json = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeStatusResponse] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(create_linked_integration_runtime_request, (IO, bytes)): + _content = create_linked_integration_runtime_request + else: + _json = self._serialize.body( + create_linked_integration_runtime_request, "CreateLinkedIntegrationRuntimeRequest" + ) request = build_create_linked_integration_runtime_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_linked_integration_runtime.metadata['url'], + content=_content, + template_url=self.create_linked_integration_runtime.metadata["url"], headers=_headers, params=_params, ) @@ -1363,22 +1624,20 @@ async def create_linked_integration_runtime( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_linked_integration_runtime.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime"} # type: ignore - + create_linked_integration_runtime.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_linked_services_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_linked_services_operations.py index 16f52c8fd55..7e33e494369 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_linked_services_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_linked_services_operations.py @@ -6,10 +6,17 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest @@ -20,10 +27,17 @@ from ... import models as _models from ..._vendor import _convert_request -from ...operations._linked_services_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_by_factory_request -T = TypeVar('T') +from ...operations._linked_services_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_by_factory_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + class LinkedServicesOperations: """ .. warning:: @@ -43,46 +57,41 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any - ) -> AsyncIterable[_models.LinkedServiceListResponse]: + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> AsyncIterable["_models.LinkedServiceResource"]: """Lists linked services. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either LinkedServiceListResponse or the result of + :return: An iterator like instance of either LinkedServiceResource or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.LinkedServiceListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.LinkedServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.LinkedServiceListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.LinkedServiceListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_factory.metadata['url'], + template_url=self.list_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -90,16 +99,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -115,10 +119,8 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -128,13 +130,11 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices"} # type: ignore + list_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices"} # type: ignore - @distributed_trace_async + @overload async def create_or_update( self, resource_group_name: str, @@ -142,51 +142,130 @@ async def create_or_update( linked_service_name: str, linked_service: _models.LinkedServiceResource, if_match: Optional[str] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.LinkedServiceResource: """Creates or updates a linked service. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param linked_service_name: The linked service name. + :param linked_service_name: The linked service name. Required. :type linked_service_name: str - :param linked_service: Linked service resource definition. + :param linked_service: Linked service resource definition. Required. :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceResource :param if_match: ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. Default value is None. :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LinkedServiceResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.LinkedServiceResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + linked_service_name: str, + linked_service: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LinkedServiceResource: + """Creates or updates a linked service. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param linked_service_name: The linked service name. Required. + :type linked_service_name: str + :param linked_service: Linked service resource definition. Required. + :type linked_service: IO + :param if_match: ETag of the linkedService entity. Should only be specified for update, for + which it should match existing entity or can be * for unconditional update. Default value is + None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LinkedServiceResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.LinkedServiceResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + linked_service_name: str, + linked_service: Union[_models.LinkedServiceResource, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.LinkedServiceResource: + """Creates or updates a linked service. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param linked_service_name: The linked service name. Required. + :type linked_service_name: str + :param linked_service: Linked service resource definition. Is either a model type or a IO type. + Required. + :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceResource or IO + :param if_match: ETag of the linkedService entity. Should only be specified for update, for + which it should match existing entity or can be * for unconditional update. Default value is + None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: LinkedServiceResource, or the result of cls(response) + :return: LinkedServiceResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.LinkedServiceResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.LinkedServiceResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.LinkedServiceResource] - _json = self._serialize.body(linked_service, 'LinkedServiceResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(linked_service, (IO, bytes)): + _content = linked_service + else: + _json = self._serialize.body(linked_service, "LinkedServiceResource") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, linked_service_name=linked_service_name, + subscription_id=self._config.subscription_id, + if_match=if_match, api_version=api_version, content_type=content_type, json=_json, - if_match=if_match, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -194,25 +273,23 @@ async def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('LinkedServiceResource', pipeline_response) + deserialized = self._deserialize("LinkedServiceResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}"} # type: ignore - + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}"} # type: ignore @distributed_trace_async async def get( @@ -225,41 +302,38 @@ async def get( ) -> Optional[_models.LinkedServiceResource]: """Gets a linked service. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param linked_service_name: The linked service name. + :param linked_service_name: The linked service name. Required. :type linked_service_name: str :param if_none_match: ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. Default value is None. :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: LinkedServiceResource, or the result of cls(response) + :return: LinkedServiceResource or None or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.LinkedServiceResource or None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.LinkedServiceResource]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.LinkedServiceResource]] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, linked_service_name=linked_service_name, - api_version=api_version, + subscription_id=self._config.subscription_id, if_none_match=if_none_match, - template_url=self.get.metadata['url'], + api_version=api_version, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -267,10 +341,9 @@ async def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 304]: @@ -279,56 +352,48 @@ async def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('LinkedServiceResource', pipeline_response) + deserialized = self._deserialize("LinkedServiceResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}"} # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - linked_service_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, linked_service_name: str, **kwargs: Any ) -> None: """Deletes a linked service. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param linked_service_name: The linked service name. + :param linked_service_name: The linked service name. Required. :type linked_service_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, linked_service_name=linked_service_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -336,10 +401,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -349,5 +413,4 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}"} # type: ignore - + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_private_endpoints_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_private_endpoints_operations.py index 35489605a7a..fa087757d4d 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_private_endpoints_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_private_endpoints_operations.py @@ -6,10 +6,17 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest @@ -20,10 +27,17 @@ from ... import models as _models from ..._vendor import _convert_request -from ...operations._managed_private_endpoints_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_by_factory_request -T = TypeVar('T') +from ...operations._managed_private_endpoints_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_by_factory_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + class ManagedPrivateEndpointsOperations: """ .. warning:: @@ -43,50 +57,44 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - managed_virtual_network_name: str, - **kwargs: Any - ) -> AsyncIterable[_models.ManagedPrivateEndpointListResponse]: + self, resource_group_name: str, factory_name: str, managed_virtual_network_name: str, **kwargs: Any + ) -> AsyncIterable["_models.ManagedPrivateEndpointResource"]: """Lists managed private endpoints. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. + :param managed_virtual_network_name: Managed virtual network name. Required. :type managed_virtual_network_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result - of cls(response) + :return: An iterator like instance of either ManagedPrivateEndpointResource or the result of + cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.ManagedPrivateEndpointListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.ManagedPrivateEndpointListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ManagedPrivateEndpointListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, managed_virtual_network_name=managed_virtual_network_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_factory.metadata['url'], + template_url=self.list_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -94,17 +102,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - managed_virtual_network_name=managed_virtual_network_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -120,10 +122,8 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -133,13 +133,11 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints"} # type: ignore + list_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints"} # type: ignore - @distributed_trace_async + @overload async def create_or_update( self, resource_group_name: str, @@ -148,54 +146,140 @@ async def create_or_update( managed_private_endpoint_name: str, managed_private_endpoint: _models.ManagedPrivateEndpointResource, if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ManagedPrivateEndpointResource: + """Creates or updates a managed private endpoint. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. Required. + :type managed_virtual_network_name: str + :param managed_private_endpoint_name: Managed private endpoint name. Required. + :type managed_private_endpoint_name: str + :param managed_private_endpoint: Managed private endpoint resource definition. Required. + :type managed_private_endpoint: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource + :param if_match: ETag of the managed private endpoint entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. Default + value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedPrivateEndpointResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + managed_virtual_network_name: str, + managed_private_endpoint_name: str, + managed_private_endpoint: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.ManagedPrivateEndpointResource: """Creates or updates a managed private endpoint. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. + :param managed_virtual_network_name: Managed virtual network name. Required. :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. + :param managed_private_endpoint_name: Managed private endpoint name. Required. :type managed_private_endpoint_name: str - :param managed_private_endpoint: Managed private endpoint resource definition. + :param managed_private_endpoint: Managed private endpoint resource definition. Required. + :type managed_private_endpoint: IO + :param if_match: ETag of the managed private endpoint entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. Default + value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedPrivateEndpointResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + managed_virtual_network_name: str, + managed_private_endpoint_name: str, + managed_private_endpoint: Union[_models.ManagedPrivateEndpointResource, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.ManagedPrivateEndpointResource: + """Creates or updates a managed private endpoint. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. Required. + :type managed_virtual_network_name: str + :param managed_private_endpoint_name: Managed private endpoint name. Required. + :type managed_private_endpoint_name: str + :param managed_private_endpoint: Managed private endpoint resource definition. Is either a + model type or a IO type. Required. :type managed_private_endpoint: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource + or IO :param if_match: ETag of the managed private endpoint entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. Default value is None. :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedPrivateEndpointResource, or the result of cls(response) + :return: ManagedPrivateEndpointResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.ManagedPrivateEndpointResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ManagedPrivateEndpointResource] - _json = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(managed_private_endpoint, (IO, bytes)): + _content = managed_private_endpoint + else: + _json = self._serialize.body(managed_private_endpoint, "ManagedPrivateEndpointResource") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, managed_virtual_network_name=managed_virtual_network_name, managed_private_endpoint_name=managed_private_endpoint_name, + subscription_id=self._config.subscription_id, + if_match=if_match, api_version=api_version, content_type=content_type, json=_json, - if_match=if_match, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -203,25 +287,23 @@ async def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) + deserialized = self._deserialize("ManagedPrivateEndpointResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}"} # type: ignore - + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}"} # type: ignore @distributed_trace_async async def get( @@ -235,44 +317,41 @@ async def get( ) -> _models.ManagedPrivateEndpointResource: """Gets a managed private endpoint. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. + :param managed_virtual_network_name: Managed virtual network name. Required. :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. + :param managed_private_endpoint_name: Managed private endpoint name. Required. :type managed_private_endpoint_name: str :param if_none_match: ETag of the managed private endpoint entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. Default value is None. :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedPrivateEndpointResource, or the result of cls(response) + :return: ManagedPrivateEndpointResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.ManagedPrivateEndpointResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ManagedPrivateEndpointResource] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, managed_virtual_network_name=managed_virtual_network_name, managed_private_endpoint_name=managed_private_endpoint_name, - api_version=api_version, + subscription_id=self._config.subscription_id, if_none_match=if_none_match, - template_url=self.get.metadata['url'], + api_version=api_version, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -280,25 +359,23 @@ async def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) + deserialized = self._deserialize("ManagedPrivateEndpointResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}"} # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements @@ -311,39 +388,36 @@ async def delete( # pylint: disable=inconsistent-return-statements ) -> None: """Deletes a managed private endpoint. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. + :param managed_virtual_network_name: Managed virtual network name. Required. :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. + :param managed_private_endpoint_name: Managed private endpoint name. Required. :type managed_private_endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, managed_virtual_network_name=managed_virtual_network_name, managed_private_endpoint_name=managed_private_endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -351,10 +425,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -364,5 +437,4 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}"} # type: ignore - + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_virtual_networks_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_virtual_networks_operations.py index 5a04402e033..9970f4b2728 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_virtual_networks_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_virtual_networks_operations.py @@ -6,10 +6,17 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest @@ -20,10 +27,16 @@ from ... import models as _models from ..._vendor import _convert_request -from ...operations._managed_virtual_networks_operations import build_create_or_update_request, build_get_request, build_list_by_factory_request -T = TypeVar('T') +from ...operations._managed_virtual_networks_operations import ( + build_create_or_update_request, + build_get_request, + build_list_by_factory_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + class ManagedVirtualNetworksOperations: """ .. warning:: @@ -43,46 +56,41 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any - ) -> AsyncIterable[_models.ManagedVirtualNetworkListResponse]: + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> AsyncIterable["_models.ManagedVirtualNetworkResource"]: """Lists managed Virtual Networks. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of + :return: An iterator like instance of either ManagedVirtualNetworkResource or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.ManagedVirtualNetworkListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.ManagedVirtualNetworkListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ManagedVirtualNetworkListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_factory.metadata['url'], + template_url=self.list_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -90,16 +98,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -115,10 +118,8 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -128,13 +129,11 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks"} # type: ignore + list_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks"} # type: ignore - @distributed_trace_async + @overload async def create_or_update( self, resource_group_name: str, @@ -142,51 +141,131 @@ async def create_or_update( managed_virtual_network_name: str, managed_virtual_network: _models.ManagedVirtualNetworkResource, if_match: Optional[str] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.ManagedVirtualNetworkResource: """Creates or updates a managed Virtual Network. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. + :param managed_virtual_network_name: Managed virtual network name. Required. :type managed_virtual_network_name: str - :param managed_virtual_network: Managed Virtual Network resource definition. + :param managed_virtual_network: Managed Virtual Network resource definition. Required. :type managed_virtual_network: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource :param if_match: ETag of the managed Virtual Network entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. Default value is None. :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedVirtualNetworkResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + managed_virtual_network_name: str, + managed_virtual_network: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ManagedVirtualNetworkResource: + """Creates or updates a managed Virtual Network. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. Required. + :type managed_virtual_network_name: str + :param managed_virtual_network: Managed Virtual Network resource definition. Required. + :type managed_virtual_network: IO + :param if_match: ETag of the managed Virtual Network entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. Default + value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedVirtualNetworkResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + managed_virtual_network_name: str, + managed_virtual_network: Union[_models.ManagedVirtualNetworkResource, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.ManagedVirtualNetworkResource: + """Creates or updates a managed Virtual Network. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. Required. + :type managed_virtual_network_name: str + :param managed_virtual_network: Managed Virtual Network resource definition. Is either a model + type or a IO type. Required. + :type managed_virtual_network: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource or + IO + :param if_match: ETag of the managed Virtual Network entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. Default + value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedVirtualNetworkResource, or the result of cls(response) + :return: ManagedVirtualNetworkResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.ManagedVirtualNetworkResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ManagedVirtualNetworkResource] - _json = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(managed_virtual_network, (IO, bytes)): + _content = managed_virtual_network + else: + _json = self._serialize.body(managed_virtual_network, "ManagedVirtualNetworkResource") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, managed_virtual_network_name=managed_virtual_network_name, + subscription_id=self._config.subscription_id, + if_match=if_match, api_version=api_version, content_type=content_type, json=_json, - if_match=if_match, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -194,25 +273,23 @@ async def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) + deserialized = self._deserialize("ManagedVirtualNetworkResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}"} # type: ignore - + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}"} # type: ignore @distributed_trace_async async def get( @@ -225,41 +302,38 @@ async def get( ) -> _models.ManagedVirtualNetworkResource: """Gets a managed Virtual Network. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. + :param managed_virtual_network_name: Managed virtual network name. Required. :type managed_virtual_network_name: str :param if_none_match: ETag of the managed Virtual Network entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. Default value is None. :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedVirtualNetworkResource, or the result of cls(response) + :return: ManagedVirtualNetworkResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.ManagedVirtualNetworkResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ManagedVirtualNetworkResource] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, managed_virtual_network_name=managed_virtual_network_name, - api_version=api_version, + subscription_id=self._config.subscription_id, if_none_match=if_none_match, - template_url=self.get.metadata['url'], + api_version=api_version, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -267,22 +341,20 @@ async def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) + deserialized = self._deserialize("ManagedVirtualNetworkResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_operations.py index dbbf7750b39..f6fd6c082b7 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_operations.py @@ -7,9 +7,16 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +from urllib.parse import parse_qs, urljoin, urlparse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest @@ -20,9 +27,11 @@ from ... import models as _models from ..._vendor import _convert_request from ...operations._operations import build_list_request -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + class Operations: """ .. warning:: @@ -42,37 +51,30 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace - def list( - self, - **kwargs: Any - ) -> AsyncIterable[_models.OperationListResponse]: + def list(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]: """Lists the available Azure Data Factory API operations. :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OperationListResponse or the result of - cls(response) - :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.OperationListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Operation or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.Operation] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.OperationListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.OperationListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_request( api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], headers=_headers, params=_params, ) @@ -80,13 +82,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_request( - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -102,10 +102,8 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -115,8 +113,6 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/providers/Microsoft.DataFactory/operations"} # type: ignore + list.metadata = {"url": "/providers/Microsoft.DataFactory/operations"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_patch.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_patch.py index 0ad201a8c58..f7dd3251033 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_patch.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_patch.py @@ -10,6 +10,7 @@ __all__: List[str] = [] # Add all objects you want publicly available to users at this package level + def patch_sdk(): """Do not remove from this file. diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_runs_operations.py index fcf834e3960..14a008ca251 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_runs_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_runs_operations.py @@ -6,9 +6,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Optional, TypeVar - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest @@ -18,10 +24,16 @@ from ... import models as _models from ..._vendor import _convert_request -from ...operations._pipeline_runs_operations import build_cancel_request, build_get_request, build_query_by_factory_request -T = TypeVar('T') +from ...operations._pipeline_runs_operations import ( + build_cancel_request, + build_get_request, + build_query_by_factory_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + class PipelineRunsOperations: """ .. warning:: @@ -41,50 +53,112 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async + @overload async def query_by_factory( self, resource_group_name: str, factory_name: str, filter_parameters: _models.RunFilterParameters, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.PipelineRunsQueryResponse: """Query pipeline runs in the factory based on input filter conditions. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param filter_parameters: Parameters to filter the pipeline run. + :param filter_parameters: Parameters to filter the pipeline run. Required. :type filter_parameters: ~azure.mgmt.datafactory.models.RunFilterParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineRunsQueryResponse, or the result of cls(response) + :return: PipelineRunsQueryResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.PipelineRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + async def query_by_factory( + self, + resource_group_name: str, + factory_name: str, + filter_parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PipelineRunsQueryResponse: + """Query pipeline runs in the factory based on input filter conditions. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param filter_parameters: Parameters to filter the pipeline run. Required. + :type filter_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PipelineRunsQueryResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.PipelineRunsQueryResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def query_by_factory( + self, + resource_group_name: str, + factory_name: str, + filter_parameters: Union[_models.RunFilterParameters, IO], + **kwargs: Any + ) -> _models.PipelineRunsQueryResponse: + """Query pipeline runs in the factory based on input filter conditions. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param filter_parameters: Parameters to filter the pipeline run. Is either a model type or a IO + type. Required. + :type filter_parameters: ~azure.mgmt.datafactory.models.RunFilterParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PipelineRunsQueryResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.PipelineRunsQueryResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.PipelineRunsQueryResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.PipelineRunsQueryResponse] - _json = self._serialize.body(filter_parameters, 'RunFilterParameters') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(filter_parameters, (IO, bytes)): + _content = filter_parameters + else: + _json = self._serialize.body(filter_parameters, "RunFilterParameters") request = build_query_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.query_by_factory.metadata['url'], + content=_content, + template_url=self.query_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -92,66 +166,55 @@ async def query_by_factory( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('PipelineRunsQueryResponse', pipeline_response) + deserialized = self._deserialize("PipelineRunsQueryResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - query_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns"} # type: ignore - + query_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns"} # type: ignore @distributed_trace_async - async def get( - self, - resource_group_name: str, - factory_name: str, - run_id: str, - **kwargs: Any - ) -> _models.PipelineRun: + async def get(self, resource_group_name: str, factory_name: str, run_id: str, **kwargs: Any) -> _models.PipelineRun: """Get a pipeline run by its run ID. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param run_id: The pipeline run identifier. + :param run_id: The pipeline run identifier. Required. :type run_id: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineRun, or the result of cls(response) + :return: PipelineRun or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.PipelineRun - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.PipelineRun] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PipelineRun] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, run_id=run_id, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -159,25 +222,23 @@ async def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('PipelineRun', pipeline_response) + deserialized = self._deserialize("PipelineRun", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}"} # type: ignore @distributed_trace_async async def cancel( # pylint: disable=inconsistent-return-statements @@ -190,40 +251,37 @@ async def cancel( # pylint: disable=inconsistent-return-statements ) -> None: """Cancel a pipeline run by its run ID. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param run_id: The pipeline run identifier. + :param run_id: The pipeline run identifier. Required. :type run_id: str :param is_recursive: If true, cancel all the Child pipelines that are triggered by the current pipeline. Default value is None. :type is_recursive: bool :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_cancel_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, run_id=run_id, - api_version=api_version, + subscription_id=self._config.subscription_id, is_recursive=is_recursive, - template_url=self.cancel.metadata['url'], + api_version=api_version, + template_url=self.cancel.metadata["url"], headers=_headers, params=_params, ) @@ -231,10 +289,9 @@ async def cancel( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -244,5 +301,4 @@ async def cancel( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - cancel.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel"} # type: ignore - + cancel.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipelines_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipelines_operations.py index e1278f679c7..fa89f8193af 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipelines_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipelines_operations.py @@ -6,10 +6,18 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +import sys +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest @@ -20,10 +28,23 @@ from ... import models as _models from ..._vendor import _convert_request -from ...operations._pipelines_operations import build_create_or_update_request, build_create_run_request, build_delete_request, build_get_request, build_list_by_factory_request -T = TypeVar('T') +from ...operations._pipelines_operations import ( + build_create_or_update_request, + build_create_run_request, + build_delete_request, + build_get_request, + build_list_by_factory_request, +) + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + class PipelinesOperations: """ .. warning:: @@ -43,46 +64,40 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any - ) -> AsyncIterable[_models.PipelineListResponse]: + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> AsyncIterable["_models.PipelineResource"]: """Lists pipelines. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PipelineListResponse or the result of - cls(response) + :return: An iterator like instance of either PipelineResource or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.PipelineListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.PipelineResource] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.PipelineListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PipelineListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_factory.metadata['url'], + template_url=self.list_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -90,16 +105,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -115,10 +125,8 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -128,13 +136,11 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines"} # type: ignore + list_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines"} # type: ignore - @distributed_trace_async + @overload async def create_or_update( self, resource_group_name: str, @@ -142,50 +148,126 @@ async def create_or_update( pipeline_name: str, pipeline: _models.PipelineResource, if_match: Optional[str] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.PipelineResource: """Creates or updates a pipeline. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param pipeline_name: The pipeline name. + :param pipeline_name: The pipeline name. Required. :type pipeline_name: str - :param pipeline: Pipeline resource definition. + :param pipeline: Pipeline resource definition. Required. :type pipeline: ~azure.mgmt.datafactory.models.PipelineResource :param if_match: ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. Default value is None. :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PipelineResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.PipelineResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + pipeline_name: str, + pipeline: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PipelineResource: + """Creates or updates a pipeline. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param pipeline_name: The pipeline name. Required. + :type pipeline_name: str + :param pipeline: Pipeline resource definition. Required. + :type pipeline: IO + :param if_match: ETag of the pipeline entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. Default value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PipelineResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.PipelineResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + pipeline_name: str, + pipeline: Union[_models.PipelineResource, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.PipelineResource: + """Creates or updates a pipeline. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param pipeline_name: The pipeline name. Required. + :type pipeline_name: str + :param pipeline: Pipeline resource definition. Is either a model type or a IO type. Required. + :type pipeline: ~azure.mgmt.datafactory.models.PipelineResource or IO + :param if_match: ETag of the pipeline entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. Default value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineResource, or the result of cls(response) + :return: PipelineResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.PipelineResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.PipelineResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.PipelineResource] - _json = self._serialize.body(pipeline, 'PipelineResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(pipeline, (IO, bytes)): + _content = pipeline + else: + _json = self._serialize.body(pipeline, "PipelineResource") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, pipeline_name=pipeline_name, + subscription_id=self._config.subscription_id, + if_match=if_match, api_version=api_version, content_type=content_type, json=_json, - if_match=if_match, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -193,25 +275,23 @@ async def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('PipelineResource', pipeline_response) + deserialized = self._deserialize("PipelineResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}"} # type: ignore - + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}"} # type: ignore @distributed_trace_async async def get( @@ -224,41 +304,38 @@ async def get( ) -> Optional[_models.PipelineResource]: """Gets a pipeline. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param pipeline_name: The pipeline name. + :param pipeline_name: The pipeline name. Required. :type pipeline_name: str :param if_none_match: ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. Default value is None. :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineResource, or the result of cls(response) + :return: PipelineResource or None or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.PipelineResource or None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.PipelineResource]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.PipelineResource]] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, pipeline_name=pipeline_name, - api_version=api_version, + subscription_id=self._config.subscription_id, if_none_match=if_none_match, - template_url=self.get.metadata['url'], + api_version=api_version, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -266,10 +343,9 @@ async def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 304]: @@ -278,56 +354,48 @@ async def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('PipelineResource', pipeline_response) + deserialized = self._deserialize("PipelineResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}"} # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - pipeline_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, pipeline_name: str, **kwargs: Any ) -> None: """Deletes a pipeline. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param pipeline_name: The pipeline name. + :param pipeline_name: The pipeline name. Required. :type pipeline_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, pipeline_name=pipeline_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -335,10 +403,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -348,10 +415,58 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}"} # type: ignore + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}"} # type: ignore + @overload + async def create_run( + self, + resource_group_name: str, + factory_name: str, + pipeline_name: str, + reference_pipeline_run_id: Optional[str] = None, + is_recovery: Optional[bool] = None, + start_activity_name: Optional[str] = None, + start_from_failure: Optional[bool] = None, + parameters: Optional[Dict[str, JSON]] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CreateRunResponse: + """Creates a run of a pipeline. - @distributed_trace_async + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param pipeline_name: The pipeline name. Required. + :type pipeline_name: str + :param reference_pipeline_run_id: The pipeline run identifier. If run ID is specified the + parameters of the specified run will be used to create a new run. Default value is None. + :type reference_pipeline_run_id: str + :param is_recovery: Recovery mode flag. If recovery mode is set to true, the specified + referenced pipeline run and the new run will be grouped under the same groupId. Default value + is None. + :type is_recovery: bool + :param start_activity_name: In recovery mode, the rerun will start from this activity. If not + specified, all activities will run. Default value is None. + :type start_activity_name: str + :param start_from_failure: In recovery mode, if set to true, the rerun will start from failed + activities. The property will be used only if startActivityName is not specified. Default value + is None. + :type start_from_failure: bool + :param parameters: Parameters of the pipeline run. These parameters will be used only if the + runId is not specified. Default value is None. + :type parameters: dict[str, JSON] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CreateRunResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.CreateRunResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload async def create_run( self, resource_group_name: str, @@ -361,16 +476,18 @@ async def create_run( is_recovery: Optional[bool] = None, start_activity_name: Optional[str] = None, start_from_failure: Optional[bool] = None, - parameters: Optional[Dict[str, Any]] = None, + parameters: Optional[IO] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.CreateRunResponse: """Creates a run of a pipeline. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param pipeline_name: The pipeline name. + :param pipeline_name: The pipeline name. Required. :type pipeline_name: str :param reference_pipeline_run_id: The pipeline run identifier. If run ID is specified the parameters of the specified run will be used to create a new run. Default value is None. @@ -388,42 +505,97 @@ async def create_run( :type start_from_failure: bool :param parameters: Parameters of the pipeline run. These parameters will be used only if the runId is not specified. Default value is None. - :type parameters: dict[str, any] + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: CreateRunResponse, or the result of cls(response) + :return: CreateRunResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.CreateRunResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @distributed_trace_async + async def create_run( + self, + resource_group_name: str, + factory_name: str, + pipeline_name: str, + reference_pipeline_run_id: Optional[str] = None, + is_recovery: Optional[bool] = None, + start_activity_name: Optional[str] = None, + start_from_failure: Optional[bool] = None, + parameters: Optional[Union[Dict[str, JSON], IO]] = None, + **kwargs: Any + ) -> _models.CreateRunResponse: + """Creates a run of a pipeline. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param pipeline_name: The pipeline name. Required. + :type pipeline_name: str + :param reference_pipeline_run_id: The pipeline run identifier. If run ID is specified the + parameters of the specified run will be used to create a new run. Default value is None. + :type reference_pipeline_run_id: str + :param is_recovery: Recovery mode flag. If recovery mode is set to true, the specified + referenced pipeline run and the new run will be grouped under the same groupId. Default value + is None. + :type is_recovery: bool + :param start_activity_name: In recovery mode, the rerun will start from this activity. If not + specified, all activities will run. Default value is None. + :type start_activity_name: str + :param start_from_failure: In recovery mode, if set to true, the rerun will start from failed + activities. The property will be used only if startActivityName is not specified. Default value + is None. + :type start_from_failure: bool + :param parameters: Parameters of the pipeline run. These parameters will be used only if the + runId is not specified. Is either a dict type or a IO type. Default value is None. + :type parameters: dict[str, JSON] or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CreateRunResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.CreateRunResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.CreateRunResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CreateRunResponse] - if parameters is not None: - _json = self._serialize.body(parameters, '{object}') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters else: - _json = None + if parameters is not None: + _json = self._serialize.body(parameters, "{object}") + else: + _json = None request = build_create_run_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, pipeline_name=pipeline_name, - api_version=api_version, - content_type=content_type, - json=_json, + subscription_id=self._config.subscription_id, reference_pipeline_run_id=reference_pipeline_run_id, is_recovery=is_recovery, start_activity_name=start_activity_name, start_from_failure=start_from_failure, - template_url=self.create_run.metadata['url'], + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_run.metadata["url"], headers=_headers, params=_params, ) @@ -431,22 +603,20 @@ async def create_run( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('CreateRunResponse', pipeline_response) + deserialized = self._deserialize("CreateRunResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_run.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun"} # type: ignore - + create_run.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_end_point_connections_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_end_point_connections_operations.py index 60015f331bf..4636616e4ad 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_end_point_connections_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_end_point_connections_operations.py @@ -7,9 +7,16 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +from urllib.parse import parse_qs, urljoin, urlparse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest @@ -20,9 +27,11 @@ from ... import models as _models from ..._vendor import _convert_request from ...operations._private_end_point_connections_operations import build_list_by_factory_request -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + class PrivateEndPointConnectionsOperations: """ .. warning:: @@ -42,46 +51,41 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any - ) -> AsyncIterable[_models.PrivateEndpointConnectionListResponse]: + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> AsyncIterable["_models.PrivateEndpointConnectionResource"]: """Lists Private endpoint connections. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PrivateEndpointConnectionListResponse or the - result of cls(response) + :return: An iterator like instance of either PrivateEndpointConnectionResource or the result of + cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.PrivateEndpointConnectionListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.PrivateEndpointConnectionListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnectionListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_factory.metadata['url'], + template_url=self.list_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -89,16 +93,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -114,10 +113,8 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -127,8 +124,6 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndPointConnections"} # type: ignore + list_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndPointConnections"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_endpoint_connection_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_endpoint_connection_operations.py index 5d97fb2fcf8..c99d6a3449d 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_endpoint_connection_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_endpoint_connection_operations.py @@ -6,9 +6,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Optional, TypeVar - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest @@ -18,10 +24,16 @@ from ... import models as _models from ..._vendor import _convert_request -from ...operations._private_endpoint_connection_operations import build_create_or_update_request, build_delete_request, build_get_request -T = TypeVar('T') +from ...operations._private_endpoint_connection_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + class PrivateEndpointConnectionOperations: """ .. warning:: @@ -41,8 +53,7 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async + @overload async def create_or_update( self, resource_group_name: str, @@ -50,52 +61,131 @@ async def create_or_update( private_endpoint_connection_name: str, private_endpoint_wrapper: _models.PrivateLinkConnectionApprovalRequestResource, if_match: Optional[str] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.PrivateEndpointConnectionResource: """Approves or rejects a private endpoint connection. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param private_endpoint_connection_name: The private endpoint connection name. + :param private_endpoint_connection_name: The private endpoint connection name. Required. :type private_endpoint_connection_name: str - :param private_endpoint_wrapper: + :param private_endpoint_wrapper: Required. :type private_endpoint_wrapper: ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequestResource :param if_match: ETag of the private endpoint connection entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. Default value is None. :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnectionResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + private_endpoint_connection_name: str, + private_endpoint_wrapper: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PrivateEndpointConnectionResource: + """Approves or rejects a private endpoint connection. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param private_endpoint_connection_name: The private endpoint connection name. Required. + :type private_endpoint_connection_name: str + :param private_endpoint_wrapper: Required. + :type private_endpoint_wrapper: IO + :param if_match: ETag of the private endpoint connection entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. Default + value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnectionResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + private_endpoint_connection_name: str, + private_endpoint_wrapper: Union[_models.PrivateLinkConnectionApprovalRequestResource, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.PrivateEndpointConnectionResource: + """Approves or rejects a private endpoint connection. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param private_endpoint_connection_name: The private endpoint connection name. Required. + :type private_endpoint_connection_name: str + :param private_endpoint_wrapper: Is either a model type or a IO type. Required. + :type private_endpoint_wrapper: + ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequestResource or IO + :param if_match: ETag of the private endpoint connection entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. Default + value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PrivateEndpointConnectionResource, or the result of cls(response) + :return: PrivateEndpointConnectionResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.PrivateEndpointConnectionResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnectionResource] - _json = self._serialize.body(private_endpoint_wrapper, 'PrivateLinkConnectionApprovalRequestResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(private_endpoint_wrapper, (IO, bytes)): + _content = private_endpoint_wrapper + else: + _json = self._serialize.body(private_endpoint_wrapper, "PrivateLinkConnectionApprovalRequestResource") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + if_match=if_match, api_version=api_version, content_type=content_type, json=_json, - if_match=if_match, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -103,25 +193,23 @@ async def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('PrivateEndpointConnectionResource', pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnectionResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore - + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore @distributed_trace_async async def get( @@ -134,41 +222,38 @@ async def get( ) -> _models.PrivateEndpointConnectionResource: """Gets a private endpoint connection. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param private_endpoint_connection_name: The private endpoint connection name. + :param private_endpoint_connection_name: The private endpoint connection name. Required. :type private_endpoint_connection_name: str :param if_none_match: ETag of the private endpoint connection entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. Default value is None. :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PrivateEndpointConnectionResource, or the result of cls(response) + :return: PrivateEndpointConnectionResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.PrivateEndpointConnectionResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnectionResource] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, private_endpoint_connection_name=private_endpoint_connection_name, - api_version=api_version, + subscription_id=self._config.subscription_id, if_none_match=if_none_match, - template_url=self.get.metadata['url'], + api_version=api_version, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -176,66 +261,57 @@ async def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('PrivateEndpointConnectionResource', pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnectionResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - private_endpoint_connection_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, private_endpoint_connection_name: str, **kwargs: Any ) -> None: """Deletes a private endpoint connection. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param private_endpoint_connection_name: The private endpoint connection name. + :param private_endpoint_connection_name: The private endpoint connection name. Required. :type private_endpoint_connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -243,10 +319,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -256,5 +331,4 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore - + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_link_resources_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_link_resources_operations.py index 00b168541b1..4af880e6778 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_link_resources_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_link_resources_operations.py @@ -8,7 +8,13 @@ # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Optional, TypeVar -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest @@ -19,9 +25,11 @@ from ... import models as _models from ..._vendor import _convert_request from ...operations._private_link_resources_operations import build_get_request -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + class PrivateLinkResourcesOperations: """ .. warning:: @@ -41,43 +49,36 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace_async async def get( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, **kwargs: Any ) -> _models.PrivateLinkResourcesWrapper: """Gets the private link resources. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PrivateLinkResourcesWrapper, or the result of cls(response) + :return: PrivateLinkResourcesWrapper or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.PrivateLinkResourcesWrapper - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.PrivateLinkResourcesWrapper] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateLinkResourcesWrapper] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -85,22 +86,20 @@ async def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('PrivateLinkResourcesWrapper', pipeline_response) + deserialized = self._deserialize("PrivateLinkResourcesWrapper", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateLinkResources"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateLinkResources"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_runs_operations.py index 9f9d49e3852..a54c73ae2ae 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_runs_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_runs_operations.py @@ -6,9 +6,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Optional, TypeVar - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest @@ -18,10 +24,16 @@ from ... import models as _models from ..._vendor import _convert_request -from ...operations._trigger_runs_operations import build_cancel_request, build_query_by_factory_request, build_rerun_request -T = TypeVar('T') +from ...operations._trigger_runs_operations import ( + build_cancel_request, + build_query_by_factory_request, + build_rerun_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + class TriggerRunsOperations: """ .. warning:: @@ -41,51 +53,42 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace_async async def rerun( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - run_id: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, trigger_name: str, run_id: str, **kwargs: Any ) -> None: """Rerun single trigger instance by runId. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param trigger_name: The trigger name. + :param trigger_name: The trigger name. Required. :type trigger_name: str - :param run_id: The pipeline run identifier. + :param run_id: The pipeline run identifier. Required. :type run_id: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_rerun_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, run_id=run_id, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.rerun.metadata['url'], + template_url=self.rerun.metadata["url"], headers=_headers, params=_params, ) @@ -93,10 +96,9 @@ async def rerun( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -106,53 +108,44 @@ async def rerun( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - rerun.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun"} # type: ignore - + rerun.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun"} # type: ignore @distributed_trace_async async def cancel( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - run_id: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, trigger_name: str, run_id: str, **kwargs: Any ) -> None: """Cancel a single trigger instance by runId. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param trigger_name: The trigger name. + :param trigger_name: The trigger name. Required. :type trigger_name: str - :param run_id: The pipeline run identifier. + :param run_id: The pipeline run identifier. Required. :type run_id: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_cancel_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, run_id=run_id, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.cancel.metadata['url'], + template_url=self.cancel.metadata["url"], headers=_headers, params=_params, ) @@ -160,10 +153,9 @@ async def cancel( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -173,52 +165,114 @@ async def cancel( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - cancel.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel"} # type: ignore - + cancel.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel"} # type: ignore - @distributed_trace_async + @overload async def query_by_factory( self, resource_group_name: str, factory_name: str, filter_parameters: _models.RunFilterParameters, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.TriggerRunsQueryResponse: """Query trigger runs. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param filter_parameters: Parameters to filter the pipeline run. + :param filter_parameters: Parameters to filter the pipeline run. Required. :type filter_parameters: ~azure.mgmt.datafactory.models.RunFilterParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerRunsQueryResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.TriggerRunsQueryResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def query_by_factory( + self, + resource_group_name: str, + factory_name: str, + filter_parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.TriggerRunsQueryResponse: + """Query trigger runs. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param filter_parameters: Parameters to filter the pipeline run. Required. + :type filter_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerRunsQueryResponse, or the result of cls(response) + :return: TriggerRunsQueryResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.TriggerRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @distributed_trace_async + async def query_by_factory( + self, + resource_group_name: str, + factory_name: str, + filter_parameters: Union[_models.RunFilterParameters, IO], + **kwargs: Any + ) -> _models.TriggerRunsQueryResponse: + """Query trigger runs. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param filter_parameters: Parameters to filter the pipeline run. Is either a model type or a IO + type. Required. + :type filter_parameters: ~azure.mgmt.datafactory.models.RunFilterParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerRunsQueryResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.TriggerRunsQueryResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.TriggerRunsQueryResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.TriggerRunsQueryResponse] - _json = self._serialize.body(filter_parameters, 'RunFilterParameters') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(filter_parameters, (IO, bytes)): + _content = filter_parameters + else: + _json = self._serialize.body(filter_parameters, "RunFilterParameters") request = build_query_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.query_by_factory.metadata['url'], + content=_content, + template_url=self.query_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -226,22 +280,20 @@ async def query_by_factory( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('TriggerRunsQueryResponse', pipeline_response) + deserialized = self._deserialize("TriggerRunsQueryResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - query_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns"} # type: ignore - + query_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_triggers_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_triggers_operations.py index 36b3f3a5eae..a0262b37d5b 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_triggers_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_triggers_operations.py @@ -6,10 +6,17 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -22,10 +29,23 @@ from ... import models as _models from ..._vendor import _convert_request -from ...operations._triggers_operations import build_create_or_update_request, build_delete_request, build_get_event_subscription_status_request, build_get_request, build_list_by_factory_request, build_query_by_factory_request, build_start_request_initial, build_stop_request_initial, build_subscribe_to_events_request_initial, build_unsubscribe_from_events_request_initial -T = TypeVar('T') +from ...operations._triggers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_event_subscription_status_request, + build_get_request, + build_list_by_factory_request, + build_query_by_factory_request, + build_start_request, + build_stop_request, + build_subscribe_to_events_request, + build_unsubscribe_from_events_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + class TriggersOperations: """ .. warning:: @@ -45,45 +65,39 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any - ) -> AsyncIterable[_models.TriggerListResponse]: + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> AsyncIterable["_models.TriggerResource"]: """Lists triggers. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either TriggerListResponse or the result of cls(response) - :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.TriggerListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either TriggerResource or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.TriggerResource] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.TriggerListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.TriggerListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_factory.metadata['url'], + template_url=self.list_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -91,16 +105,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -116,10 +125,8 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -129,55 +136,116 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers"} # type: ignore + list_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers"} # type: ignore - @distributed_trace_async + @overload async def query_by_factory( self, resource_group_name: str, factory_name: str, filter_parameters: _models.TriggerFilterParameters, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.TriggerQueryResponse: """Query triggers. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param filter_parameters: Parameters to filter the triggers. + :param filter_parameters: Parameters to filter the triggers. Required. :type filter_parameters: ~azure.mgmt.datafactory.models.TriggerFilterParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerQueryResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.TriggerQueryResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def query_by_factory( + self, + resource_group_name: str, + factory_name: str, + filter_parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.TriggerQueryResponse: + """Query triggers. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param filter_parameters: Parameters to filter the triggers. Required. + :type filter_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerQueryResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.TriggerQueryResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def query_by_factory( + self, + resource_group_name: str, + factory_name: str, + filter_parameters: Union[_models.TriggerFilterParameters, IO], + **kwargs: Any + ) -> _models.TriggerQueryResponse: + """Query triggers. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param filter_parameters: Parameters to filter the triggers. Is either a model type or a IO + type. Required. + :type filter_parameters: ~azure.mgmt.datafactory.models.TriggerFilterParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerQueryResponse, or the result of cls(response) + :return: TriggerQueryResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.TriggerQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.TriggerQueryResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.TriggerQueryResponse] - _json = self._serialize.body(filter_parameters, 'TriggerFilterParameters') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(filter_parameters, (IO, bytes)): + _content = filter_parameters + else: + _json = self._serialize.body(filter_parameters, "TriggerFilterParameters") request = build_query_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.query_by_factory.metadata['url'], + content=_content, + template_url=self.query_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -185,27 +253,25 @@ async def query_by_factory( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('TriggerQueryResponse', pipeline_response) + deserialized = self._deserialize("TriggerQueryResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - query_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers"} # type: ignore - + query_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers"} # type: ignore - @distributed_trace_async + @overload async def create_or_update( self, resource_group_name: str, @@ -213,50 +279,126 @@ async def create_or_update( trigger_name: str, trigger: _models.TriggerResource, if_match: Optional[str] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.TriggerResource: """Creates or updates a trigger. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param trigger_name: The trigger name. + :param trigger_name: The trigger name. Required. :type trigger_name: str - :param trigger: Trigger resource definition. + :param trigger: Trigger resource definition. Required. :type trigger: ~azure.mgmt.datafactory.models.TriggerResource :param if_match: ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. Default value is None. :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.TriggerResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + trigger: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.TriggerResource: + """Creates or updates a trigger. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param trigger_name: The trigger name. Required. + :type trigger_name: str + :param trigger: Trigger resource definition. Required. + :type trigger: IO + :param if_match: ETag of the trigger entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. Default value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerResource, or the result of cls(response) + :return: TriggerResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.TriggerResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + trigger: Union[_models.TriggerResource, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.TriggerResource: + """Creates or updates a trigger. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param trigger_name: The trigger name. Required. + :type trigger_name: str + :param trigger: Trigger resource definition. Is either a model type or a IO type. Required. + :type trigger: ~azure.mgmt.datafactory.models.TriggerResource or IO + :param if_match: ETag of the trigger entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. Default value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.TriggerResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.TriggerResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.TriggerResource] - _json = self._serialize.body(trigger, 'TriggerResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(trigger, (IO, bytes)): + _content = trigger + else: + _json = self._serialize.body(trigger, "TriggerResource") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, + subscription_id=self._config.subscription_id, + if_match=if_match, api_version=api_version, content_type=content_type, json=_json, - if_match=if_match, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -264,25 +406,23 @@ async def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('TriggerResource', pipeline_response) + deserialized = self._deserialize("TriggerResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}"} # type: ignore - + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}"} # type: ignore @distributed_trace_async async def get( @@ -295,41 +435,38 @@ async def get( ) -> Optional[_models.TriggerResource]: """Gets a trigger. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param trigger_name: The trigger name. + :param trigger_name: The trigger name. Required. :type trigger_name: str :param if_none_match: ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. Default value is None. :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerResource, or the result of cls(response) + :return: TriggerResource or None or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.TriggerResource or None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.TriggerResource]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.TriggerResource]] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, - api_version=api_version, + subscription_id=self._config.subscription_id, if_none_match=if_none_match, - template_url=self.get.metadata['url'], + api_version=api_version, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -337,10 +474,9 @@ async def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 304]: @@ -349,56 +485,48 @@ async def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('TriggerResource', pipeline_response) + deserialized = self._deserialize("TriggerResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}"} # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any ) -> None: """Deletes a trigger. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param trigger_name: The trigger name. + :param trigger_name: The trigger name. Required. :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -406,10 +534,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -419,35 +546,27 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}"} # type: ignore - + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}"} # type: ignore async def _subscribe_to_events_initial( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any ) -> Optional[_models.TriggerSubscriptionOperationStatus]: - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.TriggerSubscriptionOperationStatus]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.TriggerSubscriptionOperationStatus]] - - request = build_subscribe_to_events_request_initial( - subscription_id=self._config.subscription_id, + request = build_subscribe_to_events_request( resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._subscribe_to_events_initial.metadata['url'], + template_url=self._subscribe_to_events_initial.metadata["url"], headers=_headers, params=_params, ) @@ -455,10 +574,9 @@ async def _subscribe_to_events_initial( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: @@ -467,31 +585,26 @@ async def _subscribe_to_events_initial( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _subscribe_to_events_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents"} # type: ignore - + _subscribe_to_events_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents"} # type: ignore @distributed_trace_async async def begin_subscribe_to_events( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any ) -> AsyncLROPoller[_models.TriggerSubscriptionOperationStatus]: """Subscribe event trigger to events. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param trigger_name: The trigger name. + :param trigger_name: The trigger name. Required. :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -505,99 +618,85 @@ async def begin_subscribe_to_events( or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.TriggerSubscriptionOperationStatus] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.TriggerSubscriptionOperationStatus] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = await self._subscribe_to_events_initial( # type: ignore resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: - polling_method = cast(AsyncPollingMethod, AsyncARMPolling( - lro_delay, - - - **kwargs - )) # type: AsyncPollingMethod - elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: polling_method = polling + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_subscribe_to_events.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents"} # type: ignore + begin_subscribe_to_events.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents"} # type: ignore @distributed_trace_async async def get_event_subscription_status( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any ) -> _models.TriggerSubscriptionOperationStatus: """Get a trigger's event subscription status. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param trigger_name: The trigger name. + :param trigger_name: The trigger name. Required. :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerSubscriptionOperationStatus, or the result of cls(response) + :return: TriggerSubscriptionOperationStatus or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.TriggerSubscriptionOperationStatus] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.TriggerSubscriptionOperationStatus] - request = build_get_event_subscription_status_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_event_subscription_status.metadata['url'], + template_url=self.get_event_subscription_status.metadata["url"], headers=_headers, params=_params, ) @@ -605,52 +704,43 @@ async def get_event_subscription_status( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_event_subscription_status.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus"} # type: ignore - + get_event_subscription_status.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus"} # type: ignore async def _unsubscribe_from_events_initial( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any ) -> Optional[_models.TriggerSubscriptionOperationStatus]: - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.TriggerSubscriptionOperationStatus]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.TriggerSubscriptionOperationStatus]] - - request = build_unsubscribe_from_events_request_initial( - subscription_id=self._config.subscription_id, + request = build_unsubscribe_from_events_request( resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._unsubscribe_from_events_initial.metadata['url'], + template_url=self._unsubscribe_from_events_initial.metadata["url"], headers=_headers, params=_params, ) @@ -658,10 +748,9 @@ async def _unsubscribe_from_events_initial( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: @@ -670,31 +759,26 @@ async def _unsubscribe_from_events_initial( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _unsubscribe_from_events_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents"} # type: ignore - + _unsubscribe_from_events_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents"} # type: ignore @distributed_trace_async async def begin_unsubscribe_from_events( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any ) -> AsyncLROPoller[_models.TriggerSubscriptionOperationStatus]: """Unsubscribe event trigger from events. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param trigger_name: The trigger name. + :param trigger_name: The trigger name. Required. :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -708,85 +792,71 @@ async def begin_unsubscribe_from_events( or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.TriggerSubscriptionOperationStatus] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.TriggerSubscriptionOperationStatus] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = await self._unsubscribe_from_events_initial( # type: ignore resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: - polling_method = cast(AsyncPollingMethod, AsyncARMPolling( - lro_delay, - - - **kwargs - )) # type: AsyncPollingMethod - elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: polling_method = polling + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_unsubscribe_from_events.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents"} # type: ignore + begin_unsubscribe_from_events.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents"} # type: ignore async def _start_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any ) -> None: - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - - request = build_start_request_initial( - subscription_id=self._config.subscription_id, + request = build_start_request( resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._start_initial.metadata['url'], + template_url=self._start_initial.metadata["url"], headers=_headers, params=_params, ) @@ -794,10 +864,9 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -807,24 +876,19 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - _start_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start"} # type: ignore - + _start_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start"} # type: ignore @distributed_trace_async - async def begin_start( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + async def begin_start( + self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Starts a trigger. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param trigger_name: The trigger name. + :param trigger_name: The trigger name. Required. :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -836,83 +900,69 @@ async def begin_start( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = await self._start_initial( # type: ignore resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - if polling is True: - polling_method = cast(AsyncPollingMethod, AsyncARMPolling( - lro_delay, - - - **kwargs - )) # type: AsyncPollingMethod - elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: polling_method = polling + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_start.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start"} # type: ignore + begin_start.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start"} # type: ignore async def _stop_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any ) -> None: - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - - request = build_stop_request_initial( - subscription_id=self._config.subscription_id, + request = build_stop_request( resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._stop_initial.metadata['url'], + template_url=self._stop_initial.metadata["url"], headers=_headers, params=_params, ) @@ -920,10 +970,9 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -933,24 +982,19 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - _stop_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop"} # type: ignore - + _stop_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop"} # type: ignore @distributed_trace_async - async def begin_stop( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + async def begin_stop( + self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Stops a trigger. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param trigger_name: The trigger name. + :param trigger_name: The trigger name. Required. :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -962,53 +1006,46 @@ async def begin_stop( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = await self._stop_initial( # type: ignore resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - if polling is True: - polling_method = cast(AsyncPollingMethod, AsyncARMPolling( - lro_delay, - - - **kwargs - )) # type: AsyncPollingMethod - elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: polling_method = polling + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_stop.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop"} # type: ignore + begin_stop.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py index a1eba2e4eb4..9aaa410fc93 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py @@ -115,10 +115,12 @@ from ._models_py3 import AzureSqlSource from ._models_py3 import AzureSqlTableDataset from ._models_py3 import AzureStorageLinkedService +from ._models_py3 import AzureSynapseArtifactsLinkedService from ._models_py3 import AzureTableDataset from ._models_py3 import AzureTableSink from ._models_py3 import AzureTableSource from ._models_py3 import AzureTableStorageLinkedService +from ._models_py3 import BigDataPoolParametrizationReference from ._models_py3 import BinaryDataset from ._models_py3 import BinaryReadSettings from ._models_py3 import BinarySink @@ -302,6 +304,7 @@ from ._models_py3 import GoogleCloudStorageLinkedService from ._models_py3 import GoogleCloudStorageLocation from ._models_py3 import GoogleCloudStorageReadSettings +from ._models_py3 import GoogleSheetsLinkedService from ._models_py3 import GreenplumLinkedService from ._models_py3 import GreenplumSource from ._models_py3 import GreenplumTableDataset @@ -436,6 +439,7 @@ from ._models_py3 import NetezzaPartitionSettings from ._models_py3 import NetezzaSource from ._models_py3 import NetezzaTableDataset +from ._models_py3 import NotebookParameter from ._models_py3 import ODataLinkedService from ._models_py3 import ODataResourceDataset from ._models_py3 import ODataSource @@ -657,6 +661,10 @@ from ._models_py3 import SybaseLinkedService from ._models_py3 import SybaseSource from ._models_py3 import SybaseTableDataset +from ._models_py3 import SynapseNotebookActivity +from ._models_py3 import SynapseNotebookReference +from ._models_py3 import SynapseSparkJobDefinitionActivity +from ._models_py3 import SynapseSparkJobReference from ._models_py3 import TabularSource from ._models_py3 import TabularTranslator from ._models_py3 import TarGZipReadSettings @@ -717,933 +725,957 @@ from ._models_py3 import ZohoObjectDataset from ._models_py3 import ZohoSource - -from ._data_factory_management_client_enums import ( - AmazonRdsForOraclePartitionOption, - AvroCompressionCodec, - AzureFunctionActivityMethod, - AzureSearchIndexWriteBehaviorType, - BlobEventTypes, - CassandraSourceReadConsistencyLevels, - CompressionCodec, - CopyBehaviorType, - CosmosDbConnectionMode, - CosmosDbServicePrincipalCredentialType, - CredentialReferenceType, - DataFlowComputeType, - DataFlowDebugCommandType, - DataFlowReferenceType, - DatasetCompressionLevel, - DayOfWeek, - DaysOfWeek, - Db2AuthenticationType, - DependencyCondition, - DynamicsAuthenticationType, - DynamicsDeploymentType, - DynamicsSinkWriteBehavior, - EventSubscriptionStatus, - FactoryIdentityType, - FtpAuthenticationType, - GlobalParameterType, - GoogleAdWordsAuthenticationType, - GoogleBigQueryAuthenticationType, - HBaseAuthenticationType, - HDInsightActivityDebugInfoOption, - HdiNodeTypes, - HiveAuthenticationType, - HiveServerType, - HiveThriftTransportProtocol, - HttpAuthenticationType, - ImpalaAuthenticationType, - IntegrationRuntimeAuthKeyName, - IntegrationRuntimeAutoUpdate, - IntegrationRuntimeEdition, - IntegrationRuntimeEntityReferenceType, - IntegrationRuntimeInternalChannelEncryptionMode, - IntegrationRuntimeLicenseType, - IntegrationRuntimeSsisCatalogPricingTier, - IntegrationRuntimeState, - IntegrationRuntimeType, - IntegrationRuntimeUpdateResult, - JsonFormatFilePattern, - JsonWriteFilePattern, - ManagedIntegrationRuntimeNodeStatus, - ManagedVirtualNetworkReferenceType, - MongoDbAuthenticationType, - NetezzaPartitionOption, - ODataAadServicePrincipalCredentialType, - ODataAuthenticationType, - OraclePartitionOption, - OrcCompressionCodec, - ParameterType, - PhoenixAuthenticationType, - PolybaseSettingsRejectType, - PrestoAuthenticationType, - PublicNetworkAccess, - RecurrenceFrequency, - RestServiceAuthenticationType, - RunQueryFilterOperand, - RunQueryFilterOperator, - RunQueryOrder, - RunQueryOrderByField, - SalesforceSinkWriteBehavior, - SalesforceSourceReadBehavior, - SapCloudForCustomerSinkWriteBehavior, - SapHanaAuthenticationType, - SapHanaPartitionOption, - SapTablePartitionOption, - ScriptActivityLogDestination, - ScriptActivityParameterDirection, - ScriptActivityParameterType, - ScriptType, - SelfHostedIntegrationRuntimeNodeStatus, - ServiceNowAuthenticationType, - ServicePrincipalCredentialType, - SftpAuthenticationType, - SparkAuthenticationType, - SparkServerType, - SparkThriftTransportProtocol, - SqlAlwaysEncryptedAkvAuthType, - SqlDWWriteBehaviorEnum, - SqlPartitionOption, - SqlWriteBehaviorEnum, - SsisLogLocationType, - SsisObjectMetadataType, - SsisPackageLocationType, - StoredProcedureParameterType, - SybaseAuthenticationType, - TeamDeskAuthenticationType, - TeradataAuthenticationType, - TeradataPartitionOption, - TriggerReferenceType, - TriggerRunStatus, - TriggerRuntimeState, - TumblingWindowFrequency, - VariableType, - WebActivityMethod, - WebAuthenticationType, - WebHookActivityMethod, - ZendeskAuthenticationType, -) +from ._data_factory_management_client_enums import AmazonRdsForOraclePartitionOption +from ._data_factory_management_client_enums import AvroCompressionCodec +from ._data_factory_management_client_enums import AzureFunctionActivityMethod +from ._data_factory_management_client_enums import AzureSearchIndexWriteBehaviorType +from ._data_factory_management_client_enums import BigDataPoolReferenceType +from ._data_factory_management_client_enums import BlobEventTypes +from ._data_factory_management_client_enums import CassandraSourceReadConsistencyLevels +from ._data_factory_management_client_enums import CompressionCodec +from ._data_factory_management_client_enums import CopyBehaviorType +from ._data_factory_management_client_enums import CosmosDbConnectionMode +from ._data_factory_management_client_enums import CosmosDbServicePrincipalCredentialType +from ._data_factory_management_client_enums import CredentialReferenceType +from ._data_factory_management_client_enums import DataFlowComputeType +from ._data_factory_management_client_enums import DataFlowDebugCommandType +from ._data_factory_management_client_enums import DataFlowReferenceType +from ._data_factory_management_client_enums import DatasetCompressionLevel +from ._data_factory_management_client_enums import DatasetReferenceType +from ._data_factory_management_client_enums import DayOfWeek +from ._data_factory_management_client_enums import DaysOfWeek +from ._data_factory_management_client_enums import Db2AuthenticationType +from ._data_factory_management_client_enums import DependencyCondition +from ._data_factory_management_client_enums import DynamicsAuthenticationType +from ._data_factory_management_client_enums import DynamicsDeploymentType +from ._data_factory_management_client_enums import DynamicsSinkWriteBehavior +from ._data_factory_management_client_enums import EventSubscriptionStatus +from ._data_factory_management_client_enums import ExpressionType +from ._data_factory_management_client_enums import FactoryIdentityType +from ._data_factory_management_client_enums import FtpAuthenticationType +from ._data_factory_management_client_enums import GlobalParameterType +from ._data_factory_management_client_enums import GoogleAdWordsAuthenticationType +from ._data_factory_management_client_enums import GoogleBigQueryAuthenticationType +from ._data_factory_management_client_enums import HBaseAuthenticationType +from ._data_factory_management_client_enums import HDInsightActivityDebugInfoOption +from ._data_factory_management_client_enums import HdiNodeTypes +from ._data_factory_management_client_enums import HiveAuthenticationType +from ._data_factory_management_client_enums import HiveServerType +from ._data_factory_management_client_enums import HiveThriftTransportProtocol +from ._data_factory_management_client_enums import HttpAuthenticationType +from ._data_factory_management_client_enums import ImpalaAuthenticationType +from ._data_factory_management_client_enums import IntegrationRuntimeAuthKeyName +from ._data_factory_management_client_enums import IntegrationRuntimeAutoUpdate +from ._data_factory_management_client_enums import IntegrationRuntimeEdition +from ._data_factory_management_client_enums import IntegrationRuntimeEntityReferenceType +from ._data_factory_management_client_enums import IntegrationRuntimeInternalChannelEncryptionMode +from ._data_factory_management_client_enums import IntegrationRuntimeLicenseType +from ._data_factory_management_client_enums import IntegrationRuntimeReferenceType +from ._data_factory_management_client_enums import IntegrationRuntimeSsisCatalogPricingTier +from ._data_factory_management_client_enums import IntegrationRuntimeState +from ._data_factory_management_client_enums import IntegrationRuntimeType +from ._data_factory_management_client_enums import IntegrationRuntimeUpdateResult +from ._data_factory_management_client_enums import JsonFormatFilePattern +from ._data_factory_management_client_enums import JsonWriteFilePattern +from ._data_factory_management_client_enums import ManagedIntegrationRuntimeNodeStatus +from ._data_factory_management_client_enums import ManagedVirtualNetworkReferenceType +from ._data_factory_management_client_enums import MongoDbAuthenticationType +from ._data_factory_management_client_enums import NetezzaPartitionOption +from ._data_factory_management_client_enums import NotebookParameterType +from ._data_factory_management_client_enums import NotebookReferenceType +from ._data_factory_management_client_enums import ODataAadServicePrincipalCredentialType +from ._data_factory_management_client_enums import ODataAuthenticationType +from ._data_factory_management_client_enums import OraclePartitionOption +from ._data_factory_management_client_enums import OrcCompressionCodec +from ._data_factory_management_client_enums import ParameterType +from ._data_factory_management_client_enums import PhoenixAuthenticationType +from ._data_factory_management_client_enums import PipelineReferenceType +from ._data_factory_management_client_enums import PolybaseSettingsRejectType +from ._data_factory_management_client_enums import PrestoAuthenticationType +from ._data_factory_management_client_enums import PublicNetworkAccess +from ._data_factory_management_client_enums import RecurrenceFrequency +from ._data_factory_management_client_enums import RestServiceAuthenticationType +from ._data_factory_management_client_enums import RunQueryFilterOperand +from ._data_factory_management_client_enums import RunQueryFilterOperator +from ._data_factory_management_client_enums import RunQueryOrder +from ._data_factory_management_client_enums import RunQueryOrderByField +from ._data_factory_management_client_enums import SalesforceSinkWriteBehavior +from ._data_factory_management_client_enums import SalesforceSourceReadBehavior +from ._data_factory_management_client_enums import SapCloudForCustomerSinkWriteBehavior +from ._data_factory_management_client_enums import SapHanaAuthenticationType +from ._data_factory_management_client_enums import SapHanaPartitionOption +from ._data_factory_management_client_enums import SapTablePartitionOption +from ._data_factory_management_client_enums import ScriptActivityLogDestination +from ._data_factory_management_client_enums import ScriptActivityParameterDirection +from ._data_factory_management_client_enums import ScriptActivityParameterType +from ._data_factory_management_client_enums import ScriptType +from ._data_factory_management_client_enums import SelfHostedIntegrationRuntimeNodeStatus +from ._data_factory_management_client_enums import ServiceNowAuthenticationType +from ._data_factory_management_client_enums import ServicePrincipalCredentialType +from ._data_factory_management_client_enums import SftpAuthenticationType +from ._data_factory_management_client_enums import SparkAuthenticationType +from ._data_factory_management_client_enums import SparkJobReferenceType +from ._data_factory_management_client_enums import SparkServerType +from ._data_factory_management_client_enums import SparkThriftTransportProtocol +from ._data_factory_management_client_enums import SqlAlwaysEncryptedAkvAuthType +from ._data_factory_management_client_enums import SqlDWWriteBehaviorEnum +from ._data_factory_management_client_enums import SqlPartitionOption +from ._data_factory_management_client_enums import SqlWriteBehaviorEnum +from ._data_factory_management_client_enums import SsisLogLocationType +from ._data_factory_management_client_enums import SsisObjectMetadataType +from ._data_factory_management_client_enums import SsisPackageLocationType +from ._data_factory_management_client_enums import StoredProcedureParameterType +from ._data_factory_management_client_enums import SybaseAuthenticationType +from ._data_factory_management_client_enums import TeamDeskAuthenticationType +from ._data_factory_management_client_enums import TeradataAuthenticationType +from ._data_factory_management_client_enums import TeradataPartitionOption +from ._data_factory_management_client_enums import TriggerReferenceType +from ._data_factory_management_client_enums import TriggerRunStatus +from ._data_factory_management_client_enums import TriggerRuntimeState +from ._data_factory_management_client_enums import TumblingWindowFrequency +from ._data_factory_management_client_enums import Type +from ._data_factory_management_client_enums import VariableType +from ._data_factory_management_client_enums import WebActivityMethod +from ._data_factory_management_client_enums import WebAuthenticationType +from ._data_factory_management_client_enums import WebHookActivityMethod +from ._data_factory_management_client_enums import ZendeskAuthenticationType from ._patch import __all__ as _patch_all from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import from ._patch import patch_sdk as _patch_sdk + __all__ = [ - 'AccessPolicyResponse', - 'Activity', - 'ActivityDependency', - 'ActivityPolicy', - 'ActivityRun', - 'ActivityRunsQueryResponse', - 'AddDataFlowToDebugSessionResponse', - 'AdditionalColumns', - 'AmazonMWSLinkedService', - 'AmazonMWSObjectDataset', - 'AmazonMWSSource', - 'AmazonRdsForOracleLinkedService', - 'AmazonRdsForOraclePartitionSettings', - 'AmazonRdsForOracleSource', - 'AmazonRdsForOracleTableDataset', - 'AmazonRdsForSqlServerLinkedService', - 'AmazonRdsForSqlServerSource', - 'AmazonRdsForSqlServerTableDataset', - 'AmazonRedshiftLinkedService', - 'AmazonRedshiftSource', - 'AmazonRedshiftTableDataset', - 'AmazonS3CompatibleLinkedService', - 'AmazonS3CompatibleLocation', - 'AmazonS3CompatibleReadSettings', - 'AmazonS3Dataset', - 'AmazonS3LinkedService', - 'AmazonS3Location', - 'AmazonS3ReadSettings', - 'AppFiguresLinkedService', - 'AppendVariableActivity', - 'ArmIdWrapper', - 'AsanaLinkedService', - 'AvroDataset', - 'AvroFormat', - 'AvroSink', - 'AvroSource', - 'AvroWriteSettings', - 'AzPowerShellSetup', - 'AzureBatchLinkedService', - 'AzureBlobDataset', - 'AzureBlobFSDataset', - 'AzureBlobFSLinkedService', - 'AzureBlobFSLocation', - 'AzureBlobFSReadSettings', - 'AzureBlobFSSink', - 'AzureBlobFSSource', - 'AzureBlobFSWriteSettings', - 'AzureBlobStorageLinkedService', - 'AzureBlobStorageLocation', - 'AzureBlobStorageReadSettings', - 'AzureBlobStorageWriteSettings', - 'AzureDataExplorerCommandActivity', - 'AzureDataExplorerLinkedService', - 'AzureDataExplorerSink', - 'AzureDataExplorerSource', - 'AzureDataExplorerTableDataset', - 'AzureDataLakeAnalyticsLinkedService', - 'AzureDataLakeStoreDataset', - 'AzureDataLakeStoreLinkedService', - 'AzureDataLakeStoreLocation', - 'AzureDataLakeStoreReadSettings', - 'AzureDataLakeStoreSink', - 'AzureDataLakeStoreSource', - 'AzureDataLakeStoreWriteSettings', - 'AzureDatabricksDeltaLakeDataset', - 'AzureDatabricksDeltaLakeExportCommand', - 'AzureDatabricksDeltaLakeImportCommand', - 'AzureDatabricksDeltaLakeLinkedService', - 'AzureDatabricksDeltaLakeSink', - 'AzureDatabricksDeltaLakeSource', - 'AzureDatabricksLinkedService', - 'AzureFileStorageLinkedService', - 'AzureFileStorageLocation', - 'AzureFileStorageReadSettings', - 'AzureFileStorageWriteSettings', - 'AzureFunctionActivity', - 'AzureFunctionLinkedService', - 'AzureKeyVaultLinkedService', - 'AzureKeyVaultSecretReference', - 'AzureMLBatchExecutionActivity', - 'AzureMLExecutePipelineActivity', - 'AzureMLLinkedService', - 'AzureMLServiceLinkedService', - 'AzureMLUpdateResourceActivity', - 'AzureMLWebServiceFile', - 'AzureMariaDBLinkedService', - 'AzureMariaDBSource', - 'AzureMariaDBTableDataset', - 'AzureMySqlLinkedService', - 'AzureMySqlSink', - 'AzureMySqlSource', - 'AzureMySqlTableDataset', - 'AzurePostgreSqlLinkedService', - 'AzurePostgreSqlSink', - 'AzurePostgreSqlSource', - 'AzurePostgreSqlTableDataset', - 'AzureQueueSink', - 'AzureSearchIndexDataset', - 'AzureSearchIndexSink', - 'AzureSearchLinkedService', - 'AzureSqlDWLinkedService', - 'AzureSqlDWTableDataset', - 'AzureSqlDatabaseLinkedService', - 'AzureSqlMILinkedService', - 'AzureSqlMITableDataset', - 'AzureSqlSink', - 'AzureSqlSource', - 'AzureSqlTableDataset', - 'AzureStorageLinkedService', - 'AzureTableDataset', - 'AzureTableSink', - 'AzureTableSource', - 'AzureTableStorageLinkedService', - 'BinaryDataset', - 'BinaryReadSettings', - 'BinarySink', - 'BinarySource', - 'BlobEventsTrigger', - 'BlobSink', - 'BlobSource', - 'BlobTrigger', - 'CMKIdentityDefinition', - 'CassandraLinkedService', - 'CassandraSource', - 'CassandraTableDataset', - 'ChainingTrigger', - 'CloudError', - 'CmdkeySetup', - 'CommonDataServiceForAppsEntityDataset', - 'CommonDataServiceForAppsLinkedService', - 'CommonDataServiceForAppsSink', - 'CommonDataServiceForAppsSource', - 'ComponentSetup', - 'CompressionReadSettings', - 'ConcurLinkedService', - 'ConcurObjectDataset', - 'ConcurSource', - 'ConnectionStateProperties', - 'ControlActivity', - 'CopyActivity', - 'CopyActivityLogSettings', - 'CopySink', - 'CopySource', - 'CopyTranslator', - 'CosmosDbLinkedService', - 'CosmosDbMongoDbApiCollectionDataset', - 'CosmosDbMongoDbApiLinkedService', - 'CosmosDbMongoDbApiSink', - 'CosmosDbMongoDbApiSource', - 'CosmosDbSqlApiCollectionDataset', - 'CosmosDbSqlApiSink', - 'CosmosDbSqlApiSource', - 'CouchbaseLinkedService', - 'CouchbaseSource', - 'CouchbaseTableDataset', - 'CreateDataFlowDebugSessionRequest', - 'CreateDataFlowDebugSessionResponse', - 'CreateLinkedIntegrationRuntimeRequest', - 'CreateRunResponse', - 'Credential', - 'CredentialReference', - 'CredentialResource', - 'CustomActivity', - 'CustomActivityReferenceObject', - 'CustomDataSourceLinkedService', - 'CustomDataset', - 'CustomEventsTrigger', - 'CustomSetupBase', - 'DWCopyCommandDefaultValue', - 'DWCopyCommandSettings', - 'DataFlow', - 'DataFlowDebugCommandPayload', - 'DataFlowDebugCommandRequest', - 'DataFlowDebugCommandResponse', - 'DataFlowDebugPackage', - 'DataFlowDebugPackageDebugSettings', - 'DataFlowDebugResource', - 'DataFlowDebugSessionInfo', - 'DataFlowFolder', - 'DataFlowListResponse', - 'DataFlowReference', - 'DataFlowResource', - 'DataFlowSink', - 'DataFlowSource', - 'DataFlowSourceSetting', - 'DataFlowStagingInfo', - 'DataLakeAnalyticsUSQLActivity', - 'DatabricksNotebookActivity', - 'DatabricksSparkJarActivity', - 'DatabricksSparkPythonActivity', - 'Dataset', - 'DatasetCompression', - 'DatasetDataElement', - 'DatasetDebugResource', - 'DatasetFolder', - 'DatasetListResponse', - 'DatasetLocation', - 'DatasetReference', - 'DatasetResource', - 'DatasetSchemaDataElement', - 'DatasetStorageFormat', - 'DataworldLinkedService', - 'Db2LinkedService', - 'Db2Source', - 'Db2TableDataset', - 'DeleteActivity', - 'DeleteDataFlowDebugSessionRequest', - 'DelimitedTextDataset', - 'DelimitedTextReadSettings', - 'DelimitedTextSink', - 'DelimitedTextSource', - 'DelimitedTextWriteSettings', - 'DependencyReference', - 'DistcpSettings', - 'DocumentDbCollectionDataset', - 'DocumentDbCollectionSink', - 'DocumentDbCollectionSource', - 'DrillLinkedService', - 'DrillSource', - 'DrillTableDataset', - 'DynamicsAXLinkedService', - 'DynamicsAXResourceDataset', - 'DynamicsAXSource', - 'DynamicsCrmEntityDataset', - 'DynamicsCrmLinkedService', - 'DynamicsCrmSink', - 'DynamicsCrmSource', - 'DynamicsEntityDataset', - 'DynamicsLinkedService', - 'DynamicsSink', - 'DynamicsSource', - 'EloquaLinkedService', - 'EloquaObjectDataset', - 'EloquaSource', - 'EncryptionConfiguration', - 'EntityReference', - 'EnvironmentVariableSetup', - 'ExcelDataset', - 'ExcelSource', - 'ExecuteDataFlowActivity', - 'ExecuteDataFlowActivityTypeProperties', - 'ExecuteDataFlowActivityTypePropertiesCompute', - 'ExecutePipelineActivity', - 'ExecutePipelineActivityPolicy', - 'ExecutePowerQueryActivityTypeProperties', - 'ExecuteSSISPackageActivity', - 'ExecuteWranglingDataflowActivity', - 'ExecutionActivity', - 'ExportSettings', - 'ExposureControlBatchRequest', - 'ExposureControlBatchResponse', - 'ExposureControlRequest', - 'ExposureControlResponse', - 'Expression', - 'Factory', - 'FactoryGitHubConfiguration', - 'FactoryIdentity', - 'FactoryListResponse', - 'FactoryRepoConfiguration', - 'FactoryRepoUpdate', - 'FactoryUpdateParameters', - 'FactoryVSTSConfiguration', - 'FailActivity', - 'FileServerLinkedService', - 'FileServerLocation', - 'FileServerReadSettings', - 'FileServerWriteSettings', - 'FileShareDataset', - 'FileSystemSink', - 'FileSystemSource', - 'FilterActivity', - 'Flowlet', - 'ForEachActivity', - 'FormatReadSettings', - 'FormatWriteSettings', - 'FtpReadSettings', - 'FtpServerLinkedService', - 'FtpServerLocation', - 'GetDataFactoryOperationStatusResponse', - 'GetMetadataActivity', - 'GetSsisObjectMetadataRequest', - 'GitHubAccessTokenRequest', - 'GitHubAccessTokenResponse', - 'GitHubClientSecret', - 'GlobalParameterListResponse', - 'GlobalParameterResource', - 'GlobalParameterSpecification', - 'GoogleAdWordsLinkedService', - 'GoogleAdWordsObjectDataset', - 'GoogleAdWordsSource', - 'GoogleBigQueryLinkedService', - 'GoogleBigQueryObjectDataset', - 'GoogleBigQuerySource', - 'GoogleCloudStorageLinkedService', - 'GoogleCloudStorageLocation', - 'GoogleCloudStorageReadSettings', - 'GreenplumLinkedService', - 'GreenplumSource', - 'GreenplumTableDataset', - 'HBaseLinkedService', - 'HBaseObjectDataset', - 'HBaseSource', - 'HDInsightHiveActivity', - 'HDInsightLinkedService', - 'HDInsightMapReduceActivity', - 'HDInsightOnDemandLinkedService', - 'HDInsightPigActivity', - 'HDInsightSparkActivity', - 'HDInsightStreamingActivity', - 'HdfsLinkedService', - 'HdfsLocation', - 'HdfsReadSettings', - 'HdfsSource', - 'HiveLinkedService', - 'HiveObjectDataset', - 'HiveSource', - 'HttpDataset', - 'HttpLinkedService', - 'HttpReadSettings', - 'HttpServerLocation', - 'HttpSource', - 'HubspotLinkedService', - 'HubspotObjectDataset', - 'HubspotSource', - 'IfConditionActivity', - 'ImpalaLinkedService', - 'ImpalaObjectDataset', - 'ImpalaSource', - 'ImportSettings', - 'InformixLinkedService', - 'InformixSink', - 'InformixSource', - 'InformixTableDataset', - 'IntegrationRuntime', - 'IntegrationRuntimeAuthKeys', - 'IntegrationRuntimeComputeProperties', - 'IntegrationRuntimeConnectionInfo', - 'IntegrationRuntimeCustomSetupScriptProperties', - 'IntegrationRuntimeCustomerVirtualNetwork', - 'IntegrationRuntimeDataFlowProperties', - 'IntegrationRuntimeDataProxyProperties', - 'IntegrationRuntimeDebugResource', - 'IntegrationRuntimeListResponse', - 'IntegrationRuntimeMonitoringData', - 'IntegrationRuntimeNodeIpAddress', - 'IntegrationRuntimeNodeMonitoringData', - 'IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint', - 'IntegrationRuntimeOutboundNetworkDependenciesEndpoint', - 'IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails', - 'IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse', - 'IntegrationRuntimeReference', - 'IntegrationRuntimeRegenerateKeyParameters', - 'IntegrationRuntimeResource', - 'IntegrationRuntimeSsisCatalogInfo', - 'IntegrationRuntimeSsisProperties', - 'IntegrationRuntimeStatus', - 'IntegrationRuntimeStatusListResponse', - 'IntegrationRuntimeStatusResponse', - 'IntegrationRuntimeVNetProperties', - 'JiraLinkedService', - 'JiraObjectDataset', - 'JiraSource', - 'JsonDataset', - 'JsonFormat', - 'JsonReadSettings', - 'JsonSink', - 'JsonSource', - 'JsonWriteSettings', - 'LinkedIntegrationRuntime', - 'LinkedIntegrationRuntimeKeyAuthorization', - 'LinkedIntegrationRuntimeRbacAuthorization', - 'LinkedIntegrationRuntimeRequest', - 'LinkedIntegrationRuntimeType', - 'LinkedService', - 'LinkedServiceDebugResource', - 'LinkedServiceListResponse', - 'LinkedServiceReference', - 'LinkedServiceResource', - 'LogLocationSettings', - 'LogSettings', - 'LogStorageSettings', - 'LookupActivity', - 'MagentoLinkedService', - 'MagentoObjectDataset', - 'MagentoSource', - 'ManagedIdentityCredential', - 'ManagedIntegrationRuntime', - 'ManagedIntegrationRuntimeError', - 'ManagedIntegrationRuntimeNode', - 'ManagedIntegrationRuntimeOperationResult', - 'ManagedIntegrationRuntimeStatus', - 'ManagedPrivateEndpoint', - 'ManagedPrivateEndpointListResponse', - 'ManagedPrivateEndpointResource', - 'ManagedVirtualNetwork', - 'ManagedVirtualNetworkListResponse', - 'ManagedVirtualNetworkReference', - 'ManagedVirtualNetworkResource', - 'MappingDataFlow', - 'MariaDBLinkedService', - 'MariaDBSource', - 'MariaDBTableDataset', - 'MarketoLinkedService', - 'MarketoObjectDataset', - 'MarketoSource', - 'MetadataItem', - 'MicrosoftAccessLinkedService', - 'MicrosoftAccessSink', - 'MicrosoftAccessSource', - 'MicrosoftAccessTableDataset', - 'MongoDbAtlasCollectionDataset', - 'MongoDbAtlasLinkedService', - 'MongoDbAtlasSink', - 'MongoDbAtlasSource', - 'MongoDbCollectionDataset', - 'MongoDbCursorMethodsProperties', - 'MongoDbLinkedService', - 'MongoDbSource', - 'MongoDbV2CollectionDataset', - 'MongoDbV2LinkedService', - 'MongoDbV2Sink', - 'MongoDbV2Source', - 'MultiplePipelineTrigger', - 'MySqlLinkedService', - 'MySqlSource', - 'MySqlTableDataset', - 'NetezzaLinkedService', - 'NetezzaPartitionSettings', - 'NetezzaSource', - 'NetezzaTableDataset', - 'ODataLinkedService', - 'ODataResourceDataset', - 'ODataSource', - 'OdbcLinkedService', - 'OdbcSink', - 'OdbcSource', - 'OdbcTableDataset', - 'Office365Dataset', - 'Office365LinkedService', - 'Office365Source', - 'Operation', - 'OperationDisplay', - 'OperationListResponse', - 'OperationLogSpecification', - 'OperationMetricAvailability', - 'OperationMetricDimension', - 'OperationMetricSpecification', - 'OperationServiceSpecification', - 'OracleCloudStorageLinkedService', - 'OracleCloudStorageLocation', - 'OracleCloudStorageReadSettings', - 'OracleLinkedService', - 'OraclePartitionSettings', - 'OracleServiceCloudLinkedService', - 'OracleServiceCloudObjectDataset', - 'OracleServiceCloudSource', - 'OracleSink', - 'OracleSource', - 'OracleTableDataset', - 'OrcDataset', - 'OrcFormat', - 'OrcSink', - 'OrcSource', - 'OrcWriteSettings', - 'PackageStore', - 'ParameterSpecification', - 'ParquetDataset', - 'ParquetFormat', - 'ParquetSink', - 'ParquetSource', - 'ParquetWriteSettings', - 'PaypalLinkedService', - 'PaypalObjectDataset', - 'PaypalSource', - 'PhoenixLinkedService', - 'PhoenixObjectDataset', - 'PhoenixSource', - 'PipelineElapsedTimeMetricPolicy', - 'PipelineFolder', - 'PipelineListResponse', - 'PipelinePolicy', - 'PipelineReference', - 'PipelineResource', - 'PipelineRun', - 'PipelineRunInvokedBy', - 'PipelineRunsQueryResponse', - 'PolybaseSettings', - 'PostgreSqlLinkedService', - 'PostgreSqlSource', - 'PostgreSqlTableDataset', - 'PowerQuerySink', - 'PowerQuerySinkMapping', - 'PowerQuerySource', - 'PrestoLinkedService', - 'PrestoObjectDataset', - 'PrestoSource', - 'PrivateEndpoint', - 'PrivateEndpointConnectionListResponse', - 'PrivateEndpointConnectionResource', - 'PrivateLinkConnectionApprovalRequest', - 'PrivateLinkConnectionApprovalRequestResource', - 'PrivateLinkConnectionState', - 'PrivateLinkResource', - 'PrivateLinkResourceProperties', - 'PrivateLinkResourcesWrapper', - 'PurviewConfiguration', - 'QueryDataFlowDebugSessionsResponse', - 'QuickBooksLinkedService', - 'QuickBooksObjectDataset', - 'QuickBooksSource', - 'QuickbaseLinkedService', - 'RecurrenceSchedule', - 'RecurrenceScheduleOccurrence', - 'RedirectIncompatibleRowSettings', - 'RedshiftUnloadSettings', - 'RelationalSource', - 'RelationalTableDataset', - 'RemotePrivateEndpointConnection', - 'RerunTumblingWindowTrigger', - 'Resource', - 'ResponsysLinkedService', - 'ResponsysObjectDataset', - 'ResponsysSource', - 'RestResourceDataset', - 'RestServiceLinkedService', - 'RestSink', - 'RestSource', - 'RetryPolicy', - 'RunFilterParameters', - 'RunQueryFilter', - 'RunQueryOrderBy', - 'SSISAccessCredential', - 'SSISChildPackage', - 'SSISExecutionCredential', - 'SSISExecutionParameter', - 'SSISLogLocation', - 'SSISPackageLocation', - 'SSISPropertyOverride', - 'SalesforceLinkedService', - 'SalesforceMarketingCloudLinkedService', - 'SalesforceMarketingCloudObjectDataset', - 'SalesforceMarketingCloudSource', - 'SalesforceObjectDataset', - 'SalesforceServiceCloudLinkedService', - 'SalesforceServiceCloudObjectDataset', - 'SalesforceServiceCloudSink', - 'SalesforceServiceCloudSource', - 'SalesforceSink', - 'SalesforceSource', - 'SapBWLinkedService', - 'SapBwCubeDataset', - 'SapBwSource', - 'SapCloudForCustomerLinkedService', - 'SapCloudForCustomerResourceDataset', - 'SapCloudForCustomerSink', - 'SapCloudForCustomerSource', - 'SapEccLinkedService', - 'SapEccResourceDataset', - 'SapEccSource', - 'SapHanaLinkedService', - 'SapHanaPartitionSettings', - 'SapHanaSource', - 'SapHanaTableDataset', - 'SapOdpLinkedService', - 'SapOdpResourceDataset', - 'SapOdpSource', - 'SapOpenHubLinkedService', - 'SapOpenHubSource', - 'SapOpenHubTableDataset', - 'SapTableLinkedService', - 'SapTablePartitionSettings', - 'SapTableResourceDataset', - 'SapTableSource', - 'ScheduleTrigger', - 'ScheduleTriggerRecurrence', - 'ScriptAction', - 'ScriptActivity', - 'ScriptActivityParameter', - 'ScriptActivityScriptBlock', - 'ScriptActivityTypePropertiesLogSettings', - 'SecretBase', - 'SecureString', - 'SelfDependencyTumblingWindowTriggerReference', - 'SelfHostedIntegrationRuntime', - 'SelfHostedIntegrationRuntimeNode', - 'SelfHostedIntegrationRuntimeStatus', - 'ServiceNowLinkedService', - 'ServiceNowObjectDataset', - 'ServiceNowSource', - 'ServicePrincipalCredential', - 'SetVariableActivity', - 'SftpLocation', - 'SftpReadSettings', - 'SftpServerLinkedService', - 'SftpWriteSettings', - 'SharePointOnlineListLinkedService', - 'SharePointOnlineListResourceDataset', - 'SharePointOnlineListSource', - 'ShopifyLinkedService', - 'ShopifyObjectDataset', - 'ShopifySource', - 'SkipErrorFile', - 'SmartsheetLinkedService', - 'SnowflakeDataset', - 'SnowflakeExportCopyCommand', - 'SnowflakeImportCopyCommand', - 'SnowflakeLinkedService', - 'SnowflakeSink', - 'SnowflakeSource', - 'SparkLinkedService', - 'SparkObjectDataset', - 'SparkSource', - 'SqlAlwaysEncryptedProperties', - 'SqlDWSink', - 'SqlDWSource', - 'SqlDWUpsertSettings', - 'SqlMISink', - 'SqlMISource', - 'SqlPartitionSettings', - 'SqlServerLinkedService', - 'SqlServerSink', - 'SqlServerSource', - 'SqlServerStoredProcedureActivity', - 'SqlServerTableDataset', - 'SqlSink', - 'SqlSource', - 'SqlUpsertSettings', - 'SquareLinkedService', - 'SquareObjectDataset', - 'SquareSource', - 'SsisEnvironment', - 'SsisEnvironmentReference', - 'SsisFolder', - 'SsisObjectMetadata', - 'SsisObjectMetadataListResponse', - 'SsisObjectMetadataStatusResponse', - 'SsisPackage', - 'SsisParameter', - 'SsisProject', - 'SsisVariable', - 'StagingSettings', - 'StoreReadSettings', - 'StoreWriteSettings', - 'StoredProcedureParameter', - 'SubResource', - 'SubResourceDebugResource', - 'SwitchActivity', - 'SwitchCase', - 'SybaseLinkedService', - 'SybaseSource', - 'SybaseTableDataset', - 'TabularSource', - 'TabularTranslator', - 'TarGZipReadSettings', - 'TarReadSettings', - 'TeamDeskLinkedService', - 'TeradataLinkedService', - 'TeradataPartitionSettings', - 'TeradataSource', - 'TeradataTableDataset', - 'TextFormat', - 'Transformation', - 'Trigger', - 'TriggerDependencyReference', - 'TriggerFilterParameters', - 'TriggerListResponse', - 'TriggerPipelineReference', - 'TriggerQueryResponse', - 'TriggerReference', - 'TriggerResource', - 'TriggerRun', - 'TriggerRunsQueryResponse', - 'TriggerSubscriptionOperationStatus', - 'TumblingWindowTrigger', - 'TumblingWindowTriggerDependencyReference', - 'TwilioLinkedService', - 'TypeConversionSettings', - 'UntilActivity', - 'UpdateIntegrationRuntimeNodeRequest', - 'UpdateIntegrationRuntimeRequest', - 'UserAccessPolicy', - 'UserProperty', - 'ValidationActivity', - 'VariableSpecification', - 'VerticaLinkedService', - 'VerticaSource', - 'VerticaTableDataset', - 'WaitActivity', - 'WebActivity', - 'WebActivityAuthentication', - 'WebAnonymousAuthentication', - 'WebBasicAuthentication', - 'WebClientCertificateAuthentication', - 'WebHookActivity', - 'WebLinkedService', - 'WebLinkedServiceTypeProperties', - 'WebSource', - 'WebTableDataset', - 'WranglingDataFlow', - 'XeroLinkedService', - 'XeroObjectDataset', - 'XeroSource', - 'XmlDataset', - 'XmlReadSettings', - 'XmlSource', - 'ZendeskLinkedService', - 'ZipDeflateReadSettings', - 'ZohoLinkedService', - 'ZohoObjectDataset', - 'ZohoSource', - 'AmazonRdsForOraclePartitionOption', - 'AvroCompressionCodec', - 'AzureFunctionActivityMethod', - 'AzureSearchIndexWriteBehaviorType', - 'BlobEventTypes', - 'CassandraSourceReadConsistencyLevels', - 'CompressionCodec', - 'CopyBehaviorType', - 'CosmosDbConnectionMode', - 'CosmosDbServicePrincipalCredentialType', - 'CredentialReferenceType', - 'DataFlowComputeType', - 'DataFlowDebugCommandType', - 'DataFlowReferenceType', - 'DatasetCompressionLevel', - 'DayOfWeek', - 'DaysOfWeek', - 'Db2AuthenticationType', - 'DependencyCondition', - 'DynamicsAuthenticationType', - 'DynamicsDeploymentType', - 'DynamicsSinkWriteBehavior', - 'EventSubscriptionStatus', - 'FactoryIdentityType', - 'FtpAuthenticationType', - 'GlobalParameterType', - 'GoogleAdWordsAuthenticationType', - 'GoogleBigQueryAuthenticationType', - 'HBaseAuthenticationType', - 'HDInsightActivityDebugInfoOption', - 'HdiNodeTypes', - 'HiveAuthenticationType', - 'HiveServerType', - 'HiveThriftTransportProtocol', - 'HttpAuthenticationType', - 'ImpalaAuthenticationType', - 'IntegrationRuntimeAuthKeyName', - 'IntegrationRuntimeAutoUpdate', - 'IntegrationRuntimeEdition', - 'IntegrationRuntimeEntityReferenceType', - 'IntegrationRuntimeInternalChannelEncryptionMode', - 'IntegrationRuntimeLicenseType', - 'IntegrationRuntimeSsisCatalogPricingTier', - 'IntegrationRuntimeState', - 'IntegrationRuntimeType', - 'IntegrationRuntimeUpdateResult', - 'JsonFormatFilePattern', - 'JsonWriteFilePattern', - 'ManagedIntegrationRuntimeNodeStatus', - 'ManagedVirtualNetworkReferenceType', - 'MongoDbAuthenticationType', - 'NetezzaPartitionOption', - 'ODataAadServicePrincipalCredentialType', - 'ODataAuthenticationType', - 'OraclePartitionOption', - 'OrcCompressionCodec', - 'ParameterType', - 'PhoenixAuthenticationType', - 'PolybaseSettingsRejectType', - 'PrestoAuthenticationType', - 'PublicNetworkAccess', - 'RecurrenceFrequency', - 'RestServiceAuthenticationType', - 'RunQueryFilterOperand', - 'RunQueryFilterOperator', - 'RunQueryOrder', - 'RunQueryOrderByField', - 'SalesforceSinkWriteBehavior', - 'SalesforceSourceReadBehavior', - 'SapCloudForCustomerSinkWriteBehavior', - 'SapHanaAuthenticationType', - 'SapHanaPartitionOption', - 'SapTablePartitionOption', - 'ScriptActivityLogDestination', - 'ScriptActivityParameterDirection', - 'ScriptActivityParameterType', - 'ScriptType', - 'SelfHostedIntegrationRuntimeNodeStatus', - 'ServiceNowAuthenticationType', - 'ServicePrincipalCredentialType', - 'SftpAuthenticationType', - 'SparkAuthenticationType', - 'SparkServerType', - 'SparkThriftTransportProtocol', - 'SqlAlwaysEncryptedAkvAuthType', - 'SqlDWWriteBehaviorEnum', - 'SqlPartitionOption', - 'SqlWriteBehaviorEnum', - 'SsisLogLocationType', - 'SsisObjectMetadataType', - 'SsisPackageLocationType', - 'StoredProcedureParameterType', - 'SybaseAuthenticationType', - 'TeamDeskAuthenticationType', - 'TeradataAuthenticationType', - 'TeradataPartitionOption', - 'TriggerReferenceType', - 'TriggerRunStatus', - 'TriggerRuntimeState', - 'TumblingWindowFrequency', - 'VariableType', - 'WebActivityMethod', - 'WebAuthenticationType', - 'WebHookActivityMethod', - 'ZendeskAuthenticationType', + "AccessPolicyResponse", + "Activity", + "ActivityDependency", + "ActivityPolicy", + "ActivityRun", + "ActivityRunsQueryResponse", + "AddDataFlowToDebugSessionResponse", + "AdditionalColumns", + "AmazonMWSLinkedService", + "AmazonMWSObjectDataset", + "AmazonMWSSource", + "AmazonRdsForOracleLinkedService", + "AmazonRdsForOraclePartitionSettings", + "AmazonRdsForOracleSource", + "AmazonRdsForOracleTableDataset", + "AmazonRdsForSqlServerLinkedService", + "AmazonRdsForSqlServerSource", + "AmazonRdsForSqlServerTableDataset", + "AmazonRedshiftLinkedService", + "AmazonRedshiftSource", + "AmazonRedshiftTableDataset", + "AmazonS3CompatibleLinkedService", + "AmazonS3CompatibleLocation", + "AmazonS3CompatibleReadSettings", + "AmazonS3Dataset", + "AmazonS3LinkedService", + "AmazonS3Location", + "AmazonS3ReadSettings", + "AppFiguresLinkedService", + "AppendVariableActivity", + "ArmIdWrapper", + "AsanaLinkedService", + "AvroDataset", + "AvroFormat", + "AvroSink", + "AvroSource", + "AvroWriteSettings", + "AzPowerShellSetup", + "AzureBatchLinkedService", + "AzureBlobDataset", + "AzureBlobFSDataset", + "AzureBlobFSLinkedService", + "AzureBlobFSLocation", + "AzureBlobFSReadSettings", + "AzureBlobFSSink", + "AzureBlobFSSource", + "AzureBlobFSWriteSettings", + "AzureBlobStorageLinkedService", + "AzureBlobStorageLocation", + "AzureBlobStorageReadSettings", + "AzureBlobStorageWriteSettings", + "AzureDataExplorerCommandActivity", + "AzureDataExplorerLinkedService", + "AzureDataExplorerSink", + "AzureDataExplorerSource", + "AzureDataExplorerTableDataset", + "AzureDataLakeAnalyticsLinkedService", + "AzureDataLakeStoreDataset", + "AzureDataLakeStoreLinkedService", + "AzureDataLakeStoreLocation", + "AzureDataLakeStoreReadSettings", + "AzureDataLakeStoreSink", + "AzureDataLakeStoreSource", + "AzureDataLakeStoreWriteSettings", + "AzureDatabricksDeltaLakeDataset", + "AzureDatabricksDeltaLakeExportCommand", + "AzureDatabricksDeltaLakeImportCommand", + "AzureDatabricksDeltaLakeLinkedService", + "AzureDatabricksDeltaLakeSink", + "AzureDatabricksDeltaLakeSource", + "AzureDatabricksLinkedService", + "AzureFileStorageLinkedService", + "AzureFileStorageLocation", + "AzureFileStorageReadSettings", + "AzureFileStorageWriteSettings", + "AzureFunctionActivity", + "AzureFunctionLinkedService", + "AzureKeyVaultLinkedService", + "AzureKeyVaultSecretReference", + "AzureMLBatchExecutionActivity", + "AzureMLExecutePipelineActivity", + "AzureMLLinkedService", + "AzureMLServiceLinkedService", + "AzureMLUpdateResourceActivity", + "AzureMLWebServiceFile", + "AzureMariaDBLinkedService", + "AzureMariaDBSource", + "AzureMariaDBTableDataset", + "AzureMySqlLinkedService", + "AzureMySqlSink", + "AzureMySqlSource", + "AzureMySqlTableDataset", + "AzurePostgreSqlLinkedService", + "AzurePostgreSqlSink", + "AzurePostgreSqlSource", + "AzurePostgreSqlTableDataset", + "AzureQueueSink", + "AzureSearchIndexDataset", + "AzureSearchIndexSink", + "AzureSearchLinkedService", + "AzureSqlDWLinkedService", + "AzureSqlDWTableDataset", + "AzureSqlDatabaseLinkedService", + "AzureSqlMILinkedService", + "AzureSqlMITableDataset", + "AzureSqlSink", + "AzureSqlSource", + "AzureSqlTableDataset", + "AzureStorageLinkedService", + "AzureSynapseArtifactsLinkedService", + "AzureTableDataset", + "AzureTableSink", + "AzureTableSource", + "AzureTableStorageLinkedService", + "BigDataPoolParametrizationReference", + "BinaryDataset", + "BinaryReadSettings", + "BinarySink", + "BinarySource", + "BlobEventsTrigger", + "BlobSink", + "BlobSource", + "BlobTrigger", + "CMKIdentityDefinition", + "CassandraLinkedService", + "CassandraSource", + "CassandraTableDataset", + "ChainingTrigger", + "CloudError", + "CmdkeySetup", + "CommonDataServiceForAppsEntityDataset", + "CommonDataServiceForAppsLinkedService", + "CommonDataServiceForAppsSink", + "CommonDataServiceForAppsSource", + "ComponentSetup", + "CompressionReadSettings", + "ConcurLinkedService", + "ConcurObjectDataset", + "ConcurSource", + "ConnectionStateProperties", + "ControlActivity", + "CopyActivity", + "CopyActivityLogSettings", + "CopySink", + "CopySource", + "CopyTranslator", + "CosmosDbLinkedService", + "CosmosDbMongoDbApiCollectionDataset", + "CosmosDbMongoDbApiLinkedService", + "CosmosDbMongoDbApiSink", + "CosmosDbMongoDbApiSource", + "CosmosDbSqlApiCollectionDataset", + "CosmosDbSqlApiSink", + "CosmosDbSqlApiSource", + "CouchbaseLinkedService", + "CouchbaseSource", + "CouchbaseTableDataset", + "CreateDataFlowDebugSessionRequest", + "CreateDataFlowDebugSessionResponse", + "CreateLinkedIntegrationRuntimeRequest", + "CreateRunResponse", + "Credential", + "CredentialReference", + "CredentialResource", + "CustomActivity", + "CustomActivityReferenceObject", + "CustomDataSourceLinkedService", + "CustomDataset", + "CustomEventsTrigger", + "CustomSetupBase", + "DWCopyCommandDefaultValue", + "DWCopyCommandSettings", + "DataFlow", + "DataFlowDebugCommandPayload", + "DataFlowDebugCommandRequest", + "DataFlowDebugCommandResponse", + "DataFlowDebugPackage", + "DataFlowDebugPackageDebugSettings", + "DataFlowDebugResource", + "DataFlowDebugSessionInfo", + "DataFlowFolder", + "DataFlowListResponse", + "DataFlowReference", + "DataFlowResource", + "DataFlowSink", + "DataFlowSource", + "DataFlowSourceSetting", + "DataFlowStagingInfo", + "DataLakeAnalyticsUSQLActivity", + "DatabricksNotebookActivity", + "DatabricksSparkJarActivity", + "DatabricksSparkPythonActivity", + "Dataset", + "DatasetCompression", + "DatasetDataElement", + "DatasetDebugResource", + "DatasetFolder", + "DatasetListResponse", + "DatasetLocation", + "DatasetReference", + "DatasetResource", + "DatasetSchemaDataElement", + "DatasetStorageFormat", + "DataworldLinkedService", + "Db2LinkedService", + "Db2Source", + "Db2TableDataset", + "DeleteActivity", + "DeleteDataFlowDebugSessionRequest", + "DelimitedTextDataset", + "DelimitedTextReadSettings", + "DelimitedTextSink", + "DelimitedTextSource", + "DelimitedTextWriteSettings", + "DependencyReference", + "DistcpSettings", + "DocumentDbCollectionDataset", + "DocumentDbCollectionSink", + "DocumentDbCollectionSource", + "DrillLinkedService", + "DrillSource", + "DrillTableDataset", + "DynamicsAXLinkedService", + "DynamicsAXResourceDataset", + "DynamicsAXSource", + "DynamicsCrmEntityDataset", + "DynamicsCrmLinkedService", + "DynamicsCrmSink", + "DynamicsCrmSource", + "DynamicsEntityDataset", + "DynamicsLinkedService", + "DynamicsSink", + "DynamicsSource", + "EloquaLinkedService", + "EloquaObjectDataset", + "EloquaSource", + "EncryptionConfiguration", + "EntityReference", + "EnvironmentVariableSetup", + "ExcelDataset", + "ExcelSource", + "ExecuteDataFlowActivity", + "ExecuteDataFlowActivityTypeProperties", + "ExecuteDataFlowActivityTypePropertiesCompute", + "ExecutePipelineActivity", + "ExecutePipelineActivityPolicy", + "ExecutePowerQueryActivityTypeProperties", + "ExecuteSSISPackageActivity", + "ExecuteWranglingDataflowActivity", + "ExecutionActivity", + "ExportSettings", + "ExposureControlBatchRequest", + "ExposureControlBatchResponse", + "ExposureControlRequest", + "ExposureControlResponse", + "Expression", + "Factory", + "FactoryGitHubConfiguration", + "FactoryIdentity", + "FactoryListResponse", + "FactoryRepoConfiguration", + "FactoryRepoUpdate", + "FactoryUpdateParameters", + "FactoryVSTSConfiguration", + "FailActivity", + "FileServerLinkedService", + "FileServerLocation", + "FileServerReadSettings", + "FileServerWriteSettings", + "FileShareDataset", + "FileSystemSink", + "FileSystemSource", + "FilterActivity", + "Flowlet", + "ForEachActivity", + "FormatReadSettings", + "FormatWriteSettings", + "FtpReadSettings", + "FtpServerLinkedService", + "FtpServerLocation", + "GetDataFactoryOperationStatusResponse", + "GetMetadataActivity", + "GetSsisObjectMetadataRequest", + "GitHubAccessTokenRequest", + "GitHubAccessTokenResponse", + "GitHubClientSecret", + "GlobalParameterListResponse", + "GlobalParameterResource", + "GlobalParameterSpecification", + "GoogleAdWordsLinkedService", + "GoogleAdWordsObjectDataset", + "GoogleAdWordsSource", + "GoogleBigQueryLinkedService", + "GoogleBigQueryObjectDataset", + "GoogleBigQuerySource", + "GoogleCloudStorageLinkedService", + "GoogleCloudStorageLocation", + "GoogleCloudStorageReadSettings", + "GoogleSheetsLinkedService", + "GreenplumLinkedService", + "GreenplumSource", + "GreenplumTableDataset", + "HBaseLinkedService", + "HBaseObjectDataset", + "HBaseSource", + "HDInsightHiveActivity", + "HDInsightLinkedService", + "HDInsightMapReduceActivity", + "HDInsightOnDemandLinkedService", + "HDInsightPigActivity", + "HDInsightSparkActivity", + "HDInsightStreamingActivity", + "HdfsLinkedService", + "HdfsLocation", + "HdfsReadSettings", + "HdfsSource", + "HiveLinkedService", + "HiveObjectDataset", + "HiveSource", + "HttpDataset", + "HttpLinkedService", + "HttpReadSettings", + "HttpServerLocation", + "HttpSource", + "HubspotLinkedService", + "HubspotObjectDataset", + "HubspotSource", + "IfConditionActivity", + "ImpalaLinkedService", + "ImpalaObjectDataset", + "ImpalaSource", + "ImportSettings", + "InformixLinkedService", + "InformixSink", + "InformixSource", + "InformixTableDataset", + "IntegrationRuntime", + "IntegrationRuntimeAuthKeys", + "IntegrationRuntimeComputeProperties", + "IntegrationRuntimeConnectionInfo", + "IntegrationRuntimeCustomSetupScriptProperties", + "IntegrationRuntimeCustomerVirtualNetwork", + "IntegrationRuntimeDataFlowProperties", + "IntegrationRuntimeDataProxyProperties", + "IntegrationRuntimeDebugResource", + "IntegrationRuntimeListResponse", + "IntegrationRuntimeMonitoringData", + "IntegrationRuntimeNodeIpAddress", + "IntegrationRuntimeNodeMonitoringData", + "IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint", + "IntegrationRuntimeOutboundNetworkDependenciesEndpoint", + "IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails", + "IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse", + "IntegrationRuntimeReference", + "IntegrationRuntimeRegenerateKeyParameters", + "IntegrationRuntimeResource", + "IntegrationRuntimeSsisCatalogInfo", + "IntegrationRuntimeSsisProperties", + "IntegrationRuntimeStatus", + "IntegrationRuntimeStatusListResponse", + "IntegrationRuntimeStatusResponse", + "IntegrationRuntimeVNetProperties", + "JiraLinkedService", + "JiraObjectDataset", + "JiraSource", + "JsonDataset", + "JsonFormat", + "JsonReadSettings", + "JsonSink", + "JsonSource", + "JsonWriteSettings", + "LinkedIntegrationRuntime", + "LinkedIntegrationRuntimeKeyAuthorization", + "LinkedIntegrationRuntimeRbacAuthorization", + "LinkedIntegrationRuntimeRequest", + "LinkedIntegrationRuntimeType", + "LinkedService", + "LinkedServiceDebugResource", + "LinkedServiceListResponse", + "LinkedServiceReference", + "LinkedServiceResource", + "LogLocationSettings", + "LogSettings", + "LogStorageSettings", + "LookupActivity", + "MagentoLinkedService", + "MagentoObjectDataset", + "MagentoSource", + "ManagedIdentityCredential", + "ManagedIntegrationRuntime", + "ManagedIntegrationRuntimeError", + "ManagedIntegrationRuntimeNode", + "ManagedIntegrationRuntimeOperationResult", + "ManagedIntegrationRuntimeStatus", + "ManagedPrivateEndpoint", + "ManagedPrivateEndpointListResponse", + "ManagedPrivateEndpointResource", + "ManagedVirtualNetwork", + "ManagedVirtualNetworkListResponse", + "ManagedVirtualNetworkReference", + "ManagedVirtualNetworkResource", + "MappingDataFlow", + "MariaDBLinkedService", + "MariaDBSource", + "MariaDBTableDataset", + "MarketoLinkedService", + "MarketoObjectDataset", + "MarketoSource", + "MetadataItem", + "MicrosoftAccessLinkedService", + "MicrosoftAccessSink", + "MicrosoftAccessSource", + "MicrosoftAccessTableDataset", + "MongoDbAtlasCollectionDataset", + "MongoDbAtlasLinkedService", + "MongoDbAtlasSink", + "MongoDbAtlasSource", + "MongoDbCollectionDataset", + "MongoDbCursorMethodsProperties", + "MongoDbLinkedService", + "MongoDbSource", + "MongoDbV2CollectionDataset", + "MongoDbV2LinkedService", + "MongoDbV2Sink", + "MongoDbV2Source", + "MultiplePipelineTrigger", + "MySqlLinkedService", + "MySqlSource", + "MySqlTableDataset", + "NetezzaLinkedService", + "NetezzaPartitionSettings", + "NetezzaSource", + "NetezzaTableDataset", + "NotebookParameter", + "ODataLinkedService", + "ODataResourceDataset", + "ODataSource", + "OdbcLinkedService", + "OdbcSink", + "OdbcSource", + "OdbcTableDataset", + "Office365Dataset", + "Office365LinkedService", + "Office365Source", + "Operation", + "OperationDisplay", + "OperationListResponse", + "OperationLogSpecification", + "OperationMetricAvailability", + "OperationMetricDimension", + "OperationMetricSpecification", + "OperationServiceSpecification", + "OracleCloudStorageLinkedService", + "OracleCloudStorageLocation", + "OracleCloudStorageReadSettings", + "OracleLinkedService", + "OraclePartitionSettings", + "OracleServiceCloudLinkedService", + "OracleServiceCloudObjectDataset", + "OracleServiceCloudSource", + "OracleSink", + "OracleSource", + "OracleTableDataset", + "OrcDataset", + "OrcFormat", + "OrcSink", + "OrcSource", + "OrcWriteSettings", + "PackageStore", + "ParameterSpecification", + "ParquetDataset", + "ParquetFormat", + "ParquetSink", + "ParquetSource", + "ParquetWriteSettings", + "PaypalLinkedService", + "PaypalObjectDataset", + "PaypalSource", + "PhoenixLinkedService", + "PhoenixObjectDataset", + "PhoenixSource", + "PipelineElapsedTimeMetricPolicy", + "PipelineFolder", + "PipelineListResponse", + "PipelinePolicy", + "PipelineReference", + "PipelineResource", + "PipelineRun", + "PipelineRunInvokedBy", + "PipelineRunsQueryResponse", + "PolybaseSettings", + "PostgreSqlLinkedService", + "PostgreSqlSource", + "PostgreSqlTableDataset", + "PowerQuerySink", + "PowerQuerySinkMapping", + "PowerQuerySource", + "PrestoLinkedService", + "PrestoObjectDataset", + "PrestoSource", + "PrivateEndpoint", + "PrivateEndpointConnectionListResponse", + "PrivateEndpointConnectionResource", + "PrivateLinkConnectionApprovalRequest", + "PrivateLinkConnectionApprovalRequestResource", + "PrivateLinkConnectionState", + "PrivateLinkResource", + "PrivateLinkResourceProperties", + "PrivateLinkResourcesWrapper", + "PurviewConfiguration", + "QueryDataFlowDebugSessionsResponse", + "QuickBooksLinkedService", + "QuickBooksObjectDataset", + "QuickBooksSource", + "QuickbaseLinkedService", + "RecurrenceSchedule", + "RecurrenceScheduleOccurrence", + "RedirectIncompatibleRowSettings", + "RedshiftUnloadSettings", + "RelationalSource", + "RelationalTableDataset", + "RemotePrivateEndpointConnection", + "RerunTumblingWindowTrigger", + "Resource", + "ResponsysLinkedService", + "ResponsysObjectDataset", + "ResponsysSource", + "RestResourceDataset", + "RestServiceLinkedService", + "RestSink", + "RestSource", + "RetryPolicy", + "RunFilterParameters", + "RunQueryFilter", + "RunQueryOrderBy", + "SSISAccessCredential", + "SSISChildPackage", + "SSISExecutionCredential", + "SSISExecutionParameter", + "SSISLogLocation", + "SSISPackageLocation", + "SSISPropertyOverride", + "SalesforceLinkedService", + "SalesforceMarketingCloudLinkedService", + "SalesforceMarketingCloudObjectDataset", + "SalesforceMarketingCloudSource", + "SalesforceObjectDataset", + "SalesforceServiceCloudLinkedService", + "SalesforceServiceCloudObjectDataset", + "SalesforceServiceCloudSink", + "SalesforceServiceCloudSource", + "SalesforceSink", + "SalesforceSource", + "SapBWLinkedService", + "SapBwCubeDataset", + "SapBwSource", + "SapCloudForCustomerLinkedService", + "SapCloudForCustomerResourceDataset", + "SapCloudForCustomerSink", + "SapCloudForCustomerSource", + "SapEccLinkedService", + "SapEccResourceDataset", + "SapEccSource", + "SapHanaLinkedService", + "SapHanaPartitionSettings", + "SapHanaSource", + "SapHanaTableDataset", + "SapOdpLinkedService", + "SapOdpResourceDataset", + "SapOdpSource", + "SapOpenHubLinkedService", + "SapOpenHubSource", + "SapOpenHubTableDataset", + "SapTableLinkedService", + "SapTablePartitionSettings", + "SapTableResourceDataset", + "SapTableSource", + "ScheduleTrigger", + "ScheduleTriggerRecurrence", + "ScriptAction", + "ScriptActivity", + "ScriptActivityParameter", + "ScriptActivityScriptBlock", + "ScriptActivityTypePropertiesLogSettings", + "SecretBase", + "SecureString", + "SelfDependencyTumblingWindowTriggerReference", + "SelfHostedIntegrationRuntime", + "SelfHostedIntegrationRuntimeNode", + "SelfHostedIntegrationRuntimeStatus", + "ServiceNowLinkedService", + "ServiceNowObjectDataset", + "ServiceNowSource", + "ServicePrincipalCredential", + "SetVariableActivity", + "SftpLocation", + "SftpReadSettings", + "SftpServerLinkedService", + "SftpWriteSettings", + "SharePointOnlineListLinkedService", + "SharePointOnlineListResourceDataset", + "SharePointOnlineListSource", + "ShopifyLinkedService", + "ShopifyObjectDataset", + "ShopifySource", + "SkipErrorFile", + "SmartsheetLinkedService", + "SnowflakeDataset", + "SnowflakeExportCopyCommand", + "SnowflakeImportCopyCommand", + "SnowflakeLinkedService", + "SnowflakeSink", + "SnowflakeSource", + "SparkLinkedService", + "SparkObjectDataset", + "SparkSource", + "SqlAlwaysEncryptedProperties", + "SqlDWSink", + "SqlDWSource", + "SqlDWUpsertSettings", + "SqlMISink", + "SqlMISource", + "SqlPartitionSettings", + "SqlServerLinkedService", + "SqlServerSink", + "SqlServerSource", + "SqlServerStoredProcedureActivity", + "SqlServerTableDataset", + "SqlSink", + "SqlSource", + "SqlUpsertSettings", + "SquareLinkedService", + "SquareObjectDataset", + "SquareSource", + "SsisEnvironment", + "SsisEnvironmentReference", + "SsisFolder", + "SsisObjectMetadata", + "SsisObjectMetadataListResponse", + "SsisObjectMetadataStatusResponse", + "SsisPackage", + "SsisParameter", + "SsisProject", + "SsisVariable", + "StagingSettings", + "StoreReadSettings", + "StoreWriteSettings", + "StoredProcedureParameter", + "SubResource", + "SubResourceDebugResource", + "SwitchActivity", + "SwitchCase", + "SybaseLinkedService", + "SybaseSource", + "SybaseTableDataset", + "SynapseNotebookActivity", + "SynapseNotebookReference", + "SynapseSparkJobDefinitionActivity", + "SynapseSparkJobReference", + "TabularSource", + "TabularTranslator", + "TarGZipReadSettings", + "TarReadSettings", + "TeamDeskLinkedService", + "TeradataLinkedService", + "TeradataPartitionSettings", + "TeradataSource", + "TeradataTableDataset", + "TextFormat", + "Transformation", + "Trigger", + "TriggerDependencyReference", + "TriggerFilterParameters", + "TriggerListResponse", + "TriggerPipelineReference", + "TriggerQueryResponse", + "TriggerReference", + "TriggerResource", + "TriggerRun", + "TriggerRunsQueryResponse", + "TriggerSubscriptionOperationStatus", + "TumblingWindowTrigger", + "TumblingWindowTriggerDependencyReference", + "TwilioLinkedService", + "TypeConversionSettings", + "UntilActivity", + "UpdateIntegrationRuntimeNodeRequest", + "UpdateIntegrationRuntimeRequest", + "UserAccessPolicy", + "UserProperty", + "ValidationActivity", + "VariableSpecification", + "VerticaLinkedService", + "VerticaSource", + "VerticaTableDataset", + "WaitActivity", + "WebActivity", + "WebActivityAuthentication", + "WebAnonymousAuthentication", + "WebBasicAuthentication", + "WebClientCertificateAuthentication", + "WebHookActivity", + "WebLinkedService", + "WebLinkedServiceTypeProperties", + "WebSource", + "WebTableDataset", + "WranglingDataFlow", + "XeroLinkedService", + "XeroObjectDataset", + "XeroSource", + "XmlDataset", + "XmlReadSettings", + "XmlSource", + "ZendeskLinkedService", + "ZipDeflateReadSettings", + "ZohoLinkedService", + "ZohoObjectDataset", + "ZohoSource", + "AmazonRdsForOraclePartitionOption", + "AvroCompressionCodec", + "AzureFunctionActivityMethod", + "AzureSearchIndexWriteBehaviorType", + "BigDataPoolReferenceType", + "BlobEventTypes", + "CassandraSourceReadConsistencyLevels", + "CompressionCodec", + "CopyBehaviorType", + "CosmosDbConnectionMode", + "CosmosDbServicePrincipalCredentialType", + "CredentialReferenceType", + "DataFlowComputeType", + "DataFlowDebugCommandType", + "DataFlowReferenceType", + "DatasetCompressionLevel", + "DatasetReferenceType", + "DayOfWeek", + "DaysOfWeek", + "Db2AuthenticationType", + "DependencyCondition", + "DynamicsAuthenticationType", + "DynamicsDeploymentType", + "DynamicsSinkWriteBehavior", + "EventSubscriptionStatus", + "ExpressionType", + "FactoryIdentityType", + "FtpAuthenticationType", + "GlobalParameterType", + "GoogleAdWordsAuthenticationType", + "GoogleBigQueryAuthenticationType", + "HBaseAuthenticationType", + "HDInsightActivityDebugInfoOption", + "HdiNodeTypes", + "HiveAuthenticationType", + "HiveServerType", + "HiveThriftTransportProtocol", + "HttpAuthenticationType", + "ImpalaAuthenticationType", + "IntegrationRuntimeAuthKeyName", + "IntegrationRuntimeAutoUpdate", + "IntegrationRuntimeEdition", + "IntegrationRuntimeEntityReferenceType", + "IntegrationRuntimeInternalChannelEncryptionMode", + "IntegrationRuntimeLicenseType", + "IntegrationRuntimeReferenceType", + "IntegrationRuntimeSsisCatalogPricingTier", + "IntegrationRuntimeState", + "IntegrationRuntimeType", + "IntegrationRuntimeUpdateResult", + "JsonFormatFilePattern", + "JsonWriteFilePattern", + "ManagedIntegrationRuntimeNodeStatus", + "ManagedVirtualNetworkReferenceType", + "MongoDbAuthenticationType", + "NetezzaPartitionOption", + "NotebookParameterType", + "NotebookReferenceType", + "ODataAadServicePrincipalCredentialType", + "ODataAuthenticationType", + "OraclePartitionOption", + "OrcCompressionCodec", + "ParameterType", + "PhoenixAuthenticationType", + "PipelineReferenceType", + "PolybaseSettingsRejectType", + "PrestoAuthenticationType", + "PublicNetworkAccess", + "RecurrenceFrequency", + "RestServiceAuthenticationType", + "RunQueryFilterOperand", + "RunQueryFilterOperator", + "RunQueryOrder", + "RunQueryOrderByField", + "SalesforceSinkWriteBehavior", + "SalesforceSourceReadBehavior", + "SapCloudForCustomerSinkWriteBehavior", + "SapHanaAuthenticationType", + "SapHanaPartitionOption", + "SapTablePartitionOption", + "ScriptActivityLogDestination", + "ScriptActivityParameterDirection", + "ScriptActivityParameterType", + "ScriptType", + "SelfHostedIntegrationRuntimeNodeStatus", + "ServiceNowAuthenticationType", + "ServicePrincipalCredentialType", + "SftpAuthenticationType", + "SparkAuthenticationType", + "SparkJobReferenceType", + "SparkServerType", + "SparkThriftTransportProtocol", + "SqlAlwaysEncryptedAkvAuthType", + "SqlDWWriteBehaviorEnum", + "SqlPartitionOption", + "SqlWriteBehaviorEnum", + "SsisLogLocationType", + "SsisObjectMetadataType", + "SsisPackageLocationType", + "StoredProcedureParameterType", + "SybaseAuthenticationType", + "TeamDeskAuthenticationType", + "TeradataAuthenticationType", + "TeradataPartitionOption", + "TriggerReferenceType", + "TriggerRunStatus", + "TriggerRuntimeState", + "TumblingWindowFrequency", + "Type", + "VariableType", + "WebActivityMethod", + "WebAuthenticationType", + "WebHookActivityMethod", + "ZendeskAuthenticationType", ] __all__.extend([p for p in _patch_all if p not in __all__]) -_patch_sdk() \ No newline at end of file +_patch_sdk() diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py index 5dfcf00dba9..e713f01bdf7 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py @@ -11,12 +11,15 @@ class AmazonRdsForOraclePartitionOption(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AmazonRdsForOraclePartitionOption.""" NONE = "None" PHYSICAL_PARTITIONS_OF_TABLE = "PhysicalPartitionsOfTable" DYNAMIC_RANGE = "DynamicRange" + class AvroCompressionCodec(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AvroCompressionCodec.""" NONE = "none" DEFLATE = "deflate" @@ -24,9 +27,9 @@ class AvroCompressionCodec(str, Enum, metaclass=CaseInsensitiveEnumMeta): XZ = "xz" BZIP2 = "bzip2" + class AzureFunctionActivityMethod(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The list of HTTP methods supported by a AzureFunctionActivity. - """ + """The list of HTTP methods supported by a AzureFunctionActivity.""" GET = "GET" POST = "POST" @@ -36,18 +39,27 @@ class AzureFunctionActivityMethod(str, Enum, metaclass=CaseInsensitiveEnumMeta): HEAD = "HEAD" TRACE = "TRACE" + class AzureSearchIndexWriteBehaviorType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Specify the write behavior when upserting documents into Azure Search Index. - """ + """Specify the write behavior when upserting documents into Azure Search Index.""" MERGE = "Merge" UPLOAD = "Upload" + +class BigDataPoolReferenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Big data pool reference type.""" + + BIG_DATA_POOL_REFERENCE = "BigDataPoolReference" + + class BlobEventTypes(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """BlobEventTypes.""" MICROSOFT_STORAGE_BLOB_CREATED = "Microsoft.Storage.BlobCreated" MICROSOFT_STORAGE_BLOB_DELETED = "Microsoft.Storage.BlobDeleted" + class CassandraSourceReadConsistencyLevels(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The consistency level specifies how many Cassandra servers must respond to a read request before returning data to the client application. Cassandra checks the specified number of @@ -66,9 +78,9 @@ class CassandraSourceReadConsistencyLevels(str, Enum, metaclass=CaseInsensitiveE SERIAL = "SERIAL" LOCAL_SERIAL = "LOCAL_SERIAL" + class CompressionCodec(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """All available compressionCodec values. - """ + """All available compressionCodec values.""" NONE = "none" LZO = "lzo" @@ -81,14 +93,15 @@ class CompressionCodec(str, Enum, metaclass=CaseInsensitiveEnumMeta): TAR = "tar" TAR_G_ZIP = "tarGZip" + class CopyBehaviorType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """All available types of copy behavior. - """ + """All available types of copy behavior.""" PRESERVE_HIERARCHY = "PreserveHierarchy" FLATTEN_HIERARCHY = "FlattenHierarchy" MERGE_FILES = "MergeFiles" + class CosmosDbConnectionMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The connection mode used to access CosmosDB account. Type: string (or Expression with resultType string). @@ -97,6 +110,7 @@ class CosmosDbConnectionMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): GATEWAY = "Gateway" DIRECT = "Direct" + class CosmosDbServicePrincipalCredentialType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or @@ -106,44 +120,50 @@ class CosmosDbServicePrincipalCredentialType(str, Enum, metaclass=CaseInsensitiv SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" + class CredentialReferenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Credential reference type. - """ + """Credential reference type.""" CREDENTIAL_REFERENCE = "CredentialReference" + class DataFlowComputeType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Compute type of the cluster which will execute data flow job. - """ + """Compute type of the cluster which will execute data flow job.""" GENERAL = "General" MEMORY_OPTIMIZED = "MemoryOptimized" COMPUTE_OPTIMIZED = "ComputeOptimized" + class DataFlowDebugCommandType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The command type. - """ + """The command type.""" EXECUTE_PREVIEW_QUERY = "executePreviewQuery" EXECUTE_STATISTICS_QUERY = "executeStatisticsQuery" EXECUTE_EXPRESSION_QUERY = "executeExpressionQuery" + class DataFlowReferenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Data flow reference type. - """ + """Data flow reference type.""" DATA_FLOW_REFERENCE = "DataFlowReference" + class DatasetCompressionLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """All available compression levels. - """ + """All available compression levels.""" OPTIMAL = "Optimal" FASTEST = "Fastest" + +class DatasetReferenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Dataset reference type.""" + + DATASET_REFERENCE = "DatasetReference" + + class DayOfWeek(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The days of the week. - """ + """The days of the week.""" SUNDAY = "Sunday" MONDAY = "Monday" @@ -153,7 +173,9 @@ class DayOfWeek(str, Enum, metaclass=CaseInsensitiveEnumMeta): FRIDAY = "Friday" SATURDAY = "Saturday" + class DaysOfWeek(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """DaysOfWeek.""" SUNDAY = "Sunday" MONDAY = "Monday" @@ -163,6 +185,7 @@ class DaysOfWeek(str, Enum, metaclass=CaseInsensitiveEnumMeta): FRIDAY = "Friday" SATURDAY = "Saturday" + class Db2AuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """AuthenticationType to be used for connection. It is mutually exclusive with connectionString property. @@ -170,37 +193,39 @@ class Db2AuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): BASIC = "Basic" + class DependencyCondition(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """DependencyCondition.""" SUCCEEDED = "Succeeded" FAILED = "Failed" SKIPPED = "Skipped" COMPLETED = "Completed" + class DynamicsAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """All available dynamicsAuthenticationType values. - """ + """All available dynamicsAuthenticationType values.""" OFFICE365 = "Office365" IFD = "Ifd" AAD_SERVICE_PRINCIPAL = "AADServicePrincipal" + class DynamicsDeploymentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """All available dynamicsDeploymentType values. - """ + """All available dynamicsDeploymentType values.""" ONLINE = "Online" ON_PREMISES_WITH_IFD = "OnPremisesWithIfd" + class DynamicsSinkWriteBehavior(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Defines values for DynamicsSinkWriteBehavior. - """ + """Defines values for DynamicsSinkWriteBehavior.""" UPSERT = "Upsert" + class EventSubscriptionStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Event Subscription Status. - """ + """Event Subscription Status.""" ENABLED = "Enabled" PROVISIONING = "Provisioning" @@ -208,24 +233,30 @@ class EventSubscriptionStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): DISABLED = "Disabled" UNKNOWN = "Unknown" + +class ExpressionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Expression type.""" + + EXPRESSION = "Expression" + + class FactoryIdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The identity type. - """ + """The identity type.""" SYSTEM_ASSIGNED = "SystemAssigned" USER_ASSIGNED = "UserAssigned" SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned" + class FtpAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The authentication type to be used to connect to the FTP server. - """ + """The authentication type to be used to connect to the FTP server.""" BASIC = "Basic" ANONYMOUS = "Anonymous" + class GlobalParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Global Parameter type. - """ + """Global Parameter type.""" OBJECT = "Object" STRING = "String" @@ -234,6 +265,7 @@ class GlobalParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta): BOOL = "Bool" ARRAY = "Array" + class GoogleAdWordsAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. @@ -242,6 +274,7 @@ class GoogleAdWordsAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMe SERVICE_AUTHENTICATION = "ServiceAuthentication" USER_AUTHENTICATION = "UserAuthentication" + class GoogleBigQueryAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. @@ -250,57 +283,57 @@ class GoogleBigQueryAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumM SERVICE_AUTHENTICATION = "ServiceAuthentication" USER_AUTHENTICATION = "UserAuthentication" + class HBaseAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The authentication mechanism to use to connect to the HBase server. - """ + """The authentication mechanism to use to connect to the HBase server.""" ANONYMOUS = "Anonymous" BASIC = "Basic" + class HdiNodeTypes(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """All available HdiNodeTypes values. - """ + """All available HdiNodeTypes values.""" HEADNODE = "Headnode" WORKERNODE = "Workernode" ZOOKEEPER = "Zookeeper" + class HDInsightActivityDebugInfoOption(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The HDInsightActivityDebugInfoOption settings to use. - """ + """The HDInsightActivityDebugInfoOption settings to use.""" NONE = "None" ALWAYS = "Always" FAILURE = "Failure" + class HiveAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The authentication method used to access the Hive server. - """ + """The authentication method used to access the Hive server.""" ANONYMOUS = "Anonymous" USERNAME = "Username" USERNAME_AND_PASSWORD = "UsernameAndPassword" WINDOWS_AZURE_HD_INSIGHT_SERVICE = "WindowsAzureHDInsightService" + class HiveServerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of Hive server. - """ + """The type of Hive server.""" HIVE_SERVER1 = "HiveServer1" HIVE_SERVER2 = "HiveServer2" HIVE_THRIFT_SERVER = "HiveThriftServer" + class HiveThriftTransportProtocol(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The transport protocol to use in the Thrift layer. - """ + """The transport protocol to use in the Thrift layer.""" BINARY = "Binary" SASL = "SASL" HTTP = "HTTP " + class HttpAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The authentication type to be used to connect to the HTTP server. - """ + """The authentication type to be used to connect to the HTTP server.""" BASIC = "Basic" ANONYMOUS = "Anonymous" @@ -308,42 +341,43 @@ class HttpAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): WINDOWS = "Windows" CLIENT_CERTIFICATE = "ClientCertificate" + class ImpalaAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The authentication type to use. - """ + """The authentication type to use.""" ANONYMOUS = "Anonymous" SASL_USERNAME = "SASLUsername" USERNAME_AND_PASSWORD = "UsernameAndPassword" + class IntegrationRuntimeAuthKeyName(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The name of the authentication key to regenerate. - """ + """The name of the authentication key to regenerate.""" AUTH_KEY1 = "authKey1" AUTH_KEY2 = "authKey2" + class IntegrationRuntimeAutoUpdate(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The state of integration runtime auto update. - """ + """The state of integration runtime auto update.""" ON = "On" OFF = "Off" + class IntegrationRuntimeEdition(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The edition for the SSIS Integration Runtime - """ + """The edition for the SSIS Integration Runtime.""" STANDARD = "Standard" ENTERPRISE = "Enterprise" + class IntegrationRuntimeEntityReferenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of this referenced entity. - """ + """The type of this referenced entity.""" INTEGRATION_RUNTIME_REFERENCE = "IntegrationRuntimeReference" LINKED_SERVICE_REFERENCE = "LinkedServiceReference" + class IntegrationRuntimeInternalChannelEncryptionMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): """It is used to set the encryption mode for node-node communication channel (when more than 2 self-hosted integration runtime nodes exist). @@ -353,16 +387,23 @@ class IntegrationRuntimeInternalChannelEncryptionMode(str, Enum, metaclass=CaseI SSL_ENCRYPTED = "SslEncrypted" NOT_ENCRYPTED = "NotEncrypted" + class IntegrationRuntimeLicenseType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """License type for bringing your own license scenario. - """ + """License type for bringing your own license scenario.""" BASE_PRICE = "BasePrice" LICENSE_INCLUDED = "LicenseIncluded" + +class IntegrationRuntimeReferenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of integration runtime.""" + + INTEGRATION_RUNTIME_REFERENCE = "IntegrationRuntimeReference" + + class IntegrationRuntimeSsisCatalogPricingTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The pricing tier for the catalog database. The valid values could be found in - https://azure.microsoft.com/en-us/pricing/details/sql-database/ + https://azure.microsoft.com/en-us/pricing/details/sql-database/. """ BASIC = "Basic" @@ -370,9 +411,9 @@ class IntegrationRuntimeSsisCatalogPricingTier(str, Enum, metaclass=CaseInsensit PREMIUM = "Premium" PREMIUM_RS = "PremiumRS" + class IntegrationRuntimeState(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The state of integration runtime. - """ + """The state of integration runtime.""" INITIAL = "Initial" STOPPED = "Stopped" @@ -385,75 +426,90 @@ class IntegrationRuntimeState(str, Enum, metaclass=CaseInsensitiveEnumMeta): OFFLINE = "Offline" ACCESS_DENIED = "AccessDenied" + class IntegrationRuntimeType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of integration runtime. - """ + """The type of integration runtime.""" MANAGED = "Managed" SELF_HOSTED = "SelfHosted" + class IntegrationRuntimeUpdateResult(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The result of the last integration runtime node update. - """ + """The result of the last integration runtime node update.""" NONE = "None" SUCCEED = "Succeed" FAIL = "Fail" + class JsonFormatFilePattern(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """JSON format file pattern. A property of JsonFormat. - """ + """JSON format file pattern. A property of JsonFormat.""" SET_OF_OBJECTS = "setOfObjects" ARRAY_OF_OBJECTS = "arrayOfObjects" + class JsonWriteFilePattern(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """All available filePatterns. - """ + """All available filePatterns.""" SET_OF_OBJECTS = "setOfObjects" ARRAY_OF_OBJECTS = "arrayOfObjects" + class ManagedIntegrationRuntimeNodeStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The managed integration runtime node status. - """ + """The managed integration runtime node status.""" STARTING = "Starting" AVAILABLE = "Available" RECYCLING = "Recycling" UNAVAILABLE = "Unavailable" + class ManagedVirtualNetworkReferenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Managed Virtual Network reference type. - """ + """Managed Virtual Network reference type.""" MANAGED_VIRTUAL_NETWORK_REFERENCE = "ManagedVirtualNetworkReference" + class MongoDbAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The authentication type to be used to connect to the MongoDB database. - """ + """The authentication type to be used to connect to the MongoDB database.""" BASIC = "Basic" ANONYMOUS = "Anonymous" + class NetezzaPartitionOption(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The partition mechanism that will be used for Netezza read in parallel. - """ + """The partition mechanism that will be used for Netezza read in parallel.""" NONE = "None" DATA_SLICE = "DataSlice" DYNAMIC_RANGE = "DynamicRange" + +class NotebookParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Notebook parameter type.""" + + STRING = "string" + INT = "int" + FLOAT = "float" + BOOL = "bool" + + +class NotebookReferenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Synapse notebook reference type.""" + + NOTEBOOK_REFERENCE = "NotebookReference" + + class ODataAadServicePrincipalCredentialType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Specify the credential type (key or cert) is used for service principal. - """ + """Specify the credential type (key or cert) is used for service principal.""" SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" + class ODataAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of authentication used to connect to the OData service. - """ + """Type of authentication used to connect to the OData service.""" BASIC = "Basic" ANONYMOUS = "Anonymous" @@ -461,24 +517,26 @@ class ODataAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): AAD_SERVICE_PRINCIPAL = "AadServicePrincipal" MANAGED_SERVICE_IDENTITY = "ManagedServiceIdentity" + class OraclePartitionOption(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The partition mechanism that will be used for Oracle read in parallel. - """ + """The partition mechanism that will be used for Oracle read in parallel.""" NONE = "None" PHYSICAL_PARTITIONS_OF_TABLE = "PhysicalPartitionsOfTable" DYNAMIC_RANGE = "DynamicRange" + class OrcCompressionCodec(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """OrcCompressionCodec.""" NONE = "none" ZLIB = "zlib" SNAPPY = "snappy" LZO = "lzo" + class ParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Parameter type. - """ + """Parameter type.""" OBJECT = "Object" STRING = "String" @@ -488,38 +546,44 @@ class ParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta): ARRAY = "Array" SECURE_STRING = "SecureString" + class PhoenixAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The authentication mechanism used to connect to the Phoenix server. - """ + """The authentication mechanism used to connect to the Phoenix server.""" ANONYMOUS = "Anonymous" USERNAME_AND_PASSWORD = "UsernameAndPassword" WINDOWS_AZURE_HD_INSIGHT_SERVICE = "WindowsAzureHDInsightService" + +class PipelineReferenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Pipeline reference type.""" + + PIPELINE_REFERENCE = "PipelineReference" + + class PolybaseSettingsRejectType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Indicates whether the RejectValue property is specified as a literal value or a percentage. - """ + """Indicates whether the RejectValue property is specified as a literal value or a percentage.""" VALUE = "value" PERCENTAGE = "percentage" + class PrestoAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The authentication mechanism used to connect to the Presto server. - """ + """The authentication mechanism used to connect to the Presto server.""" ANONYMOUS = "Anonymous" LDAP = "LDAP" + class PublicNetworkAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Whether or not public network access is allowed for the data factory. - """ + """Whether or not public network access is allowed for the data factory.""" ENABLED = "Enabled" DISABLED = "Disabled" + class RecurrenceFrequency(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Enumerates possible frequency option for the schedule trigger. - """ + """Enumerates possible frequency option for the schedule trigger.""" NOT_SPECIFIED = "NotSpecified" MINUTE = "Minute" @@ -529,9 +593,9 @@ class RecurrenceFrequency(str, Enum, metaclass=CaseInsensitiveEnumMeta): MONTH = "Month" YEAR = "Year" + class RestServiceAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of authentication used to connect to the REST service. - """ + """Type of authentication used to connect to the REST service.""" ANONYMOUS = "Anonymous" BASIC = "Basic" @@ -539,6 +603,7 @@ class RestServiceAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta MANAGED_SERVICE_IDENTITY = "ManagedServiceIdentity" O_AUTH2_CLIENT_CREDENTIAL = "OAuth2ClientCredential" + class RunQueryFilterOperand(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Parameter name to be used for filter. The allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are ActivityName, @@ -559,22 +624,23 @@ class RunQueryFilterOperand(str, Enum, metaclass=CaseInsensitiveEnumMeta): RUN_GROUP_ID = "RunGroupId" LATEST_ONLY = "LatestOnly" + class RunQueryFilterOperator(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Operator to be used for filter. - """ + """Operator to be used for filter.""" EQUALS = "Equals" NOT_EQUALS = "NotEquals" - IN_ENUM = "In" + IN = "In" NOT_IN = "NotIn" + class RunQueryOrder(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Sorting order of the parameter. - """ + """Sorting order of the parameter.""" ASC = "ASC" DESC = "DESC" + class RunQueryOrderByField(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Parameter name to be used for order by. The allowed parameters to order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are ActivityName, @@ -592,45 +658,45 @@ class RunQueryOrderByField(str, Enum, metaclass=CaseInsensitiveEnumMeta): TRIGGER_NAME = "TriggerName" TRIGGER_RUN_TIMESTAMP = "TriggerRunTimestamp" + class SalesforceSinkWriteBehavior(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The write behavior for the operation. Default is Insert. - """ + """The write behavior for the operation. Default is Insert.""" INSERT = "Insert" UPSERT = "Upsert" + class SalesforceSourceReadBehavior(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The read behavior for the operation. Default is Query. - """ + """The read behavior for the operation. Default is Query.""" QUERY = "Query" QUERY_ALL = "QueryAll" + class SapCloudForCustomerSinkWriteBehavior(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The write behavior for the operation. Default is 'Insert'. - """ + """The write behavior for the operation. Default is 'Insert'.""" INSERT = "Insert" UPDATE = "Update" + class SapHanaAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The authentication type to be used to connect to the SAP HANA server. - """ + """The authentication type to be used to connect to the SAP HANA server.""" BASIC = "Basic" WINDOWS = "Windows" + class SapHanaPartitionOption(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The partition mechanism that will be used for SAP HANA read in parallel. - """ + """The partition mechanism that will be used for SAP HANA read in parallel.""" NONE = "None" PHYSICAL_PARTITIONS_OF_TABLE = "PhysicalPartitionsOfTable" SAP_HANA_DYNAMIC_RANGE = "SapHanaDynamicRange" + class SapTablePartitionOption(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The partition mechanism that will be used for SAP table read in parallel. - """ + """The partition mechanism that will be used for SAP table read in parallel.""" NONE = "None" PARTITION_ON_INT = "PartitionOnInt" @@ -639,24 +705,24 @@ class SapTablePartitionOption(str, Enum, metaclass=CaseInsensitiveEnumMeta): PARTITION_ON_CALENDAR_DATE = "PartitionOnCalendarDate" PARTITION_ON_TIME = "PartitionOnTime" + class ScriptActivityLogDestination(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The destination of logs. Type: string. - """ + """The destination of logs. Type: string.""" ACTIVITY_OUTPUT = "ActivityOutput" EXTERNAL_STORE = "ExternalStore" + class ScriptActivityParameterDirection(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The direction of the parameter. - """ + """The direction of the parameter.""" INPUT = "Input" OUTPUT = "Output" INPUT_OUTPUT = "InputOutput" + class ScriptActivityParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of the parameter. - """ + """The type of the parameter.""" BOOLEAN = "Boolean" DATE_TIME = "DateTime" @@ -671,16 +737,16 @@ class ScriptActivityParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta): STRING = "String" TIMESPAN = "Timespan" + class ScriptType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of the query. Type: string. - """ + """The type of the query. Type: string.""" QUERY = "Query" NON_QUERY = "NonQuery" + class SelfHostedIntegrationRuntimeNodeStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Status of the integration runtime node. - """ + """Status of the integration runtime node.""" NEED_REGISTRATION = "NeedRegistration" ONLINE = "Online" @@ -690,53 +756,60 @@ class SelfHostedIntegrationRuntimeNodeStatus(str, Enum, metaclass=CaseInsensitiv INITIALIZING = "Initializing" INITIALIZE_FAILED = "InitializeFailed" + class ServiceNowAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The authentication type to use. - """ + """The authentication type to use.""" BASIC = "Basic" O_AUTH2 = "OAuth2" + class ServicePrincipalCredentialType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """All available servicePrincipalCredentialType values. - """ + """All available servicePrincipalCredentialType values.""" SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" + class SftpAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The authentication type to be used to connect to the FTP server. - """ + """The authentication type to be used to connect to the FTP server.""" BASIC = "Basic" SSH_PUBLIC_KEY = "SshPublicKey" MULTI_FACTOR = "MultiFactor" + class SparkAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The authentication method used to access the Spark server. - """ + """The authentication method used to access the Spark server.""" ANONYMOUS = "Anonymous" USERNAME = "Username" USERNAME_AND_PASSWORD = "UsernameAndPassword" WINDOWS_AZURE_HD_INSIGHT_SERVICE = "WindowsAzureHDInsightService" + +class SparkJobReferenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Synapse spark job reference type.""" + + SPARK_JOB_DEFINITION_REFERENCE = "SparkJobDefinitionReference" + + class SparkServerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of Spark server. - """ + """The type of Spark server.""" SHARK_SERVER = "SharkServer" SHARK_SERVER2 = "SharkServer2" SPARK_THRIFT_SERVER = "SparkThriftServer" + class SparkThriftTransportProtocol(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The transport protocol to use in the Thrift layer. - """ + """The transport protocol to use in the Thrift layer.""" BINARY = "Binary" SASL = "SASL" HTTP = "HTTP " + class SqlAlwaysEncryptedAkvAuthType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Sql always encrypted AKV authentication type. Type: string (or Expression with resultType string). @@ -746,56 +819,56 @@ class SqlAlwaysEncryptedAkvAuthType(str, Enum, metaclass=CaseInsensitiveEnumMeta MANAGED_IDENTITY = "ManagedIdentity" USER_ASSIGNED_MANAGED_IDENTITY = "UserAssignedManagedIdentity" + class SqlDWWriteBehaviorEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Specify the write behavior when copying data into sql dw. - """ + """Specify the write behavior when copying data into sql dw.""" INSERT = "Insert" UPSERT = "Upsert" + class SqlPartitionOption(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The partition mechanism that will be used for Sql read in parallel. - """ + """The partition mechanism that will be used for Sql read in parallel.""" NONE = "None" PHYSICAL_PARTITIONS_OF_TABLE = "PhysicalPartitionsOfTable" DYNAMIC_RANGE = "DynamicRange" + class SqlWriteBehaviorEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Specify the write behavior when copying data into sql. - """ + """Specify the write behavior when copying data into sql.""" INSERT = "Insert" UPSERT = "Upsert" STORED_PROCEDURE = "StoredProcedure" + class SsisLogLocationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of SSIS log location. - """ + """The type of SSIS log location.""" FILE = "File" + class SsisObjectMetadataType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of SSIS object metadata. - """ + """The type of SSIS object metadata.""" FOLDER = "Folder" PROJECT = "Project" PACKAGE = "Package" ENVIRONMENT = "Environment" + class SsisPackageLocationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of SSIS package location. - """ + """The type of SSIS package location.""" SSISDB = "SSISDB" FILE = "File" INLINE_PACKAGE = "InlinePackage" PACKAGE_STORE = "PackageStore" + class StoredProcedureParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Stored procedure parameter type. - """ + """Stored procedure parameter type.""" STRING = "String" INT = "Int" @@ -805,99 +878,105 @@ class StoredProcedureParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta) BOOLEAN = "Boolean" DATE = "Date" + class SybaseAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """AuthenticationType to be used for connection. - """ + """AuthenticationType to be used for connection.""" BASIC = "Basic" WINDOWS = "Windows" + class TeamDeskAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The authentication type to use. - """ + """The authentication type to use.""" BASIC = "Basic" TOKEN = "Token" + class TeradataAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """AuthenticationType to be used for connection. - """ + """AuthenticationType to be used for connection.""" BASIC = "Basic" WINDOWS = "Windows" + class TeradataPartitionOption(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The partition mechanism that will be used for teradata read in parallel. - """ + """The partition mechanism that will be used for teradata read in parallel.""" NONE = "None" HASH = "Hash" DYNAMIC_RANGE = "DynamicRange" + class TriggerReferenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Trigger reference type. - """ + """Trigger reference type.""" TRIGGER_REFERENCE = "TriggerReference" + class TriggerRunStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Trigger run status. - """ + """Trigger run status.""" SUCCEEDED = "Succeeded" FAILED = "Failed" INPROGRESS = "Inprogress" + class TriggerRuntimeState(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Enumerates possible state of Triggers. - """ + """Enumerates possible state of Triggers.""" STARTED = "Started" STOPPED = "Stopped" DISABLED = "Disabled" + class TumblingWindowFrequency(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Enumerates possible frequency option for the tumbling window trigger. - """ + """Enumerates possible frequency option for the tumbling window trigger.""" MINUTE = "Minute" HOUR = "Hour" MONTH = "Month" + +class Type(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Linked service reference type.""" + + LINKED_SERVICE_REFERENCE = "LinkedServiceReference" + + class VariableType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Variable type. - """ + """Variable type.""" STRING = "String" BOOL = "Bool" ARRAY = "Array" + class WebActivityMethod(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The list of HTTP methods supported by a WebActivity. - """ + """The list of HTTP methods supported by a WebActivity.""" GET = "GET" POST = "POST" PUT = "PUT" DELETE = "DELETE" + class WebAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of authentication used to connect to the web table source. - """ + """Type of authentication used to connect to the web table source.""" BASIC = "Basic" ANONYMOUS = "Anonymous" CLIENT_CERTIFICATE = "ClientCertificate" + class WebHookActivityMethod(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The list of HTTP methods supported by a WebHook activity. - """ + """The list of HTTP methods supported by a WebHook activity.""" POST = "POST" + class ZendeskAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The authentication type to use. - """ + """The authentication type to use.""" BASIC = "Basic" TOKEN = "Token" diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py deleted file mode 100644 index fb43215b43c..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py +++ /dev/null @@ -1,37651 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -import msrest.serialization - - -class AccessPolicyResponse(msrest.serialization.Model): - """Get Data Plane read only token response definition. - - :param policy: The user access policy. - :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy - :param access_token: Data Plane read only access token. - :type access_token: str - :param data_plane_url: Data Plane service base URL. - :type data_plane_url: str - """ - - _attribute_map = { - 'policy': {'key': 'policy', 'type': 'UserAccessPolicy'}, - 'access_token': {'key': 'accessToken', 'type': 'str'}, - 'data_plane_url': {'key': 'dataPlaneUrl', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AccessPolicyResponse, self).__init__(**kwargs) - self.policy = kwargs.get('policy', None) - self.access_token = kwargs.get('access_token', None) - self.data_plane_url = kwargs.get('data_plane_url', None) - - -class Activity(msrest.serialization.Model): - """A pipeline activity. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AppendVariableActivity, ControlActivity, ExecutePipelineActivity, ExecutionActivity, FilterActivity, ForEachActivity, IfConditionActivity, SetVariableActivity, SwitchActivity, UntilActivity, ValidationActivity, WaitActivity, WebHookActivity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - } - - _subtype_map = { - 'type': {'AppendVariable': 'AppendVariableActivity', 'Container': 'ControlActivity', 'ExecutePipeline': 'ExecutePipelineActivity', 'Execution': 'ExecutionActivity', 'Filter': 'FilterActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'SetVariable': 'SetVariableActivity', 'Switch': 'SwitchActivity', 'Until': 'UntilActivity', 'Validation': 'ValidationActivity', 'Wait': 'WaitActivity', 'WebHook': 'WebHookActivity'} - } - - def __init__( - self, - **kwargs - ): - super(Activity, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.name = kwargs['name'] - self.type = 'Activity' # type: str - self.description = kwargs.get('description', None) - self.depends_on = kwargs.get('depends_on', None) - self.user_properties = kwargs.get('user_properties', None) - - -class ActivityDependency(msrest.serialization.Model): - """Activity dependency information. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param activity: Required. Activity name. - :type activity: str - :param dependency_conditions: Required. Match-Condition for the dependency. - :type dependency_conditions: list[str or ~azure.mgmt.datafactory.models.DependencyCondition] - """ - - _validation = { - 'activity': {'required': True}, - 'dependency_conditions': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'activity': {'key': 'activity', 'type': 'str'}, - 'dependency_conditions': {'key': 'dependencyConditions', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - super(ActivityDependency, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.activity = kwargs['activity'] - self.dependency_conditions = kwargs['dependency_conditions'] - - -class ActivityPolicy(msrest.serialization.Model): - """Execution policy for an activity. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param timeout: Specifies the timeout for the activity to run. The default timeout is 7 days. - Type: string (or Expression with resultType string), pattern: - ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object - :param retry: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with - resultType integer), minimum: 0. - :type retry: object - :param retry_interval_in_seconds: Interval between each retry attempt (in seconds). The default - is 30 sec. - :type retry_interval_in_seconds: int - :param secure_input: When set to true, Input from activity is considered as secure and will not - be logged to monitoring. - :type secure_input: bool - :param secure_output: When set to true, Output from activity is considered as secure and will - not be logged to monitoring. - :type secure_output: bool - """ - - _validation = { - 'retry_interval_in_seconds': {'maximum': 86400, 'minimum': 30}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'timeout': {'key': 'timeout', 'type': 'object'}, - 'retry': {'key': 'retry', 'type': 'object'}, - 'retry_interval_in_seconds': {'key': 'retryIntervalInSeconds', 'type': 'int'}, - 'secure_input': {'key': 'secureInput', 'type': 'bool'}, - 'secure_output': {'key': 'secureOutput', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - super(ActivityPolicy, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.timeout = kwargs.get('timeout', None) - self.retry = kwargs.get('retry', None) - self.retry_interval_in_seconds = kwargs.get('retry_interval_in_seconds', None) - self.secure_input = kwargs.get('secure_input', None) - self.secure_output = kwargs.get('secure_output', None) - - -class ActivityRun(msrest.serialization.Model): - """Information about an activity run in a pipeline. - - Variables are only populated by the server, and will be ignored when sending a request. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :ivar pipeline_name: The name of the pipeline. - :vartype pipeline_name: str - :ivar pipeline_run_id: The id of the pipeline run. - :vartype pipeline_run_id: str - :ivar activity_name: The name of the activity. - :vartype activity_name: str - :ivar activity_type: The type of the activity. - :vartype activity_type: str - :ivar activity_run_id: The id of the activity run. - :vartype activity_run_id: str - :ivar linked_service_name: The name of the compute linked service. - :vartype linked_service_name: str - :ivar status: The status of the activity run. - :vartype status: str - :ivar activity_run_start: The start time of the activity run in 'ISO 8601' format. - :vartype activity_run_start: ~datetime.datetime - :ivar activity_run_end: The end time of the activity run in 'ISO 8601' format. - :vartype activity_run_end: ~datetime.datetime - :ivar duration_in_ms: The duration of the activity run. - :vartype duration_in_ms: int - :ivar input: The input for the activity. - :vartype input: object - :ivar output: The output for the activity. - :vartype output: object - :ivar error: The error if any from the activity run. - :vartype error: object - """ - - _validation = { - 'pipeline_name': {'readonly': True}, - 'pipeline_run_id': {'readonly': True}, - 'activity_name': {'readonly': True}, - 'activity_type': {'readonly': True}, - 'activity_run_id': {'readonly': True}, - 'linked_service_name': {'readonly': True}, - 'status': {'readonly': True}, - 'activity_run_start': {'readonly': True}, - 'activity_run_end': {'readonly': True}, - 'duration_in_ms': {'readonly': True}, - 'input': {'readonly': True}, - 'output': {'readonly': True}, - 'error': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, - 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, - 'activity_name': {'key': 'activityName', 'type': 'str'}, - 'activity_type': {'key': 'activityType', 'type': 'str'}, - 'activity_run_id': {'key': 'activityRunId', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'activity_run_start': {'key': 'activityRunStart', 'type': 'iso-8601'}, - 'activity_run_end': {'key': 'activityRunEnd', 'type': 'iso-8601'}, - 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, - 'input': {'key': 'input', 'type': 'object'}, - 'output': {'key': 'output', 'type': 'object'}, - 'error': {'key': 'error', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ActivityRun, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.pipeline_name = None - self.pipeline_run_id = None - self.activity_name = None - self.activity_type = None - self.activity_run_id = None - self.linked_service_name = None - self.status = None - self.activity_run_start = None - self.activity_run_end = None - self.duration_in_ms = None - self.input = None - self.output = None - self.error = None - - -class ActivityRunsQueryResponse(msrest.serialization.Model): - """A list activity runs. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of activity runs. - :type value: list[~azure.mgmt.datafactory.models.ActivityRun] - :param continuation_token: The continuation token for getting the next page of results, if any - remaining results exist, null otherwise. - :type continuation_token: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[ActivityRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ActivityRunsQueryResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.continuation_token = kwargs.get('continuation_token', None) - - -class AddDataFlowToDebugSessionResponse(msrest.serialization.Model): - """Response body structure for starting data flow debug session. - - :param job_version: The ID of data flow debug job version. - :type job_version: str - """ - - _attribute_map = { - 'job_version': {'key': 'jobVersion', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AddDataFlowToDebugSessionResponse, self).__init__(**kwargs) - self.job_version = kwargs.get('job_version', None) - - -class AdditionalColumns(msrest.serialization.Model): - """Specify the column name and value of additional columns. - - :param name: Additional column name. Type: string (or Expression with resultType string). - :type name: object - :param value: Additional column value. Type: string (or Expression with resultType string). - :type value: object - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'object'}, - 'value': {'key': 'value', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AdditionalColumns, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.value = kwargs.get('value', None) - - -class LinkedService(msrest.serialization.Model): - """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AmazonS3CompatibleLinkedService, AzureBatchLinkedService, AzureBlobFsLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureDatabricksDeltaLakeLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMlLinkedService, AzureMlServiceLinkedService, AzureMariaDbLinkedService, AzureMySqlLinkedService, AzurePostgreSqlLinkedService, AzureSearchLinkedService, AzureSqlDwLinkedService, AzureSqlDatabaseLinkedService, AzureSqlMiLinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDbLinkedService, CosmosDbMongoDbApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAxLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HdInsightLinkedService, HdInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDbLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDbLinkedService, MongoDbAtlasLinkedService, MongoDbV2LinkedService, MySqlLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleCloudStorageLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSqlLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBwLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SqlServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - } - - _subtype_map = { - 'type': {'AmazonMWS': 'AmazonMwsLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AmazonS3Compatible': 'AmazonS3CompatibleLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFsLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMlLinkedService', 'AzureMLService': 'AzureMlServiceLinkedService', 'AzureMariaDB': 'AzureMariaDbLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSqlDwLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'AzureSqlMI': 'AzureSqlMiLinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAxLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HdInsightLinkedService', 'HDInsightOnDemand': 'HdInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDbLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDbLinkedService', 'MongoDbAtlas': 'MongoDbAtlasLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MySql': 'MySqlLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleCloudStorage': 'OracleCloudStorageLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBwLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SqlServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} - } - - def __init__( - self, - **kwargs - ): - super(LinkedService, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'LinkedService' # type: str - self.connect_via = kwargs.get('connect_via', None) - self.description = kwargs.get('description', None) - self.parameters = kwargs.get('parameters', None) - self.annotations = kwargs.get('annotations', None) - - -class AmazonMwsLinkedService(LinkedService): - """Amazon Marketplace Web Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. - mws.amazonservices.com). - :type endpoint: object - :param marketplace_id: Required. The Amazon Marketplace ID you want to retrieve data from. To - retrieve data from multiple Marketplace IDs, separate them with a comma (,). (i.e. - A2EUQ1WTGCTBG2). - :type marketplace_id: object - :param seller_id: Required. The Amazon seller ID. - :type seller_id: object - :param mws_auth_token: The Amazon MWS authentication token. - :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase - :param access_key_id: Required. The access key id used to access data. - :type access_key_id: object - :param secret_key: The secret key used to access data. - :type secret_key: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'marketplace_id': {'required': True}, - 'seller_id': {'required': True}, - 'access_key_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'marketplace_id': {'key': 'typeProperties.marketplaceID', 'type': 'object'}, - 'seller_id': {'key': 'typeProperties.sellerID', 'type': 'object'}, - 'mws_auth_token': {'key': 'typeProperties.mwsAuthToken', 'type': 'SecretBase'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_key': {'key': 'typeProperties.secretKey', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AmazonMwsLinkedService, self).__init__(**kwargs) - self.type = 'AmazonMWS' # type: str - self.endpoint = kwargs['endpoint'] - self.marketplace_id = kwargs['marketplace_id'] - self.seller_id = kwargs['seller_id'] - self.mws_auth_token = kwargs.get('mws_auth_token', None) - self.access_key_id = kwargs['access_key_id'] - self.secret_key = kwargs.get('secret_key', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class Dataset(msrest.serialization.Model): - """The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsObjectDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFsDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureDatabricksDeltaLakeDataset, AzureMariaDbTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDwTableDataset, AzureSqlMiTableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAxResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDbTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbAtlasCollectionDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - } - - _subtype_map = { - 'type': {'AmazonMWSObject': 'AmazonMwsObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFsDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureDatabricksDeltaLakeDataset': 'AzureDatabricksDeltaLakeDataset', 'AzureMariaDBTable': 'AzureMariaDbTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDwTableDataset', 'AzureSqlMITable': 'AzureSqlMiTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAxResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDbTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbAtlasCollection': 'MongoDbAtlasCollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} - } - - def __init__( - self, - **kwargs - ): - super(Dataset, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'Dataset' # type: str - self.description = kwargs.get('description', None) - self.structure = kwargs.get('structure', None) - self.schema = kwargs.get('schema', None) - self.linked_service_name = kwargs['linked_service_name'] - self.parameters = kwargs.get('parameters', None) - self.annotations = kwargs.get('annotations', None) - self.folder = kwargs.get('folder', None) - - -class AmazonMwsObjectDataset(Dataset): - """Amazon Marketplace Web Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AmazonMwsObjectDataset, self).__init__(**kwargs) - self.type = 'AmazonMWSObject' # type: str - self.table_name = kwargs.get('table_name', None) - - -class CopySource(msrest.serialization.Model): - """A copy activity source. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSource, AzureBlobFsSource, AzureDataExplorerSource, AzureDataLakeStoreSource, AzureDatabricksDeltaLakeSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDbMongoDbApiSource, CosmosDbSqlApiSource, DelimitedTextSource, DocumentDbCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDbAtlasSource, MongoDbSource, MongoDbV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - } - - _subtype_map = { - 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFsSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'AzureDatabricksDeltaLakeSource': 'AzureDatabricksDeltaLakeSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbAtlasSource': 'MongoDbAtlasSource', 'MongoDbSource': 'MongoDbSource', 'MongoDbV2Source': 'MongoDbV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} - } - - def __init__( - self, - **kwargs - ): - super(CopySource, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'CopySource' # type: str - self.source_retry_count = kwargs.get('source_retry_count', None) - self.source_retry_wait = kwargs.get('source_retry_wait', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None) - - -class TabularSource(CopySource): - """Copy activity sources of tabular type. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsSource, AmazonRedshiftSource, AzureMariaDbSource, AzureMySqlSource, AzurePostgreSqlSource, AzureSqlSource, AzureTableSource, CassandraSource, ConcurSource, CouchbaseSource, Db2Source, DrillSource, DynamicsAxSource, EloquaSource, GoogleAdWordsSource, GoogleBigQuerySource, GreenplumSource, HBaseSource, HiveSource, HubspotSource, ImpalaSource, InformixSource, JiraSource, MagentoSource, MariaDbSource, MarketoSource, MySqlSource, NetezzaSource, OdbcSource, OracleServiceCloudSource, PaypalSource, PhoenixSource, PostgreSqlSource, PrestoSource, QuickBooksSource, ResponsysSource, SalesforceMarketingCloudSource, SalesforceSource, SapBwSource, SapCloudForCustomerSource, SapEccSource, SapHanaSource, SapOpenHubSource, SapTableSource, ServiceNowSource, ShopifySource, SparkSource, SqlDwSource, SqlMiSource, SqlServerSource, SqlSource, SquareSource, SybaseSource, TeradataSource, VerticaSource, XeroSource, ZohoSource. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - _subtype_map = { - 'type': {'AmazonMWSSource': 'AmazonMwsSource', 'AmazonRedshiftSource': 'AmazonRedshiftSource', 'AzureMariaDBSource': 'AzureMariaDbSource', 'AzureMySqlSource': 'AzureMySqlSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AzureSqlSource': 'AzureSqlSource', 'AzureTableSource': 'AzureTableSource', 'CassandraSource': 'CassandraSource', 'ConcurSource': 'ConcurSource', 'CouchbaseSource': 'CouchbaseSource', 'Db2Source': 'Db2Source', 'DrillSource': 'DrillSource', 'DynamicsAXSource': 'DynamicsAxSource', 'EloquaSource': 'EloquaSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'GreenplumSource': 'GreenplumSource', 'HBaseSource': 'HBaseSource', 'HiveSource': 'HiveSource', 'HubspotSource': 'HubspotSource', 'ImpalaSource': 'ImpalaSource', 'InformixSource': 'InformixSource', 'JiraSource': 'JiraSource', 'MagentoSource': 'MagentoSource', 'MariaDBSource': 'MariaDbSource', 'MarketoSource': 'MarketoSource', 'MySqlSource': 'MySqlSource', 'NetezzaSource': 'NetezzaSource', 'OdbcSource': 'OdbcSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'PaypalSource': 'PaypalSource', 'PhoenixSource': 'PhoenixSource', 'PostgreSqlSource': 'PostgreSqlSource', 'PrestoSource': 'PrestoSource', 'QuickBooksSource': 'QuickBooksSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SapEccSource': 'SapEccSource', 'SapHanaSource': 'SapHanaSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapTableSource': 'SapTableSource', 'ServiceNowSource': 'ServiceNowSource', 'ShopifySource': 'ShopifySource', 'SparkSource': 'SparkSource', 'SqlDWSource': 'SqlDwSource', 'SqlMISource': 'SqlMiSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'SquareSource': 'SquareSource', 'SybaseSource': 'SybaseSource', 'TeradataSource': 'TeradataSource', 'VerticaSource': 'VerticaSource', 'XeroSource': 'XeroSource', 'ZohoSource': 'ZohoSource'} - } - - def __init__( - self, - **kwargs - ): - super(TabularSource, self).__init__(**kwargs) - self.type = 'TabularSource' # type: str - self.query_timeout = kwargs.get('query_timeout', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class AmazonMwsSource(TabularSource): - """A copy activity Amazon Marketplace Web Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AmazonMwsSource, self).__init__(**kwargs) - self.type = 'AmazonMWSSource' # type: str - self.query = kwargs.get('query', None) - - -class AmazonRedshiftLinkedService(LinkedService): - """Linked service for Amazon Redshift. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param server: Required. The name of the Amazon Redshift server. Type: string (or Expression - with resultType string). - :type server: object - :param username: The username of the Amazon Redshift source. Type: string (or Expression with - resultType string). - :type username: object - :param password: The password of the Amazon Redshift source. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param database: Required. The database name of the Amazon Redshift source. Type: string (or - Expression with resultType string). - :type database: object - :param port: The TCP port number that the Amazon Redshift server uses to listen for client - connections. The default value is 5439. Type: integer (or Expression with resultType integer). - :type port: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AmazonRedshiftLinkedService, self).__init__(**kwargs) - self.type = 'AmazonRedshift' # type: str - self.server = kwargs['server'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.database = kwargs['database'] - self.port = kwargs.get('port', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class AmazonRedshiftSource(TabularSource): - """A copy activity source for Amazon Redshift Source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: Database query. Type: string (or Expression with resultType string). - :type query: object - :param redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when - copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be - unloaded into S3 first and then copied into the targeted sink from the interim S3. - :type redshift_unload_settings: ~azure.mgmt.datafactory.models.RedshiftUnloadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(AmazonRedshiftSource, self).__init__(**kwargs) - self.type = 'AmazonRedshiftSource' # type: str - self.query = kwargs.get('query', None) - self.redshift_unload_settings = kwargs.get('redshift_unload_settings', None) - - -class AmazonRedshiftTableDataset(Dataset): - """The Amazon Redshift table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The Amazon Redshift table name. Type: string (or Expression with resultType - string). - :type table: object - :param schema_type_properties_schema: The Amazon Redshift schema name. Type: string (or - Expression with resultType string). - :type schema_type_properties_schema: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AmazonRedshiftTableDataset, self).__init__(**kwargs) - self.type = 'AmazonRedshiftTable' # type: str - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - - -class AmazonS3CompatibleLinkedService(LinkedService): - """Linked service for Amazon S3 Compatible. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param access_key_id: The access key identifier of the Amazon S3 Compatible Identity and Access - Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: object - :param secret_access_key: The secret access key of the Amazon S3 Compatible Identity and Access - Management (IAM) user. - :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_url: This value specifies the endpoint to access with the Amazon S3 Compatible - Connector. This is an optional property; change it only if you want to try a different service - endpoint or want to switch between https and http. Type: string (or Expression with resultType - string). - :type service_url: object - :param force_path_style: If true, use S3 path-style access instead of virtual hosted-style - access. Default value is false. Type: boolean (or Expression with resultType boolean). - :type force_path_style: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, - 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, - 'force_path_style': {'key': 'typeProperties.forcePathStyle', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AmazonS3CompatibleLinkedService, self).__init__(**kwargs) - self.type = 'AmazonS3Compatible' # type: str - self.access_key_id = kwargs.get('access_key_id', None) - self.secret_access_key = kwargs.get('secret_access_key', None) - self.service_url = kwargs.get('service_url', None) - self.force_path_style = kwargs.get('force_path_style', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class DatasetLocation(msrest.serialization.Model): - """Dataset location. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonS3CompatibleLocation, AmazonS3Location, AzureBlobFsLocation, AzureBlobStorageLocation, AzureDataLakeStoreLocation, AzureFileStorageLocation, FileServerLocation, FtpServerLocation, GoogleCloudStorageLocation, HdfsLocation, HttpServerLocation, OracleCloudStorageLocation, SftpLocation. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType - string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - _subtype_map = { - 'type': {'AmazonS3CompatibleLocation': 'AmazonS3CompatibleLocation', 'AmazonS3Location': 'AmazonS3Location', 'AzureBlobFSLocation': 'AzureBlobFsLocation', 'AzureBlobStorageLocation': 'AzureBlobStorageLocation', 'AzureDataLakeStoreLocation': 'AzureDataLakeStoreLocation', 'AzureFileStorageLocation': 'AzureFileStorageLocation', 'FileServerLocation': 'FileServerLocation', 'FtpServerLocation': 'FtpServerLocation', 'GoogleCloudStorageLocation': 'GoogleCloudStorageLocation', 'HdfsLocation': 'HdfsLocation', 'HttpServerLocation': 'HttpServerLocation', 'OracleCloudStorageLocation': 'OracleCloudStorageLocation', 'SftpLocation': 'SftpLocation'} - } - - def __init__( - self, - **kwargs - ): - super(DatasetLocation, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'DatasetLocation' # type: str - self.folder_path = kwargs.get('folder_path', None) - self.file_name = kwargs.get('file_name', None) - - -class AmazonS3CompatibleLocation(DatasetLocation): - """The location of Amazon S3 Compatible dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType - string). - :type file_name: object - :param bucket_name: Specify the bucketName of Amazon S3 Compatible. Type: string (or Expression - with resultType string). - :type bucket_name: object - :param version: Specify the version of Amazon S3 Compatible. Type: string (or Expression with - resultType string). - :type version: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'bucket_name': {'key': 'bucketName', 'type': 'object'}, - 'version': {'key': 'version', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AmazonS3CompatibleLocation, self).__init__(**kwargs) - self.type = 'AmazonS3CompatibleLocation' # type: str - self.bucket_name = kwargs.get('bucket_name', None) - self.version = kwargs.get('version', None) - - -class StoreReadSettings(msrest.serialization.Model): - """Connector read setting. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonS3CompatibleReadSettings, AmazonS3ReadSettings, AzureBlobFsReadSettings, AzureBlobStorageReadSettings, AzureDataLakeStoreReadSettings, AzureFileStorageReadSettings, FileServerReadSettings, FtpReadSettings, GoogleCloudStorageReadSettings, HdfsReadSettings, HttpReadSettings, OracleCloudStorageReadSettings, SftpReadSettings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - } - - _subtype_map = { - 'type': {'AmazonS3CompatibleReadSettings': 'AmazonS3CompatibleReadSettings', 'AmazonS3ReadSettings': 'AmazonS3ReadSettings', 'AzureBlobFSReadSettings': 'AzureBlobFsReadSettings', 'AzureBlobStorageReadSettings': 'AzureBlobStorageReadSettings', 'AzureDataLakeStoreReadSettings': 'AzureDataLakeStoreReadSettings', 'AzureFileStorageReadSettings': 'AzureFileStorageReadSettings', 'FileServerReadSettings': 'FileServerReadSettings', 'FtpReadSettings': 'FtpReadSettings', 'GoogleCloudStorageReadSettings': 'GoogleCloudStorageReadSettings', 'HdfsReadSettings': 'HdfsReadSettings', 'HttpReadSettings': 'HttpReadSettings', 'OracleCloudStorageReadSettings': 'OracleCloudStorageReadSettings', 'SftpReadSettings': 'SftpReadSettings'} - } - - def __init__( - self, - **kwargs - ): - super(StoreReadSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'StoreReadSettings' # type: str - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None) - - -class AmazonS3CompatibleReadSettings(StoreReadSettings): - """Amazon S3 Compatible read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param recursive: If true, files under the folder path will be read recursively. Default is - true. Type: boolean (or Expression with resultType boolean). - :type recursive: object - :param wildcard_folder_path: Amazon S3 Compatible wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Amazon S3 Compatible wildcardFileName. Type: string (or Expression - with resultType string). - :type wildcard_file_name: object - :param prefix: The prefix filter for the S3 Compatible object name. Type: string (or Expression - with resultType string). - :type prefix: object - :param file_list_path: Point to a text file that lists each file (relative path to the path - configured in the dataset) that you want to copy. Type: string (or Expression with resultType - string). - :type file_list_path: object - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: - string (or Expression with resultType string). - :type partition_root_path: object - :param delete_files_after_completion: Indicates whether the source files need to be deleted - after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object - :param modified_datetime_start: The start of file's modified datetime. Type: string (or - Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression - with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'prefix': {'key': 'prefix', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AmazonS3CompatibleReadSettings, self).__init__(**kwargs) - self.type = 'AmazonS3CompatibleReadSettings' # type: str - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.prefix = kwargs.get('prefix', None) - self.file_list_path = kwargs.get('file_list_path', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.partition_root_path = kwargs.get('partition_root_path', None) - self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - - -class AmazonS3Dataset(Dataset): - """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param bucket_name: Required. The name of the Amazon S3 bucket. Type: string (or Expression - with resultType string). - :type bucket_name: object - :param key: The key of the Amazon S3 object. Type: string (or Expression with resultType - string). - :type key: object - :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with - resultType string). - :type prefix: object - :param version: The version for the S3 object. Type: string (or Expression with resultType - string). - :type version: object - :param modified_datetime_start: The start of S3 object's modified datetime. Type: string (or - Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of S3 object's modified datetime. Type: string (or - Expression with resultType string). - :type modified_datetime_end: object - :param format: The format of files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the Amazon S3 object. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'bucket_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'bucket_name': {'key': 'typeProperties.bucketName', 'type': 'object'}, - 'key': {'key': 'typeProperties.key', 'type': 'object'}, - 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, - 'version': {'key': 'typeProperties.version', 'type': 'object'}, - 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__( - self, - **kwargs - ): - super(AmazonS3Dataset, self).__init__(**kwargs) - self.type = 'AmazonS3Object' # type: str - self.bucket_name = kwargs['bucket_name'] - self.key = kwargs.get('key', None) - self.prefix = kwargs.get('prefix', None) - self.version = kwargs.get('version', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - self.format = kwargs.get('format', None) - self.compression = kwargs.get('compression', None) - - -class AmazonS3LinkedService(LinkedService): - """Linked service for Amazon S3. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param authentication_type: The authentication type of S3. Allowed value: AccessKey (default) - or TemporarySecurityCredentials. Type: string (or Expression with resultType string). - :type authentication_type: object - :param access_key_id: The access key identifier of the Amazon S3 Identity and Access Management - (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: object - :param secret_access_key: The secret access key of the Amazon S3 Identity and Access Management - (IAM) user. - :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_url: This value specifies the endpoint to access with the S3 Connector. This is - an optional property; change it only if you want to try a different service endpoint or want to - switch between https and http. Type: string (or Expression with resultType string). - :type service_url: object - :param session_token: The session token for the S3 temporary security credential. - :type session_token: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, - 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, - 'session_token': {'key': 'typeProperties.sessionToken', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AmazonS3LinkedService, self).__init__(**kwargs) - self.type = 'AmazonS3' # type: str - self.authentication_type = kwargs.get('authentication_type', None) - self.access_key_id = kwargs.get('access_key_id', None) - self.secret_access_key = kwargs.get('secret_access_key', None) - self.service_url = kwargs.get('service_url', None) - self.session_token = kwargs.get('session_token', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class AmazonS3Location(DatasetLocation): - """The location of amazon S3 dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType - string). - :type file_name: object - :param bucket_name: Specify the bucketName of amazon S3. Type: string (or Expression with - resultType string). - :type bucket_name: object - :param version: Specify the version of amazon S3. Type: string (or Expression with resultType - string). - :type version: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'bucket_name': {'key': 'bucketName', 'type': 'object'}, - 'version': {'key': 'version', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AmazonS3Location, self).__init__(**kwargs) - self.type = 'AmazonS3Location' # type: str - self.bucket_name = kwargs.get('bucket_name', None) - self.version = kwargs.get('version', None) - - -class AmazonS3ReadSettings(StoreReadSettings): - """Amazon S3 read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param recursive: If true, files under the folder path will be read recursively. Default is - true. Type: boolean (or Expression with resultType boolean). - :type recursive: object - :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or Expression with - resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or Expression with - resultType string). - :type wildcard_file_name: object - :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with - resultType string). - :type prefix: object - :param file_list_path: Point to a text file that lists each file (relative path to the path - configured in the dataset) that you want to copy. Type: string (or Expression with resultType - string). - :type file_list_path: object - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: - string (or Expression with resultType string). - :type partition_root_path: object - :param delete_files_after_completion: Indicates whether the source files need to be deleted - after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object - :param modified_datetime_start: The start of file's modified datetime. Type: string (or - Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression - with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'prefix': {'key': 'prefix', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AmazonS3ReadSettings, self).__init__(**kwargs) - self.type = 'AmazonS3ReadSettings' # type: str - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.prefix = kwargs.get('prefix', None) - self.file_list_path = kwargs.get('file_list_path', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.partition_root_path = kwargs.get('partition_root_path', None) - self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - - -class AppendVariableActivity(Activity): - """Append value for a Variable of type Array. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param variable_name: Name of the variable whose value needs to be appended to. - :type variable_name: str - :param value: Value to be appended. Could be a static value or Expression. - :type value: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'value': {'key': 'typeProperties.value', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AppendVariableActivity, self).__init__(**kwargs) - self.type = 'AppendVariable' # type: str - self.variable_name = kwargs.get('variable_name', None) - self.value = kwargs.get('value', None) - - -class ArmIdWrapper(msrest.serialization.Model): - """A wrapper for an ARM resource id. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: - :vartype id: str - """ - - _validation = { - 'id': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ArmIdWrapper, self).__init__(**kwargs) - self.id = None - - -class AvroDataset(Dataset): - """Avro dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the avro storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param avro_compression_codec: The data avroCompressionCodec. Type: string (or Expression with - resultType string). - :type avro_compression_codec: object - :param avro_compression_level: - :type avro_compression_level: int - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'avro_compression_level': {'maximum': 9, 'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'object'}, - 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(AvroDataset, self).__init__(**kwargs) - self.type = 'Avro' # type: str - self.location = kwargs.get('location', None) - self.avro_compression_codec = kwargs.get('avro_compression_codec', None) - self.avro_compression_level = kwargs.get('avro_compression_level', None) - - -class DatasetStorageFormat(msrest.serialization.Model): - """The format definition of a storage. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroFormat, JsonFormat, OrcFormat, ParquetFormat, TextFormat. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage format.Constant filled by server. - :type type: str - :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - } - - _subtype_map = { - 'type': {'AvroFormat': 'AvroFormat', 'JsonFormat': 'JsonFormat', 'OrcFormat': 'OrcFormat', 'ParquetFormat': 'ParquetFormat', 'TextFormat': 'TextFormat'} - } - - def __init__( - self, - **kwargs - ): - super(DatasetStorageFormat, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'DatasetStorageFormat' # type: str - self.serializer = kwargs.get('serializer', None) - self.deserializer = kwargs.get('deserializer', None) - - -class AvroFormat(DatasetStorageFormat): - """The data stored in Avro format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage format.Constant filled by server. - :type type: str - :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AvroFormat, self).__init__(**kwargs) - self.type = 'AvroFormat' # type: str - - -class CopySink(msrest.serialization.Model): - """A copy activity sink. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, MongoDbAtlasSink, MongoDbV2Sink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - } - - _subtype_map = { - 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'MongoDbAtlasSink': 'MongoDbAtlasSink', 'MongoDbV2Sink': 'MongoDbV2Sink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} - } - - def __init__( - self, - **kwargs - ): - super(CopySink, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'CopySink' # type: str - self.write_batch_size = kwargs.get('write_batch_size', None) - self.write_batch_timeout = kwargs.get('write_batch_timeout', None) - self.sink_retry_count = kwargs.get('sink_retry_count', None) - self.sink_retry_wait = kwargs.get('sink_retry_wait', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None) - - -class AvroSink(CopySink): - """A copy activity Avro sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param store_settings: Avro store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: Avro format settings. - :type format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(AvroSink, self).__init__(**kwargs) - self.type = 'AvroSink' # type: str - self.store_settings = kwargs.get('store_settings', None) - self.format_settings = kwargs.get('format_settings', None) - - -class AvroSource(CopySource): - """A copy activity Avro source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param store_settings: Avro store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AvroSource, self).__init__(**kwargs) - self.type = 'AvroSource' # type: str - self.store_settings = kwargs.get('store_settings', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class FormatWriteSettings(msrest.serialization.Model): - """Format write settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings, OrcWriteSettings, ParquetWriteSettings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings', 'OrcWriteSettings': 'OrcWriteSettings', 'ParquetWriteSettings': 'ParquetWriteSettings'} - } - - def __init__( - self, - **kwargs - ): - super(FormatWriteSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'FormatWriteSettings' # type: str - - -class AvroWriteSettings(FormatWriteSettings): - """Avro write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param record_name: Top level record name in write result, which is required in AVRO spec. - :type record_name: str - :param record_namespace: Record namespace in the write result. - :type record_namespace: str - :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the - specified count. Type: integer (or Expression with resultType integer). - :type max_rows_per_file: object - :param file_name_prefix: Specifies the file name pattern - :code:``_:code:``.:code:`` when copy from non-file - based store without partitionOptions. Type: string (or Expression with resultType string). - :type file_name_prefix: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'record_name': {'key': 'recordName', 'type': 'str'}, - 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, - 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, - 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AvroWriteSettings, self).__init__(**kwargs) - self.type = 'AvroWriteSettings' # type: str - self.record_name = kwargs.get('record_name', None) - self.record_namespace = kwargs.get('record_namespace', None) - self.max_rows_per_file = kwargs.get('max_rows_per_file', None) - self.file_name_prefix = kwargs.get('file_name_prefix', None) - - -class CustomSetupBase(msrest.serialization.Model): - """The base definition of the custom setup. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzPowerShellSetup, CmdkeySetup, ComponentSetup, EnvironmentVariableSetup. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. The type of custom setup.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'AzPowerShellSetup': 'AzPowerShellSetup', 'CmdkeySetup': 'CmdkeySetup', 'ComponentSetup': 'ComponentSetup', 'EnvironmentVariableSetup': 'EnvironmentVariableSetup'} - } - - def __init__( - self, - **kwargs - ): - super(CustomSetupBase, self).__init__(**kwargs) - self.type = None # type: Optional[str] - - -class AzPowerShellSetup(CustomSetupBase): - """The express custom setup of installing Azure PowerShell. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. The type of custom setup.Constant filled by server. - :type type: str - :param version: Required. The required version of Azure PowerShell to install. - :type version: str - """ - - _validation = { - 'type': {'required': True}, - 'version': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'version': {'key': 'typeProperties.version', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AzPowerShellSetup, self).__init__(**kwargs) - self.type = 'AzPowerShellSetup' # type: str - self.version = kwargs['version'] - - -class AzureBatchLinkedService(LinkedService): - """Azure Batch linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param account_name: Required. The Azure Batch account name. Type: string (or Expression with - resultType string). - :type account_name: object - :param access_key: The Azure Batch account access key. - :type access_key: ~azure.mgmt.datafactory.models.SecretBase - :param batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType - string). - :type batch_uri: object - :param pool_name: Required. The Azure Batch pool name. Type: string (or Expression with - resultType string). - :type pool_name: object - :param linked_service_name: Required. The Azure Storage linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference - """ - - _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'batch_uri': {'required': True}, - 'pool_name': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, - 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, - 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureBatchLinkedService, self).__init__(**kwargs) - self.type = 'AzureBatch' # type: str - self.account_name = kwargs['account_name'] - self.access_key = kwargs.get('access_key', None) - self.batch_uri = kwargs['batch_uri'] - self.pool_name = kwargs['pool_name'] - self.linked_service_name = kwargs['linked_service_name'] - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.credential = kwargs.get('credential', None) - - -class AzureBlobDataset(Dataset): - """The Azure Blob storage. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param folder_path: The path of the Azure Blob storage. Type: string (or Expression with - resultType string). - :type folder_path: object - :param table_root_location: The root of blob path. Type: string (or Expression with resultType - string). - :type table_root_location: object - :param file_name: The name of the Azure Blob. Type: string (or Expression with resultType - string). - :type file_name: object - :param modified_datetime_start: The start of Azure Blob's modified datetime. Type: string (or - Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of Azure Blob's modified datetime. Type: string (or - Expression with resultType string). - :type modified_datetime_end: object - :param format: The format of the Azure Blob storage. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the blob storage. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureBlobDataset, self).__init__(**kwargs) - self.type = 'AzureBlob' # type: str - self.folder_path = kwargs.get('folder_path', None) - self.table_root_location = kwargs.get('table_root_location', None) - self.file_name = kwargs.get('file_name', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - self.format = kwargs.get('format', None) - self.compression = kwargs.get('compression', None) - - -class AzureBlobFsDataset(Dataset): - """The Azure Data Lake Storage Gen2 storage. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. Type: string (or - Expression with resultType string). - :type folder_path: object - :param file_name: The name of the Azure Data Lake Storage Gen2. Type: string (or Expression - with resultType string). - :type file_name: object - :param format: The format of the Azure Data Lake Storage Gen2 storage. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the blob storage. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureBlobFsDataset, self).__init__(**kwargs) - self.type = 'AzureBlobFSFile' # type: str - self.folder_path = kwargs.get('folder_path', None) - self.file_name = kwargs.get('file_name', None) - self.format = kwargs.get('format', None) - self.compression = kwargs.get('compression', None) - - -class AzureBlobFsLinkedService(LinkedService): - """Azure Data Lake Storage Gen2 linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or - Expression with resultType string). - :type url: object - :param account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string (or - Expression with resultType string). - :type account_key: object - :param service_principal_id: The ID of the application used to authenticate against the Azure - Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to authenticate against the Azure - Data Lake Storage Gen2 account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed - values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data - factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureBlobFsLinkedService, self).__init__(**kwargs) - self.type = 'AzureBlobFS' # type: str - self.url = kwargs['url'] - self.account_key = kwargs.get('account_key', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.azure_cloud_type = kwargs.get('azure_cloud_type', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.credential = kwargs.get('credential', None) - - -class AzureBlobFsLocation(DatasetLocation): - """The location of azure blobFS dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType - string). - :type file_name: object - :param file_system: Specify the fileSystem of azure blobFS. Type: string (or Expression with - resultType string). - :type file_system: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'file_system': {'key': 'fileSystem', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureBlobFsLocation, self).__init__(**kwargs) - self.type = 'AzureBlobFSLocation' # type: str - self.file_system = kwargs.get('file_system', None) - - -class AzureBlobFsReadSettings(StoreReadSettings): - """Azure blobFS read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param recursive: If true, files under the folder path will be read recursively. Default is - true. Type: boolean (or Expression with resultType boolean). - :type recursive: object - :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string (or Expression with - resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with - resultType string). - :type wildcard_file_name: object - :param file_list_path: Point to a text file that lists each file (relative path to the path - configured in the dataset) that you want to copy. Type: string (or Expression with resultType - string). - :type file_list_path: object - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: - string (or Expression with resultType string). - :type partition_root_path: object - :param delete_files_after_completion: Indicates whether the source files need to be deleted - after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object - :param modified_datetime_start: The start of file's modified datetime. Type: string (or - Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression - with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureBlobFsReadSettings, self).__init__(**kwargs) - self.type = 'AzureBlobFSReadSettings' # type: str - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.file_list_path = kwargs.get('file_list_path', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.partition_root_path = kwargs.get('partition_root_path', None) - self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - - -class AzureBlobFsSink(CopySink): - """A copy activity Azure Data Lake Storage Gen2 sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects - (or Expression with resultType array of objects). - :type metadata: list[~azure.mgmt.datafactory.models.MetadataItem] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureBlobFsSink, self).__init__(**kwargs) - self.type = 'AzureBlobFSSink' # type: str - self.copy_behavior = kwargs.get('copy_behavior', None) - self.metadata = kwargs.get('metadata', None) - - -class AzureBlobFsSource(CopySource): - """A copy activity Azure BlobFS source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType - boolean). - :type treat_empty_as_null: object - :param skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or - Expression with resultType integer). - :type skip_header_line_count: object - :param recursive: If true, files under the folder path will be read recursively. Default is - true. Type: boolean (or Expression with resultType boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, - 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureBlobFsSource, self).__init__(**kwargs) - self.type = 'AzureBlobFSSource' # type: str - self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) - self.skip_header_line_count = kwargs.get('skip_header_line_count', None) - self.recursive = kwargs.get('recursive', None) - - -class StoreWriteSettings(msrest.serialization.Model): - """Connector write settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureBlobFsWriteSettings, AzureBlobStorageWriteSettings, AzureDataLakeStoreWriteSettings, AzureFileStorageWriteSettings, FileServerWriteSettings, SftpWriteSettings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - _subtype_map = { - 'type': {'AzureBlobFSWriteSettings': 'AzureBlobFsWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureFileStorageWriteSettings': 'AzureFileStorageWriteSettings', 'FileServerWriteSettings': 'FileServerWriteSettings', 'SftpWriteSettings': 'SftpWriteSettings'} - } - - def __init__( - self, - **kwargs - ): - super(StoreWriteSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'StoreWriteSettings' # type: str - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None) - self.copy_behavior = kwargs.get('copy_behavior', None) - - -class AzureBlobFsWriteSettings(StoreWriteSettings): - """Azure blobFS write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer - (or Expression with resultType integer). - :type block_size_in_mb: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureBlobFsWriteSettings, self).__init__(**kwargs) - self.type = 'AzureBlobFSWriteSettings' # type: str - self.block_size_in_mb = kwargs.get('block_size_in_mb', None) - - -class AzureBlobStorageLinkedService(LinkedService): - """The azure blob storage linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: The connection string. It is mutually exclusive with sasUri, - serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with - connectionString, serviceEndpoint property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is - mutually exclusive with connectionString, sasUri property. - :type service_endpoint: str - :param service_principal_id: The ID of the service principal used to authenticate against Azure - SQL Data Warehouse. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Azure SQL Data Warehouse. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed - values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data - factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object - :param account_kind: Specify the kind of your storage account. Allowed values are: Storage - (general purpose v1), StorageV2 (general purpose v2), BlobStorage, or BlockBlobStorage. Type: - string (or Expression with resultType string). - :type account_kind: str - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: str - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, - 'account_kind': {'key': 'typeProperties.accountKind', 'type': 'str'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureBlobStorageLinkedService, self).__init__(**kwargs) - self.type = 'AzureBlobStorage' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.sas_uri = kwargs.get('sas_uri', None) - self.sas_token = kwargs.get('sas_token', None) - self.service_endpoint = kwargs.get('service_endpoint', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.azure_cloud_type = kwargs.get('azure_cloud_type', None) - self.account_kind = kwargs.get('account_kind', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.credential = kwargs.get('credential', None) - - -class AzureBlobStorageLocation(DatasetLocation): - """The location of azure blob dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType - string). - :type file_name: object - :param container: Specify the container of azure blob. Type: string (or Expression with - resultType string). - :type container: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'container': {'key': 'container', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureBlobStorageLocation, self).__init__(**kwargs) - self.type = 'AzureBlobStorageLocation' # type: str - self.container = kwargs.get('container', None) - - -class AzureBlobStorageReadSettings(StoreReadSettings): - """Azure blob read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param recursive: If true, files under the folder path will be read recursively. Default is - true. Type: boolean (or Expression with resultType boolean). - :type recursive: object - :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string (or Expression with - resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or Expression with - resultType string). - :type wildcard_file_name: object - :param prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with - resultType string). - :type prefix: object - :param file_list_path: Point to a text file that lists each file (relative path to the path - configured in the dataset) that you want to copy. Type: string (or Expression with resultType - string). - :type file_list_path: object - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: - string (or Expression with resultType string). - :type partition_root_path: object - :param delete_files_after_completion: Indicates whether the source files need to be deleted - after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object - :param modified_datetime_start: The start of file's modified datetime. Type: string (or - Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression - with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'prefix': {'key': 'prefix', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureBlobStorageReadSettings, self).__init__(**kwargs) - self.type = 'AzureBlobStorageReadSettings' # type: str - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.prefix = kwargs.get('prefix', None) - self.file_list_path = kwargs.get('file_list_path', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.partition_root_path = kwargs.get('partition_root_path', None) - self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - - -class AzureBlobStorageWriteSettings(StoreWriteSettings): - """Azure blob write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer - (or Expression with resultType integer). - :type block_size_in_mb: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureBlobStorageWriteSettings, self).__init__(**kwargs) - self.type = 'AzureBlobStorageWriteSettings' # type: str - self.block_size_in_mb = kwargs.get('block_size_in_mb', None) - - -class AzureDatabricksDeltaLakeDataset(Dataset): - """Azure Databricks Delta Lake dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table: The name of delta table. Type: string (or Expression with resultType string). - :type table: object - :param database: The database name of delta table. Type: string (or Expression with resultType - string). - :type database: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDatabricksDeltaLakeDataset, self).__init__(**kwargs) - self.type = 'AzureDatabricksDeltaLakeDataset' # type: str - self.table = kwargs.get('table', None) - self.database = kwargs.get('database', None) - - -class ExportSettings(msrest.serialization.Model): - """Export command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDatabricksDeltaLakeExportCommand, SnowflakeExportCopyCommand. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The export setting type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'AzureDatabricksDeltaLakeExportCommand': 'AzureDatabricksDeltaLakeExportCommand', 'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} - } - - def __init__( - self, - **kwargs - ): - super(ExportSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'ExportSettings' # type: str - - -class AzureDatabricksDeltaLakeExportCommand(ExportSettings): - """Azure Databricks Delta Lake export command settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The export setting type.Constant filled by server. - :type type: str - :param date_format: Specify the date format for the csv in Azure Databricks Delta Lake Copy. - Type: string (or Expression with resultType string). - :type date_format: object - :param timestamp_format: Specify the timestamp format for the csv in Azure Databricks Delta - Lake Copy. Type: string (or Expression with resultType string). - :type timestamp_format: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'date_format': {'key': 'dateFormat', 'type': 'object'}, - 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDatabricksDeltaLakeExportCommand, self).__init__(**kwargs) - self.type = 'AzureDatabricksDeltaLakeExportCommand' # type: str - self.date_format = kwargs.get('date_format', None) - self.timestamp_format = kwargs.get('timestamp_format', None) - - -class ImportSettings(msrest.serialization.Model): - """Import command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDatabricksDeltaLakeImportCommand, SnowflakeImportCopyCommand. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The import setting type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'AzureDatabricksDeltaLakeImportCommand': 'AzureDatabricksDeltaLakeImportCommand', 'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} - } - - def __init__( - self, - **kwargs - ): - super(ImportSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'ImportSettings' # type: str - - -class AzureDatabricksDeltaLakeImportCommand(ImportSettings): - """Azure Databricks Delta Lake import command settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The import setting type.Constant filled by server. - :type type: str - :param date_format: Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: - string (or Expression with resultType string). - :type date_format: object - :param timestamp_format: Specify the timestamp format for csv in Azure Databricks Delta Lake - Copy. Type: string (or Expression with resultType string). - :type timestamp_format: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'date_format': {'key': 'dateFormat', 'type': 'object'}, - 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDatabricksDeltaLakeImportCommand, self).__init__(**kwargs) - self.type = 'AzureDatabricksDeltaLakeImportCommand' # type: str - self.date_format = kwargs.get('date_format', None) - self.timestamp_format = kwargs.get('timestamp_format', None) - - -class AzureDatabricksDeltaLakeLinkedService(LinkedService): - """Azure Databricks Delta Lake linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks - deployment. Type: string (or Expression with resultType string). - :type domain: object - :param access_token: Access token for databricks REST API. Refer to - https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param cluster_id: The id of an existing interactive cluster that will be used for all runs of - this job. Type: string (or Expression with resultType string). - :type cluster_id: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'domain': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'cluster_id': {'key': 'typeProperties.clusterId', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDatabricksDeltaLakeLinkedService, self).__init__(**kwargs) - self.type = 'AzureDatabricksDeltaLake' # type: str - self.domain = kwargs['domain'] - self.access_token = kwargs.get('access_token', None) - self.cluster_id = kwargs.get('cluster_id', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class AzureDatabricksDeltaLakeSink(CopySink): - """A copy activity Azure Databricks Delta Lake sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType - string). - :type pre_copy_script: object - :param import_settings: Azure Databricks Delta Lake import settings. - :type import_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeImportCommand - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDatabricksDeltaLakeSink, self).__init__(**kwargs) - self.type = 'AzureDatabricksDeltaLakeSink' # type: str - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.import_settings = kwargs.get('import_settings', None) - - -class AzureDatabricksDeltaLakeSource(CopySource): - """A copy activity Azure Databricks Delta Lake source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with - resultType string). - :type query: object - :param export_settings: Azure Databricks Delta Lake export settings. - :type export_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeExportCommand - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDatabricksDeltaLakeSource, self).__init__(**kwargs) - self.type = 'AzureDatabricksDeltaLakeSource' # type: str - self.query = kwargs.get('query', None) - self.export_settings = kwargs.get('export_settings', None) - - -class AzureDatabricksLinkedService(LinkedService): - """Azure Databricks linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks - deployment. Type: string (or Expression with resultType string). - :type domain: object - :param access_token: Access token for databricks REST API. Refer to - https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression - with resultType string). - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param authentication: Required to specify MSI, if using Workspace resource id for databricks - REST API. Type: string (or Expression with resultType string). - :type authentication: object - :param workspace_resource_id: Workspace resource id for databricks REST API. Type: string (or - Expression with resultType string). - :type workspace_resource_id: object - :param existing_cluster_id: The id of an existing interactive cluster that will be used for all - runs of this activity. Type: string (or Expression with resultType string). - :type existing_cluster_id: object - :param instance_pool_id: The id of an existing instance pool that will be used for all runs of - this activity. Type: string (or Expression with resultType string). - :type instance_pool_id: object - :param new_cluster_version: If not using an existing interactive cluster, this specifies the - Spark version of a new job cluster or instance pool nodes created for each run of this - activity. Required if instancePoolId is specified. Type: string (or Expression with resultType - string). - :type new_cluster_version: object - :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies - the number of worker nodes to use for the new job cluster or instance pool. For new job - clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto- - scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can - only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is - specified. Type: string (or Expression with resultType string). - :type new_cluster_num_of_worker: object - :param new_cluster_node_type: The node type of the new job cluster. This property is required - if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is - specified, this property is ignored. Type: string (or Expression with resultType string). - :type new_cluster_node_type: object - :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value - pairs. - :type new_cluster_spark_conf: dict[str, object] - :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment - variables key-value pairs. - :type new_cluster_spark_env_vars: dict[str, object] - :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored - in instance pool configurations. - :type new_cluster_custom_tags: dict[str, object] - :param new_cluster_log_destination: Specify a location to deliver Spark driver, worker, and - event logs. Type: string (or Expression with resultType string). - :type new_cluster_log_destination: object - :param new_cluster_driver_node_type: The driver node type for the new job cluster. This - property is ignored in instance pool configurations. Type: string (or Expression with - resultType string). - :type new_cluster_driver_node_type: object - :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: - array of strings (or Expression with resultType array of strings). - :type new_cluster_init_scripts: object - :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This - property is now ignored, and takes the default elastic disk behavior in Databricks (elastic - disks are always enabled). Type: boolean (or Expression with resultType boolean). - :type new_cluster_enable_elastic_disk: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param policy_id: The policy id for limiting the ability to configure clusters based on a user - defined set of rules. Type: string (or Expression with resultType string). - :type policy_id: object - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference - """ - - _validation = { - 'type': {'required': True}, - 'domain': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, - 'workspace_resource_id': {'key': 'typeProperties.workspaceResourceId', 'type': 'object'}, - 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, - 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, - 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, - 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, - 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, - 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, - 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, - 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, - 'new_cluster_log_destination': {'key': 'typeProperties.newClusterLogDestination', 'type': 'object'}, - 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, - 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, - 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'policy_id': {'key': 'typeProperties.policyId', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDatabricksLinkedService, self).__init__(**kwargs) - self.type = 'AzureDatabricks' # type: str - self.domain = kwargs['domain'] - self.access_token = kwargs.get('access_token', None) - self.authentication = kwargs.get('authentication', None) - self.workspace_resource_id = kwargs.get('workspace_resource_id', None) - self.existing_cluster_id = kwargs.get('existing_cluster_id', None) - self.instance_pool_id = kwargs.get('instance_pool_id', None) - self.new_cluster_version = kwargs.get('new_cluster_version', None) - self.new_cluster_num_of_worker = kwargs.get('new_cluster_num_of_worker', None) - self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) - self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) - self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) - self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) - self.new_cluster_log_destination = kwargs.get('new_cluster_log_destination', None) - self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) - self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) - self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.policy_id = kwargs.get('policy_id', None) - self.credential = kwargs.get('credential', None) - - -class ExecutionActivity(Activity): - """Base class for all execution activities. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMlBatchExecutionActivity, AzureMlExecutePipelineActivity, AzureMlUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUsqlActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSsisPackageActivity, GetMetadataActivity, HdInsightHiveActivity, HdInsightMapReduceActivity, HdInsightPigActivity, HdInsightSparkActivity, HdInsightStreamingActivity, LookupActivity, SqlServerStoredProcedureActivity, WebActivity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - } - - _subtype_map = { - 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMlBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMlExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMlUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUsqlActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSsisPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HdInsightHiveActivity', 'HDInsightMapReduce': 'HdInsightMapReduceActivity', 'HDInsightPig': 'HdInsightPigActivity', 'HDInsightSpark': 'HdInsightSparkActivity', 'HDInsightStreaming': 'HdInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} - } - - def __init__( - self, - **kwargs - ): - super(ExecutionActivity, self).__init__(**kwargs) - self.type = 'Execution' # type: str - self.linked_service_name = kwargs.get('linked_service_name', None) - self.policy = kwargs.get('policy', None) - - -class AzureDataExplorerCommandActivity(ExecutionActivity): - """Azure Data Explorer command activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param command: Required. A control command, according to the Azure Data Explorer command - syntax. Type: string (or Expression with resultType string). - :type command: object - :param command_timeout: Control command timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). - :type command_timeout: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'command': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDataExplorerCommandActivity, self).__init__(**kwargs) - self.type = 'AzureDataExplorerCommand' # type: str - self.command = kwargs['command'] - self.command_timeout = kwargs.get('command_timeout', None) - - -class AzureDataExplorerLinkedService(LinkedService): - """Azure Data Explorer (Kusto) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL - will be in the format https://:code:``.:code:``.kusto.windows.net. - Type: string (or Expression with resultType string). - :type endpoint: object - :param service_principal_id: The ID of the service principal used to authenticate against Azure - Data Explorer. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Kusto. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param database: Required. Database name for connection. Type: string (or Expression with - resultType string). - :type database: object - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDataExplorerLinkedService, self).__init__(**kwargs) - self.type = 'AzureDataExplorer' # type: str - self.endpoint = kwargs['endpoint'] - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.database = kwargs['database'] - self.tenant = kwargs.get('tenant', None) - self.credential = kwargs.get('credential', None) - - -class AzureDataExplorerSink(CopySink): - """A copy activity Azure Data Explorer sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the - target Kusto table. Type: string. - :type ingestion_mapping_name: object - :param ingestion_mapping_as_json: An explicit column mapping description provided in a json - format. Type: string. - :type ingestion_mapping_as_json: object - :param flush_immediately: If set to true, any aggregation will be skipped. Default is false. - Type: boolean. - :type flush_immediately: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, - 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, - 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDataExplorerSink, self).__init__(**kwargs) - self.type = 'AzureDataExplorerSink' # type: str - self.ingestion_mapping_name = kwargs.get('ingestion_mapping_name', None) - self.ingestion_mapping_as_json = kwargs.get('ingestion_mapping_as_json', None) - self.flush_immediately = kwargs.get('flush_immediately', None) - - -class AzureDataExplorerSource(CopySource): - """A copy activity Azure Data Explorer (Kusto) source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: - string (or Expression with resultType string). - :type query: object - :param no_truncation: The name of the Boolean option that controls whether truncation is - applied to result-sets that go beyond a certain row-count limit. - :type no_truncation: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - 'query': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDataExplorerSource, self).__init__(**kwargs) - self.type = 'AzureDataExplorerSource' # type: str - self.query = kwargs['query'] - self.no_truncation = kwargs.get('no_truncation', None) - self.query_timeout = kwargs.get('query_timeout', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class AzureDataExplorerTableDataset(Dataset): - """The Azure Data Explorer (Kusto) dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table: The table name of the Azure Data Explorer database. Type: string (or Expression - with resultType string). - :type table: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDataExplorerTableDataset, self).__init__(**kwargs) - self.type = 'AzureDataExplorerTable' # type: str - self.table = kwargs.get('table', None) - - -class AzureDataLakeAnalyticsLinkedService(LinkedService): - """Azure Data Lake Analytics linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param account_name: Required. The Azure Data Lake Analytics account name. Type: string (or - Expression with resultType string). - :type account_name: object - :param service_principal_id: The ID of the application used to authenticate against the Azure - Data Lake Analytics account. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to authenticate against the Azure - Data Lake Analytics account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. The name or ID of the tenant to which the service principal belongs. - Type: string (or Expression with resultType string). - :type tenant: object - :param subscription_id: Data Lake Analytics account subscription ID (if different from Data - Factory account). Type: string (or Expression with resultType string). - :type subscription_id: object - :param resource_group_name: Data Lake Analytics account resource group name (if different from - Data Factory account). Type: string (or Expression with resultType string). - :type resource_group_name: object - :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string (or Expression with - resultType string). - :type data_lake_analytics_uri: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'tenant': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDataLakeAnalyticsLinkedService, self).__init__(**kwargs) - self.type = 'AzureDataLakeAnalytics' # type: str - self.account_name = kwargs['account_name'] - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs['tenant'] - self.subscription_id = kwargs.get('subscription_id', None) - self.resource_group_name = kwargs.get('resource_group_name', None) - self.data_lake_analytics_uri = kwargs.get('data_lake_analytics_uri', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class AzureDataLakeStoreDataset(Dataset): - """Azure Data Lake Store dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param folder_path: Path to the folder in the Azure Data Lake Store. Type: string (or - Expression with resultType string). - :type folder_path: object - :param file_name: The name of the file in the Azure Data Lake Store. Type: string (or - Expression with resultType string). - :type file_name: object - :param format: The format of the Data Lake Store. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the item(s) in the Azure Data Lake - Store. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDataLakeStoreDataset, self).__init__(**kwargs) - self.type = 'AzureDataLakeStoreFile' # type: str - self.folder_path = kwargs.get('folder_path', None) - self.file_name = kwargs.get('file_name', None) - self.format = kwargs.get('format', None) - self.compression = kwargs.get('compression', None) - - -class AzureDataLakeStoreLinkedService(LinkedService): - """Azure Data Lake Store linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or Expression - with resultType string). - :type data_lake_store_uri: object - :param service_principal_id: The ID of the application used to authenticate against the Azure - Data Lake Store account. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to authenticate against the Azure - Data Lake Store account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed - values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data - factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object - :param account_name: Data Lake Store account name. Type: string (or Expression with resultType - string). - :type account_name: object - :param subscription_id: Data Lake Store account subscription ID (if different from Data Factory - account). Type: string (or Expression with resultType string). - :type subscription_id: object - :param resource_group_name: Data Lake Store account resource group name (if different from Data - Factory account). Type: string (or Expression with resultType string). - :type resource_group_name: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference - """ - - _validation = { - 'type': {'required': True}, - 'data_lake_store_uri': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDataLakeStoreLinkedService, self).__init__(**kwargs) - self.type = 'AzureDataLakeStore' # type: str - self.data_lake_store_uri = kwargs['data_lake_store_uri'] - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.azure_cloud_type = kwargs.get('azure_cloud_type', None) - self.account_name = kwargs.get('account_name', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.resource_group_name = kwargs.get('resource_group_name', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.credential = kwargs.get('credential', None) - - -class AzureDataLakeStoreLocation(DatasetLocation): - """The location of azure data lake store dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType - string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDataLakeStoreLocation, self).__init__(**kwargs) - self.type = 'AzureDataLakeStoreLocation' # type: str - - -class AzureDataLakeStoreReadSettings(StoreReadSettings): - """Azure data lake store read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param recursive: If true, files under the folder path will be read recursively. Default is - true. Type: boolean (or Expression with resultType boolean). - :type recursive: object - :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or Expression with - resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType - string). - :type wildcard_file_name: object - :param file_list_path: Point to a text file that lists each file (relative path to the path - configured in the dataset) that you want to copy. Type: string (or Expression with resultType - string). - :type file_list_path: object - :param list_after: Lists files after the value (exclusive) based on file/folder names’ - lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders - under the folderPath. Type: string (or Expression with resultType string). - :type list_after: object - :param list_before: Lists files before the value (inclusive) based on file/folder names’ - lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders - under the folderPath. Type: string (or Expression with resultType string). - :type list_before: object - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: - string (or Expression with resultType string). - :type partition_root_path: object - :param delete_files_after_completion: Indicates whether the source files need to be deleted - after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object - :param modified_datetime_start: The start of file's modified datetime. Type: string (or - Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression - with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'list_after': {'key': 'listAfter', 'type': 'object'}, - 'list_before': {'key': 'listBefore', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDataLakeStoreReadSettings, self).__init__(**kwargs) - self.type = 'AzureDataLakeStoreReadSettings' # type: str - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.file_list_path = kwargs.get('file_list_path', None) - self.list_after = kwargs.get('list_after', None) - self.list_before = kwargs.get('list_before', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.partition_root_path = kwargs.get('partition_root_path', None) - self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - - -class AzureDataLakeStoreSink(CopySink): - """A copy activity Azure Data Lake Store sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param enable_adls_single_file_parallel: Single File Parallel. - :type enable_adls_single_file_parallel: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDataLakeStoreSink, self).__init__(**kwargs) - self.type = 'AzureDataLakeStoreSink' # type: str - self.copy_behavior = kwargs.get('copy_behavior', None) - self.enable_adls_single_file_parallel = kwargs.get('enable_adls_single_file_parallel', None) - - -class AzureDataLakeStoreSource(CopySource): - """A copy activity Azure Data Lake source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param recursive: If true, files under the folder path will be read recursively. Default is - true. Type: boolean (or Expression with resultType boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDataLakeStoreSource, self).__init__(**kwargs) - self.type = 'AzureDataLakeStoreSource' # type: str - self.recursive = kwargs.get('recursive', None) - - -class AzureDataLakeStoreWriteSettings(StoreWriteSettings): - """Azure data lake store write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param expiry_date_time: Specifies the expiry time of the written files. The time is applied to - the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: integer - (or Expression with resultType integer). - :type expiry_date_time: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'expiry_date_time': {'key': 'expiryDateTime', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDataLakeStoreWriteSettings, self).__init__(**kwargs) - self.type = 'AzureDataLakeStoreWriteSettings' # type: str - self.expiry_date_time = kwargs.get('expiry_date_time', None) - - -class AzureFileStorageLinkedService(LinkedService): - """Azure File Storage linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Host name of the server. Type: string (or Expression with resultType string). - :type host: object - :param user_id: User ID to logon the server. Type: string (or Expression with resultType - string). - :type user_id: object - :param password: Password to logon the server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param connection_string: The connection string. It is mutually exclusive with sasUri property. - Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure File resource. It is mutually exclusive with - connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param file_share: The azure file share name. It is required when auth with - accountKey/sasToken. Type: string (or Expression with resultType string). - :type file_share: object - :param snapshot: The azure file share snapshot version. Type: string (or Expression with - resultType string). - :type snapshot: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'file_share': {'key': 'typeProperties.fileShare', 'type': 'object'}, - 'snapshot': {'key': 'typeProperties.snapshot', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureFileStorageLinkedService, self).__init__(**kwargs) - self.type = 'AzureFileStorage' # type: str - self.host = kwargs.get('host', None) - self.user_id = kwargs.get('user_id', None) - self.password = kwargs.get('password', None) - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.sas_uri = kwargs.get('sas_uri', None) - self.sas_token = kwargs.get('sas_token', None) - self.file_share = kwargs.get('file_share', None) - self.snapshot = kwargs.get('snapshot', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class AzureFileStorageLocation(DatasetLocation): - """The location of file server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType - string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureFileStorageLocation, self).__init__(**kwargs) - self.type = 'AzureFileStorageLocation' # type: str - - -class AzureFileStorageReadSettings(StoreReadSettings): - """Azure File Storage read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param recursive: If true, files under the folder path will be read recursively. Default is - true. Type: boolean (or Expression with resultType boolean). - :type recursive: object - :param wildcard_folder_path: Azure File Storage wildcardFolderPath. Type: string (or Expression - with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression - with resultType string). - :type wildcard_file_name: object - :param prefix: The prefix filter for the Azure File name starting from root path. Type: string - (or Expression with resultType string). - :type prefix: object - :param file_list_path: Point to a text file that lists each file (relative path to the path - configured in the dataset) that you want to copy. Type: string (or Expression with resultType - string). - :type file_list_path: object - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: - string (or Expression with resultType string). - :type partition_root_path: object - :param delete_files_after_completion: Indicates whether the source files need to be deleted - after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object - :param modified_datetime_start: The start of file's modified datetime. Type: string (or - Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression - with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'prefix': {'key': 'prefix', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureFileStorageReadSettings, self).__init__(**kwargs) - self.type = 'AzureFileStorageReadSettings' # type: str - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.prefix = kwargs.get('prefix', None) - self.file_list_path = kwargs.get('file_list_path', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.partition_root_path = kwargs.get('partition_root_path', None) - self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - - -class AzureFileStorageWriteSettings(StoreWriteSettings): - """Azure File Storage write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureFileStorageWriteSettings, self).__init__(**kwargs) - self.type = 'AzureFileStorageWriteSettings' # type: str - - -class AzureFunctionActivity(ExecutionActivity): - """Azure Function activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible values include: "GET", - "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "TRACE". - :type method: str or ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod - :param function_name: Required. Name of the Function that the Azure Function Activity will - call. Type: string (or Expression with resultType string). - :type function_name: object - :param headers: Represents the headers that will be sent to the request. For example, to set - the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": - "application/json" }. Type: string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT - method, not allowed for GET method Type: string (or Expression with resultType string). - :type body: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True}, - 'function_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureFunctionActivity, self).__init__(**kwargs) - self.type = 'AzureFunctionActivity' # type: str - self.method = kwargs['method'] - self.function_name = kwargs['function_name'] - self.headers = kwargs.get('headers', None) - self.body = kwargs.get('body', None) - - -class AzureFunctionLinkedService(LinkedService): - """Azure Function linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param function_app_url: Required. The endpoint of the Azure Function App. URL will be in the - format https://:code:``.azurewebsites.net. - :type function_app_url: object - :param function_key: Function or Host key for Azure Function App. - :type function_key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference - :param resource_id: Allowed token audiences for azure function. - :type resource_id: object - :param authentication: Type of authentication (Required to specify MSI) used to connect to - AzureFunction. Type: string (or Expression with resultType string). - :type authentication: object - """ - - _validation = { - 'type': {'required': True}, - 'function_app_url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, - 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, - 'resource_id': {'key': 'typeProperties.resourceId', 'type': 'object'}, - 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureFunctionLinkedService, self).__init__(**kwargs) - self.type = 'AzureFunction' # type: str - self.function_app_url = kwargs['function_app_url'] - self.function_key = kwargs.get('function_key', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.credential = kwargs.get('credential', None) - self.resource_id = kwargs.get('resource_id', None) - self.authentication = kwargs.get('authentication', None) - - -class AzureKeyVaultLinkedService(LinkedService): - """Azure Key Vault linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param base_url: Required. The base URL of the Azure Key Vault. e.g. - https://myakv.vault.azure.net Type: string (or Expression with resultType string). - :type base_url: object - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference - """ - - _validation = { - 'type': {'required': True}, - 'base_url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureKeyVaultLinkedService, self).__init__(**kwargs) - self.type = 'AzureKeyVault' # type: str - self.base_url = kwargs['base_url'] - self.credential = kwargs.get('credential', None) - - -class SecretBase(msrest.serialization.Model): - """The base definition of a secret type. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureKeyVaultSecretReference, SecureString. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Type of the secret.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference', 'SecureString': 'SecureString'} - } - - def __init__( - self, - **kwargs - ): - super(SecretBase, self).__init__(**kwargs) - self.type = None # type: Optional[str] - - -class AzureKeyVaultSecretReference(SecretBase): - """Azure Key Vault secret reference. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Type of the secret.Constant filled by server. - :type type: str - :param store: Required. The Azure Key Vault linked service reference. - :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or - Expression with resultType string). - :type secret_name: object - :param secret_version: The version of the secret in Azure Key Vault. The default value is the - latest version of the secret. Type: string (or Expression with resultType string). - :type secret_version: object - """ - - _validation = { - 'type': {'required': True}, - 'store': {'required': True}, - 'secret_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, - 'secret_name': {'key': 'secretName', 'type': 'object'}, - 'secret_version': {'key': 'secretVersion', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureKeyVaultSecretReference, self).__init__(**kwargs) - self.type = 'AzureKeyVaultSecret' # type: str - self.store = kwargs['store'] - self.secret_name = kwargs['secret_name'] - self.secret_version = kwargs.get('secret_version', None) - - -class AzureMariaDbLinkedService(LinkedService): - """Azure Database for MariaDB linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMariaDbLinkedService, self).__init__(**kwargs) - self.type = 'AzureMariaDB' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class AzureMariaDbSource(TabularSource): - """A copy activity Azure MariaDB source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMariaDbSource, self).__init__(**kwargs) - self.type = 'AzureMariaDBSource' # type: str - self.query = kwargs.get('query', None) - - -class AzureMariaDbTableDataset(Dataset): - """Azure Database for MariaDB dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMariaDbTableDataset, self).__init__(**kwargs) - self.type = 'AzureMariaDBTable' # type: str - self.table_name = kwargs.get('table_name', None) - - -class AzureMlBatchExecutionActivity(ExecutionActivity): - """Azure ML Batch Execution activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param global_parameters: Key,Value pairs to be passed to the Azure ML Batch Execution Service - endpoint. Keys must match the names of web service parameters defined in the published Azure ML - web service. Values will be passed in the GlobalParameters property of the Azure ML batch - execution request. - :type global_parameters: dict[str, object] - :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web - Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This - information will be passed in the WebServiceOutputs property of the Azure ML batch execution - request. - :type web_service_outputs: dict[str, ~azure.mgmt.datafactory.models.AzureMlWebServiceFile] - :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web - Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This - information will be passed in the WebServiceInputs property of the Azure ML batch execution - request. - :type web_service_inputs: dict[str, ~azure.mgmt.datafactory.models.AzureMlWebServiceFile] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, - 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMlWebServiceFile}'}, - 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMlWebServiceFile}'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMlBatchExecutionActivity, self).__init__(**kwargs) - self.type = 'AzureMLBatchExecution' # type: str - self.global_parameters = kwargs.get('global_parameters', None) - self.web_service_outputs = kwargs.get('web_service_outputs', None) - self.web_service_inputs = kwargs.get('web_service_inputs', None) - - -class AzureMlExecutePipelineActivity(ExecutionActivity): - """Azure ML Execute Pipeline activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param ml_pipeline_id: ID of the published Azure ML pipeline. Type: string (or Expression with - resultType string). - :type ml_pipeline_id: object - :param ml_pipeline_endpoint_id: ID of the published Azure ML pipeline endpoint. Type: string - (or Expression with resultType string). - :type ml_pipeline_endpoint_id: object - :param version: Version of the published Azure ML pipeline endpoint. Type: string (or - Expression with resultType string). - :type version: object - :param experiment_name: Run history experiment name of the pipeline run. This information will - be passed in the ExperimentName property of the published pipeline execution request. Type: - string (or Expression with resultType string). - :type experiment_name: object - :param ml_pipeline_parameters: Key,Value pairs to be passed to the published Azure ML pipeline - endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. - Values will be passed in the ParameterAssignments property of the published pipeline execution - request. Type: object with key value pairs (or Expression with resultType object). - :type ml_pipeline_parameters: object - :param data_path_assignments: Dictionary used for changing data path assignments without - retraining. Values will be passed in the dataPathAssignments property of the published pipeline - execution request. Type: object with key value pairs (or Expression with resultType object). - :type data_path_assignments: object - :param ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be - passed in the ParentRunId property of the published pipeline execution request. Type: string - (or Expression with resultType string). - :type ml_parent_run_id: object - :param continue_on_step_failure: Whether to continue execution of other steps in the - PipelineRun if a step fails. This information will be passed in the continueOnStepFailure - property of the published pipeline execution request. Type: boolean (or Expression with - resultType boolean). - :type continue_on_step_failure: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'ml_pipeline_id': {'key': 'typeProperties.mlPipelineId', 'type': 'object'}, - 'ml_pipeline_endpoint_id': {'key': 'typeProperties.mlPipelineEndpointId', 'type': 'object'}, - 'version': {'key': 'typeProperties.version', 'type': 'object'}, - 'experiment_name': {'key': 'typeProperties.experimentName', 'type': 'object'}, - 'ml_pipeline_parameters': {'key': 'typeProperties.mlPipelineParameters', 'type': 'object'}, - 'data_path_assignments': {'key': 'typeProperties.dataPathAssignments', 'type': 'object'}, - 'ml_parent_run_id': {'key': 'typeProperties.mlParentRunId', 'type': 'object'}, - 'continue_on_step_failure': {'key': 'typeProperties.continueOnStepFailure', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMlExecutePipelineActivity, self).__init__(**kwargs) - self.type = 'AzureMLExecutePipeline' # type: str - self.ml_pipeline_id = kwargs.get('ml_pipeline_id', None) - self.ml_pipeline_endpoint_id = kwargs.get('ml_pipeline_endpoint_id', None) - self.version = kwargs.get('version', None) - self.experiment_name = kwargs.get('experiment_name', None) - self.ml_pipeline_parameters = kwargs.get('ml_pipeline_parameters', None) - self.data_path_assignments = kwargs.get('data_path_assignments', None) - self.ml_parent_run_id = kwargs.get('ml_parent_run_id', None) - self.continue_on_step_failure = kwargs.get('continue_on_step_failure', None) - - -class AzureMlLinkedService(LinkedService): - """Azure ML Studio Web Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service - endpoint. Type: string (or Expression with resultType string). - :type ml_endpoint: object - :param api_key: Required. The API key for accessing the Azure ML model endpoint. - :type api_key: ~azure.mgmt.datafactory.models.SecretBase - :param update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web - Service endpoint. Type: string (or Expression with resultType string). - :type update_resource_endpoint: object - :param service_principal_id: The ID of the service principal used to authenticate against the - ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression - with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against the - ARM-based updateResourceEndpoint of an Azure ML Studio web service. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param authentication: Type of authentication (Required to specify MSI) used to connect to - AzureML. Type: string (or Expression with resultType string). - :type authentication: object - """ - - _validation = { - 'type': {'required': True}, - 'ml_endpoint': {'required': True}, - 'api_key': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, - 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, - 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMlLinkedService, self).__init__(**kwargs) - self.type = 'AzureML' # type: str - self.ml_endpoint = kwargs['ml_endpoint'] - self.api_key = kwargs['api_key'] - self.update_resource_endpoint = kwargs.get('update_resource_endpoint', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.authentication = kwargs.get('authentication', None) - - -class AzureMlServiceLinkedService(LinkedService): - """Azure ML Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param subscription_id: Required. Azure ML Service workspace subscription ID. Type: string (or - Expression with resultType string). - :type subscription_id: object - :param resource_group_name: Required. Azure ML Service workspace resource group name. Type: - string (or Expression with resultType string). - :type resource_group_name: object - :param ml_workspace_name: Required. Azure ML Service workspace name. Type: string (or - Expression with resultType string). - :type ml_workspace_name: object - :param service_principal_id: The ID of the service principal used to authenticate against the - endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType - string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against the - endpoint of a published Azure ML Service pipeline. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'subscription_id': {'required': True}, - 'resource_group_name': {'required': True}, - 'ml_workspace_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'ml_workspace_name': {'key': 'typeProperties.mlWorkspaceName', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMlServiceLinkedService, self).__init__(**kwargs) - self.type = 'AzureMLService' # type: str - self.subscription_id = kwargs['subscription_id'] - self.resource_group_name = kwargs['resource_group_name'] - self.ml_workspace_name = kwargs['ml_workspace_name'] - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class AzureMlUpdateResourceActivity(ExecutionActivity): - """Azure ML Update Resource management activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param trained_model_name: Required. Name of the Trained Model module in the Web Service - experiment to be updated. Type: string (or Expression with resultType string). - :type trained_model_name: object - :param trained_model_linked_service_name: Required. Name of Azure Storage linked service - holding the .ilearner file that will be uploaded by the update operation. - :type trained_model_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param trained_model_file_path: Required. The relative file path in trainedModelLinkedService - to represent the .ilearner file that will be uploaded by the update operation. Type: string - (or Expression with resultType string). - :type trained_model_file_path: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'trained_model_name': {'required': True}, - 'trained_model_linked_service_name': {'required': True}, - 'trained_model_file_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, - 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMlUpdateResourceActivity, self).__init__(**kwargs) - self.type = 'AzureMLUpdateResource' # type: str - self.trained_model_name = kwargs['trained_model_name'] - self.trained_model_linked_service_name = kwargs['trained_model_linked_service_name'] - self.trained_model_file_path = kwargs['trained_model_file_path'] - - -class AzureMlWebServiceFile(msrest.serialization.Model): - """Azure ML WebService Input/Output file. - - All required parameters must be populated in order to send to Azure. - - :param file_path: Required. The relative file path, including container name, in the Azure Blob - Storage specified by the LinkedService. Type: string (or Expression with resultType string). - :type file_path: object - :param linked_service_name: Required. Reference to an Azure Storage LinkedService, where Azure - ML WebService Input/Output file located. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - """ - - _validation = { - 'file_path': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'file_path': {'key': 'filePath', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMlWebServiceFile, self).__init__(**kwargs) - self.file_path = kwargs['file_path'] - self.linked_service_name = kwargs['linked_service_name'] - - -class AzureMySqlLinkedService(LinkedService): - """Azure MySQL database linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMySqlLinkedService, self).__init__(**kwargs) - self.type = 'AzureMySql' # type: str - self.connection_string = kwargs['connection_string'] - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class AzureMySqlSink(CopySink): - """A copy activity Azure MySql sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param pre_copy_script: A query to execute before starting the copy. Type: string (or - Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMySqlSink, self).__init__(**kwargs) - self.type = 'AzureMySqlSink' # type: str - self.pre_copy_script = kwargs.get('pre_copy_script', None) - - -class AzureMySqlSource(TabularSource): - """A copy activity Azure MySQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: Database query. Type: string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMySqlSource, self).__init__(**kwargs) - self.type = 'AzureMySqlSource' # type: str - self.query = kwargs.get('query', None) - - -class AzureMySqlTableDataset(Dataset): - """The Azure MySQL database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The Azure MySQL database table name. Type: string (or Expression with - resultType string). - :type table_name: object - :param table: The name of Azure MySQL database table. Type: string (or Expression with - resultType string). - :type table: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMySqlTableDataset, self).__init__(**kwargs) - self.type = 'AzureMySqlTable' # type: str - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - - -class AzurePostgreSqlLinkedService(LinkedService): - """Azure PostgreSQL linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzurePostgreSqlLinkedService, self).__init__(**kwargs) - self.type = 'AzurePostgreSql' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class AzurePostgreSqlSink(CopySink): - """A copy activity Azure PostgreSQL sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param pre_copy_script: A query to execute before starting the copy. Type: string (or - Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzurePostgreSqlSink, self).__init__(**kwargs) - self.type = 'AzurePostgreSqlSink' # type: str - self.pre_copy_script = kwargs.get('pre_copy_script', None) - - -class AzurePostgreSqlSource(TabularSource): - """A copy activity Azure PostgreSQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzurePostgreSqlSource, self).__init__(**kwargs) - self.type = 'AzurePostgreSqlSource' # type: str - self.query = kwargs.get('query', None) - - -class AzurePostgreSqlTableDataset(Dataset): - """Azure PostgreSQL dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name of the Azure PostgreSQL database which includes both schema - and table. Type: string (or Expression with resultType string). - :type table_name: object - :param table: The table name of the Azure PostgreSQL database. Type: string (or Expression with - resultType string). - :type table: object - :param schema_type_properties_schema: The schema name of the Azure PostgreSQL database. Type: - string (or Expression with resultType string). - :type schema_type_properties_schema: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzurePostgreSqlTableDataset, self).__init__(**kwargs) - self.type = 'AzurePostgreSqlTable' # type: str - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - - -class AzureQueueSink(CopySink): - """A copy activity Azure Queue sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureQueueSink, self).__init__(**kwargs) - self.type = 'AzureQueueSink' # type: str - - -class AzureSearchIndexDataset(Dataset): - """The Azure Search Index. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param index_name: Required. The name of the Azure Search Index. Type: string (or Expression - with resultType string). - :type index_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'index_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureSearchIndexDataset, self).__init__(**kwargs) - self.type = 'AzureSearchIndex' # type: str - self.index_name = kwargs['index_name'] - - -class AzureSearchIndexSink(CopySink): - """A copy activity Azure Search Index sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param write_behavior: Specify the write behavior when upserting documents into Azure Search - Index. Possible values include: "Merge", "Upload". - :type write_behavior: str or ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureSearchIndexSink, self).__init__(**kwargs) - self.type = 'AzureSearchIndexSink' # type: str - self.write_behavior = kwargs.get('write_behavior', None) - - -class AzureSearchLinkedService(LinkedService): - """Linked service for Windows Azure Search Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. URL for Azure Search service. Type: string (or Expression with resultType - string). - :type url: object - :param key: Admin Key for Azure Search service. - :type key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureSearchLinkedService, self).__init__(**kwargs) - self.type = 'AzureSearch' # type: str - self.url = kwargs['url'] - self.key = kwargs.get('key', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class AzureSqlDatabaseLinkedService(LinkedService): - """Microsoft Azure SQL Database linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to authenticate against Azure - SQL Database. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Azure SQL Database. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed - values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data - factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param always_encrypted_settings: Sql always encrypted properties. - :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureSqlDatabaseLinkedService, self).__init__(**kwargs) - self.type = 'AzureSqlDatabase' # type: str - self.connection_string = kwargs['connection_string'] - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.azure_cloud_type = kwargs.get('azure_cloud_type', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.always_encrypted_settings = kwargs.get('always_encrypted_settings', None) - self.credential = kwargs.get('credential', None) - - -class AzureSqlDwLinkedService(LinkedService): - """Azure SQL Data Warehouse linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to authenticate against Azure - SQL Data Warehouse. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Azure SQL Data Warehouse. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed - values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data - factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureSqlDwLinkedService, self).__init__(**kwargs) - self.type = 'AzureSqlDW' # type: str - self.connection_string = kwargs['connection_string'] - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.azure_cloud_type = kwargs.get('azure_cloud_type', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.credential = kwargs.get('credential', None) - - -class AzureSqlDwTableDataset(Dataset): - """The Azure SQL Data Warehouse dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The schema name of the Azure SQL Data Warehouse. Type: - string (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the Azure SQL Data Warehouse. Type: string (or Expression with - resultType string). - :type table: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureSqlDwTableDataset, self).__init__(**kwargs) - self.type = 'AzureSqlDWTable' # type: str - self.table_name = kwargs.get('table_name', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - self.table = kwargs.get('table', None) - - -class AzureSqlMiLinkedService(LinkedService): - """Azure SQL Managed Instance linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to authenticate against Azure - SQL Managed Instance. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Azure SQL Managed Instance. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed - values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data - factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param always_encrypted_settings: Sql always encrypted properties. - :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureSqlMiLinkedService, self).__init__(**kwargs) - self.type = 'AzureSqlMI' # type: str - self.connection_string = kwargs['connection_string'] - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.azure_cloud_type = kwargs.get('azure_cloud_type', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.always_encrypted_settings = kwargs.get('always_encrypted_settings', None) - self.credential = kwargs.get('credential', None) - - -class AzureSqlMiTableDataset(Dataset): - """The Azure SQL Managed Instance dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The schema name of the Azure SQL Managed Instance. Type: - string (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the Azure SQL Managed Instance dataset. Type: string (or - Expression with resultType string). - :type table: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureSqlMiTableDataset, self).__init__(**kwargs) - self.type = 'AzureSqlMITable' # type: str - self.table_name = kwargs.get('table_name', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - self.table = kwargs.get('table', None) - - -class AzureSqlSink(CopySink): - """A copy activity Azure SQL sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or - Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with - resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType - string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the - table type. Type: string (or Expression with resultType string). - :type stored_procedure_table_type_parameter_name: object - :param table_option: The option to handle sink table, such as autoCreate. For now only - 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: object - :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or - Expression with resultType boolean). - :type sql_writer_use_table_lock: object - :param write_behavior: Write behavior when copying data into Azure SQL. Type: - SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). - :type write_behavior: object - :param upsert_settings: SQL upsert settings. - :type upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureSqlSink, self).__init__(**kwargs) - self.type = 'AzureSqlSink' # type: str - self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) - self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) - self.table_option = kwargs.get('table_option', None) - self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None) - self.write_behavior = kwargs.get('write_behavior', None) - self.upsert_settings = kwargs.get('upsert_settings', None) - - -class AzureSqlSource(TabularSource): - """A copy activity Azure SQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database - source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression - with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored procedure parameters. - Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object - :param partition_option: The partition mechanism that will be used for Sql read in parallel. - Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: object - :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureSqlSource, self).__init__(**kwargs) - self.type = 'AzureSqlSource' # type: str - self.sql_reader_query = kwargs.get('sql_reader_query', None) - self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.produce_additional_types = kwargs.get('produce_additional_types', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) - - -class AzureSqlTableDataset(Dataset): - """The Azure SQL Server database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The schema name of the Azure SQL database. Type: string - (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the Azure SQL database. Type: string (or Expression with - resultType string). - :type table: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureSqlTableDataset, self).__init__(**kwargs) - self.type = 'AzureSqlTable' # type: str - self.table_name = kwargs.get('table_name', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - self.table = kwargs.get('table', None) - - -class AzureStorageLinkedService(LinkedService): - """The storage account linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: The connection string. It is mutually exclusive with sasUri property. - Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with - connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureStorageLinkedService, self).__init__(**kwargs) - self.type = 'AzureStorage' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.sas_uri = kwargs.get('sas_uri', None) - self.sas_token = kwargs.get('sas_token', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class AzureTableDataset(Dataset): - """The Azure Table storage dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: Required. The table name of the Azure Table storage. Type: string (or - Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'table_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureTableDataset, self).__init__(**kwargs) - self.type = 'AzureTable' # type: str - self.table_name = kwargs['table_name'] - - -class AzureTableSink(CopySink): - """A copy activity Azure Table sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param azure_table_default_partition_key_value: Azure Table default partition key value. Type: - string (or Expression with resultType string). - :type azure_table_default_partition_key_value: object - :param azure_table_partition_key_name: Azure Table partition key name. Type: string (or - Expression with resultType string). - :type azure_table_partition_key_name: object - :param azure_table_row_key_name: Azure Table row key name. Type: string (or Expression with - resultType string). - :type azure_table_row_key_name: object - :param azure_table_insert_type: Azure Table insert type. Type: string (or Expression with - resultType string). - :type azure_table_insert_type: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, - 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, - 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, - 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureTableSink, self).__init__(**kwargs) - self.type = 'AzureTableSink' # type: str - self.azure_table_default_partition_key_value = kwargs.get('azure_table_default_partition_key_value', None) - self.azure_table_partition_key_name = kwargs.get('azure_table_partition_key_name', None) - self.azure_table_row_key_name = kwargs.get('azure_table_row_key_name', None) - self.azure_table_insert_type = kwargs.get('azure_table_insert_type', None) - - -class AzureTableSource(TabularSource): - """A copy activity Azure Table source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param azure_table_source_query: Azure Table source query. Type: string (or Expression with - resultType string). - :type azure_table_source_query: object - :param azure_table_source_ignore_table_not_found: Azure Table source ignore table not found. - Type: boolean (or Expression with resultType boolean). - :type azure_table_source_ignore_table_not_found: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, - 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureTableSource, self).__init__(**kwargs) - self.type = 'AzureTableSource' # type: str - self.azure_table_source_query = kwargs.get('azure_table_source_query', None) - self.azure_table_source_ignore_table_not_found = kwargs.get('azure_table_source_ignore_table_not_found', None) - - -class AzureTableStorageLinkedService(LinkedService): - """The azure table storage linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: The connection string. It is mutually exclusive with sasUri property. - Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with - connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureTableStorageLinkedService, self).__init__(**kwargs) - self.type = 'AzureTableStorage' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.sas_uri = kwargs.get('sas_uri', None) - self.sas_token = kwargs.get('sas_token', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class BinaryDataset(Dataset): - """Binary dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the Binary storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param compression: The data compression method used for the binary dataset. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__( - self, - **kwargs - ): - super(BinaryDataset, self).__init__(**kwargs) - self.type = 'Binary' # type: str - self.location = kwargs.get('location', None) - self.compression = kwargs.get('compression', None) - - -class FormatReadSettings(msrest.serialization.Model): - """Format read settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BinaryReadSettings, DelimitedTextReadSettings, JsonReadSettings, XmlReadSettings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'BinaryReadSettings': 'BinaryReadSettings', 'DelimitedTextReadSettings': 'DelimitedTextReadSettings', 'JsonReadSettings': 'JsonReadSettings', 'XmlReadSettings': 'XmlReadSettings'} - } - - def __init__( - self, - **kwargs - ): - super(FormatReadSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'FormatReadSettings' # type: str - - -class BinaryReadSettings(FormatReadSettings): - """Binary read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param compression_properties: Compression settings. - :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(BinaryReadSettings, self).__init__(**kwargs) - self.type = 'BinaryReadSettings' # type: str - self.compression_properties = kwargs.get('compression_properties', None) - - -class BinarySink(CopySink): - """A copy activity Binary sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param store_settings: Binary store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(BinarySink, self).__init__(**kwargs) - self.type = 'BinarySink' # type: str - self.store_settings = kwargs.get('store_settings', None) - - -class BinarySource(CopySource): - """A copy activity Binary source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param store_settings: Binary store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param format_settings: Binary format settings. - :type format_settings: ~azure.mgmt.datafactory.models.BinaryReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'BinaryReadSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(BinarySource, self).__init__(**kwargs) - self.type = 'BinarySource' # type: str - self.store_settings = kwargs.get('store_settings', None) - self.format_settings = kwargs.get('format_settings', None) - - -class Trigger(msrest.serialization.Model): - """Azure data factory nested object which contains information about creating pipeline run. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ChainingTrigger, MultiplePipelineTrigger, RerunTumblingWindowTrigger, TumblingWindowTrigger. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] - """ - - _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - } - - _subtype_map = { - 'type': {'ChainingTrigger': 'ChainingTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger', 'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger'} - } - - def __init__( - self, - **kwargs - ): - super(Trigger, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'Trigger' # type: str - self.description = kwargs.get('description', None) - self.runtime_state = None - self.annotations = kwargs.get('annotations', None) - - -class MultiplePipelineTrigger(Trigger): - """Base class for all triggers that support one to many model for trigger to pipeline. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BlobEventsTrigger, BlobTrigger, CustomEventsTrigger, ScheduleTrigger. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] - :param pipelines: Pipelines that need to be started. - :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - """ - - _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - } - - _subtype_map = { - 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'CustomEventsTrigger': 'CustomEventsTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} - } - - def __init__( - self, - **kwargs - ): - super(MultiplePipelineTrigger, self).__init__(**kwargs) - self.type = 'MultiplePipelineTrigger' # type: str - self.pipelines = kwargs.get('pipelines', None) - - -class BlobEventsTrigger(MultiplePipelineTrigger): - """Trigger that runs every time a Blob event occurs. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] - :param pipelines: Pipelines that need to be started. - :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param blob_path_begins_with: The blob path must begin with the pattern provided for trigger to - fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the - december folder under the records container. At least one of these must be provided: - blobPathBeginsWith, blobPathEndsWith. - :type blob_path_begins_with: str - :param blob_path_ends_with: The blob path must end with the pattern provided for trigger to - fire. For example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a - december folder. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. - :type blob_path_ends_with: str - :param ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. - :type ignore_empty_blobs: bool - :param events: Required. The type of events that cause this trigger to fire. - :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] - :param scope: Required. The ARM resource ID of the Storage Account. - :type scope: str - """ - - _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, - 'events': {'required': True}, - 'scope': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, - 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, - 'ignore_empty_blobs': {'key': 'typeProperties.ignoreEmptyBlobs', 'type': 'bool'}, - 'events': {'key': 'typeProperties.events', 'type': '[str]'}, - 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(BlobEventsTrigger, self).__init__(**kwargs) - self.type = 'BlobEventsTrigger' # type: str - self.blob_path_begins_with = kwargs.get('blob_path_begins_with', None) - self.blob_path_ends_with = kwargs.get('blob_path_ends_with', None) - self.ignore_empty_blobs = kwargs.get('ignore_empty_blobs', None) - self.events = kwargs['events'] - self.scope = kwargs['scope'] - - -class BlobSink(CopySink): - """A copy activity Azure Blob sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression - with resultType boolean). - :type blob_writer_overwrite_files: object - :param blob_writer_date_time_format: Blob writer date time format. Type: string (or Expression - with resultType string). - :type blob_writer_date_time_format: object - :param blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with - resultType boolean). - :type blob_writer_add_header: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects - (or Expression with resultType array of objects). - :type metadata: list[~azure.mgmt.datafactory.models.MetadataItem] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, - 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, - 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'}, - } - - def __init__( - self, - **kwargs - ): - super(BlobSink, self).__init__(**kwargs) - self.type = 'BlobSink' # type: str - self.blob_writer_overwrite_files = kwargs.get('blob_writer_overwrite_files', None) - self.blob_writer_date_time_format = kwargs.get('blob_writer_date_time_format', None) - self.blob_writer_add_header = kwargs.get('blob_writer_add_header', None) - self.copy_behavior = kwargs.get('copy_behavior', None) - self.metadata = kwargs.get('metadata', None) - - -class BlobSource(CopySource): - """A copy activity Azure Blob source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType - boolean). - :type treat_empty_as_null: object - :param skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or - Expression with resultType integer). - :type skip_header_line_count: object - :param recursive: If true, files under the folder path will be read recursively. Default is - true. Type: boolean (or Expression with resultType boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, - 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(BlobSource, self).__init__(**kwargs) - self.type = 'BlobSource' # type: str - self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) - self.skip_header_line_count = kwargs.get('skip_header_line_count', None) - self.recursive = kwargs.get('recursive', None) - - -class BlobTrigger(MultiplePipelineTrigger): - """Trigger that runs every time the selected Blob container changes. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] - :param pipelines: Pipelines that need to be started. - :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param folder_path: Required. The path of the container/folder that will trigger the pipeline. - :type folder_path: str - :param max_concurrency: Required. The max number of parallel files to handle when it is - triggered. - :type max_concurrency: int - :param linked_service: Required. The Azure Storage linked service reference. - :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - """ - - _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, - 'folder_path': {'required': True}, - 'max_concurrency': {'required': True}, - 'linked_service': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, - 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, - } - - def __init__( - self, - **kwargs - ): - super(BlobTrigger, self).__init__(**kwargs) - self.type = 'BlobTrigger' # type: str - self.folder_path = kwargs['folder_path'] - self.max_concurrency = kwargs['max_concurrency'] - self.linked_service = kwargs['linked_service'] - - -class CassandraLinkedService(LinkedService): - """Linked service for Cassandra data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. Host name for connection. Type: string (or Expression with resultType - string). - :type host: object - :param authentication_type: AuthenticationType to be used for connection. Type: string (or - Expression with resultType string). - :type authentication_type: object - :param port: The port for the connection. Type: integer (or Expression with resultType - integer). - :type port: object - :param username: Username for authentication. Type: string (or Expression with resultType - string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(CassandraLinkedService, self).__init__(**kwargs) - self.type = 'Cassandra' # type: str - self.host = kwargs['host'] - self.authentication_type = kwargs.get('authentication_type', None) - self.port = kwargs.get('port', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class CassandraSource(TabularSource): - """A copy activity source for a Cassandra database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language - (CQL) command. Type: string (or Expression with resultType string). - :type query: object - :param consistency_level: The consistency level specifies how many Cassandra servers must - respond to a read request before returning data to the client application. Cassandra checks the - specified number of Cassandra servers for data to satisfy the read request. Must be one of - cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. - Possible values include: "ALL", "EACH_QUORUM", "QUORUM", "LOCAL_QUORUM", "ONE", "TWO", "THREE", - "LOCAL_ONE", "SERIAL", "LOCAL_SERIAL". - :type consistency_level: str or - ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(CassandraSource, self).__init__(**kwargs) - self.type = 'CassandraSource' # type: str - self.query = kwargs.get('query', None) - self.consistency_level = kwargs.get('consistency_level', None) - - -class CassandraTableDataset(Dataset): - """The Cassandra database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name of the Cassandra database. Type: string (or Expression with - resultType string). - :type table_name: object - :param keyspace: The keyspace of the Cassandra database. Type: string (or Expression with - resultType string). - :type keyspace: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(CassandraTableDataset, self).__init__(**kwargs) - self.type = 'CassandraTable' # type: str - self.table_name = kwargs.get('table_name', None) - self.keyspace = kwargs.get('keyspace', None) - - -class ChainingTrigger(Trigger): - """Trigger that allows the referenced pipeline to depend on other pipeline runs based on runDimension Name/Value pairs. Upstream pipelines should declare the same runDimension Name and their runs should have the values for those runDimensions. The referenced pipeline run would be triggered if the values for the runDimension match for all upstream pipeline runs. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] - :param pipeline: Required. Pipeline for which runs are created when all upstream pipelines - complete successfully. - :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference - :param depends_on: Required. Upstream Pipelines. - :type depends_on: list[~azure.mgmt.datafactory.models.PipelineReference] - :param run_dimension: Required. Run Dimension property that needs to be emitted by upstream - pipelines. - :type run_dimension: str - """ - - _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, - 'pipeline': {'required': True}, - 'depends_on': {'required': True}, - 'run_dimension': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, - 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[PipelineReference]'}, - 'run_dimension': {'key': 'typeProperties.runDimension', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ChainingTrigger, self).__init__(**kwargs) - self.type = 'ChainingTrigger' # type: str - self.pipeline = kwargs['pipeline'] - self.depends_on = kwargs['depends_on'] - self.run_dimension = kwargs['run_dimension'] - - -class CloudError(msrest.serialization.Model): - """The object that defines the structure of an Azure Data Factory error response. - - All required parameters must be populated in order to send to Azure. - - :param code: Required. Error code. - :type code: str - :param message: Required. Error message. - :type message: str - :param target: Property name/path in request associated with error. - :type target: str - :param details: Array with additional error details. - :type details: list[~azure.mgmt.datafactory.models.CloudError] - """ - - _validation = { - 'code': {'required': True}, - 'message': {'required': True}, - } - - _attribute_map = { - 'code': {'key': 'error.code', 'type': 'str'}, - 'message': {'key': 'error.message', 'type': 'str'}, - 'target': {'key': 'error.target', 'type': 'str'}, - 'details': {'key': 'error.details', 'type': '[CloudError]'}, - } - - def __init__( - self, - **kwargs - ): - super(CloudError, self).__init__(**kwargs) - self.code = kwargs['code'] - self.message = kwargs['message'] - self.target = kwargs.get('target', None) - self.details = kwargs.get('details', None) - - -class CmdkeySetup(CustomSetupBase): - """The custom setup of running cmdkey commands. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. The type of custom setup.Constant filled by server. - :type type: str - :param target_name: Required. The server name of data source access. - :type target_name: object - :param user_name: Required. The user name of data source access. - :type user_name: object - :param password: Required. The password of data source access. - :type password: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'type': {'required': True}, - 'target_name': {'required': True}, - 'user_name': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'target_name': {'key': 'typeProperties.targetName', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - } - - def __init__( - self, - **kwargs - ): - super(CmdkeySetup, self).__init__(**kwargs) - self.type = 'CmdkeySetup' # type: str - self.target_name = kwargs['target_name'] - self.user_name = kwargs['user_name'] - self.password = kwargs['password'] - - -class CmkIdentityDefinition(msrest.serialization.Model): - """Managed Identity used for CMK. - - :param user_assigned_identity: The resource id of the user assigned identity to authenticate to - customer's key vault. - :type user_assigned_identity: str - """ - - _attribute_map = { - 'user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(CmkIdentityDefinition, self).__init__(**kwargs) - self.user_assigned_identity = kwargs.get('user_assigned_identity', None) - - -class CommonDataServiceForAppsEntityDataset(Dataset): - """The Common Data Service for Apps entity dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param entity_name: The logical name of the entity. Type: string (or Expression with resultType - string). - :type entity_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(CommonDataServiceForAppsEntityDataset, self).__init__(**kwargs) - self.type = 'CommonDataServiceForAppsEntity' # type: str - self.entity_name = kwargs.get('entity_name', None) - - -class CommonDataServiceForAppsLinkedService(LinkedService): - """Common Data Service for Apps linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param deployment_type: Required. The deployment type of the Common Data Service for Apps - instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common - Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType - string). - :type deployment_type: object - :param host_name: The host name of the on-premises Common Data Service for Apps server. The - property is required for on-prem and not allowed for online. Type: string (or Expression with - resultType string). - :type host_name: object - :param port: The port of on-premises Common Data Service for Apps server. The property is - required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression - with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Common Data Service for Apps server. The property - is required for on-line and not allowed for on-prem. Type: string (or Expression with - resultType string). - :type service_uri: object - :param organization_name: The organization name of the Common Data Service for Apps instance. - The property is required for on-prem and required for online when there are more than one - Common Data Service for Apps instances associated with the user. Type: string (or Expression - with resultType string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect to Common Data Service - for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. - 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). - :type authentication_type: object - :param username: User name to access the Common Data Service for Apps instance. Type: string - (or Expression with resultType string). - :type username: object - :param password: Password to access the Common Data Service for Apps instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_credential_type: The service principal credential type to use in - Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). - :type service_principal_credential_type: object - :param service_principal_credential: The credential of the service principal object in Azure - Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', - servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If - servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only - be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, - 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(CommonDataServiceForAppsLinkedService, self).__init__(**kwargs) - self.type = 'CommonDataServiceForApps' # type: str - self.deployment_type = kwargs['deployment_type'] - self.host_name = kwargs.get('host_name', None) - self.port = kwargs.get('port', None) - self.service_uri = kwargs.get('service_uri', None) - self.organization_name = kwargs.get('organization_name', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_credential_type = kwargs.get('service_principal_credential_type', None) - self.service_principal_credential = kwargs.get('service_principal_credential', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class CommonDataServiceForAppsSink(CopySink): - """A copy activity Common Data Service for Apps sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param write_behavior: Required. The write behavior for the operation. Possible values include: - "Upsert". - :type write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior - :param ignore_null_values: The flag indicating whether to ignore null values from input dataset - (except key fields) during write operation. Default is false. Type: boolean (or Expression with - resultType boolean). - :type ignore_null_values: object - :param alternate_key_name: The logical name of the alternate key which will be used when - upserting records. Type: string (or Expression with resultType string). - :type alternate_key_name: object - """ - - _validation = { - 'type': {'required': True}, - 'write_behavior': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(CommonDataServiceForAppsSink, self).__init__(**kwargs) - self.type = 'CommonDataServiceForAppsSink' # type: str - self.write_behavior = kwargs['write_behavior'] - self.ignore_null_values = kwargs.get('ignore_null_values', None) - self.alternate_key_name = kwargs.get('alternate_key_name', None) - - -class CommonDataServiceForAppsSource(CopySource): - """A copy activity Common Data Service for Apps source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query: FetchXML is a proprietary query language that is used in Microsoft Common Data - Service for Apps (online & on-premises). Type: string (or Expression with resultType string). - :type query: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(CommonDataServiceForAppsSource, self).__init__(**kwargs) - self.type = 'CommonDataServiceForAppsSource' # type: str - self.query = kwargs.get('query', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class ComponentSetup(CustomSetupBase): - """The custom setup of installing 3rd party components. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. The type of custom setup.Constant filled by server. - :type type: str - :param component_name: Required. The name of the 3rd party component. - :type component_name: str - :param license_key: The license key to activate the component. - :type license_key: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'type': {'required': True}, - 'component_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'component_name': {'key': 'typeProperties.componentName', 'type': 'str'}, - 'license_key': {'key': 'typeProperties.licenseKey', 'type': 'SecretBase'}, - } - - def __init__( - self, - **kwargs - ): - super(ComponentSetup, self).__init__(**kwargs) - self.type = 'ComponentSetup' # type: str - self.component_name = kwargs['component_name'] - self.license_key = kwargs.get('license_key', None) - - -class CompressionReadSettings(msrest.serialization.Model): - """Compression read settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: TarGZipReadSettings, TarReadSettings, ZipDeflateReadSettings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The Compression setting type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'TarGZipReadSettings': 'TarGZipReadSettings', 'TarReadSettings': 'TarReadSettings', 'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} - } - - def __init__( - self, - **kwargs - ): - super(CompressionReadSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'CompressionReadSettings' # type: str - - -class ConcurLinkedService(LinkedService): - """Concur Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_properties: Properties used to connect to Concur. It is mutually exclusive - with any other properties in the linked service. Type: object. - :type connection_properties: object - :param client_id: Required. Application client_id supplied by Concur App Management. - :type client_id: object - :param username: Required. The user name that you use to access Concur Service. - :type username: object - :param password: The password corresponding to the user name that you provided in the username - field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_id': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ConcurLinkedService, self).__init__(**kwargs) - self.type = 'Concur' # type: str - self.connection_properties = kwargs.get('connection_properties', None) - self.client_id = kwargs['client_id'] - self.username = kwargs['username'] - self.password = kwargs.get('password', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class ConcurObjectDataset(Dataset): - """Concur Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ConcurObjectDataset, self).__init__(**kwargs) - self.type = 'ConcurObject' # type: str - self.table_name = kwargs.get('table_name', None) - - -class ConcurSource(TabularSource): - """A copy activity Concur Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ConcurSource, self).__init__(**kwargs) - self.type = 'ConcurSource' # type: str - self.query = kwargs.get('query', None) - - -class ConnectionStateProperties(msrest.serialization.Model): - """The connection state of a managed private endpoint. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar actions_required: The actions required on the managed private endpoint. - :vartype actions_required: str - :ivar description: The managed private endpoint description. - :vartype description: str - :ivar status: The approval status. - :vartype status: str - """ - - _validation = { - 'actions_required': {'readonly': True}, - 'description': {'readonly': True}, - 'status': {'readonly': True}, - } - - _attribute_map = { - 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ConnectionStateProperties, self).__init__(**kwargs) - self.actions_required = None - self.description = None - self.status = None - - -class ControlActivity(Activity): - """Base class for all control activities like IfCondition, ForEach , Until. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - } - - def __init__( - self, - **kwargs - ): - super(ControlActivity, self).__init__(**kwargs) - self.type = 'Container' # type: str - - -class CopyActivity(ExecutionActivity): - """Copy activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param inputs: List of inputs for the activity. - :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] - :param outputs: List of outputs for the activity. - :type outputs: list[~azure.mgmt.datafactory.models.DatasetReference] - :param source: Required. Copy activity source. - :type source: ~azure.mgmt.datafactory.models.CopySource - :param sink: Required. Copy activity sink. - :type sink: ~azure.mgmt.datafactory.models.CopySink - :param translator: Copy activity translator. If not specified, tabular translator is used. - :type translator: object - :param enable_staging: Specifies whether to copy data via an interim staging. Default value is - false. Type: boolean (or Expression with resultType boolean). - :type enable_staging: object - :param staging_settings: Specifies interim staging settings when EnableStaging is true. - :type staging_settings: ~azure.mgmt.datafactory.models.StagingSettings - :param parallel_copies: Maximum number of concurrent sessions opened on the source or sink to - avoid overloading the data store. Type: integer (or Expression with resultType integer), - minimum: 0. - :type parallel_copies: object - :param data_integration_units: Maximum number of data integration units that can be used to - perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. - :type data_integration_units: object - :param enable_skip_incompatible_row: Whether to skip incompatible row. Default value is false. - Type: boolean (or Expression with resultType boolean). - :type enable_skip_incompatible_row: object - :param redirect_incompatible_row_settings: Redirect incompatible row settings when - EnableSkipIncompatibleRow is true. - :type redirect_incompatible_row_settings: - ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings - :param log_storage_settings: (Deprecated. Please use LogSettings) Log storage settings customer - need to provide when enabling session log. - :type log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings - :param log_settings: Log settings customer needs provide when enabling log. - :type log_settings: ~azure.mgmt.datafactory.models.LogSettings - :param preserve_rules: Preserve Rules. - :type preserve_rules: list[object] - :param preserve: Preserve rules. - :type preserve: list[object] - :param validate_data_consistency: Whether to enable Data Consistency validation. Type: boolean - (or Expression with resultType boolean). - :type validate_data_consistency: object - :param skip_error_file: Specify the fault tolerance for data consistency. - :type skip_error_file: ~azure.mgmt.datafactory.models.SkipErrorFile - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'source': {'required': True}, - 'sink': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, - 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, - 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, - 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, - 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, - 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, - 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, - 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, - 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, - 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, - 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, - 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, - 'log_settings': {'key': 'typeProperties.logSettings', 'type': 'LogSettings'}, - 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, - 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, - 'validate_data_consistency': {'key': 'typeProperties.validateDataConsistency', 'type': 'object'}, - 'skip_error_file': {'key': 'typeProperties.skipErrorFile', 'type': 'SkipErrorFile'}, - } - - def __init__( - self, - **kwargs - ): - super(CopyActivity, self).__init__(**kwargs) - self.type = 'Copy' # type: str - self.inputs = kwargs.get('inputs', None) - self.outputs = kwargs.get('outputs', None) - self.source = kwargs['source'] - self.sink = kwargs['sink'] - self.translator = kwargs.get('translator', None) - self.enable_staging = kwargs.get('enable_staging', None) - self.staging_settings = kwargs.get('staging_settings', None) - self.parallel_copies = kwargs.get('parallel_copies', None) - self.data_integration_units = kwargs.get('data_integration_units', None) - self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) - self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) - self.log_storage_settings = kwargs.get('log_storage_settings', None) - self.log_settings = kwargs.get('log_settings', None) - self.preserve_rules = kwargs.get('preserve_rules', None) - self.preserve = kwargs.get('preserve', None) - self.validate_data_consistency = kwargs.get('validate_data_consistency', None) - self.skip_error_file = kwargs.get('skip_error_file', None) - - -class CopyActivityLogSettings(msrest.serialization.Model): - """Settings for copy activity log. - - :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or - Expression with resultType string). - :type log_level: object - :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or - Expression with resultType boolean). - :type enable_reliable_logging: object - """ - - _attribute_map = { - 'log_level': {'key': 'logLevel', 'type': 'object'}, - 'enable_reliable_logging': {'key': 'enableReliableLogging', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(CopyActivityLogSettings, self).__init__(**kwargs) - self.log_level = kwargs.get('log_level', None) - self.enable_reliable_logging = kwargs.get('enable_reliable_logging', None) - - -class CopyTranslator(msrest.serialization.Model): - """A copy activity translator. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: TabularTranslator. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy translator type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'TabularTranslator': 'TabularTranslator'} - } - - def __init__( - self, - **kwargs - ): - super(CopyTranslator, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'CopyTranslator' # type: str - - -class CosmosDbLinkedService(LinkedService): - """Microsoft Azure Cosmos Database (CosmosDB) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param account_endpoint: The endpoint of the Azure CosmosDB account. Type: string (or - Expression with resultType string). - :type account_endpoint: object - :param database: The name of the database. Type: string (or Expression with resultType string). - :type database: object - :param account_key: The account key of the Azure CosmosDB account. Type: SecureString or - AzureKeyVaultSecretReference. - :type account_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_credential_type: The service principal credential type to use in - Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.mgmt.datafactory.models.CosmosDbServicePrincipalCredentialType - :param service_principal_credential: The credential of the service principal object in Azure - Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', - servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If - servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only - be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed - values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data - factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object - :param connection_mode: The connection mode used to access CosmosDB account. Type: string (or - Expression with resultType string). Possible values include: "Gateway", "Direct". - :type connection_mode: str or ~azure.mgmt.datafactory.models.CosmosDbConnectionMode - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_endpoint': {'key': 'typeProperties.accountEndpoint', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, - 'connection_mode': {'key': 'typeProperties.connectionMode', 'type': 'str'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(CosmosDbLinkedService, self).__init__(**kwargs) - self.type = 'CosmosDb' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.account_endpoint = kwargs.get('account_endpoint', None) - self.database = kwargs.get('database', None) - self.account_key = kwargs.get('account_key', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_credential_type = kwargs.get('service_principal_credential_type', None) - self.service_principal_credential = kwargs.get('service_principal_credential', None) - self.tenant = kwargs.get('tenant', None) - self.azure_cloud_type = kwargs.get('azure_cloud_type', None) - self.connection_mode = kwargs.get('connection_mode', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class CosmosDbMongoDbApiCollectionDataset(Dataset): - """The CosmosDB (MongoDB API) database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param collection: Required. The collection name of the CosmosDB (MongoDB API) database. Type: - string (or Expression with resultType string). - :type collection: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(CosmosDbMongoDbApiCollectionDataset, self).__init__(**kwargs) - self.type = 'CosmosDbMongoDbApiCollection' # type: str - self.collection = kwargs['collection'] - - -class CosmosDbMongoDbApiLinkedService(LinkedService): - """Linked service for CosmosDB (MongoDB API) data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param is_server_version_above32: Whether the CosmosDB (MongoDB API) server version is higher - than 3.2. The default value is false. Type: boolean (or Expression with resultType boolean). - :type is_server_version_above32: object - :param connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param database: Required. The name of the CosmosDB (MongoDB API) database that you want to - access. Type: string (or Expression with resultType string). - :type database: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'is_server_version_above32': {'key': 'typeProperties.isServerVersionAbove32', 'type': 'object'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs) - self.type = 'CosmosDbMongoDbApi' # type: str - self.is_server_version_above32 = kwargs.get('is_server_version_above32', None) - self.connection_string = kwargs['connection_string'] - self.database = kwargs['database'] - - -class CosmosDbMongoDbApiSink(CopySink): - """A copy activity sink for a CosmosDB (MongoDB API) database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) - rather than throw exception (insert). The default value is "insert". Type: string (or - Expression with resultType string). Type: string (or Expression with resultType string). - :type write_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(CosmosDbMongoDbApiSink, self).__init__(**kwargs) - self.type = 'CosmosDbMongoDbApiSink' # type: str - self.write_behavior = kwargs.get('write_behavior', None) - - -class CosmosDbMongoDbApiSource(CopySource): - """A copy activity source for a CosmosDB (MongoDB API) database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param filter: Specifies selection filter using query operators. To return all documents in a - collection, omit this parameter or pass an empty document ({}). Type: string (or Expression - with resultType string). - :type filter: object - :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties - :param batch_size: Specifies the number of documents to return in each batch of the response - from MongoDB instance. In most cases, modifying the batch size will not affect the user or the - application. This property's main purpose is to avoid hit the limitation of response size. - Type: integer (or Expression with resultType integer). - :type batch_size: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(CosmosDbMongoDbApiSource, self).__init__(**kwargs) - self.type = 'CosmosDbMongoDbApiSource' # type: str - self.filter = kwargs.get('filter', None) - self.cursor_methods = kwargs.get('cursor_methods', None) - self.batch_size = kwargs.get('batch_size', None) - self.query_timeout = kwargs.get('query_timeout', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class CosmosDbSqlApiCollectionDataset(Dataset): - """Microsoft Azure CosmosDB (SQL API) Collection dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or - Expression with resultType string). - :type collection_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(CosmosDbSqlApiCollectionDataset, self).__init__(**kwargs) - self.type = 'CosmosDbSqlApiCollection' # type: str - self.collection_name = kwargs['collection_name'] - - -class CosmosDbSqlApiSink(CopySink): - """A copy activity Azure CosmosDB (SQL API) Collection sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or - Expression with resultType string). Allowed values: insert and upsert. - :type write_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(CosmosDbSqlApiSink, self).__init__(**kwargs) - self.type = 'CosmosDbSqlApiSink' # type: str - self.write_behavior = kwargs.get('write_behavior', None) - - -class CosmosDbSqlApiSource(CopySource): - """A copy activity Azure CosmosDB (SQL API) Collection source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query: SQL API query. Type: string (or Expression with resultType string). - :type query: object - :param page_size: Page size of the result. Type: integer (or Expression with resultType - integer). - :type page_size: object - :param preferred_regions: Preferred regions. Type: array of strings (or Expression with - resultType array of strings). - :type preferred_regions: object - :param detect_datetime: Whether detect primitive values as datetime values. Type: boolean (or - Expression with resultType boolean). - :type detect_datetime: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'page_size': {'key': 'pageSize', 'type': 'object'}, - 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, - 'detect_datetime': {'key': 'detectDatetime', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(CosmosDbSqlApiSource, self).__init__(**kwargs) - self.type = 'CosmosDbSqlApiSource' # type: str - self.query = kwargs.get('query', None) - self.page_size = kwargs.get('page_size', None) - self.preferred_regions = kwargs.get('preferred_regions', None) - self.detect_datetime = kwargs.get('detect_datetime', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class CouchbaseLinkedService(LinkedService): - """Couchbase server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param cred_string: The Azure key vault secret reference of credString in connection string. - :type cred_string: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(CouchbaseLinkedService, self).__init__(**kwargs) - self.type = 'Couchbase' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.cred_string = kwargs.get('cred_string', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class CouchbaseSource(TabularSource): - """A copy activity Couchbase server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(CouchbaseSource, self).__init__(**kwargs) - self.type = 'CouchbaseSource' # type: str - self.query = kwargs.get('query', None) - - -class CouchbaseTableDataset(Dataset): - """Couchbase server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(CouchbaseTableDataset, self).__init__(**kwargs) - self.type = 'CouchbaseTable' # type: str - self.table_name = kwargs.get('table_name', None) - - -class CreateDataFlowDebugSessionRequest(msrest.serialization.Model): - """Request body structure for creating data flow debug session. - - :param compute_type: Compute type of the cluster. The value will be overwritten by the same - setting in integration runtime if provided. - :type compute_type: str - :param core_count: Core count of the cluster. The value will be overwritten by the same setting - in integration runtime if provided. - :type core_count: int - :param time_to_live: Time to live setting of the cluster in minutes. - :type time_to_live: int - :param integration_runtime: Set to use integration runtime setting for data flow debug session. - :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeDebugResource - """ - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'core_count': {'key': 'coreCount', 'type': 'int'}, - 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, - 'integration_runtime': {'key': 'integrationRuntime', 'type': 'IntegrationRuntimeDebugResource'}, - } - - def __init__( - self, - **kwargs - ): - super(CreateDataFlowDebugSessionRequest, self).__init__(**kwargs) - self.compute_type = kwargs.get('compute_type', None) - self.core_count = kwargs.get('core_count', None) - self.time_to_live = kwargs.get('time_to_live', None) - self.integration_runtime = kwargs.get('integration_runtime', None) - - -class CreateDataFlowDebugSessionResponse(msrest.serialization.Model): - """Response body structure for creating data flow debug session. - - :param status: The state of the debug session. - :type status: str - :param session_id: The ID of data flow debug session. - :type session_id: str - """ - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'session_id': {'key': 'sessionId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(CreateDataFlowDebugSessionResponse, self).__init__(**kwargs) - self.status = kwargs.get('status', None) - self.session_id = kwargs.get('session_id', None) - - -class CreateLinkedIntegrationRuntimeRequest(msrest.serialization.Model): - """The linked integration runtime information. - - :param name: The name of the linked integration runtime. - :type name: str - :param subscription_id: The ID of the subscription that the linked integration runtime belongs - to. - :type subscription_id: str - :param data_factory_name: The name of the data factory that the linked integration runtime - belongs to. - :type data_factory_name: str - :param data_factory_location: The location of the data factory that the linked integration - runtime belongs to. - :type data_factory_location: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(CreateLinkedIntegrationRuntimeRequest, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.data_factory_name = kwargs.get('data_factory_name', None) - self.data_factory_location = kwargs.get('data_factory_location', None) - - -class CreateRunResponse(msrest.serialization.Model): - """Response body with a run identifier. - - All required parameters must be populated in order to send to Azure. - - :param run_id: Required. Identifier of a run. - :type run_id: str - """ - - _validation = { - 'run_id': {'required': True}, - } - - _attribute_map = { - 'run_id': {'key': 'runId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(CreateRunResponse, self).__init__(**kwargs) - self.run_id = kwargs['run_id'] - - -class Credential(msrest.serialization.Model): - """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ManagedIdentityCredential, ServicePrincipalCredential. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of credential.Constant filled by server. - :type type: str - :param description: Credential description. - :type description: str - :param annotations: List of tags that can be used for describing the Credential. - :type annotations: list[object] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - } - - _subtype_map = { - 'type': {'ManagedIdentity': 'ManagedIdentityCredential', 'ServicePrincipal': 'ServicePrincipalCredential'} - } - - def __init__( - self, - **kwargs - ): - super(Credential, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'Credential' # type: str - self.description = kwargs.get('description', None) - self.annotations = kwargs.get('annotations', None) - - -class CredentialReference(msrest.serialization.Model): - """Credential reference type. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :ivar type: Required. Credential reference type. Default value: "CredentialReference". - :vartype type: str - :param reference_name: Required. Reference credential name. - :type reference_name: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - } - - type = "CredentialReference" - - def __init__( - self, - **kwargs - ): - super(CredentialReference, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.reference_name = kwargs['reference_name'] - - -class SubResource(msrest.serialization.Model): - """Azure Data Factory nested resource, which belongs to a factory. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SubResource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.etag = None - - -class CredentialResource(SubResource): - """Credential resource type. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of credentials. - :type properties: ~azure.mgmt.datafactory.models.Credential - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Credential'}, - } - - def __init__( - self, - **kwargs - ): - super(CredentialResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class CustomActivity(ExecutionActivity): - """Custom activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param command: Required. Command for custom activity Type: string (or Expression with - resultType string). - :type command: object - :param resource_linked_service: Resource linked service reference. - :type resource_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param folder_path: Folder path for resource files Type: string (or Expression with resultType - string). - :type folder_path: object - :param reference_objects: Reference objects. - :type reference_objects: ~azure.mgmt.datafactory.models.CustomActivityReferenceObject - :param extended_properties: User defined property bag. There is no restriction on the keys or - values that can be used. The user specified custom activity has the full responsibility to - consume and interpret the content defined. - :type extended_properties: dict[str, object] - :param retention_time_in_days: The retention time for the files submitted for custom activity. - Type: double (or Expression with resultType double). - :type retention_time_in_days: object - :param auto_user_specification: Elevation level and scope for the user, default is nonadmin - task. Type: string (or Expression with resultType double). - :type auto_user_specification: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'command': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, - 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, - 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, - 'auto_user_specification': {'key': 'typeProperties.autoUserSpecification', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(CustomActivity, self).__init__(**kwargs) - self.type = 'Custom' # type: str - self.command = kwargs['command'] - self.resource_linked_service = kwargs.get('resource_linked_service', None) - self.folder_path = kwargs.get('folder_path', None) - self.reference_objects = kwargs.get('reference_objects', None) - self.extended_properties = kwargs.get('extended_properties', None) - self.retention_time_in_days = kwargs.get('retention_time_in_days', None) - self.auto_user_specification = kwargs.get('auto_user_specification', None) - - -class CustomActivityReferenceObject(msrest.serialization.Model): - """Reference objects for custom activity. - - :param linked_services: Linked service references. - :type linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param datasets: Dataset references. - :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] - """ - - _attribute_map = { - 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, - 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, - } - - def __init__( - self, - **kwargs - ): - super(CustomActivityReferenceObject, self).__init__(**kwargs) - self.linked_services = kwargs.get('linked_services', None) - self.datasets = kwargs.get('datasets', None) - - -class CustomDataset(Dataset): - """The custom dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type_properties: Custom dataset properties. - :type type_properties: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type_properties': {'key': 'typeProperties', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(CustomDataset, self).__init__(**kwargs) - self.type = 'CustomDataset' # type: str - self.type_properties = kwargs.get('type_properties', None) - - -class CustomDataSourceLinkedService(LinkedService): - """Custom linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param type_properties: Required. Custom linked service properties. - :type type_properties: object - """ - - _validation = { - 'type': {'required': True}, - 'type_properties': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type_properties': {'key': 'typeProperties', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(CustomDataSourceLinkedService, self).__init__(**kwargs) - self.type = 'CustomDataSource' # type: str - self.type_properties = kwargs['type_properties'] - - -class CustomEventsTrigger(MultiplePipelineTrigger): - """Trigger that runs every time a custom event is received. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] - :param pipelines: Pipelines that need to be started. - :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param subject_begins_with: The event subject must begin with the pattern provided for trigger - to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. - :type subject_begins_with: str - :param subject_ends_with: The event subject must end with the pattern provided for trigger to - fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. - :type subject_ends_with: str - :param events: Required. The list of event types that cause this trigger to fire. - :type events: list[object] - :param scope: Required. The ARM resource ID of the Azure Event Grid Topic. - :type scope: str - """ - - _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, - 'events': {'required': True}, - 'scope': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'subject_begins_with': {'key': 'typeProperties.subjectBeginsWith', 'type': 'str'}, - 'subject_ends_with': {'key': 'typeProperties.subjectEndsWith', 'type': 'str'}, - 'events': {'key': 'typeProperties.events', 'type': '[object]'}, - 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(CustomEventsTrigger, self).__init__(**kwargs) - self.type = 'CustomEventsTrigger' # type: str - self.subject_begins_with = kwargs.get('subject_begins_with', None) - self.subject_ends_with = kwargs.get('subject_ends_with', None) - self.events = kwargs['events'] - self.scope = kwargs['scope'] - - -class DatabricksNotebookActivity(ExecutionActivity): - """DatabricksNotebook activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param notebook_path: Required. The absolute path of the notebook to be run in the Databricks - Workspace. This path must begin with a slash. Type: string (or Expression with resultType - string). - :type notebook_path: object - :param base_parameters: Base parameters to be used for each run of this job.If the notebook - takes a parameter that is not specified, the default value from the notebook will be used. - :type base_parameters: dict[str, object] - :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, object]] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'notebook_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, - 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, - } - - def __init__( - self, - **kwargs - ): - super(DatabricksNotebookActivity, self).__init__(**kwargs) - self.type = 'DatabricksNotebook' # type: str - self.notebook_path = kwargs['notebook_path'] - self.base_parameters = kwargs.get('base_parameters', None) - self.libraries = kwargs.get('libraries', None) - - -class DatabricksSparkJarActivity(ExecutionActivity): - """DatabricksSparkJar activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param main_class_name: Required. The full name of the class containing the main method to be - executed. This class must be contained in a JAR provided as a library. Type: string (or - Expression with resultType string). - :type main_class_name: object - :param parameters: Parameters that will be passed to the main method. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, object]] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'main_class_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, - } - - def __init__( - self, - **kwargs - ): - super(DatabricksSparkJarActivity, self).__init__(**kwargs) - self.type = 'DatabricksSparkJar' # type: str - self.main_class_name = kwargs['main_class_name'] - self.parameters = kwargs.get('parameters', None) - self.libraries = kwargs.get('libraries', None) - - -class DatabricksSparkPythonActivity(ExecutionActivity): - """DatabricksSparkPython activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param python_file: Required. The URI of the Python file to be executed. DBFS paths are - supported. Type: string (or Expression with resultType string). - :type python_file: object - :param parameters: Command line parameters that will be passed to the Python file. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, object]] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'python_file': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, - } - - def __init__( - self, - **kwargs - ): - super(DatabricksSparkPythonActivity, self).__init__(**kwargs) - self.type = 'DatabricksSparkPython' # type: str - self.python_file = kwargs['python_file'] - self.parameters = kwargs.get('parameters', None) - self.libraries = kwargs.get('libraries', None) - - -class DataFlow(msrest.serialization.Model): - """Azure Data Factory nested object which contains a flow with data movements and transformations. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: MappingDataFlow. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Type of data flow.Constant filled by server. - :type type: str - :param description: The description of the data flow. - :type description: str - :param annotations: List of tags that can be used for describing the data flow. - :type annotations: list[object] - :param folder: The folder that this data flow is in. If not specified, Data flow will appear at - the root level. - :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, - } - - _subtype_map = { - 'type': {'MappingDataFlow': 'MappingDataFlow'} - } - - def __init__( - self, - **kwargs - ): - super(DataFlow, self).__init__(**kwargs) - self.type = None # type: Optional[str] - self.description = kwargs.get('description', None) - self.annotations = kwargs.get('annotations', None) - self.folder = kwargs.get('folder', None) - - -class DataFlowDebugCommandPayload(msrest.serialization.Model): - """Structure of command payload. - - All required parameters must be populated in order to send to Azure. - - :param stream_name: Required. The stream name which is used for preview. - :type stream_name: str - :param row_limits: Row limits for preview response. - :type row_limits: int - :param columns: Array of column names. - :type columns: list[str] - :param expression: The expression which is used for preview. - :type expression: str - """ - - _validation = { - 'stream_name': {'required': True}, - } - - _attribute_map = { - 'stream_name': {'key': 'streamName', 'type': 'str'}, - 'row_limits': {'key': 'rowLimits', 'type': 'int'}, - 'columns': {'key': 'columns', 'type': '[str]'}, - 'expression': {'key': 'expression', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DataFlowDebugCommandPayload, self).__init__(**kwargs) - self.stream_name = kwargs['stream_name'] - self.row_limits = kwargs.get('row_limits', None) - self.columns = kwargs.get('columns', None) - self.expression = kwargs.get('expression', None) - - -class DataFlowDebugCommandRequest(msrest.serialization.Model): - """Request body structure for data flow debug command. - - :param session_id: The ID of data flow debug session. - :type session_id: str - :param command: The command type. Possible values include: "executePreviewQuery", - "executeStatisticsQuery", "executeExpressionQuery". - :type command: str or ~azure.mgmt.datafactory.models.DataFlowDebugCommandType - :param command_payload: The command payload object. - :type command_payload: ~azure.mgmt.datafactory.models.DataFlowDebugCommandPayload - """ - - _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'command': {'key': 'command', 'type': 'str'}, - 'command_payload': {'key': 'commandPayload', 'type': 'DataFlowDebugCommandPayload'}, - } - - def __init__( - self, - **kwargs - ): - super(DataFlowDebugCommandRequest, self).__init__(**kwargs) - self.session_id = kwargs.get('session_id', None) - self.command = kwargs.get('command', None) - self.command_payload = kwargs.get('command_payload', None) - - -class DataFlowDebugCommandResponse(msrest.serialization.Model): - """Response body structure of data flow result for data preview, statistics or expression preview. - - :param status: The run status of data preview, statistics or expression preview. - :type status: str - :param data: The result data of data preview, statistics or expression preview. - :type data: str - """ - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'data': {'key': 'data', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DataFlowDebugCommandResponse, self).__init__(**kwargs) - self.status = kwargs.get('status', None) - self.data = kwargs.get('data', None) - - -class DataFlowDebugPackage(msrest.serialization.Model): - """Request body structure for starting data flow debug session. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow: Data flow instance. - :type data_flow: ~azure.mgmt.datafactory.models.DataFlowDebugResource - :param datasets: List of datasets. - :type datasets: list[~azure.mgmt.datafactory.models.DatasetDebugResource] - :param linked_services: List of linked services. - :type linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceDebugResource] - :param staging: Staging info for debug session. - :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo - :param debug_settings: Data flow debug settings. - :type debug_settings: ~azure.mgmt.datafactory.models.DataFlowDebugPackageDebugSettings - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowDebugResource'}, - 'datasets': {'key': 'datasets', 'type': '[DatasetDebugResource]'}, - 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceDebugResource]'}, - 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, - 'debug_settings': {'key': 'debugSettings', 'type': 'DataFlowDebugPackageDebugSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(DataFlowDebugPackage, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.session_id = kwargs.get('session_id', None) - self.data_flow = kwargs.get('data_flow', None) - self.datasets = kwargs.get('datasets', None) - self.linked_services = kwargs.get('linked_services', None) - self.staging = kwargs.get('staging', None) - self.debug_settings = kwargs.get('debug_settings', None) - - -class DataFlowDebugPackageDebugSettings(msrest.serialization.Model): - """Data flow debug settings. - - :param source_settings: Source setting for data flow debug. - :type source_settings: list[~azure.mgmt.datafactory.models.DataFlowSourceSetting] - :param parameters: Data flow parameters. - :type parameters: dict[str, object] - :param dataset_parameters: Parameters for dataset. - :type dataset_parameters: object - """ - - _attribute_map = { - 'source_settings': {'key': 'sourceSettings', 'type': '[DataFlowSourceSetting]'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DataFlowDebugPackageDebugSettings, self).__init__(**kwargs) - self.source_settings = kwargs.get('source_settings', None) - self.parameters = kwargs.get('parameters', None) - self.dataset_parameters = kwargs.get('dataset_parameters', None) - - -class SubResourceDebugResource(msrest.serialization.Model): - """Azure Data Factory nested debug resource. - - :param name: The resource name. - :type name: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SubResourceDebugResource, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - - -class DataFlowDebugResource(SubResourceDebugResource): - """Data flow debug resource. - - All required parameters must be populated in order to send to Azure. - - :param name: The resource name. - :type name: str - :param properties: Required. Data flow properties. - :type properties: ~azure.mgmt.datafactory.models.DataFlow - """ - - _validation = { - 'properties': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'DataFlow'}, - } - - def __init__( - self, - **kwargs - ): - super(DataFlowDebugResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class DataFlowDebugSessionInfo(msrest.serialization.Model): - """Data flow debug session info. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param data_flow_name: The name of the data flow. - :type data_flow_name: str - :param compute_type: Compute type of the cluster. - :type compute_type: str - :param core_count: Core count of the cluster. - :type core_count: int - :param node_count: Node count of the cluster. (deprecated property). - :type node_count: int - :param integration_runtime_name: Attached integration runtime name of data flow debug session. - :type integration_runtime_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :param start_time: Start time of data flow debug session. - :type start_time: str - :param time_to_live_in_minutes: Compute type of the cluster. - :type time_to_live_in_minutes: int - :param last_activity_time: Last activity time of data flow debug session. - :type last_activity_time: str - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'core_count': {'key': 'coreCount', 'type': 'int'}, - 'node_count': {'key': 'nodeCount', 'type': 'int'}, - 'integration_runtime_name': {'key': 'integrationRuntimeName', 'type': 'str'}, - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'time_to_live_in_minutes': {'key': 'timeToLiveInMinutes', 'type': 'int'}, - 'last_activity_time': {'key': 'lastActivityTime', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DataFlowDebugSessionInfo, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.data_flow_name = kwargs.get('data_flow_name', None) - self.compute_type = kwargs.get('compute_type', None) - self.core_count = kwargs.get('core_count', None) - self.node_count = kwargs.get('node_count', None) - self.integration_runtime_name = kwargs.get('integration_runtime_name', None) - self.session_id = kwargs.get('session_id', None) - self.start_time = kwargs.get('start_time', None) - self.time_to_live_in_minutes = kwargs.get('time_to_live_in_minutes', None) - self.last_activity_time = kwargs.get('last_activity_time', None) - - -class DataFlowFolder(msrest.serialization.Model): - """The folder that this data flow is in. If not specified, Data flow will appear at the root level. - - :param name: The name of the folder that this data flow is in. - :type name: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DataFlowFolder, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - - -class DataFlowListResponse(msrest.serialization.Model): - """A list of data flow resources. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of data flows. - :type value: list[~azure.mgmt.datafactory.models.DataFlowResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[DataFlowResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DataFlowListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) - - -class DataFlowReference(msrest.serialization.Model): - """Data flow reference type. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :ivar type: Required. Data flow reference type. Default value: "DataFlowReference". - :vartype type: str - :param reference_name: Required. Reference data flow name. - :type reference_name: str - :param dataset_parameters: Reference data flow parameters from dataset. - :type dataset_parameters: object - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, - } - - type = "DataFlowReference" - - def __init__( - self, - **kwargs - ): - super(DataFlowReference, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.reference_name = kwargs['reference_name'] - self.dataset_parameters = kwargs.get('dataset_parameters', None) - - -class DataFlowResource(SubResource): - """Data flow resource type. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Data flow properties. - :type properties: ~azure.mgmt.datafactory.models.DataFlow - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'DataFlow'}, - } - - def __init__( - self, - **kwargs - ): - super(DataFlowResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class Transformation(msrest.serialization.Model): - """A data flow transformation. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. - :type description: str - """ - - _validation = { - 'name': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Transformation, self).__init__(**kwargs) - self.name = kwargs['name'] - self.description = kwargs.get('description', None) - - -class DataFlowSink(Transformation): - """Transformation for data flow sink. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. - :type description: str - :param dataset: Dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param linked_service: Linked service reference. - :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param schema_linked_service: Schema linked service reference. - :type schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - """ - - _validation = { - 'name': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, - 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, - 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, - } - - def __init__( - self, - **kwargs - ): - super(DataFlowSink, self).__init__(**kwargs) - self.dataset = kwargs.get('dataset', None) - self.linked_service = kwargs.get('linked_service', None) - self.schema_linked_service = kwargs.get('schema_linked_service', None) - - -class DataFlowSource(Transformation): - """Transformation for data flow source. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. - :type description: str - :param dataset: Dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param linked_service: Linked service reference. - :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param schema_linked_service: Schema linked service reference. - :type schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - """ - - _validation = { - 'name': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, - 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, - 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, - } - - def __init__( - self, - **kwargs - ): - super(DataFlowSource, self).__init__(**kwargs) - self.dataset = kwargs.get('dataset', None) - self.linked_service = kwargs.get('linked_service', None) - self.schema_linked_service = kwargs.get('schema_linked_service', None) - - -class DataFlowSourceSetting(msrest.serialization.Model): - """Definition of data flow source setting for debug. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param source_name: The data flow source name. - :type source_name: str - :param row_limit: Defines the row limit of data flow source in debug. - :type row_limit: int - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_name': {'key': 'sourceName', 'type': 'str'}, - 'row_limit': {'key': 'rowLimit', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(DataFlowSourceSetting, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.source_name = kwargs.get('source_name', None) - self.row_limit = kwargs.get('row_limit', None) - - -class DataFlowStagingInfo(msrest.serialization.Model): - """Staging info for execute data flow activity. - - :param linked_service: Staging linked service reference. - :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType - string). - :type folder_path: object - """ - - _attribute_map = { - 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DataFlowStagingInfo, self).__init__(**kwargs) - self.linked_service = kwargs.get('linked_service', None) - self.folder_path = kwargs.get('folder_path', None) - - -class DataLakeAnalyticsUsqlActivity(ExecutionActivity): - """Data Lake Analytics U-SQL activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param script_path: Required. Case-sensitive path to folder that contains the U-SQL script. - Type: string (or Expression with resultType string). - :type script_path: object - :param script_linked_service: Required. Script linked service reference. - :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. - Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. - :type degree_of_parallelism: object - :param priority: Determines which jobs out of all that are queued should be selected to run - first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or - Expression with resultType integer), minimum: 1. - :type priority: object - :param parameters: Parameters for U-SQL job request. - :type parameters: dict[str, object] - :param runtime_version: Runtime version of the U-SQL engine to use. Type: string (or Expression - with resultType string). - :type runtime_version: object - :param compilation_mode: Compilation mode of U-SQL. Must be one of these values : Semantic, - Full and SingleBox. Type: string (or Expression with resultType string). - :type compilation_mode: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'script_path': {'required': True}, - 'script_linked_service': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, - 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, - 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DataLakeAnalyticsUsqlActivity, self).__init__(**kwargs) - self.type = 'DataLakeAnalyticsU-SQL' # type: str - self.script_path = kwargs['script_path'] - self.script_linked_service = kwargs['script_linked_service'] - self.degree_of_parallelism = kwargs.get('degree_of_parallelism', None) - self.priority = kwargs.get('priority', None) - self.parameters = kwargs.get('parameters', None) - self.runtime_version = kwargs.get('runtime_version', None) - self.compilation_mode = kwargs.get('compilation_mode', None) - - -class DatasetCompression(msrest.serialization.Model): - """The compression method used on a dataset. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetTarCompression, DatasetTarGZipCompression, DatasetZipDeflateCompression. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression. Type: string (or Expression with resultType - string).Constant filled by server. - :type type: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'object'}, - } - - _subtype_map = { - 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'Tar': 'DatasetTarCompression', 'TarGZip': 'DatasetTarGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} - } - - def __init__( - self, - **kwargs - ): - super(DatasetCompression, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'DatasetCompression' # type: str - - -class DatasetBZip2Compression(DatasetCompression): - """The BZip2 compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression. Type: string (or Expression with resultType - string).Constant filled by server. - :type type: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DatasetBZip2Compression, self).__init__(**kwargs) - self.type = 'BZip2' # type: str - - -class DatasetDataElement(msrest.serialization.Model): - """Columns that define the structure of the dataset. - - :param name: Name of the column. Type: string (or Expression with resultType string). - :type name: object - :param type: Type of the column. Type: string (or Expression with resultType string). - :type type: object - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DatasetDataElement, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.type = kwargs.get('type', None) - - -class DatasetDebugResource(SubResourceDebugResource): - """Dataset debug resource. - - All required parameters must be populated in order to send to Azure. - - :param name: The resource name. - :type name: str - :param properties: Required. Dataset properties. - :type properties: ~azure.mgmt.datafactory.models.Dataset - """ - - _validation = { - 'properties': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Dataset'}, - } - - def __init__( - self, - **kwargs - ): - super(DatasetDebugResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class DatasetDeflateCompression(DatasetCompression): - """The Deflate compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression. Type: string (or Expression with resultType - string).Constant filled by server. - :type type: object - :param level: The Deflate compression level. - :type level: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'object'}, - 'level': {'key': 'level', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DatasetDeflateCompression, self).__init__(**kwargs) - self.type = 'Deflate' # type: str - self.level = kwargs.get('level', None) - - -class DatasetFolder(msrest.serialization.Model): - """The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - - :param name: The name of the folder that this Dataset is in. - :type name: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DatasetFolder, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - - -class DatasetGZipCompression(DatasetCompression): - """The GZip compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression. Type: string (or Expression with resultType - string).Constant filled by server. - :type type: object - :param level: The GZip compression level. - :type level: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'object'}, - 'level': {'key': 'level', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DatasetGZipCompression, self).__init__(**kwargs) - self.type = 'GZip' # type: str - self.level = kwargs.get('level', None) - - -class DatasetListResponse(msrest.serialization.Model): - """A list of dataset resources. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of datasets. - :type value: list[~azure.mgmt.datafactory.models.DatasetResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[DatasetResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DatasetListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) - - -class DatasetReference(msrest.serialization.Model): - """Dataset reference type. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Dataset reference type. Default value: "DatasetReference". - :vartype type: str - :param reference_name: Required. Reference dataset name. - :type reference_name: str - :param parameters: Arguments for dataset. - :type parameters: dict[str, object] - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - type = "DatasetReference" - - def __init__( - self, - **kwargs - ): - super(DatasetReference, self).__init__(**kwargs) - self.reference_name = kwargs['reference_name'] - self.parameters = kwargs.get('parameters', None) - - -class DatasetResource(SubResource): - """Dataset resource type. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Dataset properties. - :type properties: ~azure.mgmt.datafactory.models.Dataset - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Dataset'}, - } - - def __init__( - self, - **kwargs - ): - super(DatasetResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class DatasetSchemaDataElement(msrest.serialization.Model): - """Columns that define the physical type schema of the dataset. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Name of the schema column. Type: string (or Expression with resultType string). - :type name: object - :param type: Type of the schema column. Type: string (or Expression with resultType string). - :type type: object - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DatasetSchemaDataElement, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.name = kwargs.get('name', None) - self.type = kwargs.get('type', None) - - -class DatasetTarCompression(DatasetCompression): - """The Tar archive method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression. Type: string (or Expression with resultType - string).Constant filled by server. - :type type: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DatasetTarCompression, self).__init__(**kwargs) - self.type = 'Tar' # type: str - - -class DatasetTarGZipCompression(DatasetCompression): - """The TarGZip compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression. Type: string (or Expression with resultType - string).Constant filled by server. - :type type: object - :param level: The TarGZip compression level. - :type level: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'object'}, - 'level': {'key': 'level', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DatasetTarGZipCompression, self).__init__(**kwargs) - self.type = 'TarGZip' # type: str - self.level = kwargs.get('level', None) - - -class DatasetZipDeflateCompression(DatasetCompression): - """The ZipDeflate compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression. Type: string (or Expression with resultType - string).Constant filled by server. - :type type: object - :param level: The ZipDeflate compression level. - :type level: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'object'}, - 'level': {'key': 'level', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DatasetZipDeflateCompression, self).__init__(**kwargs) - self.type = 'ZipDeflate' # type: str - self.level = kwargs.get('level', None) - - -class Db2LinkedService(LinkedService): - """Linked service for DB2 data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: The connection string. It is mutually exclusive with server, - database, authenticationType, userName, packageCollection and certificateCommonName property. - Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param server: Server name for connection. It is mutually exclusive with connectionString - property. Type: string (or Expression with resultType string). - :type server: object - :param database: Database name for connection. It is mutually exclusive with connectionString - property. Type: string (or Expression with resultType string). - :type database: object - :param authentication_type: AuthenticationType to be used for connection. It is mutually - exclusive with connectionString property. Possible values include: "Basic". - :type authentication_type: str or ~azure.mgmt.datafactory.models.Db2AuthenticationType - :param username: Username for authentication. It is mutually exclusive with connectionString - property. Type: string (or Expression with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param package_collection: Under where packages are created when querying database. It is - mutually exclusive with connectionString property. Type: string (or Expression with resultType - string). - :type package_collection: object - :param certificate_common_name: Certificate Common Name when TLS is enabled. It is mutually - exclusive with connectionString property. Type: string (or Expression with resultType string). - :type certificate_common_name: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. It is mutually exclusive with - connectionString property. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'package_collection': {'key': 'typeProperties.packageCollection', 'type': 'object'}, - 'certificate_common_name': {'key': 'typeProperties.certificateCommonName', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(Db2LinkedService, self).__init__(**kwargs) - self.type = 'Db2' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.server = kwargs.get('server', None) - self.database = kwargs.get('database', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.package_collection = kwargs.get('package_collection', None) - self.certificate_common_name = kwargs.get('certificate_common_name', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class Db2Source(TabularSource): - """A copy activity source for Db2 databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: Database query. Type: string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(Db2Source, self).__init__(**kwargs) - self.type = 'Db2Source' # type: str - self.query = kwargs.get('query', None) - - -class Db2TableDataset(Dataset): - """The Db2 table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The Db2 schema name. Type: string (or Expression with - resultType string). - :type schema_type_properties_schema: object - :param table: The Db2 table name. Type: string (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(Db2TableDataset, self).__init__(**kwargs) - self.type = 'Db2Table' # type: str - self.table_name = kwargs.get('table_name', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - self.table = kwargs.get('table', None) - - -class DeleteActivity(ExecutionActivity): - """Delete activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param recursive: If true, files or sub-folders under current folder path will be deleted - recursively. Default is false. Type: boolean (or Expression with resultType boolean). - :type recursive: object - :param max_concurrent_connections: The max concurrent connections to connect data source at the - same time. - :type max_concurrent_connections: int - :param enable_logging: Whether to record detailed logs of delete-activity execution. Default - value is false. Type: boolean (or Expression with resultType boolean). - :type enable_logging: object - :param log_storage_settings: Log storage settings customer need to provide when enableLogging - is true. - :type log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings - :param dataset: Required. Delete activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param store_settings: Delete activity store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'max_concurrent_connections': {'minimum': 1}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, - 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, - 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'store_settings': {'key': 'typeProperties.storeSettings', 'type': 'StoreReadSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(DeleteActivity, self).__init__(**kwargs) - self.type = 'Delete' # type: str - self.recursive = kwargs.get('recursive', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - self.enable_logging = kwargs.get('enable_logging', None) - self.log_storage_settings = kwargs.get('log_storage_settings', None) - self.dataset = kwargs['dataset'] - self.store_settings = kwargs.get('store_settings', None) - - -class DeleteDataFlowDebugSessionRequest(msrest.serialization.Model): - """Request body structure for deleting data flow debug session. - - :param session_id: The ID of data flow debug session. - :type session_id: str - """ - - _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DeleteDataFlowDebugSessionRequest, self).__init__(**kwargs) - self.session_id = kwargs.get('session_id', None) - - -class DelimitedTextDataset(Dataset): - """Delimited text dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the delimited text storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param column_delimiter: The column delimiter. Type: string (or Expression with resultType - string). - :type column_delimiter: object - :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). - :type row_delimiter: object - :param encoding_name: The code page name of the preferred encoding. If miss, the default value - is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in - the following link to set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with - resultType string). - :type encoding_name: object - :param compression_codec: The data compressionCodec. Type: string (or Expression with - resultType string). - :type compression_codec: object - :param compression_level: The data compression method used for DelimitedText. - :type compression_level: object - :param quote_char: The quote character. Type: string (or Expression with resultType string). - :type quote_char: object - :param escape_char: The escape character. Type: string (or Expression with resultType string). - :type escape_char: object - :param first_row_as_header: When used as input, treat the first row of data as headers. When - used as output,write the headers into the output as the first row of data. The default value is - false. Type: boolean (or Expression with resultType boolean). - :type first_row_as_header: object - :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, - 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, - 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, - 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, - 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, - 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, - 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DelimitedTextDataset, self).__init__(**kwargs) - self.type = 'DelimitedText' # type: str - self.location = kwargs.get('location', None) - self.column_delimiter = kwargs.get('column_delimiter', None) - self.row_delimiter = kwargs.get('row_delimiter', None) - self.encoding_name = kwargs.get('encoding_name', None) - self.compression_codec = kwargs.get('compression_codec', None) - self.compression_level = kwargs.get('compression_level', None) - self.quote_char = kwargs.get('quote_char', None) - self.escape_char = kwargs.get('escape_char', None) - self.first_row_as_header = kwargs.get('first_row_as_header', None) - self.null_value = kwargs.get('null_value', None) - - -class DelimitedTextReadSettings(FormatReadSettings): - """Delimited text read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param skip_line_count: Indicates the number of non-empty rows to skip when reading data from - input files. Type: integer (or Expression with resultType integer). - :type skip_line_count: object - :param compression_properties: Compression settings. - :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, - 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(DelimitedTextReadSettings, self).__init__(**kwargs) - self.type = 'DelimitedTextReadSettings' # type: str - self.skip_line_count = kwargs.get('skip_line_count', None) - self.compression_properties = kwargs.get('compression_properties', None) - - -class DelimitedTextSink(CopySink): - """A copy activity DelimitedText sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param store_settings: DelimitedText store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: DelimitedText format settings. - :type format_settings: ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(DelimitedTextSink, self).__init__(**kwargs) - self.type = 'DelimitedTextSink' # type: str - self.store_settings = kwargs.get('store_settings', None) - self.format_settings = kwargs.get('format_settings', None) - - -class DelimitedTextSource(CopySource): - """A copy activity DelimitedText source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param store_settings: DelimitedText store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param format_settings: DelimitedText format settings. - :type format_settings: ~azure.mgmt.datafactory.models.DelimitedTextReadSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DelimitedTextSource, self).__init__(**kwargs) - self.type = 'DelimitedTextSource' # type: str - self.store_settings = kwargs.get('store_settings', None) - self.format_settings = kwargs.get('format_settings', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class DelimitedTextWriteSettings(FormatWriteSettings): - """Delimited text write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param quote_all_text: Indicates whether string values should always be enclosed with quotes. - Type: boolean (or Expression with resultType boolean). - :type quote_all_text: object - :param file_extension: Required. The file extension used to create the files. Type: string (or - Expression with resultType string). - :type file_extension: object - :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the - specified count. Type: integer (or Expression with resultType integer). - :type max_rows_per_file: object - :param file_name_prefix: Specifies the file name pattern - :code:``_:code:``.:code:`` when copy from non-file - based store without partitionOptions. Type: string (or Expression with resultType string). - :type file_name_prefix: object - """ - - _validation = { - 'type': {'required': True}, - 'file_extension': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, - 'file_extension': {'key': 'fileExtension', 'type': 'object'}, - 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, - 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DelimitedTextWriteSettings, self).__init__(**kwargs) - self.type = 'DelimitedTextWriteSettings' # type: str - self.quote_all_text = kwargs.get('quote_all_text', None) - self.file_extension = kwargs['file_extension'] - self.max_rows_per_file = kwargs.get('max_rows_per_file', None) - self.file_name_prefix = kwargs.get('file_name_prefix', None) - - -class DependencyReference(msrest.serialization.Model): - """Referenced dependency. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SelfDependencyTumblingWindowTriggerReference, TriggerDependencyReference. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. The type of dependency reference.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} - } - - def __init__( - self, - **kwargs - ): - super(DependencyReference, self).__init__(**kwargs) - self.type = None # type: Optional[str] - - -class DistcpSettings(msrest.serialization.Model): - """Distcp settings. - - All required parameters must be populated in order to send to Azure. - - :param resource_manager_endpoint: Required. Specifies the Yarn ResourceManager endpoint. Type: - string (or Expression with resultType string). - :type resource_manager_endpoint: object - :param temp_script_path: Required. Specifies an existing folder path which will be used to - store temp Distcp command script. The script file is generated by ADF and will be removed after - Copy job finished. Type: string (or Expression with resultType string). - :type temp_script_path: object - :param distcp_options: Specifies the Distcp options. Type: string (or Expression with - resultType string). - :type distcp_options: object - """ - - _validation = { - 'resource_manager_endpoint': {'required': True}, - 'temp_script_path': {'required': True}, - } - - _attribute_map = { - 'resource_manager_endpoint': {'key': 'resourceManagerEndpoint', 'type': 'object'}, - 'temp_script_path': {'key': 'tempScriptPath', 'type': 'object'}, - 'distcp_options': {'key': 'distcpOptions', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DistcpSettings, self).__init__(**kwargs) - self.resource_manager_endpoint = kwargs['resource_manager_endpoint'] - self.temp_script_path = kwargs['temp_script_path'] - self.distcp_options = kwargs.get('distcp_options', None) - - -class DocumentDbCollectionDataset(Dataset): - """Microsoft Azure Document Database Collection dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param collection_name: Required. Document Database collection name. Type: string (or - Expression with resultType string). - :type collection_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DocumentDbCollectionDataset, self).__init__(**kwargs) - self.type = 'DocumentDbCollection' # type: str - self.collection_name = kwargs['collection_name'] - - -class DocumentDbCollectionSink(CopySink): - """A copy activity Document Database Collection sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or - Expression with resultType string). - :type nesting_separator: object - :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or - Expression with resultType string). Allowed values: insert and upsert. - :type write_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DocumentDbCollectionSink, self).__init__(**kwargs) - self.type = 'DocumentDbCollectionSink' # type: str - self.nesting_separator = kwargs.get('nesting_separator', None) - self.write_behavior = kwargs.get('write_behavior', None) - - -class DocumentDbCollectionSource(CopySource): - """A copy activity Document Database Collection source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query: Documents query. Type: string (or Expression with resultType string). - :type query: object - :param nesting_separator: Nested properties separator. Type: string (or Expression with - resultType string). - :type nesting_separator: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DocumentDbCollectionSource, self).__init__(**kwargs) - self.type = 'DocumentDbCollectionSource' # type: str - self.query = kwargs.get('query', None) - self.nesting_separator = kwargs.get('nesting_separator', None) - self.query_timeout = kwargs.get('query_timeout', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class DrillLinkedService(LinkedService): - """Drill server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DrillLinkedService, self).__init__(**kwargs) - self.type = 'Drill' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class DrillSource(TabularSource): - """A copy activity Drill server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DrillSource, self).__init__(**kwargs) - self.type = 'DrillSource' # type: str - self.query = kwargs.get('query', None) - - -class DrillTableDataset(Dataset): - """Drill server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of the Drill. Type: string (or Expression with resultType string). - :type table: object - :param schema_type_properties_schema: The schema name of the Drill. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DrillTableDataset, self).__init__(**kwargs) - self.type = 'DrillTable' # type: str - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - - -class DwCopyCommandDefaultValue(msrest.serialization.Model): - """Default value. - - :param column_name: Column name. Type: object (or Expression with resultType string). - :type column_name: object - :param default_value: The default value of the column. Type: object (or Expression with - resultType string). - :type default_value: object - """ - - _attribute_map = { - 'column_name': {'key': 'columnName', 'type': 'object'}, - 'default_value': {'key': 'defaultValue', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DwCopyCommandDefaultValue, self).__init__(**kwargs) - self.column_name = kwargs.get('column_name', None) - self.default_value = kwargs.get('default_value', None) - - -class DwCopyCommandSettings(msrest.serialization.Model): - """DW Copy Command settings. - - :param default_values: Specifies the default values for each target column in SQL DW. The - default values in the property overwrite the DEFAULT constraint set in the DB, and identity - column cannot have a default value. Type: array of objects (or Expression with resultType array - of objects). - :type default_values: list[~azure.mgmt.datafactory.models.DwCopyCommandDefaultValue] - :param additional_options: Additional options directly passed to SQL DW in Copy Command. Type: - key value pairs (value should be string type) (or Expression with resultType object). Example: - "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" }. - :type additional_options: dict[str, str] - """ - - _attribute_map = { - 'default_values': {'key': 'defaultValues', 'type': '[DwCopyCommandDefaultValue]'}, - 'additional_options': {'key': 'additionalOptions', 'type': '{str}'}, - } - - def __init__( - self, - **kwargs - ): - super(DwCopyCommandSettings, self).__init__(**kwargs) - self.default_values = kwargs.get('default_values', None) - self.additional_options = kwargs.get('additional_options', None) - - -class DynamicsAxLinkedService(LinkedService): - """Dynamics AX linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData - endpoint. - :type url: object - :param service_principal_id: Required. Specify the application's client ID. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. Specify the application's key. Mark this field as a - SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key - Vault. Type: string (or Expression with resultType string). - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. Specify the tenant information (domain name or tenant ID) under which - your application resides. Retrieve it by hovering the mouse in the top-right corner of the - Azure portal. Type: string (or Expression with resultType string). - :type tenant: object - :param aad_resource_id: Required. Specify the resource you are requesting authorization. Type: - string (or Expression with resultType string). - :type aad_resource_id: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'tenant': {'required': True}, - 'aad_resource_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DynamicsAxLinkedService, self).__init__(**kwargs) - self.type = 'DynamicsAX' # type: str - self.url = kwargs['url'] - self.service_principal_id = kwargs['service_principal_id'] - self.service_principal_key = kwargs['service_principal_key'] - self.tenant = kwargs['tenant'] - self.aad_resource_id = kwargs['aad_resource_id'] - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class DynamicsAxResourceDataset(Dataset): - """The path of the Dynamics AX OData entity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression - with resultType string). - :type path: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DynamicsAxResourceDataset, self).__init__(**kwargs) - self.type = 'DynamicsAXResource' # type: str - self.path = kwargs['path'] - - -class DynamicsAxSource(TabularSource): - """A copy activity Dynamics AX source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout - to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string - (or Expression with resultType string), pattern: - ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DynamicsAxSource, self).__init__(**kwargs) - self.type = 'DynamicsAXSource' # type: str - self.query = kwargs.get('query', None) - self.http_request_timeout = kwargs.get('http_request_timeout', None) - - -class DynamicsCrmEntityDataset(Dataset): - """The Dynamics CRM entity dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param entity_name: The logical name of the entity. Type: string (or Expression with resultType - string). - :type entity_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DynamicsCrmEntityDataset, self).__init__(**kwargs) - self.type = 'DynamicsCrmEntity' # type: str - self.entity_name = kwargs.get('entity_name', None) - - -class DynamicsCrmLinkedService(LinkedService): - """Dynamics CRM linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' - for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: - string (or Expression with resultType string). - :type deployment_type: object - :param host_name: The host name of the on-premises Dynamics CRM server. The property is - required for on-prem and not allowed for online. Type: string (or Expression with resultType - string). - :type host_name: object - :param port: The port of on-premises Dynamics CRM server. The property is required for on-prem - and not allowed for online. Default is 443. Type: integer (or Expression with resultType - integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Dynamics CRM server. The property is required for - on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: object - :param organization_name: The organization name of the Dynamics CRM instance. The property is - required for on-prem and required for online when there are more than one Dynamics CRM - instances associated with the user. Type: string (or Expression with resultType string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect to Dynamics CRM - server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, - 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). - :type authentication_type: object - :param username: User name to access the Dynamics CRM instance. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password to access the Dynamics CRM instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_credential_type: The service principal credential type to use in - Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). - :type service_principal_credential_type: object - :param service_principal_credential: The credential of the service principal object in Azure - Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', - servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If - servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only - be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, - 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DynamicsCrmLinkedService, self).__init__(**kwargs) - self.type = 'DynamicsCrm' # type: str - self.deployment_type = kwargs['deployment_type'] - self.host_name = kwargs.get('host_name', None) - self.port = kwargs.get('port', None) - self.service_uri = kwargs.get('service_uri', None) - self.organization_name = kwargs.get('organization_name', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_credential_type = kwargs.get('service_principal_credential_type', None) - self.service_principal_credential = kwargs.get('service_principal_credential', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class DynamicsCrmSink(CopySink): - """A copy activity Dynamics CRM sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param write_behavior: Required. The write behavior for the operation. Possible values include: - "Upsert". - :type write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior - :param ignore_null_values: The flag indicating whether to ignore null values from input dataset - (except key fields) during write operation. Default is false. Type: boolean (or Expression with - resultType boolean). - :type ignore_null_values: object - :param alternate_key_name: The logical name of the alternate key which will be used when - upserting records. Type: string (or Expression with resultType string). - :type alternate_key_name: object - """ - - _validation = { - 'type': {'required': True}, - 'write_behavior': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DynamicsCrmSink, self).__init__(**kwargs) - self.type = 'DynamicsCrmSink' # type: str - self.write_behavior = kwargs['write_behavior'] - self.ignore_null_values = kwargs.get('ignore_null_values', None) - self.alternate_key_name = kwargs.get('alternate_key_name', None) - - -class DynamicsCrmSource(CopySource): - """A copy activity Dynamics CRM source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM - (online & on-premises). Type: string (or Expression with resultType string). - :type query: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DynamicsCrmSource, self).__init__(**kwargs) - self.type = 'DynamicsCrmSource' # type: str - self.query = kwargs.get('query', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class DynamicsEntityDataset(Dataset): - """The Dynamics entity dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param entity_name: The logical name of the entity. Type: string (or Expression with resultType - string). - :type entity_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DynamicsEntityDataset, self).__init__(**kwargs) - self.type = 'DynamicsEntity' # type: str - self.entity_name = kwargs.get('entity_name', None) - - -class DynamicsLinkedService(LinkedService): - """Dynamics linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for - Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or - Expression with resultType string). - :type deployment_type: object - :param host_name: The host name of the on-premises Dynamics server. The property is required - for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :type host_name: object - :param port: The port of on-premises Dynamics server. The property is required for on-prem and - not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), - minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Dynamics server. The property is required for on- - line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: object - :param organization_name: The organization name of the Dynamics instance. The property is - required for on-prem and required for online when there are more than one Dynamics instances - associated with the user. Type: string (or Expression with resultType string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect to Dynamics server. - 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' - for Server-To-Server authentication in online scenario. Type: string (or Expression with - resultType string). - :type authentication_type: object - :param username: User name to access the Dynamics instance. Type: string (or Expression with - resultType string). - :type username: object - :param password: Password to access the Dynamics instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_credential_type: The service principal credential type to use in - Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). - :type service_principal_credential_type: str - :param service_principal_credential: The credential of the service principal object in Azure - Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', - servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If - servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only - be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DynamicsLinkedService, self).__init__(**kwargs) - self.type = 'Dynamics' # type: str - self.deployment_type = kwargs['deployment_type'] - self.host_name = kwargs.get('host_name', None) - self.port = kwargs.get('port', None) - self.service_uri = kwargs.get('service_uri', None) - self.organization_name = kwargs.get('organization_name', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_credential_type = kwargs.get('service_principal_credential_type', None) - self.service_principal_credential = kwargs.get('service_principal_credential', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class DynamicsSink(CopySink): - """A copy activity Dynamics sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param write_behavior: Required. The write behavior for the operation. Possible values include: - "Upsert". - :type write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior - :param ignore_null_values: The flag indicating whether ignore null values from input dataset - (except key fields) during write operation. Default is false. Type: boolean (or Expression with - resultType boolean). - :type ignore_null_values: object - :param alternate_key_name: The logical name of the alternate key which will be used when - upserting records. Type: string (or Expression with resultType string). - :type alternate_key_name: object - """ - - _validation = { - 'type': {'required': True}, - 'write_behavior': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DynamicsSink, self).__init__(**kwargs) - self.type = 'DynamicsSink' # type: str - self.write_behavior = kwargs['write_behavior'] - self.ignore_null_values = kwargs.get('ignore_null_values', None) - self.alternate_key_name = kwargs.get('alternate_key_name', None) - - -class DynamicsSource(CopySource): - """A copy activity Dynamics source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics - (online & on-premises). Type: string (or Expression with resultType string). - :type query: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DynamicsSource, self).__init__(**kwargs) - self.type = 'DynamicsSource' # type: str - self.query = kwargs.get('query', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class EloquaLinkedService(LinkedService): - """Eloqua server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com). - :type endpoint: object - :param username: Required. The site name and user name of your Eloqua account in the form: - sitename/username. (i.e. Eloqua/Alice). - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(EloquaLinkedService, self).__init__(**kwargs) - self.type = 'Eloqua' # type: str - self.endpoint = kwargs['endpoint'] - self.username = kwargs['username'] - self.password = kwargs.get('password', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class EloquaObjectDataset(Dataset): - """Eloqua server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(EloquaObjectDataset, self).__init__(**kwargs) - self.type = 'EloquaObject' # type: str - self.table_name = kwargs.get('table_name', None) - - -class EloquaSource(TabularSource): - """A copy activity Eloqua server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(EloquaSource, self).__init__(**kwargs) - self.type = 'EloquaSource' # type: str - self.query = kwargs.get('query', None) - - -class EncryptionConfiguration(msrest.serialization.Model): - """Definition of CMK for the factory. - - All required parameters must be populated in order to send to Azure. - - :param key_name: Required. The name of the key in Azure Key Vault to use as Customer Managed - Key. - :type key_name: str - :param vault_base_url: Required. The url of the Azure Key Vault used for CMK. - :type vault_base_url: str - :param key_version: The version of the key used for CMK. If not provided, latest version will - be used. - :type key_version: str - :param identity: User assigned identity to use to authenticate to customer's key vault. If not - provided Managed Service Identity will be used. - :type identity: ~azure.mgmt.datafactory.models.CmkIdentityDefinition - """ - - _validation = { - 'key_name': {'required': True}, - 'vault_base_url': {'required': True}, - } - - _attribute_map = { - 'key_name': {'key': 'keyName', 'type': 'str'}, - 'vault_base_url': {'key': 'vaultBaseUrl', 'type': 'str'}, - 'key_version': {'key': 'keyVersion', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'CmkIdentityDefinition'}, - } - - def __init__( - self, - **kwargs - ): - super(EncryptionConfiguration, self).__init__(**kwargs) - self.key_name = kwargs['key_name'] - self.vault_base_url = kwargs['vault_base_url'] - self.key_version = kwargs.get('key_version', None) - self.identity = kwargs.get('identity', None) - - -class EntityReference(msrest.serialization.Model): - """The entity reference. - - :param type: The type of this referenced entity. Possible values include: - "IntegrationRuntimeReference", "LinkedServiceReference". - :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType - :param reference_name: The name of this referenced entity. - :type reference_name: str - """ - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(EntityReference, self).__init__(**kwargs) - self.type = kwargs.get('type', None) - self.reference_name = kwargs.get('reference_name', None) - - -class EnvironmentVariableSetup(CustomSetupBase): - """The custom setup of setting environment variable. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. The type of custom setup.Constant filled by server. - :type type: str - :param variable_name: Required. The name of the environment variable. - :type variable_name: str - :param variable_value: Required. The value of the environment variable. - :type variable_value: str - """ - - _validation = { - 'type': {'required': True}, - 'variable_name': {'required': True}, - 'variable_value': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'variable_value': {'key': 'typeProperties.variableValue', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(EnvironmentVariableSetup, self).__init__(**kwargs) - self.type = 'EnvironmentVariableSetup' # type: str - self.variable_name = kwargs['variable_name'] - self.variable_value = kwargs['variable_value'] - - -class ExcelDataset(Dataset): - """Excel dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the excel storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param sheet_name: The sheet name of excel file. Type: string (or Expression with resultType - string). - :type sheet_name: object - :param sheet_index: The sheet index of excel file and default value is 0. Type: integer (or - Expression with resultType integer). - :type sheet_index: object - :param range: The partial data of one sheet. Type: string (or Expression with resultType - string). - :type range: object - :param first_row_as_header: When used as input, treat the first row of data as headers. When - used as output,write the headers into the output as the first row of data. The default value is - false. Type: boolean (or Expression with resultType boolean). - :type first_row_as_header: object - :param compression: The data compression method used for the json dataset. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'sheet_name': {'key': 'typeProperties.sheetName', 'type': 'object'}, - 'sheet_index': {'key': 'typeProperties.sheetIndex', 'type': 'object'}, - 'range': {'key': 'typeProperties.range', 'type': 'object'}, - 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ExcelDataset, self).__init__(**kwargs) - self.type = 'Excel' # type: str - self.location = kwargs.get('location', None) - self.sheet_name = kwargs.get('sheet_name', None) - self.sheet_index = kwargs.get('sheet_index', None) - self.range = kwargs.get('range', None) - self.first_row_as_header = kwargs.get('first_row_as_header', None) - self.compression = kwargs.get('compression', None) - self.null_value = kwargs.get('null_value', None) - - -class ExcelSource(CopySource): - """A copy activity excel source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param store_settings: Excel store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ExcelSource, self).__init__(**kwargs) - self.type = 'ExcelSource' # type: str - self.store_settings = kwargs.get('store_settings', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class ExecuteDataFlowActivity(ExecutionActivity): - """Execute data flow activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param data_flow: Required. Data flow reference. - :type data_flow: ~azure.mgmt.datafactory.models.DataFlowReference - :param staging: Staging info for execute data flow activity. - :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo - :param integration_runtime: The integration runtime reference. - :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param compute: Compute properties for data flow activity. - :type compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute - :param trace_level: Trace level setting used for data flow monitoring output. Supported values - are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). - :type trace_level: object - :param continue_on_error: Continue on error setting used for data flow execution. Enables - processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). - :type continue_on_error: object - :param run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with - the same save order to be processed concurrently. Type: boolean (or Expression with resultType - boolean). - :type run_concurrently: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'data_flow': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'data_flow': {'key': 'typeProperties.dataFlow', 'type': 'DataFlowReference'}, - 'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'}, - 'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'}, - 'compute': {'key': 'typeProperties.compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'}, - 'trace_level': {'key': 'typeProperties.traceLevel', 'type': 'object'}, - 'continue_on_error': {'key': 'typeProperties.continueOnError', 'type': 'object'}, - 'run_concurrently': {'key': 'typeProperties.runConcurrently', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ExecuteDataFlowActivity, self).__init__(**kwargs) - self.type = 'ExecuteDataFlow' # type: str - self.data_flow = kwargs['data_flow'] - self.staging = kwargs.get('staging', None) - self.integration_runtime = kwargs.get('integration_runtime', None) - self.compute = kwargs.get('compute', None) - self.trace_level = kwargs.get('trace_level', None) - self.continue_on_error = kwargs.get('continue_on_error', None) - self.run_concurrently = kwargs.get('run_concurrently', None) - - -class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): - """Compute properties for data flow activity. - - :param compute_type: Compute type of the cluster which will execute data flow job. Possible - values include: 'General', 'MemoryOptimized', 'ComputeOptimized'. Type: string (or Expression - with resultType string). - :type compute_type: object - :param core_count: Core count of the cluster which will execute data flow job. Supported values - are: 8, 16, 32, 48, 80, 144 and 272. Type: integer (or Expression with resultType integer). - :type core_count: object - """ - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'object'}, - 'core_count': {'key': 'coreCount', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ExecuteDataFlowActivityTypePropertiesCompute, self).__init__(**kwargs) - self.compute_type = kwargs.get('compute_type', None) - self.core_count = kwargs.get('core_count', None) - - -class ExecutePipelineActivity(Activity): - """Execute pipeline activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param pipeline: Required. Pipeline reference. - :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference - :param parameters: Pipeline parameters. - :type parameters: dict[str, object] - :param wait_on_completion: Defines whether activity execution will wait for the dependent - pipeline execution to finish. Default is false. - :type wait_on_completion: bool - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'pipeline': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - super(ExecutePipelineActivity, self).__init__(**kwargs) - self.type = 'ExecutePipeline' # type: str - self.pipeline = kwargs['pipeline'] - self.parameters = kwargs.get('parameters', None) - self.wait_on_completion = kwargs.get('wait_on_completion', None) - - -class ExecuteSsisPackageActivity(ExecutionActivity): - """Execute SSIS package activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param package_location: Required. SSIS package location. - :type package_location: ~azure.mgmt.datafactory.models.SsisPackageLocation - :param runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or - "x64". Type: string (or Expression with resultType string). - :type runtime: object - :param logging_level: The logging level of SSIS package execution. Type: string (or Expression - with resultType string). - :type logging_level: object - :param environment_path: The environment path to execute the SSIS package. Type: string (or - Expression with resultType string). - :type environment_path: object - :param execution_credential: The package execution credential. - :type execution_credential: ~azure.mgmt.datafactory.models.SsisExecutionCredential - :param connect_via: Required. The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param project_parameters: The project level parameters to execute the SSIS package. - :type project_parameters: dict[str, ~azure.mgmt.datafactory.models.SsisExecutionParameter] - :param package_parameters: The package level parameters to execute the SSIS package. - :type package_parameters: dict[str, ~azure.mgmt.datafactory.models.SsisExecutionParameter] - :param project_connection_managers: The project level connection managers to execute the SSIS - package. - :type project_connection_managers: dict[str, object] - :param package_connection_managers: The package level connection managers to execute the SSIS - package. - :type package_connection_managers: dict[str, object] - :param property_overrides: The property overrides to execute the SSIS package. - :type property_overrides: dict[str, ~azure.mgmt.datafactory.models.SsisPropertyOverride] - :param log_location: SSIS package execution log location. - :type log_location: ~azure.mgmt.datafactory.models.SsisLogLocation - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'package_location': {'required': True}, - 'connect_via': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SsisPackageLocation'}, - 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, - 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, - 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, - 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SsisExecutionCredential'}, - 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, - 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SsisExecutionParameter}'}, - 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SsisExecutionParameter}'}, - 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{object}'}, - 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{object}'}, - 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SsisPropertyOverride}'}, - 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SsisLogLocation'}, - } - - def __init__( - self, - **kwargs - ): - super(ExecuteSsisPackageActivity, self).__init__(**kwargs) - self.type = 'ExecuteSSISPackage' # type: str - self.package_location = kwargs['package_location'] - self.runtime = kwargs.get('runtime', None) - self.logging_level = kwargs.get('logging_level', None) - self.environment_path = kwargs.get('environment_path', None) - self.execution_credential = kwargs.get('execution_credential', None) - self.connect_via = kwargs['connect_via'] - self.project_parameters = kwargs.get('project_parameters', None) - self.package_parameters = kwargs.get('package_parameters', None) - self.project_connection_managers = kwargs.get('project_connection_managers', None) - self.package_connection_managers = kwargs.get('package_connection_managers', None) - self.property_overrides = kwargs.get('property_overrides', None) - self.log_location = kwargs.get('log_location', None) - - -class ExposureControlBatchRequest(msrest.serialization.Model): - """A list of exposure control features. - - All required parameters must be populated in order to send to Azure. - - :param exposure_control_requests: Required. List of exposure control features. - :type exposure_control_requests: list[~azure.mgmt.datafactory.models.ExposureControlRequest] - """ - - _validation = { - 'exposure_control_requests': {'required': True}, - } - - _attribute_map = { - 'exposure_control_requests': {'key': 'exposureControlRequests', 'type': '[ExposureControlRequest]'}, - } - - def __init__( - self, - **kwargs - ): - super(ExposureControlBatchRequest, self).__init__(**kwargs) - self.exposure_control_requests = kwargs['exposure_control_requests'] - - -class ExposureControlBatchResponse(msrest.serialization.Model): - """A list of exposure control feature values. - - All required parameters must be populated in order to send to Azure. - - :param exposure_control_responses: Required. List of exposure control feature values. - :type exposure_control_responses: list[~azure.mgmt.datafactory.models.ExposureControlResponse] - """ - - _validation = { - 'exposure_control_responses': {'required': True}, - } - - _attribute_map = { - 'exposure_control_responses': {'key': 'exposureControlResponses', 'type': '[ExposureControlResponse]'}, - } - - def __init__( - self, - **kwargs - ): - super(ExposureControlBatchResponse, self).__init__(**kwargs) - self.exposure_control_responses = kwargs['exposure_control_responses'] - - -class ExposureControlRequest(msrest.serialization.Model): - """The exposure control request. - - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str - """ - - _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'feature_type': {'key': 'featureType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ExposureControlRequest, self).__init__(**kwargs) - self.feature_name = kwargs.get('feature_name', None) - self.feature_type = kwargs.get('feature_type', None) - - -class ExposureControlResponse(msrest.serialization.Model): - """The exposure control response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar feature_name: The feature name. - :vartype feature_name: str - :ivar value: The feature value. - :vartype value: str - """ - - _validation = { - 'feature_name': {'readonly': True}, - 'value': {'readonly': True}, - } - - _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ExposureControlResponse, self).__init__(**kwargs) - self.feature_name = None - self.value = None - - -class Expression(msrest.serialization.Model): - """Azure Data Factory expression definition. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Expression type. Default value: "Expression". - :vartype type: str - :param value: Required. Expression value. - :type value: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - type = "Expression" - - def __init__( - self, - **kwargs - ): - super(Expression, self).__init__(**kwargs) - self.value = kwargs['value'] - - -class Resource(msrest.serialization.Model): - """Azure Data Factory top-level resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :param location: The resource location. - :type location: str - :param tags: A set of tags. The resource tags. - :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.location = kwargs.get('location', None) - self.tags = kwargs.get('tags', None) - self.e_tag = None - - -class Factory(Resource): - """Factory resource type. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :param location: The resource location. - :type location: str - :param tags: A set of tags. The resource tags. - :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param identity: Managed service identity of the factory. - :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity - :ivar provisioning_state: Factory provisioning state, example Succeeded. - :vartype provisioning_state: str - :ivar create_time: Time the factory was created in ISO8601 format. - :vartype create_time: ~datetime.datetime - :ivar version: Version of the factory. - :vartype version: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration - :param global_parameters: List of parameters for factory. - :type global_parameters: dict[str, ~azure.mgmt.datafactory.models.GlobalParameterSpecification] - :param encryption: Properties to enable Customer Managed Key for the factory. - :type encryption: ~azure.mgmt.datafactory.models.EncryptionConfiguration - :param public_network_access: Whether or not public network access is allowed for the data - factory. Possible values include: "Enabled", "Disabled". - :type public_network_access: str or ~azure.mgmt.datafactory.models.PublicNetworkAccess - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'create_time': {'readonly': True}, - 'version': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, - 'additional_properties': {'key': '', 'type': '{object}'}, - 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, - 'version': {'key': 'properties.version', 'type': 'str'}, - 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, - 'global_parameters': {'key': 'properties.globalParameters', 'type': '{GlobalParameterSpecification}'}, - 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionConfiguration'}, - 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Factory, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.identity = kwargs.get('identity', None) - self.provisioning_state = None - self.create_time = None - self.version = None - self.repo_configuration = kwargs.get('repo_configuration', None) - self.global_parameters = kwargs.get('global_parameters', None) - self.encryption = kwargs.get('encryption', None) - self.public_network_access = kwargs.get('public_network_access', None) - - -class FactoryRepoConfiguration(msrest.serialization.Model): - """Factory's git repo information. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: FactoryGitHubConfiguration, FactoryVstsConfiguration. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Type of repo configuration.Constant filled by server. - :type type: str - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - """ - - _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'FactoryGitHubConfiguration': 'FactoryGitHubConfiguration', 'FactoryVSTSConfiguration': 'FactoryVstsConfiguration'} - } - - def __init__( - self, - **kwargs - ): - super(FactoryRepoConfiguration, self).__init__(**kwargs) - self.type = None # type: Optional[str] - self.account_name = kwargs['account_name'] - self.repository_name = kwargs['repository_name'] - self.collaboration_branch = kwargs['collaboration_branch'] - self.root_folder = kwargs['root_folder'] - self.last_commit_id = kwargs.get('last_commit_id', None) - - -class FactoryGitHubConfiguration(FactoryRepoConfiguration): - """Factory's GitHub repo information. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Type of repo configuration.Constant filled by server. - :type type: str - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param host_name: GitHub Enterprise host name. For example: https://github.mydomain.com. - :type host_name: str - """ - - _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'host_name': {'key': 'hostName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(FactoryGitHubConfiguration, self).__init__(**kwargs) - self.type = 'FactoryGitHubConfiguration' # type: str - self.host_name = kwargs.get('host_name', None) - - -class FactoryIdentity(msrest.serialization.Model): - """Identity properties of the factory resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. The identity type. Possible values include: "SystemAssigned", - "UserAssigned", "SystemAssigned,UserAssigned". - :type type: str or ~azure.mgmt.datafactory.models.FactoryIdentityType - :ivar principal_id: The principal id of the identity. - :vartype principal_id: str - :ivar tenant_id: The client tenant id of the identity. - :vartype tenant_id: str - :param user_assigned_identities: List of user assigned identities for the factory. - :type user_assigned_identities: dict[str, object] - """ - - _validation = { - 'type': {'required': True}, - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{object}'}, - } - - def __init__( - self, - **kwargs - ): - super(FactoryIdentity, self).__init__(**kwargs) - self.type = kwargs['type'] - self.principal_id = None - self.tenant_id = None - self.user_assigned_identities = kwargs.get('user_assigned_identities', None) - - -class FactoryListResponse(msrest.serialization.Model): - """A list of factory resources. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of factories. - :type value: list[~azure.mgmt.datafactory.models.Factory] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[Factory]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(FactoryListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) - - -class FactoryRepoUpdate(msrest.serialization.Model): - """Factory's git repo information. - - :param factory_resource_id: The factory resource id. - :type factory_resource_id: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration - """ - - _attribute_map = { - 'factory_resource_id': {'key': 'factoryResourceId', 'type': 'str'}, - 'repo_configuration': {'key': 'repoConfiguration', 'type': 'FactoryRepoConfiguration'}, - } - - def __init__( - self, - **kwargs - ): - super(FactoryRepoUpdate, self).__init__(**kwargs) - self.factory_resource_id = kwargs.get('factory_resource_id', None) - self.repo_configuration = kwargs.get('repo_configuration', None) - - -class FactoryUpdateParameters(msrest.serialization.Model): - """Parameters for updating a factory resource. - - :param tags: A set of tags. The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity - """ - - _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, - } - - def __init__( - self, - **kwargs - ): - super(FactoryUpdateParameters, self).__init__(**kwargs) - self.tags = kwargs.get('tags', None) - self.identity = kwargs.get('identity', None) - - -class FactoryVstsConfiguration(FactoryRepoConfiguration): - """Factory's VSTS repo information. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Type of repo configuration.Constant filled by server. - :type type: str - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param project_name: Required. VSTS project name. - :type project_name: str - :param tenant_id: VSTS tenant id. - :type tenant_id: str - """ - - _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - 'project_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'project_name': {'key': 'projectName', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(FactoryVstsConfiguration, self).__init__(**kwargs) - self.type = 'FactoryVSTSConfiguration' # type: str - self.project_name = kwargs['project_name'] - self.tenant_id = kwargs.get('tenant_id', None) - - -class FileServerLinkedService(LinkedService): - """File system linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. Host name of the server. Type: string (or Expression with resultType - string). - :type host: object - :param user_id: User ID to logon the server. Type: string (or Expression with resultType - string). - :type user_id: object - :param password: Password to logon the server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(FileServerLinkedService, self).__init__(**kwargs) - self.type = 'FileServer' # type: str - self.host = kwargs['host'] - self.user_id = kwargs.get('user_id', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class FileServerLocation(DatasetLocation): - """The location of file server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType - string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(FileServerLocation, self).__init__(**kwargs) - self.type = 'FileServerLocation' # type: str - - -class FileServerReadSettings(StoreReadSettings): - """File server read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param recursive: If true, files under the folder path will be read recursively. Default is - true. Type: boolean (or Expression with resultType boolean). - :type recursive: object - :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string (or Expression with - resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with - resultType string). - :type wildcard_file_name: object - :param file_list_path: Point to a text file that lists each file (relative path to the path - configured in the dataset) that you want to copy. Type: string (or Expression with resultType - string). - :type file_list_path: object - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: - string (or Expression with resultType string). - :type partition_root_path: object - :param delete_files_after_completion: Indicates whether the source files need to be deleted - after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object - :param modified_datetime_start: The start of file's modified datetime. Type: string (or - Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression - with resultType string). - :type modified_datetime_end: object - :param file_filter: Specify a filter to be used to select a subset of files in the folderPath - rather than all files. Type: string (or Expression with resultType string). - :type file_filter: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - 'file_filter': {'key': 'fileFilter', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(FileServerReadSettings, self).__init__(**kwargs) - self.type = 'FileServerReadSettings' # type: str - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.file_list_path = kwargs.get('file_list_path', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.partition_root_path = kwargs.get('partition_root_path', None) - self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - self.file_filter = kwargs.get('file_filter', None) - - -class FileServerWriteSettings(StoreWriteSettings): - """File server write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(FileServerWriteSettings, self).__init__(**kwargs) - self.type = 'FileServerWriteSettings' # type: str - - -class FileShareDataset(Dataset): - """An on-premises file system dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param folder_path: The path of the on-premises file system. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: The name of the on-premises file system. Type: string (or Expression with - resultType string). - :type file_name: object - :param modified_datetime_start: The start of file's modified datetime. Type: string (or - Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression - with resultType string). - :type modified_datetime_end: object - :param format: The format of the files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param file_filter: Specify a filter to be used to select a subset of files in the folderPath - rather than all files. Type: string (or Expression with resultType string). - :type file_filter: object - :param compression: The data compression method used for the file system. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__( - self, - **kwargs - ): - super(FileShareDataset, self).__init__(**kwargs) - self.type = 'FileShare' # type: str - self.folder_path = kwargs.get('folder_path', None) - self.file_name = kwargs.get('file_name', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - self.format = kwargs.get('format', None) - self.file_filter = kwargs.get('file_filter', None) - self.compression = kwargs.get('compression', None) - - -class FileSystemSink(CopySink): - """A copy activity file system sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(FileSystemSink, self).__init__(**kwargs) - self.type = 'FileSystemSink' # type: str - self.copy_behavior = kwargs.get('copy_behavior', None) - - -class FileSystemSource(CopySource): - """A copy activity file system source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param recursive: If true, files under the folder path will be read recursively. Default is - true. Type: boolean (or Expression with resultType boolean). - :type recursive: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(FileSystemSource, self).__init__(**kwargs) - self.type = 'FileSystemSource' # type: str - self.recursive = kwargs.get('recursive', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class FilterActivity(Activity): - """Filter and return results from input array based on the conditions. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param items: Required. Input array on which filter should be applied. - :type items: ~azure.mgmt.datafactory.models.Expression - :param condition: Required. Condition to be used for filtering the input. - :type condition: ~azure.mgmt.datafactory.models.Expression - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'items': {'required': True}, - 'condition': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, - } - - def __init__( - self, - **kwargs - ): - super(FilterActivity, self).__init__(**kwargs) - self.type = 'Filter' # type: str - self.items = kwargs['items'] - self.condition = kwargs['condition'] - - -class ForEachActivity(Activity): - """This activity is used for iterating over a collection and execute given activities. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param is_sequential: Should the loop be executed in sequence or in parallel (max 50). - :type is_sequential: bool - :param batch_count: Batch count to be used for controlling the number of parallel execution - (when isSequential is set to false). - :type batch_count: int - :param items: Required. Collection to iterate. - :type items: ~azure.mgmt.datafactory.models.Expression - :param activities: Required. List of activities to execute . - :type activities: list[~azure.mgmt.datafactory.models.Activity] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'batch_count': {'maximum': 50}, - 'items': {'required': True}, - 'activities': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, - 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, - } - - def __init__( - self, - **kwargs - ): - super(ForEachActivity, self).__init__(**kwargs) - self.type = 'ForEach' # type: str - self.is_sequential = kwargs.get('is_sequential', None) - self.batch_count = kwargs.get('batch_count', None) - self.items = kwargs['items'] - self.activities = kwargs['activities'] - - -class FtpReadSettings(StoreReadSettings): - """Ftp read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param recursive: If true, files under the folder path will be read recursively. Default is - true. Type: boolean (or Expression with resultType boolean). - :type recursive: object - :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or Expression with - resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType - string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: - string (or Expression with resultType string). - :type partition_root_path: object - :param delete_files_after_completion: Indicates whether the source files need to be deleted - after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object - :param file_list_path: Point to a text file that lists each file (relative path to the path - configured in the dataset) that you want to copy. Type: string (or Expression with resultType - string). - :type file_list_path: object - :param use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. - :type use_binary_transfer: bool - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - super(FtpReadSettings, self).__init__(**kwargs) - self.type = 'FtpReadSettings' # type: str - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.partition_root_path = kwargs.get('partition_root_path', None) - self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) - self.file_list_path = kwargs.get('file_list_path', None) - self.use_binary_transfer = kwargs.get('use_binary_transfer', None) - - -class FtpServerLinkedService(LinkedService): - """A FTP server Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. Host name of the FTP server. Type: string (or Expression with resultType - string). - :type host: object - :param port: The TCP port number that the FTP server uses to listen for client connections. - Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param authentication_type: The authentication type to be used to connect to the FTP server. - Possible values include: "Basic", "Anonymous". - :type authentication_type: str or ~azure.mgmt.datafactory.models.FtpAuthenticationType - :param user_name: Username to logon the FTP server. Type: string (or Expression with resultType - string). - :type user_name: object - :param password: Password to logon the FTP server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is - true. Type: boolean (or Expression with resultType boolean). - :type enable_ssl: object - :param enable_server_certificate_validation: If true, validate the FTP server SSL certificate - when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with - resultType boolean). - :type enable_server_certificate_validation: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(FtpServerLinkedService, self).__init__(**kwargs) - self.type = 'FtpServer' # type: str - self.host = kwargs['host'] - self.port = kwargs.get('port', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) - - -class FtpServerLocation(DatasetLocation): - """The location of ftp server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType - string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(FtpServerLocation, self).__init__(**kwargs) - self.type = 'FtpServerLocation' # type: str - - -class GetDataFactoryOperationStatusResponse(msrest.serialization.Model): - """Response body structure for get data factory operation status. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param status: Status of the operation. - :type status: str - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'status': {'key': 'status', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(GetDataFactoryOperationStatusResponse, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.status = kwargs.get('status', None) - - -class GetMetadataActivity(ExecutionActivity): - """Activity to get metadata of dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param dataset: Required. GetMetadata activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param field_list: Fields of metadata to get from dataset. - :type field_list: list[object] - :param store_settings: GetMetadata activity store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param format_settings: GetMetadata activity format settings. - :type format_settings: ~azure.mgmt.datafactory.models.FormatReadSettings - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, - 'store_settings': {'key': 'typeProperties.storeSettings', 'type': 'StoreReadSettings'}, - 'format_settings': {'key': 'typeProperties.formatSettings', 'type': 'FormatReadSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(GetMetadataActivity, self).__init__(**kwargs) - self.type = 'GetMetadata' # type: str - self.dataset = kwargs['dataset'] - self.field_list = kwargs.get('field_list', None) - self.store_settings = kwargs.get('store_settings', None) - self.format_settings = kwargs.get('format_settings', None) - - -class GetSsisObjectMetadataRequest(msrest.serialization.Model): - """The request payload of get SSIS object metadata. - - :param metadata_path: Metadata path. - :type metadata_path: str - """ - - _attribute_map = { - 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) - self.metadata_path = kwargs.get('metadata_path', None) - - -class GitHubAccessTokenRequest(msrest.serialization.Model): - """Get GitHub access token request definition. - - All required parameters must be populated in order to send to Azure. - - :param git_hub_access_code: Required. GitHub access code. - :type git_hub_access_code: str - :param git_hub_client_id: GitHub application client ID. - :type git_hub_client_id: str - :param git_hub_client_secret: GitHub bring your own app client secret information. - :type git_hub_client_secret: ~azure.mgmt.datafactory.models.GitHubClientSecret - :param git_hub_access_token_base_url: Required. GitHub access token base URL. - :type git_hub_access_token_base_url: str - """ - - _validation = { - 'git_hub_access_code': {'required': True}, - 'git_hub_access_token_base_url': {'required': True}, - } - - _attribute_map = { - 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, - 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, - 'git_hub_client_secret': {'key': 'gitHubClientSecret', 'type': 'GitHubClientSecret'}, - 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(GitHubAccessTokenRequest, self).__init__(**kwargs) - self.git_hub_access_code = kwargs['git_hub_access_code'] - self.git_hub_client_id = kwargs.get('git_hub_client_id', None) - self.git_hub_client_secret = kwargs.get('git_hub_client_secret', None) - self.git_hub_access_token_base_url = kwargs['git_hub_access_token_base_url'] - - -class GitHubAccessTokenResponse(msrest.serialization.Model): - """Get GitHub access token response definition. - - :param git_hub_access_token: GitHub access token. - :type git_hub_access_token: str - """ - - _attribute_map = { - 'git_hub_access_token': {'key': 'gitHubAccessToken', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(GitHubAccessTokenResponse, self).__init__(**kwargs) - self.git_hub_access_token = kwargs.get('git_hub_access_token', None) - - -class GitHubClientSecret(msrest.serialization.Model): - """Client secret information for factory's bring your own app repository configuration. - - :param byoa_secret_akv_url: Bring your own app client secret AKV URL. - :type byoa_secret_akv_url: str - :param byoa_secret_name: Bring your own app client secret name in AKV. - :type byoa_secret_name: str - """ - - _attribute_map = { - 'byoa_secret_akv_url': {'key': 'byoaSecretAkvUrl', 'type': 'str'}, - 'byoa_secret_name': {'key': 'byoaSecretName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(GitHubClientSecret, self).__init__(**kwargs) - self.byoa_secret_akv_url = kwargs.get('byoa_secret_akv_url', None) - self.byoa_secret_name = kwargs.get('byoa_secret_name', None) - - -class GlobalParameterSpecification(msrest.serialization.Model): - """Definition of a single parameter for an entity. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Global Parameter type. Possible values include: "Object", "String", - "Int", "Float", "Bool", "Array". - :type type: str or ~azure.mgmt.datafactory.models.GlobalParameterType - :param value: Required. Value of parameter. - :type value: object - """ - - _validation = { - 'type': {'required': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(GlobalParameterSpecification, self).__init__(**kwargs) - self.type = kwargs['type'] - self.value = kwargs['value'] - - -class GoogleAdWordsLinkedService(LinkedService): - """Google AdWords service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param client_customer_id: Required. The Client customer ID of the AdWords account that you - want to fetch report data for. - :type client_customer_id: object - :param developer_token: Required. The developer token associated with the manager account that - you use to grant access to the AdWords API. - :type developer_token: ~azure.mgmt.datafactory.models.SecretBase - :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for - authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values - include: "ServiceAuthentication", "UserAuthentication". - :type authentication_type: str or - ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType - :param refresh_token: The refresh token obtained from Google for authorizing access to AdWords - for UserAuthentication. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id of the google application used to acquire the refresh token. - Type: string (or Expression with resultType string). - :type client_id: object - :param client_secret: The client secret of the google application used to acquire the refresh - token. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param email: The service account email ID that is used for ServiceAuthentication and can only - be used on self-hosted IR. - :type email: object - :param key_file_path: The full path to the .p12 key file that is used to authenticate the - service account email address and can only be used on self-hosted IR. - :type key_file_path: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_customer_id': {'required': True}, - 'developer_token': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, - 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(GoogleAdWordsLinkedService, self).__init__(**kwargs) - self.type = 'GoogleAdWords' # type: str - self.client_customer_id = kwargs['client_customer_id'] - self.developer_token = kwargs['developer_token'] - self.authentication_type = kwargs['authentication_type'] - self.refresh_token = kwargs.get('refresh_token', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.email = kwargs.get('email', None) - self.key_file_path = kwargs.get('key_file_path', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class GoogleAdWordsObjectDataset(Dataset): - """Google AdWords service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(GoogleAdWordsObjectDataset, self).__init__(**kwargs) - self.type = 'GoogleAdWordsObject' # type: str - self.table_name = kwargs.get('table_name', None) - - -class GoogleAdWordsSource(TabularSource): - """A copy activity Google AdWords service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(GoogleAdWordsSource, self).__init__(**kwargs) - self.type = 'GoogleAdWordsSource' # type: str - self.query = kwargs.get('query', None) - - -class GoogleBigQueryLinkedService(LinkedService): - """Google BigQuery service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param project: Required. The default BigQuery project to query against. - :type project: object - :param additional_projects: A comma-separated list of public BigQuery projects to access. - :type additional_projects: object - :param request_google_drive_scope: Whether to request access to Google Drive. Allowing Google - Drive access enables support for federated tables that combine BigQuery data with data from - Google Drive. The default value is false. - :type request_google_drive_scope: object - :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for - authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values - include: "ServiceAuthentication", "UserAuthentication". - :type authentication_type: str or - ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType - :param refresh_token: The refresh token obtained from Google for authorizing access to BigQuery - for UserAuthentication. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id of the google application used to acquire the refresh token. - Type: string (or Expression with resultType string). - :type client_id: object - :param client_secret: The client secret of the google application used to acquire the refresh - token. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param email: The service account email ID that is used for ServiceAuthentication and can only - be used on self-hosted IR. - :type email: object - :param key_file_path: The full path to the .p12 key file that is used to authenticate the - service account email address and can only be used on self-hosted IR. - :type key_file_path: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'project': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'project': {'key': 'typeProperties.project', 'type': 'object'}, - 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, - 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(GoogleBigQueryLinkedService, self).__init__(**kwargs) - self.type = 'GoogleBigQuery' # type: str - self.project = kwargs['project'] - self.additional_projects = kwargs.get('additional_projects', None) - self.request_google_drive_scope = kwargs.get('request_google_drive_scope', None) - self.authentication_type = kwargs['authentication_type'] - self.refresh_token = kwargs.get('refresh_token', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.email = kwargs.get('email', None) - self.key_file_path = kwargs.get('key_file_path', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class GoogleBigQueryObjectDataset(Dataset): - """Google BigQuery service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using database + table - properties instead. - :type table_name: object - :param table: The table name of the Google BigQuery. Type: string (or Expression with - resultType string). - :type table: object - :param dataset: The database name of the Google BigQuery. Type: string (or Expression with - resultType string). - :type dataset: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(GoogleBigQueryObjectDataset, self).__init__(**kwargs) - self.type = 'GoogleBigQueryObject' # type: str - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.dataset = kwargs.get('dataset', None) - - -class GoogleBigQuerySource(TabularSource): - """A copy activity Google BigQuery service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(GoogleBigQuerySource, self).__init__(**kwargs) - self.type = 'GoogleBigQuerySource' # type: str - self.query = kwargs.get('query', None) - - -class GoogleCloudStorageLinkedService(LinkedService): - """Linked service for Google Cloud Storage. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param access_key_id: The access key identifier of the Google Cloud Storage Identity and Access - Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: object - :param secret_access_key: The secret access key of the Google Cloud Storage Identity and Access - Management (IAM) user. - :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_url: This value specifies the endpoint to access with the Google Cloud Storage - Connector. This is an optional property; change it only if you want to try a different service - endpoint or want to switch between https and http. Type: string (or Expression with resultType - string). - :type service_url: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, - 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(GoogleCloudStorageLinkedService, self).__init__(**kwargs) - self.type = 'GoogleCloudStorage' # type: str - self.access_key_id = kwargs.get('access_key_id', None) - self.secret_access_key = kwargs.get('secret_access_key', None) - self.service_url = kwargs.get('service_url', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class GoogleCloudStorageLocation(DatasetLocation): - """The location of Google Cloud Storage dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType - string). - :type file_name: object - :param bucket_name: Specify the bucketName of Google Cloud Storage. Type: string (or Expression - with resultType string). - :type bucket_name: object - :param version: Specify the version of Google Cloud Storage. Type: string (or Expression with - resultType string). - :type version: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'bucket_name': {'key': 'bucketName', 'type': 'object'}, - 'version': {'key': 'version', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(GoogleCloudStorageLocation, self).__init__(**kwargs) - self.type = 'GoogleCloudStorageLocation' # type: str - self.bucket_name = kwargs.get('bucket_name', None) - self.version = kwargs.get('version', None) - - -class GoogleCloudStorageReadSettings(StoreReadSettings): - """Google Cloud Storage read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param recursive: If true, files under the folder path will be read recursively. Default is - true. Type: boolean (or Expression with resultType boolean). - :type recursive: object - :param wildcard_folder_path: Google Cloud Storage wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Google Cloud Storage wildcardFileName. Type: string (or Expression - with resultType string). - :type wildcard_file_name: object - :param prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or - Expression with resultType string). - :type prefix: object - :param file_list_path: Point to a text file that lists each file (relative path to the path - configured in the dataset) that you want to copy. Type: string (or Expression with resultType - string). - :type file_list_path: object - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: - string (or Expression with resultType string). - :type partition_root_path: object - :param delete_files_after_completion: Indicates whether the source files need to be deleted - after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object - :param modified_datetime_start: The start of file's modified datetime. Type: string (or - Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression - with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'prefix': {'key': 'prefix', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(GoogleCloudStorageReadSettings, self).__init__(**kwargs) - self.type = 'GoogleCloudStorageReadSettings' # type: str - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.prefix = kwargs.get('prefix', None) - self.file_list_path = kwargs.get('file_list_path', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.partition_root_path = kwargs.get('partition_root_path', None) - self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - - -class GreenplumLinkedService(LinkedService): - """Greenplum Database linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(GreenplumLinkedService, self).__init__(**kwargs) - self.type = 'Greenplum' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class GreenplumSource(TabularSource): - """A copy activity Greenplum Database source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(GreenplumSource, self).__init__(**kwargs) - self.type = 'GreenplumSource' # type: str - self.query = kwargs.get('query', None) - - -class GreenplumTableDataset(Dataset): - """Greenplum Database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of Greenplum. Type: string (or Expression with resultType string). - :type table: object - :param schema_type_properties_schema: The schema name of Greenplum. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(GreenplumTableDataset, self).__init__(**kwargs) - self.type = 'GreenplumTable' # type: str - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - - -class HBaseLinkedService(LinkedService): - """HBase server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The IP address or host name of the HBase server. (i.e. 192.168.222.160). - :type host: object - :param port: The TCP port that the HBase instance uses to listen for client connections. The - default value is 9090. - :type port: object - :param http_path: The partial URL corresponding to the HBase server. (i.e. - /gateway/sandbox/hbase/version). - :type http_path: object - :param authentication_type: Required. The authentication mechanism to use to connect to the - HBase server. Possible values include: "Anonymous", "Basic". - :type authentication_type: str or ~azure.mgmt.datafactory.models.HBaseAuthenticationType - :param username: The user name used to connect to the HBase instance. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(HBaseLinkedService, self).__init__(**kwargs) - self.type = 'HBase' # type: str - self.host = kwargs['host'] - self.port = kwargs.get('port', None) - self.http_path = kwargs.get('http_path', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class HBaseObjectDataset(Dataset): - """HBase server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(HBaseObjectDataset, self).__init__(**kwargs) - self.type = 'HBaseObject' # type: str - self.table_name = kwargs.get('table_name', None) - - -class HBaseSource(TabularSource): - """A copy activity HBase server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(HBaseSource, self).__init__(**kwargs) - self.type = 'HBaseSource' # type: str - self.query = kwargs.get('query', None) - - -class HdfsLinkedService(LinkedService): - """Hadoop Distributed File System (HDFS) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. The URL of the HDFS service endpoint, e.g. - http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). - :type url: object - :param authentication_type: Type of authentication used to connect to the HDFS. Possible values - are: Anonymous and Windows. Type: string (or Expression with resultType string). - :type authentication_type: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param user_name: User name for Windows authentication. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password for Windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - } - - def __init__( - self, - **kwargs - ): - super(HdfsLinkedService, self).__init__(**kwargs) - self.type = 'Hdfs' # type: str - self.url = kwargs['url'] - self.authentication_type = kwargs.get('authentication_type', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - - -class HdfsLocation(DatasetLocation): - """The location of HDFS. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType - string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(HdfsLocation, self).__init__(**kwargs) - self.type = 'HdfsLocation' # type: str - - -class HdfsReadSettings(StoreReadSettings): - """HDFS read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param recursive: If true, files under the folder path will be read recursively. Default is - true. Type: boolean (or Expression with resultType boolean). - :type recursive: object - :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or Expression with - resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType - string). - :type wildcard_file_name: object - :param file_list_path: Point to a text file that lists each file (relative path to the path - configured in the dataset) that you want to copy. Type: string (or Expression with resultType - string). - :type file_list_path: object - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: - string (or Expression with resultType string). - :type partition_root_path: object - :param modified_datetime_start: The start of file's modified datetime. Type: string (or - Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression - with resultType string). - :type modified_datetime_end: object - :param distcp_settings: Specifies Distcp-related settings. - :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings - :param delete_files_after_completion: Indicates whether the source files need to be deleted - after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(HdfsReadSettings, self).__init__(**kwargs) - self.type = 'HdfsReadSettings' # type: str - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.file_list_path = kwargs.get('file_list_path', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.partition_root_path = kwargs.get('partition_root_path', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - self.distcp_settings = kwargs.get('distcp_settings', None) - self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) - - -class HdfsSource(CopySource): - """A copy activity HDFS source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param recursive: If true, files under the folder path will be read recursively. Default is - true. Type: boolean (or Expression with resultType boolean). - :type recursive: object - :param distcp_settings: Specifies Distcp-related settings. - :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(HdfsSource, self).__init__(**kwargs) - self.type = 'HdfsSource' # type: str - self.recursive = kwargs.get('recursive', None) - self.distcp_settings = kwargs.get('distcp_settings', None) - - -class HdInsightHiveActivity(ExecutionActivity): - """HDInsight Hive activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.mgmt.datafactory.models.HdInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with resultType string). - :type script_path: object - :param script_linked_service: Script linked service reference. - :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param defines: Allows user to specify defines for Hive job request. - :type defines: dict[str, object] - :param variables: User specified arguments under hivevar namespace. - :type variables: list[object] - :param query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster - is with ESP (Enterprise Security Package). - :type query_timeout: int - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, - 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(HdInsightHiveActivity, self).__init__(**kwargs) - self.type = 'HDInsightHive' # type: str - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.script_path = kwargs.get('script_path', None) - self.script_linked_service = kwargs.get('script_linked_service', None) - self.defines = kwargs.get('defines', None) - self.variables = kwargs.get('variables', None) - self.query_timeout = kwargs.get('query_timeout', None) - - -class HdInsightLinkedService(LinkedService): - """HDInsight linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with - resultType string). - :type cluster_uri: object - :param user_name: HDInsight cluster user name. Type: string (or Expression with resultType - string). - :type user_name: object - :param password: HDInsight cluster password. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param linked_service_name: The Azure Storage linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param hcatalog_linked_service_name: A reference to the Azure SQL linked service that points to - the HCatalog database. - :type hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security - Package). Type: Boolean. - :type is_esp_enabled: object - :param file_system: Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. - Type: string (or Expression with resultType string). - :type file_system: object - """ - - _validation = { - 'type': {'required': True}, - 'cluster_uri': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, - 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(HdInsightLinkedService, self).__init__(**kwargs) - self.type = 'HDInsight' # type: str - self.cluster_uri = kwargs['cluster_uri'] - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.is_esp_enabled = kwargs.get('is_esp_enabled', None) - self.file_system = kwargs.get('file_system', None) - - -class HdInsightMapReduceActivity(ExecutionActivity): - """HDInsight MapReduce activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.mgmt.datafactory.models.HdInsightActivityDebugInfoOption - :param class_name: Required. Class name. Type: string (or Expression with resultType string). - :type class_name: object - :param jar_file_path: Required. Jar path. Type: string (or Expression with resultType string). - :type jar_file_path: object - :param jar_linked_service: Jar linked service reference. - :type jar_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param jar_libs: Jar libs. - :type jar_libs: list[object] - :param defines: Allows user to specify defines for the MapReduce job request. - :type defines: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'class_name': {'required': True}, - 'jar_file_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, - 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, - 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, - 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - } - - def __init__( - self, - **kwargs - ): - super(HdInsightMapReduceActivity, self).__init__(**kwargs) - self.type = 'HDInsightMapReduce' # type: str - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.class_name = kwargs['class_name'] - self.jar_file_path = kwargs['jar_file_path'] - self.jar_linked_service = kwargs.get('jar_linked_service', None) - self.jar_libs = kwargs.get('jar_libs', None) - self.defines = kwargs.get('defines', None) - - -class HdInsightOnDemandLinkedService(LinkedService): - """HDInsight ondemand linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: 4. - Type: string (or Expression with resultType string). - :type cluster_size: object - :param time_to_live: Required. The allowed idle time for the on-demand HDInsight cluster. - Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity - run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string - (or Expression with resultType string). - :type time_to_live: object - :param version: Required. Version of the HDInsight cluster.  Type: string (or Expression with - resultType string). - :type version: object - :param linked_service_name: Required. Azure Storage linked service to be used by the on-demand - cluster for storing and processing data. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param host_subscription_id: Required. The customer’s subscription to host the cluster. Type: - string (or Expression with resultType string). - :type host_subscription_id: object - :param service_principal_id: The service principal id for the hostSubscriptionId. Type: string - (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key for the service principal id. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. The Tenant id/name to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param cluster_resource_group: Required. The resource group where the cluster belongs. Type: - string (or Expression with resultType string). - :type cluster_resource_group: object - :param cluster_name_prefix: The prefix of cluster name, postfix will be distinct with - timestamp. Type: string (or Expression with resultType string). - :type cluster_name_prefix: object - :param cluster_user_name: The username to access the cluster. Type: string (or Expression with - resultType string). - :type cluster_user_name: object - :param cluster_password: The password to access the cluster. - :type cluster_password: ~azure.mgmt.datafactory.models.SecretBase - :param cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for - Linux). Type: string (or Expression with resultType string). - :type cluster_ssh_user_name: object - :param cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). - :type cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase - :param additional_linked_service_names: Specifies additional storage accounts for the HDInsight - linked service so that the Data Factory service can register them on your behalf. - :type additional_linked_service_names: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param hcatalog_linked_service_name: The name of Azure SQL linked service that point to the - HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database - as the metastore. - :type hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param cluster_type: The cluster type. Type: string (or Expression with resultType string). - :type cluster_type: object - :param spark_version: The version of spark if the cluster type is 'spark'. Type: string (or - Expression with resultType string). - :type spark_version: object - :param core_configuration: Specifies the core configuration parameters (as in core-site.xml) - for the HDInsight cluster to be created. - :type core_configuration: object - :param h_base_configuration: Specifies the HBase configuration parameters (hbase-site.xml) for - the HDInsight cluster. - :type h_base_configuration: object - :param hdfs_configuration: Specifies the HDFS configuration parameters (hdfs-site.xml) for the - HDInsight cluster. - :type hdfs_configuration: object - :param hive_configuration: Specifies the hive configuration parameters (hive-site.xml) for the - HDInsight cluster. - :type hive_configuration: object - :param map_reduce_configuration: Specifies the MapReduce configuration parameters (mapred- - site.xml) for the HDInsight cluster. - :type map_reduce_configuration: object - :param oozie_configuration: Specifies the Oozie configuration parameters (oozie-site.xml) for - the HDInsight cluster. - :type oozie_configuration: object - :param storm_configuration: Specifies the Storm configuration parameters (storm-site.xml) for - the HDInsight cluster. - :type storm_configuration: object - :param yarn_configuration: Specifies the Yarn configuration parameters (yarn-site.xml) for the - HDInsight cluster. - :type yarn_configuration: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param head_node_size: Specifies the size of the head node for the HDInsight cluster. - :type head_node_size: object - :param data_node_size: Specifies the size of the data node for the HDInsight cluster. - :type data_node_size: object - :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for the HDInsight - cluster. - :type zookeeper_node_size: object - :param script_actions: Custom script actions to run on HDI ondemand cluster once it's up. - Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize- - cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen- - us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. - :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] - :param virtual_network_id: The ARM resource ID for the vNet to which the cluster should be - joined after creation. Type: string (or Expression with resultType string). - :type virtual_network_id: object - :param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was - specified, then this property is required. Type: string (or Expression with resultType string). - :type subnet_name: object - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference - """ - - _validation = { - 'type': {'required': True}, - 'cluster_size': {'required': True}, - 'time_to_live': {'required': True}, - 'version': {'required': True}, - 'linked_service_name': {'required': True}, - 'host_subscription_id': {'required': True}, - 'tenant': {'required': True}, - 'cluster_resource_group': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, - 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, - 'version': {'key': 'typeProperties.version', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, - 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, - 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, - 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, - 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, - 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, - 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, - 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, - 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, - 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, - 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, - 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, - 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, - 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, - 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, - 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, - 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, - 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, - 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, - 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, - 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, - } - - def __init__( - self, - **kwargs - ): - super(HdInsightOnDemandLinkedService, self).__init__(**kwargs) - self.type = 'HDInsightOnDemand' # type: str - self.cluster_size = kwargs['cluster_size'] - self.time_to_live = kwargs['time_to_live'] - self.version = kwargs['version'] - self.linked_service_name = kwargs['linked_service_name'] - self.host_subscription_id = kwargs['host_subscription_id'] - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs['tenant'] - self.cluster_resource_group = kwargs['cluster_resource_group'] - self.cluster_name_prefix = kwargs.get('cluster_name_prefix', None) - self.cluster_user_name = kwargs.get('cluster_user_name', None) - self.cluster_password = kwargs.get('cluster_password', None) - self.cluster_ssh_user_name = kwargs.get('cluster_ssh_user_name', None) - self.cluster_ssh_password = kwargs.get('cluster_ssh_password', None) - self.additional_linked_service_names = kwargs.get('additional_linked_service_names', None) - self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) - self.cluster_type = kwargs.get('cluster_type', None) - self.spark_version = kwargs.get('spark_version', None) - self.core_configuration = kwargs.get('core_configuration', None) - self.h_base_configuration = kwargs.get('h_base_configuration', None) - self.hdfs_configuration = kwargs.get('hdfs_configuration', None) - self.hive_configuration = kwargs.get('hive_configuration', None) - self.map_reduce_configuration = kwargs.get('map_reduce_configuration', None) - self.oozie_configuration = kwargs.get('oozie_configuration', None) - self.storm_configuration = kwargs.get('storm_configuration', None) - self.yarn_configuration = kwargs.get('yarn_configuration', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.head_node_size = kwargs.get('head_node_size', None) - self.data_node_size = kwargs.get('data_node_size', None) - self.zookeeper_node_size = kwargs.get('zookeeper_node_size', None) - self.script_actions = kwargs.get('script_actions', None) - self.virtual_network_id = kwargs.get('virtual_network_id', None) - self.subnet_name = kwargs.get('subnet_name', None) - self.credential = kwargs.get('credential', None) - - -class HdInsightPigActivity(ExecutionActivity): - """HDInsight Pig activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. Type: array (or Expression - with resultType array). - :type arguments: object - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.mgmt.datafactory.models.HdInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with resultType string). - :type script_path: object - :param script_linked_service: Script linked service reference. - :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param defines: Allows user to specify defines for Pig job request. - :type defines: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': 'object'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - } - - def __init__( - self, - **kwargs - ): - super(HdInsightPigActivity, self).__init__(**kwargs) - self.type = 'HDInsightPig' # type: str - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.script_path = kwargs.get('script_path', None) - self.script_linked_service = kwargs.get('script_linked_service', None) - self.defines = kwargs.get('defines', None) - - -class HdInsightSparkActivity(ExecutionActivity): - """HDInsight Spark activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. - Type: string (or Expression with resultType string). - :type root_path: object - :param entry_file_path: Required. The relative path to the root folder of the code/package to - be executed. Type: string (or Expression with resultType string). - :type entry_file_path: object - :param arguments: The user-specified arguments to HDInsightSparkActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.mgmt.datafactory.models.HdInsightActivityDebugInfoOption - :param spark_job_linked_service: The storage linked service for uploading the entry file and - dependencies, and for receiving logs. - :type spark_job_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param class_name: The application's Java/Spark main class. - :type class_name: str - :param proxy_user: The user to impersonate that will execute the job. Type: string (or - Expression with resultType string). - :type proxy_user: object - :param spark_config: Spark configuration property. - :type spark_config: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'root_path': {'required': True}, - 'entry_file_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, - 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, - 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, - 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, - } - - def __init__( - self, - **kwargs - ): - super(HdInsightSparkActivity, self).__init__(**kwargs) - self.type = 'HDInsightSpark' # type: str - self.root_path = kwargs['root_path'] - self.entry_file_path = kwargs['entry_file_path'] - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.spark_job_linked_service = kwargs.get('spark_job_linked_service', None) - self.class_name = kwargs.get('class_name', None) - self.proxy_user = kwargs.get('proxy_user', None) - self.spark_config = kwargs.get('spark_config', None) - - -class HdInsightStreamingActivity(ExecutionActivity): - """HDInsight streaming activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.mgmt.datafactory.models.HdInsightActivityDebugInfoOption - :param mapper: Required. Mapper executable name. Type: string (or Expression with resultType - string). - :type mapper: object - :param reducer: Required. Reducer executable name. Type: string (or Expression with resultType - string). - :type reducer: object - :param input: Required. Input blob path. Type: string (or Expression with resultType string). - :type input: object - :param output: Required. Output blob path. Type: string (or Expression with resultType string). - :type output: object - :param file_paths: Required. Paths to streaming job files. Can be directories. - :type file_paths: list[object] - :param file_linked_service: Linked service reference where the files are located. - :type file_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param combiner: Combiner executable name. Type: string (or Expression with resultType string). - :type combiner: object - :param command_environment: Command line environment values. - :type command_environment: list[object] - :param defines: Allows user to specify defines for streaming job request. - :type defines: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'mapper': {'required': True}, - 'reducer': {'required': True}, - 'input': {'required': True}, - 'output': {'required': True}, - 'file_paths': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, - 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, - 'input': {'key': 'typeProperties.input', 'type': 'object'}, - 'output': {'key': 'typeProperties.output', 'type': 'object'}, - 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, - 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, - 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, - 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - } - - def __init__( - self, - **kwargs - ): - super(HdInsightStreamingActivity, self).__init__(**kwargs) - self.type = 'HDInsightStreaming' # type: str - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.mapper = kwargs['mapper'] - self.reducer = kwargs['reducer'] - self.input = kwargs['input'] - self.output = kwargs['output'] - self.file_paths = kwargs['file_paths'] - self.file_linked_service = kwargs.get('file_linked_service', None) - self.combiner = kwargs.get('combiner', None) - self.command_environment = kwargs.get('command_environment', None) - self.defines = kwargs.get('defines', None) - - -class HiveLinkedService(LinkedService): - """Hive Server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. IP address or host name of the Hive server, separated by ';' for - multiple hosts (only when serviceDiscoveryMode is enable). - :type host: object - :param port: The TCP port that the Hive server uses to listen for client connections. - :type port: object - :param server_type: The type of Hive server. Possible values include: "HiveServer1", - "HiveServer2", "HiveThriftServer". - :type server_type: str or ~azure.mgmt.datafactory.models.HiveServerType - :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible - values include: "Binary", "SASL", "HTTP ". - :type thrift_transport_protocol: str or - ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol - :param authentication_type: Required. The authentication method used to access the Hive server. - Possible values include: "Anonymous", "Username", "UsernameAndPassword", - "WindowsAzureHDInsightService". - :type authentication_type: str or ~azure.mgmt.datafactory.models.HiveAuthenticationType - :param service_discovery_mode: true to indicate using the ZooKeeper service, false not. - :type service_discovery_mode: object - :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are - added. - :type zoo_keeper_name_space: object - :param use_native_query: Specifies whether the driver uses native HiveQL queries,or converts - them into an equivalent form in HiveQL. - :type use_native_query: object - :param username: The user name that you use to access Hive Server. - :type username: object - :param password: The password corresponding to the user name that you provided in the Username - field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param http_path: The partial URL corresponding to the Hive server. - :type http_path: object - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, - 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, - 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, - 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(HiveLinkedService, self).__init__(**kwargs) - self.type = 'Hive' # type: str - self.host = kwargs['host'] - self.port = kwargs.get('port', None) - self.server_type = kwargs.get('server_type', None) - self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) - self.authentication_type = kwargs['authentication_type'] - self.service_discovery_mode = kwargs.get('service_discovery_mode', None) - self.zoo_keeper_name_space = kwargs.get('zoo_keeper_name_space', None) - self.use_native_query = kwargs.get('use_native_query', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.http_path = kwargs.get('http_path', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class HiveObjectDataset(Dataset): - """Hive Server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of the Hive. Type: string (or Expression with resultType string). - :type table: object - :param schema_type_properties_schema: The schema name of the Hive. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(HiveObjectDataset, self).__init__(**kwargs) - self.type = 'HiveObject' # type: str - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - - -class HiveSource(TabularSource): - """A copy activity Hive Server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(HiveSource, self).__init__(**kwargs) - self.type = 'HiveSource' # type: str - self.query = kwargs.get('query', None) - - -class HttpDataset(Dataset): - """A file in an HTTP web server. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param relative_url: The relative URL based on the URL in the HttpLinkedService refers to an - HTTP file Type: string (or Expression with resultType string). - :type relative_url: object - :param request_method: The HTTP method for the HTTP request. Type: string (or Expression with - resultType string). - :type request_method: object - :param request_body: The body for the HTTP request. Type: string (or Expression with resultType - string). - :type request_body: object - :param additional_headers: The headers for the HTTP Request. e.g. request-header- - name-1:request-header-value-1 - ... - request-header-name-n:request-header-value-n Type: string (or Expression with resultType - string). - :type additional_headers: object - :param format: The format of files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used on files. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, - 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, - 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__( - self, - **kwargs - ): - super(HttpDataset, self).__init__(**kwargs) - self.type = 'HttpFile' # type: str - self.relative_url = kwargs.get('relative_url', None) - self.request_method = kwargs.get('request_method', None) - self.request_body = kwargs.get('request_body', None) - self.additional_headers = kwargs.get('additional_headers', None) - self.format = kwargs.get('format', None) - self.compression = kwargs.get('compression', None) - - -class HttpLinkedService(LinkedService): - """Linked service for an HTTP source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: - string (or Expression with resultType string). - :type url: object - :param authentication_type: The authentication type to be used to connect to the HTTP server. - Possible values include: "Basic", "Anonymous", "Digest", "Windows", "ClientCertificate". - :type authentication_type: str or ~azure.mgmt.datafactory.models.HttpAuthenticationType - :param user_name: User name for Basic, Digest, or Windows authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Basic, Digest, Windows, or ClientCertificate with - EmbeddedCertData authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param auth_headers: The additional HTTP headers in the request to RESTful API used for - authorization. Type: object (or Expression with resultType object). - :type auth_headers: object - :param embedded_cert_data: Base64 encoded certificate data for ClientCertificate - authentication. For on-premises copy with ClientCertificate authentication, either - CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type embedded_cert_data: object - :param cert_thumbprint: Thumbprint of certificate for ClientCertificate authentication. Only - valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either - CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type cert_thumbprint: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param enable_server_certificate_validation: If true, validate the HTTPS server SSL - certificate. Default value is true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'auth_headers': {'key': 'typeProperties.authHeaders', 'type': 'object'}, - 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, - 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(HttpLinkedService, self).__init__(**kwargs) - self.type = 'HttpServer' # type: str - self.url = kwargs['url'] - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.auth_headers = kwargs.get('auth_headers', None) - self.embedded_cert_data = kwargs.get('embedded_cert_data', None) - self.cert_thumbprint = kwargs.get('cert_thumbprint', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) - - -class HttpReadSettings(StoreReadSettings): - """Sftp read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: - string (or Expression with resultType string). - :type request_method: object - :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: - string (or Expression with resultType string). - :type request_body: object - :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: - string (or Expression with resultType string). - :type additional_headers: object - :param request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP - server. - :type request_timeout: object - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: - string (or Expression with resultType string). - :type partition_root_path: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'request_method': {'key': 'requestMethod', 'type': 'object'}, - 'request_body': {'key': 'requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, - 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(HttpReadSettings, self).__init__(**kwargs) - self.type = 'HttpReadSettings' # type: str - self.request_method = kwargs.get('request_method', None) - self.request_body = kwargs.get('request_body', None) - self.additional_headers = kwargs.get('additional_headers', None) - self.request_timeout = kwargs.get('request_timeout', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.partition_root_path = kwargs.get('partition_root_path', None) - - -class HttpServerLocation(DatasetLocation): - """The location of http server. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType - string). - :type file_name: object - :param relative_url: Specify the relativeUrl of http server. Type: string (or Expression with - resultType string). - :type relative_url: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(HttpServerLocation, self).__init__(**kwargs) - self.type = 'HttpServerLocation' # type: str - self.relative_url = kwargs.get('relative_url', None) - - -class HttpSource(CopySource): - """A copy activity source for an HTTP file. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from - HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string - (or Expression with resultType string), pattern: - ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(HttpSource, self).__init__(**kwargs) - self.type = 'HttpSource' # type: str - self.http_request_timeout = kwargs.get('http_request_timeout', None) - - -class HubspotLinkedService(LinkedService): - """Hubspot Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param client_id: Required. The client ID associated with your Hubspot application. - :type client_id: object - :param client_secret: The client secret associated with your Hubspot application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param access_token: The access token obtained when initially authenticating your OAuth - integration. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param refresh_token: The refresh token obtained when initially authenticating your OAuth - integration. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(HubspotLinkedService, self).__init__(**kwargs) - self.type = 'Hubspot' # type: str - self.client_id = kwargs['client_id'] - self.client_secret = kwargs.get('client_secret', None) - self.access_token = kwargs.get('access_token', None) - self.refresh_token = kwargs.get('refresh_token', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class HubspotObjectDataset(Dataset): - """Hubspot Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(HubspotObjectDataset, self).__init__(**kwargs) - self.type = 'HubspotObject' # type: str - self.table_name = kwargs.get('table_name', None) - - -class HubspotSource(TabularSource): - """A copy activity Hubspot Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(HubspotSource, self).__init__(**kwargs) - self.type = 'HubspotSource' # type: str - self.query = kwargs.get('query', None) - - -class IfConditionActivity(Activity): - """This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the expression. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param expression: Required. An expression that would evaluate to Boolean. This is used to - determine the block of activities (ifTrueActivities or ifFalseActivities) that will be - executed. - :type expression: ~azure.mgmt.datafactory.models.Expression - :param if_true_activities: List of activities to execute if expression is evaluated to true. - This is an optional property and if not provided, the activity will exit without any action. - :type if_true_activities: list[~azure.mgmt.datafactory.models.Activity] - :param if_false_activities: List of activities to execute if expression is evaluated to false. - This is an optional property and if not provided, the activity will exit without any action. - :type if_false_activities: list[~azure.mgmt.datafactory.models.Activity] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'expression': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, - 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, - 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, - } - - def __init__( - self, - **kwargs - ): - super(IfConditionActivity, self).__init__(**kwargs) - self.type = 'IfCondition' # type: str - self.expression = kwargs['expression'] - self.if_true_activities = kwargs.get('if_true_activities', None) - self.if_false_activities = kwargs.get('if_false_activities', None) - - -class ImpalaLinkedService(LinkedService): - """Impala server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The IP address or host name of the Impala server. (i.e. - 192.168.222.160). - :type host: object - :param port: The TCP port that the Impala server uses to listen for client connections. The - default value is 21050. - :type port: object - :param authentication_type: Required. The authentication type to use. Possible values include: - "Anonymous", "SASLUsername", "UsernameAndPassword". - :type authentication_type: str or ~azure.mgmt.datafactory.models.ImpalaAuthenticationType - :param username: The user name used to access the Impala server. The default value is anonymous - when using SASLUsername. - :type username: object - :param password: The password corresponding to the user name when using UsernameAndPassword. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ImpalaLinkedService, self).__init__(**kwargs) - self.type = 'Impala' # type: str - self.host = kwargs['host'] - self.port = kwargs.get('port', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class ImpalaObjectDataset(Dataset): - """Impala server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of the Impala. Type: string (or Expression with resultType - string). - :type table: object - :param schema_type_properties_schema: The schema name of the Impala. Type: string (or - Expression with resultType string). - :type schema_type_properties_schema: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ImpalaObjectDataset, self).__init__(**kwargs) - self.type = 'ImpalaObject' # type: str - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - - -class ImpalaSource(TabularSource): - """A copy activity Impala server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ImpalaSource, self).__init__(**kwargs) - self.type = 'ImpalaSource' # type: str - self.query = kwargs.get('query', None) - - -class InformixLinkedService(LinkedService): - """Informix linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The non-access credential portion of the connection string - as well as an optional encrypted credential. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the Informix as ODBC data - store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType - string). - :type authentication_type: object - :param credential: The access credential portion of the connection string specified in driver- - specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(InformixLinkedService, self).__init__(**kwargs) - self.type = 'Informix' # type: str - self.connection_string = kwargs['connection_string'] - self.authentication_type = kwargs.get('authentication_type', None) - self.credential = kwargs.get('credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class InformixSink(CopySink): - """A copy activity Informix sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param pre_copy_script: A query to execute before starting the copy. Type: string (or - Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(InformixSink, self).__init__(**kwargs) - self.type = 'InformixSink' # type: str - self.pre_copy_script = kwargs.get('pre_copy_script', None) - - -class InformixSource(TabularSource): - """A copy activity source for Informix. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: Database query. Type: string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(InformixSource, self).__init__(**kwargs) - self.type = 'InformixSource' # type: str - self.query = kwargs.get('query', None) - - -class InformixTableDataset(Dataset): - """The Informix table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The Informix table name. Type: string (or Expression with resultType - string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(InformixTableDataset, self).__init__(**kwargs) - self.type = 'InformixTable' # type: str - self.table_name = kwargs.get('table_name', None) - - -class IntegrationRuntime(msrest.serialization.Model): - """Azure Data Factory nested object which serves as a compute resource for activities. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ManagedIntegrationRuntime, SelfHostedIntegrationRuntime. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of integration runtime.Constant filled by server. Possible values - include: "Managed", "SelfHosted". - :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType - :param description: Integration runtime description. - :type description: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'Managed': 'ManagedIntegrationRuntime', 'SelfHosted': 'SelfHostedIntegrationRuntime'} - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntime, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'IntegrationRuntime' # type: str - self.description = kwargs.get('description', None) - - -class IntegrationRuntimeAuthKeys(msrest.serialization.Model): - """The integration runtime authentication keys. - - :param auth_key1: The primary integration runtime authentication key. - :type auth_key1: str - :param auth_key2: The secondary integration runtime authentication key. - :type auth_key2: str - """ - - _attribute_map = { - 'auth_key1': {'key': 'authKey1', 'type': 'str'}, - 'auth_key2': {'key': 'authKey2', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeAuthKeys, self).__init__(**kwargs) - self.auth_key1 = kwargs.get('auth_key1', None) - self.auth_key2 = kwargs.get('auth_key2', None) - - -class IntegrationRuntimeComputeProperties(msrest.serialization.Model): - """The compute resource properties for managed integration runtime. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param location: The location for managed integration runtime. The supported regions could be - found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement- - activities. - :type location: str - :param node_size: The node size requirement to managed integration runtime. - :type node_size: str - :param number_of_nodes: The required number of nodes for managed integration runtime. - :type number_of_nodes: int - :param max_parallel_executions_per_node: Maximum parallel executions count per node for managed - integration runtime. - :type max_parallel_executions_per_node: int - :param data_flow_properties: Data flow properties for managed integration runtime. - :type data_flow_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeDataFlowProperties - :param v_net_properties: VNet properties for managed integration runtime. - :type v_net_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties - """ - - _validation = { - 'number_of_nodes': {'minimum': 1}, - 'max_parallel_executions_per_node': {'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'node_size': {'key': 'nodeSize', 'type': 'str'}, - 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, - 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, - 'data_flow_properties': {'key': 'dataFlowProperties', 'type': 'IntegrationRuntimeDataFlowProperties'}, - 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.location = kwargs.get('location', None) - self.node_size = kwargs.get('node_size', None) - self.number_of_nodes = kwargs.get('number_of_nodes', None) - self.max_parallel_executions_per_node = kwargs.get('max_parallel_executions_per_node', None) - self.data_flow_properties = kwargs.get('data_flow_properties', None) - self.v_net_properties = kwargs.get('v_net_properties', None) - - -class IntegrationRuntimeConnectionInfo(msrest.serialization.Model): - """Connection information for encrypting the on-premises data source credentials. - - Variables are only populated by the server, and will be ignored when sending a request. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :ivar service_token: The token generated in service. Callers use this token to authenticate to - integration runtime. - :vartype service_token: str - :ivar identity_cert_thumbprint: The integration runtime SSL certificate thumbprint. Click-Once - application uses it to do server validation. - :vartype identity_cert_thumbprint: str - :ivar host_service_uri: The on-premises integration runtime host URL. - :vartype host_service_uri: str - :ivar version: The integration runtime version. - :vartype version: str - :ivar public_key: The public key for encrypting a credential when transferring the credential - to the integration runtime. - :vartype public_key: str - :ivar is_identity_cert_exprired: Whether the identity certificate is expired. - :vartype is_identity_cert_exprired: bool - """ - - _validation = { - 'service_token': {'readonly': True}, - 'identity_cert_thumbprint': {'readonly': True}, - 'host_service_uri': {'readonly': True}, - 'version': {'readonly': True}, - 'public_key': {'readonly': True}, - 'is_identity_cert_exprired': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'service_token': {'key': 'serviceToken', 'type': 'str'}, - 'identity_cert_thumbprint': {'key': 'identityCertThumbprint', 'type': 'str'}, - 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, - 'public_key': {'key': 'publicKey', 'type': 'str'}, - 'is_identity_cert_exprired': {'key': 'isIdentityCertExprired', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.service_token = None - self.identity_cert_thumbprint = None - self.host_service_uri = None - self.version = None - self.public_key = None - self.is_identity_cert_exprired = None - - -class IntegrationRuntimeCustomSetupScriptProperties(msrest.serialization.Model): - """Custom setup script properties for a managed dedicated integration runtime. - - :param blob_container_uri: The URI of the Azure blob container that contains the custom setup - script. - :type blob_container_uri: str - :param sas_token: The SAS token of the Azure blob container. - :type sas_token: ~azure.mgmt.datafactory.models.SecureString - """ - - _attribute_map = { - 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, - 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) - self.blob_container_uri = kwargs.get('blob_container_uri', None) - self.sas_token = kwargs.get('sas_token', None) - - -class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): - """Data flow properties for managed integration runtime. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param compute_type: Compute type of the cluster which will execute data flow job. Possible - values include: "General", "MemoryOptimized", "ComputeOptimized". - :type compute_type: str or ~azure.mgmt.datafactory.models.DataFlowComputeType - :param core_count: Core count of the cluster which will execute data flow job. Supported values - are: 8, 16, 32, 48, 80, 144 and 272. - :type core_count: int - :param time_to_live: Time to live (in minutes) setting of the cluster which will execute data - flow job. - :type time_to_live: int - :param cleanup: Cluster will not be recycled and it will be used in next data flow activity run - until TTL (time to live) is reached if this is set as false. Default is true. - :type cleanup: bool - """ - - _validation = { - 'time_to_live': {'minimum': 0}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'core_count': {'key': 'coreCount', 'type': 'int'}, - 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, - 'cleanup': {'key': 'cleanup', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeDataFlowProperties, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.compute_type = kwargs.get('compute_type', None) - self.core_count = kwargs.get('core_count', None) - self.time_to_live = kwargs.get('time_to_live', None) - self.cleanup = kwargs.get('cleanup', None) - - -class IntegrationRuntimeDataProxyProperties(msrest.serialization.Model): - """Data proxy properties for a managed dedicated integration runtime. - - :param connect_via: The self-hosted integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.EntityReference - :param staging_linked_service: The staging linked service reference. - :type staging_linked_service: ~azure.mgmt.datafactory.models.EntityReference - :param path: The path to contain the staged data in the Blob storage. - :type path: str - """ - - _attribute_map = { - 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, - 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, - 'path': {'key': 'path', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) - self.connect_via = kwargs.get('connect_via', None) - self.staging_linked_service = kwargs.get('staging_linked_service', None) - self.path = kwargs.get('path', None) - - -class IntegrationRuntimeDebugResource(SubResourceDebugResource): - """Integration runtime debug resource. - - All required parameters must be populated in order to send to Azure. - - :param name: The resource name. - :type name: str - :param properties: Required. Integration runtime properties. - :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime - """ - - _validation = { - 'properties': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeDebugResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class IntegrationRuntimeListResponse(msrest.serialization.Model): - """A list of integration runtime resources. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of integration runtimes. - :type value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[IntegrationRuntimeResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) - - -class IntegrationRuntimeMonitoringData(msrest.serialization.Model): - """Get monitoring data response. - - :param name: Integration runtime name. - :type name: str - :param nodes: Integration runtime node monitoring data. - :type nodes: list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'nodes': {'key': 'nodes', 'type': '[IntegrationRuntimeNodeMonitoringData]'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeMonitoringData, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.nodes = kwargs.get('nodes', None) - - -class IntegrationRuntimeNodeIpAddress(msrest.serialization.Model): - """The IP address of self-hosted integration runtime node. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar ip_address: The IP address of self-hosted integration runtime node. - :vartype ip_address: str - """ - - _validation = { - 'ip_address': {'readonly': True}, - } - - _attribute_map = { - 'ip_address': {'key': 'ipAddress', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeNodeIpAddress, self).__init__(**kwargs) - self.ip_address = None - - -class IntegrationRuntimeNodeMonitoringData(msrest.serialization.Model): - """Monitoring data for integration runtime node. - - Variables are only populated by the server, and will be ignored when sending a request. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :ivar node_name: Name of the integration runtime node. - :vartype node_name: str - :ivar available_memory_in_mb: Available memory (MB) on the integration runtime node. - :vartype available_memory_in_mb: int - :ivar cpu_utilization: CPU percentage on the integration runtime node. - :vartype cpu_utilization: int - :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration runtime node. - :vartype concurrent_jobs_limit: int - :ivar concurrent_jobs_running: The number of jobs currently running on the integration runtime - node. - :vartype concurrent_jobs_running: int - :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration runtime. - :vartype max_concurrent_jobs: int - :ivar sent_bytes: Sent bytes on the integration runtime node. - :vartype sent_bytes: float - :ivar received_bytes: Received bytes on the integration runtime node. - :vartype received_bytes: float - """ - - _validation = { - 'node_name': {'readonly': True}, - 'available_memory_in_mb': {'readonly': True}, - 'cpu_utilization': {'readonly': True}, - 'concurrent_jobs_limit': {'readonly': True}, - 'concurrent_jobs_running': {'readonly': True}, - 'max_concurrent_jobs': {'readonly': True}, - 'sent_bytes': {'readonly': True}, - 'received_bytes': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'node_name': {'key': 'nodeName', 'type': 'str'}, - 'available_memory_in_mb': {'key': 'availableMemoryInMB', 'type': 'int'}, - 'cpu_utilization': {'key': 'cpuUtilization', 'type': 'int'}, - 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, - 'concurrent_jobs_running': {'key': 'concurrentJobsRunning', 'type': 'int'}, - 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, - 'sent_bytes': {'key': 'sentBytes', 'type': 'float'}, - 'received_bytes': {'key': 'receivedBytes', 'type': 'float'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.node_name = None - self.available_memory_in_mb = None - self.cpu_utilization = None - self.concurrent_jobs_limit = None - self.concurrent_jobs_running = None - self.max_concurrent_jobs = None - self.sent_bytes = None - self.received_bytes = None - - -class IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint(msrest.serialization.Model): - """Azure-SSIS integration runtime outbound network dependency endpoints for one category. - - :param category: The category of outbound network dependency. - :type category: str - :param endpoints: The endpoints for outbound network dependency. - :type endpoints: - list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpoint] - """ - - _attribute_map = { - 'category': {'key': 'category', 'type': 'str'}, - 'endpoints': {'key': 'endpoints', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesEndpoint]'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint, self).__init__(**kwargs) - self.category = kwargs.get('category', None) - self.endpoints = kwargs.get('endpoints', None) - - -class IntegrationRuntimeOutboundNetworkDependenciesEndpoint(msrest.serialization.Model): - """The endpoint for Azure-SSIS integration runtime outbound network dependency. - - :param domain_name: The domain name of endpoint. - :type domain_name: str - :param endpoint_details: The details of endpoint. - :type endpoint_details: - list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails] - """ - - _attribute_map = { - 'domain_name': {'key': 'domainName', 'type': 'str'}, - 'endpoint_details': {'key': 'endpointDetails', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails]'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeOutboundNetworkDependenciesEndpoint, self).__init__(**kwargs) - self.domain_name = kwargs.get('domain_name', None) - self.endpoint_details = kwargs.get('endpoint_details', None) - - -class IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(msrest.serialization.Model): - """The details of Azure-SSIS integration runtime outbound network dependency endpoint. - - :param port: The port of endpoint. - :type port: int - """ - - _attribute_map = { - 'port': {'key': 'port', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails, self).__init__(**kwargs) - self.port = kwargs.get('port', None) - - -class IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse(msrest.serialization.Model): - """Azure-SSIS integration runtime outbound network dependency endpoints. - - :param value: The list of outbound network dependency endpoints. - :type value: - list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint] - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint]'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - - -class IntegrationRuntimeReference(msrest.serialization.Model): - """Integration runtime reference type. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Type of integration runtime. Default value: - "IntegrationRuntimeReference". - :vartype type: str - :param reference_name: Required. Reference integration runtime name. - :type reference_name: str - :param parameters: Arguments for integration runtime. - :type parameters: dict[str, object] - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - type = "IntegrationRuntimeReference" - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeReference, self).__init__(**kwargs) - self.reference_name = kwargs['reference_name'] - self.parameters = kwargs.get('parameters', None) - - -class IntegrationRuntimeRegenerateKeyParameters(msrest.serialization.Model): - """Parameters to regenerate the authentication key. - - :param key_name: The name of the authentication key to regenerate. Possible values include: - "authKey1", "authKey2". - :type key_name: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName - """ - - _attribute_map = { - 'key_name': {'key': 'keyName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeRegenerateKeyParameters, self).__init__(**kwargs) - self.key_name = kwargs.get('key_name', None) - - -class IntegrationRuntimeResource(SubResource): - """Integration runtime resource type. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Integration runtime properties. - :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): - """Catalog information for managed dedicated integration runtime. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param catalog_server_endpoint: The catalog database server URL. - :type catalog_server_endpoint: str - :param catalog_admin_user_name: The administrator user name of catalog database. - :type catalog_admin_user_name: str - :param catalog_admin_password: The password of the administrator user account of the catalog - database. - :type catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString - :param catalog_pricing_tier: The pricing tier for the catalog database. The valid values could - be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible values - include: "Basic", "Standard", "Premium", "PremiumRS". - :type catalog_pricing_tier: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier - :param dual_standby_pair_name: The dual standby pair name of Azure-SSIS Integration Runtimes to - support SSISDB failover. - :type dual_standby_pair_name: str - """ - - _validation = { - 'catalog_admin_user_name': {'max_length': 128, 'min_length': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, - 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, - 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, - 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, - 'dual_standby_pair_name': {'key': 'dualStandbyPairName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.catalog_server_endpoint = kwargs.get('catalog_server_endpoint', None) - self.catalog_admin_user_name = kwargs.get('catalog_admin_user_name', None) - self.catalog_admin_password = kwargs.get('catalog_admin_password', None) - self.catalog_pricing_tier = kwargs.get('catalog_pricing_tier', None) - self.dual_standby_pair_name = kwargs.get('dual_standby_pair_name', None) - - -class IntegrationRuntimeSsisProperties(msrest.serialization.Model): - """SSIS properties for managed integration runtime. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param catalog_info: Catalog information for managed dedicated integration runtime. - :type catalog_info: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo - :param license_type: License type for bringing your own license scenario. Possible values - include: "BasePrice", "LicenseIncluded". - :type license_type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType - :param custom_setup_script_properties: Custom setup script properties for a managed dedicated - integration runtime. - :type custom_setup_script_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties - :param data_proxy_properties: Data proxy properties for a managed dedicated integration - runtime. - :type data_proxy_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties - :param edition: The edition for the SSIS Integration Runtime. Possible values include: - "Standard", "Enterprise". - :type edition: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition - :param express_custom_setup_properties: Custom setup without script properties for a SSIS - integration runtime. - :type express_custom_setup_properties: list[~azure.mgmt.datafactory.models.CustomSetupBase] - :param package_stores: Package stores for the SSIS Integration Runtime. - :type package_stores: list[~azure.mgmt.datafactory.models.PackageStore] - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, - 'license_type': {'key': 'licenseType', 'type': 'str'}, - 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, - 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, - 'edition': {'key': 'edition', 'type': 'str'}, - 'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'}, - 'package_stores': {'key': 'packageStores', 'type': '[PackageStore]'}, - 'credential': {'key': 'credential', 'type': 'CredentialReference'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.catalog_info = kwargs.get('catalog_info', None) - self.license_type = kwargs.get('license_type', None) - self.custom_setup_script_properties = kwargs.get('custom_setup_script_properties', None) - self.data_proxy_properties = kwargs.get('data_proxy_properties', None) - self.edition = kwargs.get('edition', None) - self.express_custom_setup_properties = kwargs.get('express_custom_setup_properties', None) - self.package_stores = kwargs.get('package_stores', None) - self.credential = kwargs.get('credential', None) - - -class IntegrationRuntimeStatus(msrest.serialization.Model): - """Integration runtime status. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ManagedIntegrationRuntimeStatus, SelfHostedIntegrationRuntimeStatus. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of integration runtime.Constant filled by server. Possible values - include: "Managed", "SelfHosted". - :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType - :ivar data_factory_name: The data factory name which the integration runtime belong to. - :vartype data_factory_name: str - :ivar state: The state of integration runtime. Possible values include: "Initial", "Stopped", - "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", - "AccessDenied". - :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState - """ - - _validation = { - 'type': {'required': True}, - 'data_factory_name': {'readonly': True}, - 'state': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'Managed': 'ManagedIntegrationRuntimeStatus', 'SelfHosted': 'SelfHostedIntegrationRuntimeStatus'} - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeStatus, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'IntegrationRuntimeStatus' # type: str - self.data_factory_name = None - self.state = None - - -class IntegrationRuntimeStatusListResponse(msrest.serialization.Model): - """A list of integration runtime status. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of integration runtime status. - :type value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[IntegrationRuntimeStatusResponse]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeStatusListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) - - -class IntegrationRuntimeStatusResponse(msrest.serialization.Model): - """Integration runtime status response. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar name: The integration runtime name. - :vartype name: str - :param properties: Required. Integration runtime properties. - :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus - """ - - _validation = { - 'name': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'IntegrationRuntimeStatus'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) - self.name = None - self.properties = kwargs['properties'] - - -class IntegrationRuntimeVNetProperties(msrest.serialization.Model): - """VNet properties for managed integration runtime. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param v_net_id: The ID of the VNet that this integration runtime will join. - :type v_net_id: str - :param subnet: The name of the subnet this integration runtime will join. - :type subnet: str - :param public_i_ps: Resource IDs of the public IP addresses that this integration runtime will - use. - :type public_i_ps: list[str] - :param subnet_id: The ID of subnet, to which this Azure-SSIS integration runtime will be - joined. - :type subnet_id: str - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'v_net_id': {'key': 'vNetId', 'type': 'str'}, - 'subnet': {'key': 'subnet', 'type': 'str'}, - 'public_i_ps': {'key': 'publicIPs', 'type': '[str]'}, - 'subnet_id': {'key': 'subnetId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.v_net_id = kwargs.get('v_net_id', None) - self.subnet = kwargs.get('subnet', None) - self.public_i_ps = kwargs.get('public_i_ps', None) - self.subnet_id = kwargs.get('subnet_id', None) - - -class JiraLinkedService(LinkedService): - """Jira Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The IP address or host name of the Jira service. (e.g. - jira.example.com). - :type host: object - :param port: The TCP port that the Jira server uses to listen for client connections. The - default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. - :type port: object - :param username: Required. The user name that you use to access Jira Service. - :type username: object - :param password: The password corresponding to the user name that you provided in the username - field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(JiraLinkedService, self).__init__(**kwargs) - self.type = 'Jira' # type: str - self.host = kwargs['host'] - self.port = kwargs.get('port', None) - self.username = kwargs['username'] - self.password = kwargs.get('password', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class JiraObjectDataset(Dataset): - """Jira Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(JiraObjectDataset, self).__init__(**kwargs) - self.type = 'JiraObject' # type: str - self.table_name = kwargs.get('table_name', None) - - -class JiraSource(TabularSource): - """A copy activity Jira Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(JiraSource, self).__init__(**kwargs) - self.type = 'JiraSource' # type: str - self.query = kwargs.get('query', None) - - -class JsonDataset(Dataset): - """Json dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the json data storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param encoding_name: The code page name of the preferred encoding. If not specified, the - default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column - of the table in the following link to set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with - resultType string). - :type encoding_name: object - :param compression: The data compression method used for the json dataset. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__( - self, - **kwargs - ): - super(JsonDataset, self).__init__(**kwargs) - self.type = 'Json' # type: str - self.location = kwargs.get('location', None) - self.encoding_name = kwargs.get('encoding_name', None) - self.compression = kwargs.get('compression', None) - - -class JsonFormat(DatasetStorageFormat): - """The data stored in JSON format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage format.Constant filled by server. - :type type: str - :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: object - :param file_pattern: File pattern of JSON. To be more specific, the way of separating a - collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. - :type file_pattern: object - :param nesting_separator: The character used to separate nesting levels. Default value is '.' - (dot). Type: string (or Expression with resultType string). - :type nesting_separator: object - :param encoding_name: The code page name of the preferred encoding. If not provided, the - default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. - The full list of supported values can be found in the 'Name' column of the table of encodings - in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or - Expression with resultType string). - :type encoding_name: object - :param json_node_reference: The JSONPath of the JSON array element to be flattened. Example: - "$.ArrayPath". Type: string (or Expression with resultType string). - :type json_node_reference: object - :param json_path_definition: The JSONPath definition for each column mapping with a customized - column name to extract data from JSON file. For fields under root object, start with "$"; for - fields inside the array chosen by jsonNodeReference property, start from the array element. - Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object (or - Expression with resultType object). - :type json_path_definition: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'file_pattern': {'key': 'filePattern', 'type': 'object'}, - 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, - 'encoding_name': {'key': 'encodingName', 'type': 'object'}, - 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, - 'json_path_definition': {'key': 'jsonPathDefinition', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(JsonFormat, self).__init__(**kwargs) - self.type = 'JsonFormat' # type: str - self.file_pattern = kwargs.get('file_pattern', None) - self.nesting_separator = kwargs.get('nesting_separator', None) - self.encoding_name = kwargs.get('encoding_name', None) - self.json_node_reference = kwargs.get('json_node_reference', None) - self.json_path_definition = kwargs.get('json_path_definition', None) - - -class JsonReadSettings(FormatReadSettings): - """Json read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param compression_properties: Compression settings. - :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(JsonReadSettings, self).__init__(**kwargs) - self.type = 'JsonReadSettings' # type: str - self.compression_properties = kwargs.get('compression_properties', None) - - -class JsonSink(CopySink): - """A copy activity Json sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param store_settings: Json store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: Json format settings. - :type format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(JsonSink, self).__init__(**kwargs) - self.type = 'JsonSink' # type: str - self.store_settings = kwargs.get('store_settings', None) - self.format_settings = kwargs.get('format_settings', None) - - -class JsonSource(CopySource): - """A copy activity Json source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param store_settings: Json store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param format_settings: Json format settings. - :type format_settings: ~azure.mgmt.datafactory.models.JsonReadSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'JsonReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(JsonSource, self).__init__(**kwargs) - self.type = 'JsonSource' # type: str - self.store_settings = kwargs.get('store_settings', None) - self.format_settings = kwargs.get('format_settings', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class JsonWriteSettings(FormatWriteSettings): - """Json write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON - objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. - :type file_pattern: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(JsonWriteSettings, self).__init__(**kwargs) - self.type = 'JsonWriteSettings' # type: str - self.file_pattern = kwargs.get('file_pattern', None) - - -class LinkedIntegrationRuntime(msrest.serialization.Model): - """The linked integration runtime information. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: The name of the linked integration runtime. - :vartype name: str - :ivar subscription_id: The subscription ID for which the linked integration runtime belong to. - :vartype subscription_id: str - :ivar data_factory_name: The name of the data factory for which the linked integration runtime - belong to. - :vartype data_factory_name: str - :ivar data_factory_location: The location of the data factory for which the linked integration - runtime belong to. - :vartype data_factory_location: str - :ivar create_time: The creating time of the linked integration runtime. - :vartype create_time: ~datetime.datetime - """ - - _validation = { - 'name': {'readonly': True}, - 'subscription_id': {'readonly': True}, - 'data_factory_name': {'readonly': True}, - 'data_factory_location': {'readonly': True}, - 'create_time': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, - 'create_time': {'key': 'createTime', 'type': 'iso-8601'}, - } - - def __init__( - self, - **kwargs - ): - super(LinkedIntegrationRuntime, self).__init__(**kwargs) - self.name = None - self.subscription_id = None - self.data_factory_name = None - self.data_factory_location = None - self.create_time = None - - -class LinkedIntegrationRuntimeType(msrest.serialization.Model): - """The base definition of a linked integration runtime. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: LinkedIntegrationRuntimeKeyAuthorization, LinkedIntegrationRuntimeRbacAuthorization. - - All required parameters must be populated in order to send to Azure. - - :param authorization_type: Required. The authorization type for integration runtime - sharing.Constant filled by server. - :type authorization_type: str - """ - - _validation = { - 'authorization_type': {'required': True}, - } - - _attribute_map = { - 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - } - - _subtype_map = { - 'authorization_type': {'Key': 'LinkedIntegrationRuntimeKeyAuthorization', 'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization'} - } - - def __init__( - self, - **kwargs - ): - super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) - self.authorization_type = None # type: Optional[str] - - -class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): - """The key authorization type integration runtime. - - All required parameters must be populated in order to send to Azure. - - :param authorization_type: Required. The authorization type for integration runtime - sharing.Constant filled by server. - :type authorization_type: str - :param key: Required. The key used for authorization. - :type key: ~azure.mgmt.datafactory.models.SecureString - """ - - _validation = { - 'authorization_type': {'required': True}, - 'key': {'required': True}, - } - - _attribute_map = { - 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - 'key': {'key': 'key', 'type': 'SecureString'}, - } - - def __init__( - self, - **kwargs - ): - super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) - self.authorization_type = 'Key' # type: str - self.key = kwargs['key'] - - -class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): - """The role based access control (RBAC) authorization type integration runtime. - - All required parameters must be populated in order to send to Azure. - - :param authorization_type: Required. The authorization type for integration runtime - sharing.Constant filled by server. - :type authorization_type: str - :param resource_id: Required. The resource identifier of the integration runtime to be shared. - :type resource_id: str - """ - - _validation = { - 'authorization_type': {'required': True}, - 'resource_id': {'required': True}, - } - - _attribute_map = { - 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) - self.authorization_type = 'RBAC' # type: str - self.resource_id = kwargs['resource_id'] - - -class LinkedIntegrationRuntimeRequest(msrest.serialization.Model): - """Data factory name for linked integration runtime request. - - All required parameters must be populated in order to send to Azure. - - :param linked_factory_name: Required. The data factory name for linked integration runtime. - :type linked_factory_name: str - """ - - _validation = { - 'linked_factory_name': {'required': True}, - } - - _attribute_map = { - 'linked_factory_name': {'key': 'factoryName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(LinkedIntegrationRuntimeRequest, self).__init__(**kwargs) - self.linked_factory_name = kwargs['linked_factory_name'] - - -class LinkedServiceDebugResource(SubResourceDebugResource): - """Linked service debug resource. - - All required parameters must be populated in order to send to Azure. - - :param name: The resource name. - :type name: str - :param properties: Required. Properties of linked service. - :type properties: ~azure.mgmt.datafactory.models.LinkedService - """ - - _validation = { - 'properties': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'LinkedService'}, - } - - def __init__( - self, - **kwargs - ): - super(LinkedServiceDebugResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class LinkedServiceListResponse(msrest.serialization.Model): - """A list of linked service resources. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of linked services. - :type value: list[~azure.mgmt.datafactory.models.LinkedServiceResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[LinkedServiceResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(LinkedServiceListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) - - -class LinkedServiceReference(msrest.serialization.Model): - """Linked service reference type. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Linked service reference type. Default value: "LinkedServiceReference". - :vartype type: str - :param reference_name: Required. Reference LinkedService name. - :type reference_name: str - :param parameters: Arguments for LinkedService. - :type parameters: dict[str, object] - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - type = "LinkedServiceReference" - - def __init__( - self, - **kwargs - ): - super(LinkedServiceReference, self).__init__(**kwargs) - self.reference_name = kwargs['reference_name'] - self.parameters = kwargs.get('parameters', None) - - -class LinkedServiceResource(SubResource): - """Linked service resource type. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of linked service. - :type properties: ~azure.mgmt.datafactory.models.LinkedService - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'LinkedService'}, - } - - def __init__( - self, - **kwargs - ): - super(LinkedServiceResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class LogLocationSettings(msrest.serialization.Model): - """Log location settings. - - All required parameters must be populated in order to send to Azure. - - :param linked_service_name: Required. Log storage linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param path: The path to storage for storing detailed logs of activity execution. Type: string - (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'path': {'key': 'path', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(LogLocationSettings, self).__init__(**kwargs) - self.linked_service_name = kwargs['linked_service_name'] - self.path = kwargs.get('path', None) - - -class LogSettings(msrest.serialization.Model): - """Log settings. - - All required parameters must be populated in order to send to Azure. - - :param enable_copy_activity_log: Specifies whether to enable copy activity log. Type: boolean - (or Expression with resultType boolean). - :type enable_copy_activity_log: object - :param copy_activity_log_settings: Specifies settings for copy activity log. - :type copy_activity_log_settings: ~azure.mgmt.datafactory.models.CopyActivityLogSettings - :param log_location_settings: Required. Log location settings customer needs to provide when - enabling log. - :type log_location_settings: ~azure.mgmt.datafactory.models.LogLocationSettings - """ - - _validation = { - 'log_location_settings': {'required': True}, - } - - _attribute_map = { - 'enable_copy_activity_log': {'key': 'enableCopyActivityLog', 'type': 'object'}, - 'copy_activity_log_settings': {'key': 'copyActivityLogSettings', 'type': 'CopyActivityLogSettings'}, - 'log_location_settings': {'key': 'logLocationSettings', 'type': 'LogLocationSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(LogSettings, self).__init__(**kwargs) - self.enable_copy_activity_log = kwargs.get('enable_copy_activity_log', None) - self.copy_activity_log_settings = kwargs.get('copy_activity_log_settings', None) - self.log_location_settings = kwargs['log_location_settings'] - - -class LogStorageSettings(msrest.serialization.Model): - """(Deprecated. Please use LogSettings) Log storage settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param linked_service_name: Required. Log storage linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param path: The path to storage for storing detailed logs of activity execution. Type: string - (or Expression with resultType string). - :type path: object - :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or - Expression with resultType string). - :type log_level: object - :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or - Expression with resultType boolean). - :type enable_reliable_logging: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'path': {'key': 'path', 'type': 'object'}, - 'log_level': {'key': 'logLevel', 'type': 'object'}, - 'enable_reliable_logging': {'key': 'enableReliableLogging', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(LogStorageSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.linked_service_name = kwargs['linked_service_name'] - self.path = kwargs.get('path', None) - self.log_level = kwargs.get('log_level', None) - self.enable_reliable_logging = kwargs.get('enable_reliable_logging', None) - - -class LookupActivity(ExecutionActivity): - """Lookup activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param source: Required. Dataset-specific source properties, same as copy activity source. - :type source: ~azure.mgmt.datafactory.models.CopySource - :param dataset: Required. Lookup activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param first_row_only: Whether to return first row or all rows. Default value is true. Type: - boolean (or Expression with resultType boolean). - :type first_row_only: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'source': {'required': True}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(LookupActivity, self).__init__(**kwargs) - self.type = 'Lookup' # type: str - self.source = kwargs['source'] - self.dataset = kwargs['dataset'] - self.first_row_only = kwargs.get('first_row_only', None) - - -class MagentoLinkedService(LinkedService): - """Magento server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The URL of the Magento instance. (i.e. 192.168.222.110/magento3). - :type host: object - :param access_token: The access token from Magento. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MagentoLinkedService, self).__init__(**kwargs) - self.type = 'Magento' # type: str - self.host = kwargs['host'] - self.access_token = kwargs.get('access_token', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class MagentoObjectDataset(Dataset): - """Magento server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MagentoObjectDataset, self).__init__(**kwargs) - self.type = 'MagentoObject' # type: str - self.table_name = kwargs.get('table_name', None) - - -class MagentoSource(TabularSource): - """A copy activity Magento server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MagentoSource, self).__init__(**kwargs) - self.type = 'MagentoSource' # type: str - self.query = kwargs.get('query', None) - - -class ManagedIdentityCredential(Credential): - """Managed identity credential. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of credential.Constant filled by server. - :type type: str - :param description: Credential description. - :type description: str - :param annotations: List of tags that can be used for describing the Credential. - :type annotations: list[object] - :param resource_id: The resource id of user assigned managed identity. - :type resource_id: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'resource_id': {'key': 'typeProperties.resourceId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ManagedIdentityCredential, self).__init__(**kwargs) - self.type = 'ManagedIdentity' # type: str - self.resource_id = kwargs.get('resource_id', None) - - -class ManagedIntegrationRuntime(IntegrationRuntime): - """Managed integration runtime, including managed elastic and managed dedicated integration runtimes. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of integration runtime.Constant filled by server. Possible values - include: "Managed", "SelfHosted". - :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType - :param description: Integration runtime description. - :type description: str - :ivar state: Integration runtime state, only valid for managed dedicated integration runtime. - Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", - "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". - :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param managed_virtual_network: Managed Virtual Network reference. - :type managed_virtual_network: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkReference - :param compute_properties: The compute resource for managed integration runtime. - :type compute_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties - :param ssis_properties: SSIS properties for managed integration runtime. - :type ssis_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties - """ - - _validation = { - 'type': {'required': True}, - 'state': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'managed_virtual_network': {'key': 'managedVirtualNetwork', 'type': 'ManagedVirtualNetworkReference'}, - 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, - 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(ManagedIntegrationRuntime, self).__init__(**kwargs) - self.type = 'Managed' # type: str - self.state = None - self.managed_virtual_network = kwargs.get('managed_virtual_network', None) - self.compute_properties = kwargs.get('compute_properties', None) - self.ssis_properties = kwargs.get('ssis_properties', None) - - -class ManagedIntegrationRuntimeError(msrest.serialization.Model): - """Error definition for managed integration runtime. - - Variables are only populated by the server, and will be ignored when sending a request. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :ivar time: The time when the error occurred. - :vartype time: ~datetime.datetime - :ivar code: Error code. - :vartype code: str - :ivar parameters: Managed integration runtime error parameters. - :vartype parameters: list[str] - :ivar message: Error message. - :vartype message: str - """ - - _validation = { - 'time': {'readonly': True}, - 'code': {'readonly': True}, - 'parameters': {'readonly': True}, - 'message': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'time': {'key': 'time', 'type': 'iso-8601'}, - 'code': {'key': 'code', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '[str]'}, - 'message': {'key': 'message', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.time = None - self.code = None - self.parameters = None - self.message = None - - -class ManagedIntegrationRuntimeNode(msrest.serialization.Model): - """Properties of integration runtime node. - - Variables are only populated by the server, and will be ignored when sending a request. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :ivar node_id: The managed integration runtime node id. - :vartype node_id: str - :ivar status: The managed integration runtime node status. Possible values include: "Starting", - "Available", "Recycling", "Unavailable". - :vartype status: str or ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNodeStatus - :param errors: The errors that occurred on this integration runtime node. - :type errors: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] - """ - - _validation = { - 'node_id': {'readonly': True}, - 'status': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'node_id': {'key': 'nodeId', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'errors': {'key': 'errors', 'type': '[ManagedIntegrationRuntimeError]'}, - } - - def __init__( - self, - **kwargs - ): - super(ManagedIntegrationRuntimeNode, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.node_id = None - self.status = None - self.errors = kwargs.get('errors', None) - - -class ManagedIntegrationRuntimeOperationResult(msrest.serialization.Model): - """Properties of managed integration runtime operation result. - - Variables are only populated by the server, and will be ignored when sending a request. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :ivar type: The operation type. Could be start or stop. - :vartype type: str - :ivar start_time: The start time of the operation. - :vartype start_time: ~datetime.datetime - :ivar result: The operation result. - :vartype result: str - :ivar error_code: The error code. - :vartype error_code: str - :ivar parameters: Managed integration runtime error parameters. - :vartype parameters: list[str] - :ivar activity_id: The activity id for the operation request. - :vartype activity_id: str - """ - - _validation = { - 'type': {'readonly': True}, - 'start_time': {'readonly': True}, - 'result': {'readonly': True}, - 'error_code': {'readonly': True}, - 'parameters': {'readonly': True}, - 'activity_id': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'result': {'key': 'result', 'type': 'str'}, - 'error_code': {'key': 'errorCode', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '[str]'}, - 'activity_id': {'key': 'activityId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = None - self.start_time = None - self.result = None - self.error_code = None - self.parameters = None - self.activity_id = None - - -class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): - """Managed integration runtime status. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of integration runtime.Constant filled by server. Possible values - include: "Managed", "SelfHosted". - :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType - :ivar data_factory_name: The data factory name which the integration runtime belong to. - :vartype data_factory_name: str - :ivar state: The state of integration runtime. Possible values include: "Initial", "Stopped", - "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", - "AccessDenied". - :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :ivar create_time: The time at which the integration runtime was created, in ISO8601 format. - :vartype create_time: ~datetime.datetime - :ivar nodes: The list of nodes for managed integration runtime. - :vartype nodes: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNode] - :ivar other_errors: The errors that occurred on this integration runtime. - :vartype other_errors: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] - :ivar last_operation: The last operation result that occurred on this integration runtime. - :vartype last_operation: - ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeOperationResult - """ - - _validation = { - 'type': {'required': True}, - 'data_factory_name': {'readonly': True}, - 'state': {'readonly': True}, - 'create_time': {'readonly': True}, - 'nodes': {'readonly': True}, - 'other_errors': {'readonly': True}, - 'last_operation': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, - 'nodes': {'key': 'typeProperties.nodes', 'type': '[ManagedIntegrationRuntimeNode]'}, - 'other_errors': {'key': 'typeProperties.otherErrors', 'type': '[ManagedIntegrationRuntimeError]'}, - 'last_operation': {'key': 'typeProperties.lastOperation', 'type': 'ManagedIntegrationRuntimeOperationResult'}, - } - - def __init__( - self, - **kwargs - ): - super(ManagedIntegrationRuntimeStatus, self).__init__(**kwargs) - self.type = 'Managed' # type: str - self.create_time = None - self.nodes = None - self.other_errors = None - self.last_operation = None - - -class ManagedPrivateEndpoint(msrest.serialization.Model): - """Properties of a managed private endpoint. - - Variables are only populated by the server, and will be ignored when sending a request. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param connection_state: The managed private endpoint connection state. - :type connection_state: ~azure.mgmt.datafactory.models.ConnectionStateProperties - :param fqdns: Fully qualified domain names. - :type fqdns: list[str] - :param group_id: The groupId to which the managed private endpoint is created. - :type group_id: str - :ivar is_reserved: Denotes whether the managed private endpoint is reserved. - :vartype is_reserved: bool - :param private_link_resource_id: The ARM resource ID of the resource to which the managed - private endpoint is created. - :type private_link_resource_id: str - :ivar provisioning_state: The managed private endpoint provisioning state. - :vartype provisioning_state: str - """ - - _validation = { - 'is_reserved': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connection_state': {'key': 'connectionState', 'type': 'ConnectionStateProperties'}, - 'fqdns': {'key': 'fqdns', 'type': '[str]'}, - 'group_id': {'key': 'groupId', 'type': 'str'}, - 'is_reserved': {'key': 'isReserved', 'type': 'bool'}, - 'private_link_resource_id': {'key': 'privateLinkResourceId', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ManagedPrivateEndpoint, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.connection_state = kwargs.get('connection_state', None) - self.fqdns = kwargs.get('fqdns', None) - self.group_id = kwargs.get('group_id', None) - self.is_reserved = None - self.private_link_resource_id = kwargs.get('private_link_resource_id', None) - self.provisioning_state = None - - -class ManagedPrivateEndpointListResponse(msrest.serialization.Model): - """A list of managed private endpoint resources. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of managed private endpoints. - :type value: list[~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[ManagedPrivateEndpointResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ManagedPrivateEndpointListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) - - -class ManagedPrivateEndpointResource(SubResource): - """Managed private endpoint resource type. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Managed private endpoint properties. - :type properties: ~azure.mgmt.datafactory.models.ManagedPrivateEndpoint - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'ManagedPrivateEndpoint'}, - } - - def __init__( - self, - **kwargs - ): - super(ManagedPrivateEndpointResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class ManagedVirtualNetwork(msrest.serialization.Model): - """A managed Virtual Network associated with the Azure Data Factory. - - Variables are only populated by the server, and will be ignored when sending a request. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :ivar v_net_id: Managed Virtual Network ID. - :vartype v_net_id: str - :ivar alias: Managed Virtual Network alias. - :vartype alias: str - """ - - _validation = { - 'v_net_id': {'readonly': True}, - 'alias': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'v_net_id': {'key': 'vNetId', 'type': 'str'}, - 'alias': {'key': 'alias', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ManagedVirtualNetwork, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.v_net_id = None - self.alias = None - - -class ManagedVirtualNetworkListResponse(msrest.serialization.Model): - """A list of managed Virtual Network resources. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of managed Virtual Networks. - :type value: list[~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[ManagedVirtualNetworkResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ManagedVirtualNetworkListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) - - -class ManagedVirtualNetworkReference(msrest.serialization.Model): - """Managed Virtual Network reference type. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Managed Virtual Network reference type. Default value: - "ManagedVirtualNetworkReference". - :vartype type: str - :param reference_name: Required. Reference ManagedVirtualNetwork name. - :type reference_name: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - } - - type = "ManagedVirtualNetworkReference" - - def __init__( - self, - **kwargs - ): - super(ManagedVirtualNetworkReference, self).__init__(**kwargs) - self.reference_name = kwargs['reference_name'] - - -class ManagedVirtualNetworkResource(SubResource): - """Managed Virtual Network resource type. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Managed Virtual Network properties. - :type properties: ~azure.mgmt.datafactory.models.ManagedVirtualNetwork - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'ManagedVirtualNetwork'}, - } - - def __init__( - self, - **kwargs - ): - super(ManagedVirtualNetworkResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class MappingDataFlow(DataFlow): - """Mapping data flow. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Type of data flow.Constant filled by server. - :type type: str - :param description: The description of the data flow. - :type description: str - :param annotations: List of tags that can be used for describing the data flow. - :type annotations: list[object] - :param folder: The folder that this data flow is in. If not specified, Data flow will appear at - the root level. - :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder - :param sources: List of sources in data flow. - :type sources: list[~azure.mgmt.datafactory.models.DataFlowSource] - :param sinks: List of sinks in data flow. - :type sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] - :param transformations: List of transformations in data flow. - :type transformations: list[~azure.mgmt.datafactory.models.Transformation] - :param script: DataFlow script. - :type script: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, - 'sources': {'key': 'typeProperties.sources', 'type': '[DataFlowSource]'}, - 'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'}, - 'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'}, - 'script': {'key': 'typeProperties.script', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(MappingDataFlow, self).__init__(**kwargs) - self.type = 'MappingDataFlow' # type: str - self.sources = kwargs.get('sources', None) - self.sinks = kwargs.get('sinks', None) - self.transformations = kwargs.get('transformations', None) - self.script = kwargs.get('script', None) - - -class MariaDbLinkedService(LinkedService): - """MariaDB server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MariaDbLinkedService, self).__init__(**kwargs) - self.type = 'MariaDB' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class MariaDbSource(TabularSource): - """A copy activity MariaDB server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MariaDbSource, self).__init__(**kwargs) - self.type = 'MariaDBSource' # type: str - self.query = kwargs.get('query', None) - - -class MariaDbTableDataset(Dataset): - """MariaDB server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MariaDbTableDataset, self).__init__(**kwargs) - self.type = 'MariaDBTable' # type: str - self.table_name = kwargs.get('table_name', None) - - -class MarketoLinkedService(LinkedService): - """Marketo server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param endpoint: Required. The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com). - :type endpoint: object - :param client_id: Required. The client Id of your Marketo service. - :type client_id: object - :param client_secret: The client secret of your Marketo service. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MarketoLinkedService, self).__init__(**kwargs) - self.type = 'Marketo' # type: str - self.endpoint = kwargs['endpoint'] - self.client_id = kwargs['client_id'] - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class MarketoObjectDataset(Dataset): - """Marketo server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MarketoObjectDataset, self).__init__(**kwargs) - self.type = 'MarketoObject' # type: str - self.table_name = kwargs.get('table_name', None) - - -class MarketoSource(TabularSource): - """A copy activity Marketo server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MarketoSource, self).__init__(**kwargs) - self.type = 'MarketoSource' # type: str - self.query = kwargs.get('query', None) - - -class MetadataItem(msrest.serialization.Model): - """Specify the name and value of custom metadata item. - - :param name: Metadata item key name. Type: string (or Expression with resultType string). - :type name: object - :param value: Metadata item value. Type: string (or Expression with resultType string). - :type value: object - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'object'}, - 'value': {'key': 'value', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MetadataItem, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.value = kwargs.get('value', None) - - -class MicrosoftAccessLinkedService(LinkedService): - """Microsoft Access linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The non-access credential portion of the connection string - as well as an optional encrypted credential. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the Microsoft Access as - ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with - resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string specified in driver- - specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MicrosoftAccessLinkedService, self).__init__(**kwargs) - self.type = 'MicrosoftAccess' # type: str - self.connection_string = kwargs['connection_string'] - self.authentication_type = kwargs.get('authentication_type', None) - self.credential = kwargs.get('credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class MicrosoftAccessSink(CopySink): - """A copy activity Microsoft Access sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param pre_copy_script: A query to execute before starting the copy. Type: string (or - Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MicrosoftAccessSink, self).__init__(**kwargs) - self.type = 'MicrosoftAccessSink' # type: str - self.pre_copy_script = kwargs.get('pre_copy_script', None) - - -class MicrosoftAccessSource(CopySource): - """A copy activity source for Microsoft Access. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query: Database query. Type: string (or Expression with resultType string). - :type query: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MicrosoftAccessSource, self).__init__(**kwargs) - self.type = 'MicrosoftAccessSource' # type: str - self.query = kwargs.get('query', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class MicrosoftAccessTableDataset(Dataset): - """The Microsoft Access table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType - string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MicrosoftAccessTableDataset, self).__init__(**kwargs) - self.type = 'MicrosoftAccessTable' # type: str - self.table_name = kwargs.get('table_name', None) - - -class MongoDbAtlasCollectionDataset(Dataset): - """The MongoDB Atlas database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param collection: Required. The collection name of the MongoDB Atlas database. Type: string - (or Expression with resultType string). - :type collection: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MongoDbAtlasCollectionDataset, self).__init__(**kwargs) - self.type = 'MongoDbAtlasCollection' # type: str - self.collection = kwargs['collection'] - - -class MongoDbAtlasLinkedService(LinkedService): - """Linked service for MongoDB Atlas data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The MongoDB Atlas connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param database: Required. The name of the MongoDB Atlas database that you want to access. - Type: string (or Expression with resultType string). - :type database: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MongoDbAtlasLinkedService, self).__init__(**kwargs) - self.type = 'MongoDbAtlas' # type: str - self.connection_string = kwargs['connection_string'] - self.database = kwargs['database'] - - -class MongoDbAtlasSink(CopySink): - """A copy activity MongoDB Atlas sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) - rather than throw exception (insert). The default value is "insert". Type: string (or - Expression with resultType string). Type: string (or Expression with resultType string). - :type write_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MongoDbAtlasSink, self).__init__(**kwargs) - self.type = 'MongoDbAtlasSink' # type: str - self.write_behavior = kwargs.get('write_behavior', None) - - -class MongoDbAtlasSource(CopySource): - """A copy activity source for a MongoDB Atlas database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param filter: Specifies selection filter using query operators. To return all documents in a - collection, omit this parameter or pass an empty document ({}). Type: string (or Expression - with resultType string). - :type filter: object - :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties - :param batch_size: Specifies the number of documents to return in each batch of the response - from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user - or the application. This property's main purpose is to avoid hit the limitation of response - size. Type: integer (or Expression with resultType integer). - :type batch_size: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MongoDbAtlasSource, self).__init__(**kwargs) - self.type = 'MongoDbAtlasSource' # type: str - self.filter = kwargs.get('filter', None) - self.cursor_methods = kwargs.get('cursor_methods', None) - self.batch_size = kwargs.get('batch_size', None) - self.query_timeout = kwargs.get('query_timeout', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class MongoDbCollectionDataset(Dataset): - """The MongoDB database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param collection_name: Required. The table name of the MongoDB database. Type: string (or - Expression with resultType string). - :type collection_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MongoDbCollectionDataset, self).__init__(**kwargs) - self.type = 'MongoDbCollection' # type: str - self.collection_name = kwargs['collection_name'] - - -class MongoDbCursorMethodsProperties(msrest.serialization.Model): - """Cursor methods for Mongodb query. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param project: Specifies the fields to return in the documents that match the query filter. To - return all fields in the matching documents, omit this parameter. Type: string (or Expression - with resultType string). - :type project: object - :param sort: Specifies the order in which the query returns matching documents. Type: string - (or Expression with resultType string). Type: string (or Expression with resultType string). - :type sort: object - :param skip: Specifies the how many documents skipped and where MongoDB begins returning - results. This approach may be useful in implementing paginated results. Type: integer (or - Expression with resultType integer). - :type skip: object - :param limit: Specifies the maximum number of documents the server returns. limit() is - analogous to the LIMIT statement in a SQL database. Type: integer (or Expression with - resultType integer). - :type limit: object - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'project': {'key': 'project', 'type': 'object'}, - 'sort': {'key': 'sort', 'type': 'object'}, - 'skip': {'key': 'skip', 'type': 'object'}, - 'limit': {'key': 'limit', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.project = kwargs.get('project', None) - self.sort = kwargs.get('sort', None) - self.skip = kwargs.get('skip', None) - self.limit = kwargs.get('limit', None) - - -class MongoDbLinkedService(LinkedService): - """Linked service for MongoDb data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param server: Required. The IP address or server name of the MongoDB server. Type: string (or - Expression with resultType string). - :type server: object - :param authentication_type: The authentication type to be used to connect to the MongoDB - database. Possible values include: "Basic", "Anonymous". - :type authentication_type: str or ~azure.mgmt.datafactory.models.MongoDbAuthenticationType - :param database_name: Required. The name of the MongoDB database that you want to access. Type: - string (or Expression with resultType string). - :type database_name: object - :param username: Username for authentication. Type: string (or Expression with resultType - string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param auth_source: Database to verify the username and password. Type: string (or Expression - with resultType string). - :type auth_source: object - :param port: The TCP port number that the MongoDB server uses to listen for client connections. - The default value is 27017. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_ssl: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. Type: boolean (or Expression with resultType boolean). - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'database_name': {'key': 'typeProperties.databaseName', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'auth_source': {'key': 'typeProperties.authSource', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MongoDbLinkedService, self).__init__(**kwargs) - self.type = 'MongoDb' # type: str - self.server = kwargs['server'] - self.authentication_type = kwargs.get('authentication_type', None) - self.database_name = kwargs['database_name'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.auth_source = kwargs.get('auth_source', None) - self.port = kwargs.get('port', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class MongoDbSource(CopySource): - """A copy activity source for a MongoDB database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression - with resultType string). - :type query: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MongoDbSource, self).__init__(**kwargs) - self.type = 'MongoDbSource' # type: str - self.query = kwargs.get('query', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class MongoDbV2CollectionDataset(Dataset): - """The MongoDB database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param collection: Required. The collection name of the MongoDB database. Type: string (or - Expression with resultType string). - :type collection: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MongoDbV2CollectionDataset, self).__init__(**kwargs) - self.type = 'MongoDbV2Collection' # type: str - self.collection = kwargs['collection'] - - -class MongoDbV2LinkedService(LinkedService): - """Linked service for MongoDB data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The MongoDB connection string. Type: string, SecureString - or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param database: Required. The name of the MongoDB database that you want to access. Type: - string (or Expression with resultType string). - :type database: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MongoDbV2LinkedService, self).__init__(**kwargs) - self.type = 'MongoDbV2' # type: str - self.connection_string = kwargs['connection_string'] - self.database = kwargs['database'] - - -class MongoDbV2Sink(CopySink): - """A copy activity MongoDB sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) - rather than throw exception (insert). The default value is "insert". Type: string (or - Expression with resultType string). Type: string (or Expression with resultType string). - :type write_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MongoDbV2Sink, self).__init__(**kwargs) - self.type = 'MongoDbV2Sink' # type: str - self.write_behavior = kwargs.get('write_behavior', None) - - -class MongoDbV2Source(CopySource): - """A copy activity source for a MongoDB database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param filter: Specifies selection filter using query operators. To return all documents in a - collection, omit this parameter or pass an empty document ({}). Type: string (or Expression - with resultType string). - :type filter: object - :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties - :param batch_size: Specifies the number of documents to return in each batch of the response - from MongoDB instance. In most cases, modifying the batch size will not affect the user or the - application. This property's main purpose is to avoid hit the limitation of response size. - Type: integer (or Expression with resultType integer). - :type batch_size: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MongoDbV2Source, self).__init__(**kwargs) - self.type = 'MongoDbV2Source' # type: str - self.filter = kwargs.get('filter', None) - self.cursor_methods = kwargs.get('cursor_methods', None) - self.batch_size = kwargs.get('batch_size', None) - self.query_timeout = kwargs.get('query_timeout', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class MySqlLinkedService(LinkedService): - """Linked service for MySQL data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MySqlLinkedService, self).__init__(**kwargs) - self.type = 'MySql' # type: str - self.connection_string = kwargs['connection_string'] - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class MySqlSource(TabularSource): - """A copy activity source for MySQL databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: Database query. Type: string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MySqlSource, self).__init__(**kwargs) - self.type = 'MySqlSource' # type: str - self.query = kwargs.get('query', None) - - -class MySqlTableDataset(Dataset): - """The MySQL table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The MySQL table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MySqlTableDataset, self).__init__(**kwargs) - self.type = 'MySqlTable' # type: str - self.table_name = kwargs.get('table_name', None) - - -class NetezzaLinkedService(LinkedService): - """Netezza linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(NetezzaLinkedService, self).__init__(**kwargs) - self.type = 'Netezza' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class NetezzaPartitionSettings(msrest.serialization.Model): - """The settings that will be leveraged for Netezza source partitioning. - - :param partition_column_name: The name of the column in integer type that will be used for - proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in partitionColumnName that - will be used for proceeding range partitioning. Type: string (or Expression with resultType - string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in partitionColumnName that - will be used for proceeding range partitioning. Type: string (or Expression with resultType - string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(NetezzaPartitionSettings, self).__init__(**kwargs) - self.partition_column_name = kwargs.get('partition_column_name', None) - self.partition_upper_bound = kwargs.get('partition_upper_bound', None) - self.partition_lower_bound = kwargs.get('partition_lower_bound', None) - - -class NetezzaSource(TabularSource): - """A copy activity Netezza source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - :param partition_option: The partition mechanism that will be used for Netezza read in - parallel. Possible values include: "None", "DataSlice", "DynamicRange". - :type partition_option: object - :param partition_settings: The settings that will be leveraged for Netezza source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.NetezzaPartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(NetezzaSource, self).__init__(**kwargs) - self.type = 'NetezzaSource' # type: str - self.query = kwargs.get('query', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) - - -class NetezzaTableDataset(Dataset): - """Netezza dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of the Netezza. Type: string (or Expression with resultType - string). - :type table: object - :param schema_type_properties_schema: The schema name of the Netezza. Type: string (or - Expression with resultType string). - :type schema_type_properties_schema: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(NetezzaTableDataset, self).__init__(**kwargs) - self.type = 'NetezzaTable' # type: str - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - - -class ODataLinkedService(LinkedService): - """Open Data Protocol (OData) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. The URL of the OData service endpoint. Type: string (or Expression with - resultType string). - :type url: object - :param authentication_type: Type of authentication used to connect to the OData service. - Possible values include: "Basic", "Anonymous", "Windows", "AadServicePrincipal", - "ManagedServiceIdentity". - :type authentication_type: str or ~azure.mgmt.datafactory.models.ODataAuthenticationType - :param user_name: User name of the OData service. Type: string (or Expression with resultType - string). - :type user_name: object - :param password: Password of the OData service. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param auth_headers: The additional HTTP headers in the request to RESTful API used for - authorization. Type: object (or Expression with resultType object). - :type auth_headers: object - :param tenant: Specify the tenant information (domain name or tenant ID) under which your - application resides. Type: string (or Expression with resultType string). - :type tenant: object - :param service_principal_id: Specify the application id of your application registered in Azure - Active Directory. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed - values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data - factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object - :param aad_resource_id: Specify the resource you are requesting authorization to use Directory. - Type: string (or Expression with resultType string). - :type aad_resource_id: object - :param aad_service_principal_credential_type: Specify the credential type (key or cert) is used - for service principal. Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". - :type aad_service_principal_credential_type: str or - ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType - :param service_principal_key: Specify the secret of your application registered in Azure Active - Directory. Type: string (or Expression with resultType string). - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_embedded_cert: Specify the base64 encoded certificate of your - application registered in Azure Active Directory. Type: string (or Expression with resultType - string). - :type service_principal_embedded_cert: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_embedded_cert_password: Specify the password of your certificate if - your certificate has a password and you are using AadServicePrincipal authentication. Type: - string (or Expression with resultType string). - :type service_principal_embedded_cert_password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'auth_headers': {'key': 'typeProperties.authHeaders', 'type': 'object'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, - 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ODataLinkedService, self).__init__(**kwargs) - self.type = 'OData' # type: str - self.url = kwargs['url'] - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.auth_headers = kwargs.get('auth_headers', None) - self.tenant = kwargs.get('tenant', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.azure_cloud_type = kwargs.get('azure_cloud_type', None) - self.aad_resource_id = kwargs.get('aad_resource_id', None) - self.aad_service_principal_credential_type = kwargs.get('aad_service_principal_credential_type', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.service_principal_embedded_cert = kwargs.get('service_principal_embedded_cert', None) - self.service_principal_embedded_cert_password = kwargs.get('service_principal_embedded_cert_password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class ODataResourceDataset(Dataset): - """The Open Data Protocol (OData) resource dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param path: The OData resource path. Type: string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ODataResourceDataset, self).__init__(**kwargs) - self.type = 'ODataResource' # type: str - self.path = kwargs.get('path', None) - - -class ODataSource(CopySource): - """A copy activity source for OData source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType - string). - :type query: object - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout - to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string - (or Expression with resultType string), pattern: - ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ODataSource, self).__init__(**kwargs) - self.type = 'ODataSource' # type: str - self.query = kwargs.get('query', None) - self.http_request_timeout = kwargs.get('http_request_timeout', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class OdbcLinkedService(LinkedService): - """Open Database Connectivity (ODBC) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The non-access credential portion of the connection string - as well as an optional encrypted credential. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the ODBC data store. - Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string specified in driver- - specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(OdbcLinkedService, self).__init__(**kwargs) - self.type = 'Odbc' # type: str - self.connection_string = kwargs['connection_string'] - self.authentication_type = kwargs.get('authentication_type', None) - self.credential = kwargs.get('credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class OdbcSink(CopySink): - """A copy activity ODBC sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param pre_copy_script: A query to execute before starting the copy. Type: string (or - Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(OdbcSink, self).__init__(**kwargs) - self.type = 'OdbcSink' # type: str - self.pre_copy_script = kwargs.get('pre_copy_script', None) - - -class OdbcSource(TabularSource): - """A copy activity source for ODBC databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: Database query. Type: string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(OdbcSource, self).__init__(**kwargs) - self.type = 'OdbcSource' # type: str - self.query = kwargs.get('query', None) - - -class OdbcTableDataset(Dataset): - """The ODBC table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The ODBC table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(OdbcTableDataset, self).__init__(**kwargs) - self.type = 'OdbcTable' # type: str - self.table_name = kwargs.get('table_name', None) - - -class Office365Dataset(Dataset): - """The Office365 account. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: Required. Name of the dataset to extract from Office 365. Type: string (or - Expression with resultType string). - :type table_name: object - :param predicate: A predicate expression that can be used to filter the specific rows to - extract from Office 365. Type: string (or Expression with resultType string). - :type predicate: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'table_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(Office365Dataset, self).__init__(**kwargs) - self.type = 'Office365Table' # type: str - self.table_name = kwargs['table_name'] - self.predicate = kwargs.get('predicate', None) - - -class Office365LinkedService(LinkedService): - """Office365 linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param office365_tenant_id: Required. Azure tenant ID to which the Office 365 account belongs. - Type: string (or Expression with resultType string). - :type office365_tenant_id: object - :param service_principal_tenant_id: Required. Specify the tenant information under which your - Azure AD web application resides. Type: string (or Expression with resultType string). - :type service_principal_tenant_id: object - :param service_principal_id: Required. Specify the application's client ID. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. Specify the application's key. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'office365_tenant_id': {'required': True}, - 'service_principal_tenant_id': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, - 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(Office365LinkedService, self).__init__(**kwargs) - self.type = 'Office365' # type: str - self.office365_tenant_id = kwargs['office365_tenant_id'] - self.service_principal_tenant_id = kwargs['service_principal_tenant_id'] - self.service_principal_id = kwargs['service_principal_id'] - self.service_principal_key = kwargs['service_principal_key'] - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class Office365Source(CopySource): - """A copy activity source for an Office 365 service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param allowed_groups: The groups containing all the users. Type: array of strings (or - Expression with resultType array of strings). - :type allowed_groups: object - :param user_scope_filter_uri: The user scope uri. Type: string (or Expression with resultType - string). - :type user_scope_filter_uri: object - :param date_filter_column: The Column to apply the :code:`` and - :code:``. Type: string (or Expression with resultType string). - :type date_filter_column: object - :param start_time: Start time of the requested range for this dataset. Type: string (or - Expression with resultType string). - :type start_time: object - :param end_time: End time of the requested range for this dataset. Type: string (or Expression - with resultType string). - :type end_time: object - :param output_columns: The columns to be read out from the Office 365 table. Type: array of - objects (or Expression with resultType array of objects). Example: [ { "name": "Id" }, { - "name": "CreatedDateTime" } ]. - :type output_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, - 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, - 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, - 'start_time': {'key': 'startTime', 'type': 'object'}, - 'end_time': {'key': 'endTime', 'type': 'object'}, - 'output_columns': {'key': 'outputColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(Office365Source, self).__init__(**kwargs) - self.type = 'Office365Source' # type: str - self.allowed_groups = kwargs.get('allowed_groups', None) - self.user_scope_filter_uri = kwargs.get('user_scope_filter_uri', None) - self.date_filter_column = kwargs.get('date_filter_column', None) - self.start_time = kwargs.get('start_time', None) - self.end_time = kwargs.get('end_time', None) - self.output_columns = kwargs.get('output_columns', None) - - -class Operation(msrest.serialization.Model): - """Azure Data Factory API operation definition. - - :param name: Operation name: {provider}/{resource}/{operation}. - :type name: str - :param origin: The intended executor of the operation. - :type origin: str - :param display: Metadata associated with the operation. - :type display: ~azure.mgmt.datafactory.models.OperationDisplay - :param service_specification: Details about a service operation. - :type service_specification: ~azure.mgmt.datafactory.models.OperationServiceSpecification - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'origin': {'key': 'origin', 'type': 'str'}, - 'display': {'key': 'display', 'type': 'OperationDisplay'}, - 'service_specification': {'key': 'properties.serviceSpecification', 'type': 'OperationServiceSpecification'}, - } - - def __init__( - self, - **kwargs - ): - super(Operation, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.origin = kwargs.get('origin', None) - self.display = kwargs.get('display', None) - self.service_specification = kwargs.get('service_specification', None) - - -class OperationDisplay(msrest.serialization.Model): - """Metadata associated with the operation. - - :param description: The description of the operation. - :type description: str - :param provider: The name of the provider. - :type provider: str - :param resource: The name of the resource type on which the operation is performed. - :type resource: str - :param operation: The type of operation: get, read, delete, etc. - :type operation: str - """ - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(OperationDisplay, self).__init__(**kwargs) - self.description = kwargs.get('description', None) - self.provider = kwargs.get('provider', None) - self.resource = kwargs.get('resource', None) - self.operation = kwargs.get('operation', None) - - -class OperationListResponse(msrest.serialization.Model): - """A list of operations that can be performed by the Data Factory service. - - :param value: List of Data Factory operations supported by the Data Factory resource provider. - :type value: list[~azure.mgmt.datafactory.models.Operation] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[Operation]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(OperationListResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) - - -class OperationLogSpecification(msrest.serialization.Model): - """Details about an operation related to logs. - - :param name: The name of the log category. - :type name: str - :param display_name: Localized display name. - :type display_name: str - :param blob_duration: Blobs created in the customer storage account, per hour. - :type blob_duration: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(OperationLogSpecification, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.display_name = kwargs.get('display_name', None) - self.blob_duration = kwargs.get('blob_duration', None) - - -class OperationMetricAvailability(msrest.serialization.Model): - """Defines how often data for a metric becomes available. - - :param time_grain: The granularity for the metric. - :type time_grain: str - :param blob_duration: Blob created in the customer storage account, per hour. - :type blob_duration: str - """ - - _attribute_map = { - 'time_grain': {'key': 'timeGrain', 'type': 'str'}, - 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(OperationMetricAvailability, self).__init__(**kwargs) - self.time_grain = kwargs.get('time_grain', None) - self.blob_duration = kwargs.get('blob_duration', None) - - -class OperationMetricDimension(msrest.serialization.Model): - """Defines the metric dimension. - - :param name: The name of the dimension for the metric. - :type name: str - :param display_name: The display name of the metric dimension. - :type display_name: str - :param to_be_exported_for_shoebox: Whether the dimension should be exported to Azure Monitor. - :type to_be_exported_for_shoebox: bool - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'to_be_exported_for_shoebox': {'key': 'toBeExportedForShoebox', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - super(OperationMetricDimension, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.display_name = kwargs.get('display_name', None) - self.to_be_exported_for_shoebox = kwargs.get('to_be_exported_for_shoebox', None) - - -class OperationMetricSpecification(msrest.serialization.Model): - """Details about an operation related to metrics. - - :param name: The name of the metric. - :type name: str - :param display_name: Localized display name of the metric. - :type display_name: str - :param display_description: The description of the metric. - :type display_description: str - :param unit: The unit that the metric is measured in. - :type unit: str - :param aggregation_type: The type of metric aggregation. - :type aggregation_type: str - :param enable_regional_mdm_account: Whether or not the service is using regional MDM accounts. - :type enable_regional_mdm_account: str - :param source_mdm_account: The name of the MDM account. - :type source_mdm_account: str - :param source_mdm_namespace: The name of the MDM namespace. - :type source_mdm_namespace: str - :param availabilities: Defines how often data for metrics becomes available. - :type availabilities: list[~azure.mgmt.datafactory.models.OperationMetricAvailability] - :param dimensions: Defines the metric dimension. - :type dimensions: list[~azure.mgmt.datafactory.models.OperationMetricDimension] - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'display_description': {'key': 'displayDescription', 'type': 'str'}, - 'unit': {'key': 'unit', 'type': 'str'}, - 'aggregation_type': {'key': 'aggregationType', 'type': 'str'}, - 'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'str'}, - 'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'}, - 'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'}, - 'availabilities': {'key': 'availabilities', 'type': '[OperationMetricAvailability]'}, - 'dimensions': {'key': 'dimensions', 'type': '[OperationMetricDimension]'}, - } - - def __init__( - self, - **kwargs - ): - super(OperationMetricSpecification, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.display_name = kwargs.get('display_name', None) - self.display_description = kwargs.get('display_description', None) - self.unit = kwargs.get('unit', None) - self.aggregation_type = kwargs.get('aggregation_type', None) - self.enable_regional_mdm_account = kwargs.get('enable_regional_mdm_account', None) - self.source_mdm_account = kwargs.get('source_mdm_account', None) - self.source_mdm_namespace = kwargs.get('source_mdm_namespace', None) - self.availabilities = kwargs.get('availabilities', None) - self.dimensions = kwargs.get('dimensions', None) - - -class OperationServiceSpecification(msrest.serialization.Model): - """Details about a service operation. - - :param log_specifications: Details about operations related to logs. - :type log_specifications: list[~azure.mgmt.datafactory.models.OperationLogSpecification] - :param metric_specifications: Details about operations related to metrics. - :type metric_specifications: list[~azure.mgmt.datafactory.models.OperationMetricSpecification] - """ - - _attribute_map = { - 'log_specifications': {'key': 'logSpecifications', 'type': '[OperationLogSpecification]'}, - 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecification]'}, - } - - def __init__( - self, - **kwargs - ): - super(OperationServiceSpecification, self).__init__(**kwargs) - self.log_specifications = kwargs.get('log_specifications', None) - self.metric_specifications = kwargs.get('metric_specifications', None) - - -class OracleCloudStorageLinkedService(LinkedService): - """Linked service for Oracle Cloud Storage. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param access_key_id: The access key identifier of the Oracle Cloud Storage Identity and Access - Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: object - :param secret_access_key: The secret access key of the Oracle Cloud Storage Identity and Access - Management (IAM) user. - :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_url: This value specifies the endpoint to access with the Oracle Cloud Storage - Connector. This is an optional property; change it only if you want to try a different service - endpoint or want to switch between https and http. Type: string (or Expression with resultType - string). - :type service_url: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, - 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(OracleCloudStorageLinkedService, self).__init__(**kwargs) - self.type = 'OracleCloudStorage' # type: str - self.access_key_id = kwargs.get('access_key_id', None) - self.secret_access_key = kwargs.get('secret_access_key', None) - self.service_url = kwargs.get('service_url', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class OracleCloudStorageLocation(DatasetLocation): - """The location of Oracle Cloud Storage dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType - string). - :type file_name: object - :param bucket_name: Specify the bucketName of Oracle Cloud Storage. Type: string (or Expression - with resultType string). - :type bucket_name: object - :param version: Specify the version of Oracle Cloud Storage. Type: string (or Expression with - resultType string). - :type version: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'bucket_name': {'key': 'bucketName', 'type': 'object'}, - 'version': {'key': 'version', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(OracleCloudStorageLocation, self).__init__(**kwargs) - self.type = 'OracleCloudStorageLocation' # type: str - self.bucket_name = kwargs.get('bucket_name', None) - self.version = kwargs.get('version', None) - - -class OracleCloudStorageReadSettings(StoreReadSettings): - """Oracle Cloud Storage read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param recursive: If true, files under the folder path will be read recursively. Default is - true. Type: boolean (or Expression with resultType boolean). - :type recursive: object - :param wildcard_folder_path: Oracle Cloud Storage wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Oracle Cloud Storage wildcardFileName. Type: string (or Expression - with resultType string). - :type wildcard_file_name: object - :param prefix: The prefix filter for the Oracle Cloud Storage object name. Type: string (or - Expression with resultType string). - :type prefix: object - :param file_list_path: Point to a text file that lists each file (relative path to the path - configured in the dataset) that you want to copy. Type: string (or Expression with resultType - string). - :type file_list_path: object - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: - string (or Expression with resultType string). - :type partition_root_path: object - :param delete_files_after_completion: Indicates whether the source files need to be deleted - after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object - :param modified_datetime_start: The start of file's modified datetime. Type: string (or - Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression - with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'prefix': {'key': 'prefix', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(OracleCloudStorageReadSettings, self).__init__(**kwargs) - self.type = 'OracleCloudStorageReadSettings' # type: str - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.prefix = kwargs.get('prefix', None) - self.file_list_path = kwargs.get('file_list_path', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.partition_root_path = kwargs.get('partition_root_path', None) - self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - - -class OracleLinkedService(LinkedService): - """Oracle database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(OracleLinkedService, self).__init__(**kwargs) - self.type = 'Oracle' # type: str - self.connection_string = kwargs['connection_string'] - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class OraclePartitionSettings(msrest.serialization.Model): - """The settings that will be leveraged for Oracle source partitioning. - - :param partition_names: Names of the physical partitions of Oracle table. - :type partition_names: object - :param partition_column_name: The name of the column in integer type that will be used for - proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in partitionColumnName that - will be used for proceeding range partitioning. Type: string (or Expression with resultType - string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in partitionColumnName that - will be used for proceeding range partitioning. Type: string (or Expression with resultType - string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_names': {'key': 'partitionNames', 'type': 'object'}, - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(OraclePartitionSettings, self).__init__(**kwargs) - self.partition_names = kwargs.get('partition_names', None) - self.partition_column_name = kwargs.get('partition_column_name', None) - self.partition_upper_bound = kwargs.get('partition_upper_bound', None) - self.partition_lower_bound = kwargs.get('partition_lower_bound', None) - - -class OracleServiceCloudLinkedService(LinkedService): - """Oracle Service Cloud linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The URL of the Oracle Service Cloud instance. - :type host: object - :param username: Required. The user name that you use to access Oracle Service Cloud server. - :type username: object - :param password: Required. The password corresponding to the user name that you provided in the - username key. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. Type: boolean (or Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. Type: boolean (or Expression with resultType - boolean). - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'username': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(OracleServiceCloudLinkedService, self).__init__(**kwargs) - self.type = 'OracleServiceCloud' # type: str - self.host = kwargs['host'] - self.username = kwargs['username'] - self.password = kwargs['password'] - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class OracleServiceCloudObjectDataset(Dataset): - """Oracle Service Cloud dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(OracleServiceCloudObjectDataset, self).__init__(**kwargs) - self.type = 'OracleServiceCloudObject' # type: str - self.table_name = kwargs.get('table_name', None) - - -class OracleServiceCloudSource(TabularSource): - """A copy activity Oracle Service Cloud source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(OracleServiceCloudSource, self).__init__(**kwargs) - self.type = 'OracleServiceCloudSource' # type: str - self.query = kwargs.get('query', None) - - -class OracleSink(CopySink): - """A copy activity Oracle sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType - string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(OracleSink, self).__init__(**kwargs) - self.type = 'OracleSink' # type: str - self.pre_copy_script = kwargs.get('pre_copy_script', None) - - -class OracleSource(CopySource): - """A copy activity Oracle source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType - string). - :type oracle_reader_query: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param partition_option: The partition mechanism that will be used for Oracle read in parallel. - Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: object - :param partition_settings: The settings that will be leveraged for Oracle source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.OraclePartitionSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(OracleSource, self).__init__(**kwargs) - self.type = 'OracleSource' # type: str - self.oracle_reader_query = kwargs.get('oracle_reader_query', None) - self.query_timeout = kwargs.get('query_timeout', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class OracleTableDataset(Dataset): - """The on-premises Oracle database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The schema name of the on-premises Oracle database. Type: - string (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the on-premises Oracle database. Type: string (or Expression - with resultType string). - :type table: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(OracleTableDataset, self).__init__(**kwargs) - self.type = 'OracleTable' # type: str - self.table_name = kwargs.get('table_name', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - self.table = kwargs.get('table', None) - - -class OrcDataset(Dataset): - """ORC dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the ORC data storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param orc_compression_codec: The data orcCompressionCodec. Type: string (or Expression with - resultType string). - :type orc_compression_codec: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(OrcDataset, self).__init__(**kwargs) - self.type = 'Orc' # type: str - self.location = kwargs.get('location', None) - self.orc_compression_codec = kwargs.get('orc_compression_codec', None) - - -class OrcFormat(DatasetStorageFormat): - """The data stored in Optimized Row Columnar (ORC) format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage format.Constant filled by server. - :type type: str - :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(OrcFormat, self).__init__(**kwargs) - self.type = 'OrcFormat' # type: str - - -class OrcSink(CopySink): - """A copy activity ORC sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param store_settings: ORC store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: ORC format settings. - :type format_settings: ~azure.mgmt.datafactory.models.OrcWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(OrcSink, self).__init__(**kwargs) - self.type = 'OrcSink' # type: str - self.store_settings = kwargs.get('store_settings', None) - self.format_settings = kwargs.get('format_settings', None) - - -class OrcSource(CopySource): - """A copy activity ORC source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param store_settings: ORC store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(OrcSource, self).__init__(**kwargs) - self.type = 'OrcSource' # type: str - self.store_settings = kwargs.get('store_settings', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class OrcWriteSettings(FormatWriteSettings): - """Orc write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the - specified count. Type: integer (or Expression with resultType integer). - :type max_rows_per_file: object - :param file_name_prefix: Specifies the file name pattern - :code:``_:code:``.:code:`` when copy from non-file - based store without partitionOptions. Type: string (or Expression with resultType string). - :type file_name_prefix: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, - 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(OrcWriteSettings, self).__init__(**kwargs) - self.type = 'OrcWriteSettings' # type: str - self.max_rows_per_file = kwargs.get('max_rows_per_file', None) - self.file_name_prefix = kwargs.get('file_name_prefix', None) - - -class PackageStore(msrest.serialization.Model): - """Package store for the SSIS integration runtime. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. The name of the package store. - :type name: str - :param package_store_linked_service: Required. The package store linked service reference. - :type package_store_linked_service: ~azure.mgmt.datafactory.models.EntityReference - """ - - _validation = { - 'name': {'required': True}, - 'package_store_linked_service': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'package_store_linked_service': {'key': 'packageStoreLinkedService', 'type': 'EntityReference'}, - } - - def __init__( - self, - **kwargs - ): - super(PackageStore, self).__init__(**kwargs) - self.name = kwargs['name'] - self.package_store_linked_service = kwargs['package_store_linked_service'] - - -class ParameterSpecification(msrest.serialization.Model): - """Definition of a single parameter for an entity. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Parameter type. Possible values include: "Object", "String", "Int", - "Float", "Bool", "Array", "SecureString". - :type type: str or ~azure.mgmt.datafactory.models.ParameterType - :param default_value: Default value of parameter. - :type default_value: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ParameterSpecification, self).__init__(**kwargs) - self.type = kwargs['type'] - self.default_value = kwargs.get('default_value', None) - - -class ParquetDataset(Dataset): - """Parquet dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the parquet storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param compression_codec: The data compressionCodec. Type: string (or Expression with - resultType string). - :type compression_codec: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ParquetDataset, self).__init__(**kwargs) - self.type = 'Parquet' # type: str - self.location = kwargs.get('location', None) - self.compression_codec = kwargs.get('compression_codec', None) - - -class ParquetFormat(DatasetStorageFormat): - """The data stored in Parquet format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage format.Constant filled by server. - :type type: str - :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ParquetFormat, self).__init__(**kwargs) - self.type = 'ParquetFormat' # type: str - - -class ParquetSink(CopySink): - """A copy activity Parquet sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param store_settings: Parquet store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: Parquet format settings. - :type format_settings: ~azure.mgmt.datafactory.models.ParquetWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(ParquetSink, self).__init__(**kwargs) - self.type = 'ParquetSink' # type: str - self.store_settings = kwargs.get('store_settings', None) - self.format_settings = kwargs.get('format_settings', None) - - -class ParquetSource(CopySource): - """A copy activity Parquet source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param store_settings: Parquet store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ParquetSource, self).__init__(**kwargs) - self.type = 'ParquetSource' # type: str - self.store_settings = kwargs.get('store_settings', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class ParquetWriteSettings(FormatWriteSettings): - """Parquet write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the - specified count. Type: integer (or Expression with resultType integer). - :type max_rows_per_file: object - :param file_name_prefix: Specifies the file name pattern - :code:``_:code:``.:code:`` when copy from non-file - based store without partitionOptions. Type: string (or Expression with resultType string). - :type file_name_prefix: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, - 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ParquetWriteSettings, self).__init__(**kwargs) - self.type = 'ParquetWriteSettings' # type: str - self.max_rows_per_file = kwargs.get('max_rows_per_file', None) - self.file_name_prefix = kwargs.get('file_name_prefix', None) - - -class PaypalLinkedService(LinkedService): - """Paypal Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). - :type host: object - :param client_id: Required. The client ID associated with your PayPal application. - :type client_id: object - :param client_secret: The client secret associated with your PayPal application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(PaypalLinkedService, self).__init__(**kwargs) - self.type = 'Paypal' # type: str - self.host = kwargs['host'] - self.client_id = kwargs['client_id'] - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class PaypalObjectDataset(Dataset): - """Paypal Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(PaypalObjectDataset, self).__init__(**kwargs) - self.type = 'PaypalObject' # type: str - self.table_name = kwargs.get('table_name', None) - - -class PaypalSource(TabularSource): - """A copy activity Paypal Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(PaypalSource, self).__init__(**kwargs) - self.type = 'PaypalSource' # type: str - self.query = kwargs.get('query', None) - - -class PhoenixLinkedService(LinkedService): - """Phoenix server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The IP address or host name of the Phoenix server. (i.e. - 192.168.222.160). - :type host: object - :param port: The TCP port that the Phoenix server uses to listen for client connections. The - default value is 8765. - :type port: object - :param http_path: The partial URL corresponding to the Phoenix server. (i.e. - /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using - WindowsAzureHDInsightService. - :type http_path: object - :param authentication_type: Required. The authentication mechanism used to connect to the - Phoenix server. Possible values include: "Anonymous", "UsernameAndPassword", - "WindowsAzureHDInsightService". - :type authentication_type: str or ~azure.mgmt.datafactory.models.PhoenixAuthenticationType - :param username: The user name used to connect to the Phoenix server. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(PhoenixLinkedService, self).__init__(**kwargs) - self.type = 'Phoenix' # type: str - self.host = kwargs['host'] - self.port = kwargs.get('port', None) - self.http_path = kwargs.get('http_path', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class PhoenixObjectDataset(Dataset): - """Phoenix server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of the Phoenix. Type: string (or Expression with resultType - string). - :type table: object - :param schema_type_properties_schema: The schema name of the Phoenix. Type: string (or - Expression with resultType string). - :type schema_type_properties_schema: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(PhoenixObjectDataset, self).__init__(**kwargs) - self.type = 'PhoenixObject' # type: str - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - - -class PhoenixSource(TabularSource): - """A copy activity Phoenix server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(PhoenixSource, self).__init__(**kwargs) - self.type = 'PhoenixSource' # type: str - self.query = kwargs.get('query', None) - - -class PipelineElapsedTimeMetricPolicy(msrest.serialization.Model): - """Pipeline ElapsedTime Metric Policy. - - :param duration: TimeSpan value, after which an Azure Monitoring Metric is fired. - :type duration: object - """ - - _attribute_map = { - 'duration': {'key': 'duration', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(PipelineElapsedTimeMetricPolicy, self).__init__(**kwargs) - self.duration = kwargs.get('duration', None) - - -class PipelineFolder(msrest.serialization.Model): - """The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. - - :param name: The name of the folder that this Pipeline is in. - :type name: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PipelineFolder, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - - -class PipelineListResponse(msrest.serialization.Model): - """A list of pipeline resources. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of pipelines. - :type value: list[~azure.mgmt.datafactory.models.PipelineResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[PipelineResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PipelineListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) - - -class PipelinePolicy(msrest.serialization.Model): - """Pipeline Policy. - - :param elapsed_time_metric: Pipeline ElapsedTime Metric Policy. - :type elapsed_time_metric: ~azure.mgmt.datafactory.models.PipelineElapsedTimeMetricPolicy - """ - - _attribute_map = { - 'elapsed_time_metric': {'key': 'elapsedTimeMetric', 'type': 'PipelineElapsedTimeMetricPolicy'}, - } - - def __init__( - self, - **kwargs - ): - super(PipelinePolicy, self).__init__(**kwargs) - self.elapsed_time_metric = kwargs.get('elapsed_time_metric', None) - - -class PipelineReference(msrest.serialization.Model): - """Pipeline reference type. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Pipeline reference type. Default value: "PipelineReference". - :vartype type: str - :param reference_name: Required. Reference pipeline name. - :type reference_name: str - :param name: Reference name. - :type name: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - } - - type = "PipelineReference" - - def __init__( - self, - **kwargs - ): - super(PipelineReference, self).__init__(**kwargs) - self.reference_name = kwargs['reference_name'] - self.name = kwargs.get('name', None) - - -class PipelineResource(SubResource): - """Pipeline resource type. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param description: The description of the pipeline. - :type description: str - :param activities: List of activities in pipeline. - :type activities: list[~azure.mgmt.datafactory.models.Activity] - :param parameters: List of parameters for pipeline. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param variables: List of variables for pipeline. - :type variables: dict[str, ~azure.mgmt.datafactory.models.VariableSpecification] - :param concurrency: The max number of concurrent runs for the pipeline. - :type concurrency: int - :param annotations: List of tags that can be used for describing the Pipeline. - :type annotations: list[object] - :param run_dimensions: Dimensions emitted by Pipeline. - :type run_dimensions: dict[str, object] - :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at - the root level. - :type folder: ~azure.mgmt.datafactory.models.PipelineFolder - :param policy: Pipeline Policy. - :type policy: ~azure.mgmt.datafactory.models.PipelinePolicy - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'concurrency': {'minimum': 1}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'activities': {'key': 'properties.activities', 'type': '[Activity]'}, - 'parameters': {'key': 'properties.parameters', 'type': '{ParameterSpecification}'}, - 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, - 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, - 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, - 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, - 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, - 'policy': {'key': 'properties.policy', 'type': 'PipelinePolicy'}, - } - - def __init__( - self, - **kwargs - ): - super(PipelineResource, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.description = kwargs.get('description', None) - self.activities = kwargs.get('activities', None) - self.parameters = kwargs.get('parameters', None) - self.variables = kwargs.get('variables', None) - self.concurrency = kwargs.get('concurrency', None) - self.annotations = kwargs.get('annotations', None) - self.run_dimensions = kwargs.get('run_dimensions', None) - self.folder = kwargs.get('folder', None) - self.policy = kwargs.get('policy', None) - - -class PipelineRun(msrest.serialization.Model): - """Information about a pipeline run. - - Variables are only populated by the server, and will be ignored when sending a request. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :ivar run_id: Identifier of a run. - :vartype run_id: str - :ivar run_group_id: Identifier that correlates all the recovery runs of a pipeline run. - :vartype run_group_id: str - :ivar is_latest: Indicates if the recovered pipeline run is the latest in its group. - :vartype is_latest: bool - :ivar pipeline_name: The pipeline name. - :vartype pipeline_name: str - :ivar parameters: The full or partial list of parameter name, value pair used in the pipeline - run. - :vartype parameters: dict[str, str] - :ivar run_dimensions: Run dimensions emitted by Pipeline run. - :vartype run_dimensions: dict[str, str] - :ivar invoked_by: Entity that started the pipeline run. - :vartype invoked_by: ~azure.mgmt.datafactory.models.PipelineRunInvokedBy - :ivar last_updated: The last updated timestamp for the pipeline run event in ISO8601 format. - :vartype last_updated: ~datetime.datetime - :ivar run_start: The start time of a pipeline run in ISO8601 format. - :vartype run_start: ~datetime.datetime - :ivar run_end: The end time of a pipeline run in ISO8601 format. - :vartype run_end: ~datetime.datetime - :ivar duration_in_ms: The duration of a pipeline run. - :vartype duration_in_ms: int - :ivar status: The status of a pipeline run. - :vartype status: str - :ivar message: The message from a pipeline run. - :vartype message: str - """ - - _validation = { - 'run_id': {'readonly': True}, - 'run_group_id': {'readonly': True}, - 'is_latest': {'readonly': True}, - 'pipeline_name': {'readonly': True}, - 'parameters': {'readonly': True}, - 'run_dimensions': {'readonly': True}, - 'invoked_by': {'readonly': True}, - 'last_updated': {'readonly': True}, - 'run_start': {'readonly': True}, - 'run_end': {'readonly': True}, - 'duration_in_ms': {'readonly': True}, - 'status': {'readonly': True}, - 'message': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'run_id': {'key': 'runId', 'type': 'str'}, - 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, - 'is_latest': {'key': 'isLatest', 'type': 'bool'}, - 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{str}'}, - 'run_dimensions': {'key': 'runDimensions', 'type': '{str}'}, - 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, - 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, - 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, - 'run_end': {'key': 'runEnd', 'type': 'iso-8601'}, - 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, - 'status': {'key': 'status', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PipelineRun, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.run_id = None - self.run_group_id = None - self.is_latest = None - self.pipeline_name = None - self.parameters = None - self.run_dimensions = None - self.invoked_by = None - self.last_updated = None - self.run_start = None - self.run_end = None - self.duration_in_ms = None - self.status = None - self.message = None - - -class PipelineRunInvokedBy(msrest.serialization.Model): - """Provides entity name and id that started the pipeline run. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: Name of the entity that started the pipeline run. - :vartype name: str - :ivar id: The ID of the entity that started the run. - :vartype id: str - :ivar invoked_by_type: The type of the entity that started the run. - :vartype invoked_by_type: str - :ivar pipeline_name: The name of the pipeline that triggered the run, if any. - :vartype pipeline_name: str - :ivar pipeline_run_id: The run id of the pipeline that triggered the run, if any. - :vartype pipeline_run_id: str - """ - - _validation = { - 'name': {'readonly': True}, - 'id': {'readonly': True}, - 'invoked_by_type': {'readonly': True}, - 'pipeline_name': {'readonly': True}, - 'pipeline_run_id': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'str'}, - 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, - 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, - 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PipelineRunInvokedBy, self).__init__(**kwargs) - self.name = None - self.id = None - self.invoked_by_type = None - self.pipeline_name = None - self.pipeline_run_id = None - - -class PipelineRunsQueryResponse(msrest.serialization.Model): - """A list pipeline runs. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of pipeline runs. - :type value: list[~azure.mgmt.datafactory.models.PipelineRun] - :param continuation_token: The continuation token for getting the next page of results, if any - remaining results exist, null otherwise. - :type continuation_token: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[PipelineRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PipelineRunsQueryResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.continuation_token = kwargs.get('continuation_token', None) - - -class PolybaseSettings(msrest.serialization.Model): - """PolyBase settings. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param reject_type: Reject type. Possible values include: "value", "percentage". - :type reject_type: str or ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType - :param reject_value: Specifies the value or the percentage of rows that can be rejected before - the query fails. Type: number (or Expression with resultType number), minimum: 0. - :type reject_value: object - :param reject_sample_value: Determines the number of rows to attempt to retrieve before the - PolyBase recalculates the percentage of rejected rows. Type: integer (or Expression with - resultType integer), minimum: 0. - :type reject_sample_value: object - :param use_type_default: Specifies how to handle missing values in delimited text files when - PolyBase retrieves data from the text file. Type: boolean (or Expression with resultType - boolean). - :type use_type_default: object - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'reject_type': {'key': 'rejectType', 'type': 'str'}, - 'reject_value': {'key': 'rejectValue', 'type': 'object'}, - 'reject_sample_value': {'key': 'rejectSampleValue', 'type': 'object'}, - 'use_type_default': {'key': 'useTypeDefault', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(PolybaseSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.reject_type = kwargs.get('reject_type', None) - self.reject_value = kwargs.get('reject_value', None) - self.reject_sample_value = kwargs.get('reject_sample_value', None) - self.use_type_default = kwargs.get('use_type_default', None) - - -class PostgreSqlLinkedService(LinkedService): - """Linked service for PostgreSQL data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(PostgreSqlLinkedService, self).__init__(**kwargs) - self.type = 'PostgreSql' # type: str - self.connection_string = kwargs['connection_string'] - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class PostgreSqlSource(TabularSource): - """A copy activity source for PostgreSQL databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: Database query. Type: string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(PostgreSqlSource, self).__init__(**kwargs) - self.type = 'PostgreSqlSource' # type: str - self.query = kwargs.get('query', None) - - -class PostgreSqlTableDataset(Dataset): - """The PostgreSQL table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The PostgreSQL table name. Type: string (or Expression with resultType string). - :type table: object - :param schema_type_properties_schema: The PostgreSQL schema name. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(PostgreSqlTableDataset, self).__init__(**kwargs) - self.type = 'PostgreSqlTable' # type: str - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - - -class PrestoLinkedService(LinkedService): - """Presto server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The IP address or host name of the Presto server. (i.e. - 192.168.222.160). - :type host: object - :param server_version: Required. The version of the Presto server. (i.e. 0.148-t). - :type server_version: object - :param catalog: Required. The catalog context for all request against the server. - :type catalog: object - :param port: The TCP port that the Presto server uses to listen for client connections. The - default value is 8080. - :type port: object - :param authentication_type: Required. The authentication mechanism used to connect to the - Presto server. Possible values include: "Anonymous", "LDAP". - :type authentication_type: str or ~azure.mgmt.datafactory.models.PrestoAuthenticationType - :param username: The user name used to connect to the Presto server. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object - :param time_zone_id: The local time zone used by the connection. Valid values for this option - are specified in the IANA Time Zone Database. The default value is the system time zone. - :type time_zone_id: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'server_version': {'required': True}, - 'catalog': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'server_version': {'key': 'typeProperties.serverVersion', 'type': 'object'}, - 'catalog': {'key': 'typeProperties.catalog', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'time_zone_id': {'key': 'typeProperties.timeZoneID', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(PrestoLinkedService, self).__init__(**kwargs) - self.type = 'Presto' # type: str - self.host = kwargs['host'] - self.server_version = kwargs['server_version'] - self.catalog = kwargs['catalog'] - self.port = kwargs.get('port', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.time_zone_id = kwargs.get('time_zone_id', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class PrestoObjectDataset(Dataset): - """Presto server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of the Presto. Type: string (or Expression with resultType - string). - :type table: object - :param schema_type_properties_schema: The schema name of the Presto. Type: string (or - Expression with resultType string). - :type schema_type_properties_schema: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(PrestoObjectDataset, self).__init__(**kwargs) - self.type = 'PrestoObject' # type: str - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - - -class PrestoSource(TabularSource): - """A copy activity Presto server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(PrestoSource, self).__init__(**kwargs) - self.type = 'PrestoSource' # type: str - self.query = kwargs.get('query', None) - - -class PrivateEndpointConnectionListResponse(msrest.serialization.Model): - """A list of linked service resources. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of Private Endpoint Connections. - :type value: list[~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[PrivateEndpointConnectionResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateEndpointConnectionListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) - - -class PrivateEndpointConnectionResource(SubResource): - """Private Endpoint Connection ARM resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Core resource properties. - :type properties: ~azure.mgmt.datafactory.models.RemotePrivateEndpointConnection - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'RemotePrivateEndpointConnection'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateEndpointConnectionResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class PrivateLinkConnectionApprovalRequest(msrest.serialization.Model): - """A request to approve or reject a private endpoint connection. - - :param private_link_service_connection_state: The state of a private link connection. - :type private_link_service_connection_state: - ~azure.mgmt.datafactory.models.PrivateLinkConnectionState - """ - - _attribute_map = { - 'private_link_service_connection_state': {'key': 'privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateLinkConnectionApprovalRequest, self).__init__(**kwargs) - self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) - - -class PrivateLinkConnectionApprovalRequestResource(SubResource): - """Private Endpoint Connection Approval ARM resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Core resource properties. - :type properties: ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequest - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'PrivateLinkConnectionApprovalRequest'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateLinkConnectionApprovalRequestResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class PrivateLinkConnectionState(msrest.serialization.Model): - """The state of a private link connection. - - :param status: Status of a private link connection. - :type status: str - :param description: Description of a private link connection. - :type description: str - :param actions_required: ActionsRequired for a private link connection. - :type actions_required: str - """ - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateLinkConnectionState, self).__init__(**kwargs) - self.status = kwargs.get('status', None) - self.description = kwargs.get('description', None) - self.actions_required = kwargs.get('actions_required', None) - - -class PrivateLinkResource(SubResource): - """A private link resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Core resource properties. - :type properties: ~azure.mgmt.datafactory.models.PrivateLinkResourceProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'PrivateLinkResourceProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateLinkResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class PrivateLinkResourceProperties(msrest.serialization.Model): - """Properties of a private link resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar group_id: GroupId of a private link resource. - :vartype group_id: str - :ivar required_members: RequiredMembers of a private link resource. - :vartype required_members: list[str] - :ivar required_zone_names: RequiredZoneNames of a private link resource. - :vartype required_zone_names: list[str] - """ - - _validation = { - 'group_id': {'readonly': True}, - 'required_members': {'readonly': True}, - 'required_zone_names': {'readonly': True}, - } - - _attribute_map = { - 'group_id': {'key': 'groupId', 'type': 'str'}, - 'required_members': {'key': 'requiredMembers', 'type': '[str]'}, - 'required_zone_names': {'key': 'requiredZoneNames', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateLinkResourceProperties, self).__init__(**kwargs) - self.group_id = None - self.required_members = None - self.required_zone_names = None - - -class PrivateLinkResourcesWrapper(msrest.serialization.Model): - """Wrapper for a collection of private link resources. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. - :type value: list[~azure.mgmt.datafactory.models.PrivateLinkResource] - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateLinkResourcesWrapper, self).__init__(**kwargs) - self.value = kwargs['value'] - - -class QueryDataFlowDebugSessionsResponse(msrest.serialization.Model): - """A list of active debug sessions. - - :param value: Array with all active debug sessions. - :type value: list[~azure.mgmt.datafactory.models.DataFlowDebugSessionInfo] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[DataFlowDebugSessionInfo]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(QueryDataFlowDebugSessionsResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) - - -class QuickBooksLinkedService(LinkedService): - """QuickBooks server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_properties: Properties used to connect to QuickBooks. It is mutually - exclusive with any other properties in the linked service. Type: object. - :type connection_properties: object - :param endpoint: The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com). - :type endpoint: object - :param company_id: The company ID of the QuickBooks company to authorize. - :type company_id: object - :param consumer_key: The consumer key for OAuth 1.0 authentication. - :type consumer_key: object - :param consumer_secret: The consumer secret for OAuth 1.0 authentication. - :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase - :param access_token: The access token for OAuth 1.0 authentication. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param access_token_secret: The access token secret for OAuth 1.0 authentication. - :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, - 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, - 'consumer_secret': {'key': 'typeProperties.consumerSecret', 'type': 'SecretBase'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'access_token_secret': {'key': 'typeProperties.accessTokenSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(QuickBooksLinkedService, self).__init__(**kwargs) - self.type = 'QuickBooks' # type: str - self.connection_properties = kwargs.get('connection_properties', None) - self.endpoint = kwargs.get('endpoint', None) - self.company_id = kwargs.get('company_id', None) - self.consumer_key = kwargs.get('consumer_key', None) - self.consumer_secret = kwargs.get('consumer_secret', None) - self.access_token = kwargs.get('access_token', None) - self.access_token_secret = kwargs.get('access_token_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class QuickBooksObjectDataset(Dataset): - """QuickBooks server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(QuickBooksObjectDataset, self).__init__(**kwargs) - self.type = 'QuickBooksObject' # type: str - self.table_name = kwargs.get('table_name', None) - - -class QuickBooksSource(TabularSource): - """A copy activity QuickBooks server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(QuickBooksSource, self).__init__(**kwargs) - self.type = 'QuickBooksSource' # type: str - self.query = kwargs.get('query', None) - - -class RecurrenceSchedule(msrest.serialization.Model): - """The recurrence schedule. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param minutes: The minutes. - :type minutes: list[int] - :param hours: The hours. - :type hours: list[int] - :param week_days: The days of the week. - :type week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] - :param month_days: The month days. - :type month_days: list[int] - :param monthly_occurrences: The monthly occurrences. - :type monthly_occurrences: list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'minutes': {'key': 'minutes', 'type': '[int]'}, - 'hours': {'key': 'hours', 'type': '[int]'}, - 'week_days': {'key': 'weekDays', 'type': '[str]'}, - 'month_days': {'key': 'monthDays', 'type': '[int]'}, - 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, - } - - def __init__( - self, - **kwargs - ): - super(RecurrenceSchedule, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.minutes = kwargs.get('minutes', None) - self.hours = kwargs.get('hours', None) - self.week_days = kwargs.get('week_days', None) - self.month_days = kwargs.get('month_days', None) - self.monthly_occurrences = kwargs.get('monthly_occurrences', None) - - -class RecurrenceScheduleOccurrence(msrest.serialization.Model): - """The recurrence schedule occurrence. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param day: The day of the week. Possible values include: "Sunday", "Monday", "Tuesday", - "Wednesday", "Thursday", "Friday", "Saturday". - :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek - :param occurrence: The occurrence. - :type occurrence: int - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'day': {'key': 'day', 'type': 'str'}, - 'occurrence': {'key': 'occurrence', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.day = kwargs.get('day', None) - self.occurrence = kwargs.get('occurrence', None) - - -class RedirectIncompatibleRowSettings(msrest.serialization.Model): - """Redirect incompatible row settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param linked_service_name: Required. Name of the Azure Storage, Storage SAS, or Azure Data - Lake Store linked service used for redirecting incompatible row. Must be specified if - redirectIncompatibleRowSettings is specified. Type: string (or Expression with resultType - string). - :type linked_service_name: object - :param path: The path for storing the redirect incompatible row data. Type: string (or - Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'object'}, - 'path': {'key': 'path', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.linked_service_name = kwargs['linked_service_name'] - self.path = kwargs.get('path', None) - - -class RedshiftUnloadSettings(msrest.serialization.Model): - """The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. - - All required parameters must be populated in order to send to Azure. - - :param s3_linked_service_name: Required. The name of the Amazon S3 linked service which will be - used for the unload operation when copying from the Amazon Redshift source. - :type s3_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param bucket_name: Required. The bucket of the interim Amazon S3 which will be used to store - the unloaded data from Amazon Redshift source. The bucket must be in the same region as the - Amazon Redshift source. Type: string (or Expression with resultType string). - :type bucket_name: object - """ - - _validation = { - 's3_linked_service_name': {'required': True}, - 'bucket_name': {'required': True}, - } - - _attribute_map = { - 's3_linked_service_name': {'key': 's3LinkedServiceName', 'type': 'LinkedServiceReference'}, - 'bucket_name': {'key': 'bucketName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(RedshiftUnloadSettings, self).__init__(**kwargs) - self.s3_linked_service_name = kwargs['s3_linked_service_name'] - self.bucket_name = kwargs['bucket_name'] - - -class RelationalSource(CopySource): - """A copy activity source for various relational databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query: Database query. Type: string (or Expression with resultType string). - :type query: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(RelationalSource, self).__init__(**kwargs) - self.type = 'RelationalSource' # type: str - self.query = kwargs.get('query', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class RelationalTableDataset(Dataset): - """The relational table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The relational table name. Type: string (or Expression with resultType - string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(RelationalTableDataset, self).__init__(**kwargs) - self.type = 'RelationalTable' # type: str - self.table_name = kwargs.get('table_name', None) - - -class RemotePrivateEndpointConnection(msrest.serialization.Model): - """A remote private endpoint connection. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar provisioning_state: - :vartype provisioning_state: str - :param private_endpoint: PrivateEndpoint of a remote private endpoint connection. - :type private_endpoint: ~azure.mgmt.datafactory.models.ArmIdWrapper - :param private_link_service_connection_state: The state of a private link connection. - :type private_link_service_connection_state: - ~azure.mgmt.datafactory.models.PrivateLinkConnectionState - """ - - _validation = { - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'private_endpoint': {'key': 'privateEndpoint', 'type': 'ArmIdWrapper'}, - 'private_link_service_connection_state': {'key': 'privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, - } - - def __init__( - self, - **kwargs - ): - super(RemotePrivateEndpointConnection, self).__init__(**kwargs) - self.provisioning_state = None - self.private_endpoint = kwargs.get('private_endpoint', None) - self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) - - -class RerunTumblingWindowTrigger(Trigger): - """Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] - :param parent_trigger: Required. The parent trigger reference. - :type parent_trigger: object - :param requested_start_time: Required. The start time for the time period for which restatement - is initiated. Only UTC time is currently supported. - :type requested_start_time: ~datetime.datetime - :param requested_end_time: Required. The end time for the time period for which restatement is - initiated. Only UTC time is currently supported. - :type requested_end_time: ~datetime.datetime - :param rerun_concurrency: Required. The max number of parallel time windows (ready for - execution) for which a rerun is triggered. - :type rerun_concurrency: int - """ - - _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, - 'parent_trigger': {'required': True}, - 'requested_start_time': {'required': True}, - 'requested_end_time': {'required': True}, - 'rerun_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, - 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, - 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, - 'rerun_concurrency': {'key': 'typeProperties.rerunConcurrency', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(RerunTumblingWindowTrigger, self).__init__(**kwargs) - self.type = 'RerunTumblingWindowTrigger' # type: str - self.parent_trigger = kwargs['parent_trigger'] - self.requested_start_time = kwargs['requested_start_time'] - self.requested_end_time = kwargs['requested_end_time'] - self.rerun_concurrency = kwargs['rerun_concurrency'] - - -class ResponsysLinkedService(LinkedService): - """Responsys linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param endpoint: Required. The endpoint of the Responsys server. - :type endpoint: object - :param client_id: Required. The client ID associated with the Responsys application. Type: - string (or Expression with resultType string). - :type client_id: object - :param client_secret: The client secret associated with the Responsys application. Type: string - (or Expression with resultType string). - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. Type: boolean (or Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. Type: boolean (or Expression with resultType - boolean). - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ResponsysLinkedService, self).__init__(**kwargs) - self.type = 'Responsys' # type: str - self.endpoint = kwargs['endpoint'] - self.client_id = kwargs['client_id'] - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class ResponsysObjectDataset(Dataset): - """Responsys dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ResponsysObjectDataset, self).__init__(**kwargs) - self.type = 'ResponsysObject' # type: str - self.table_name = kwargs.get('table_name', None) - - -class ResponsysSource(TabularSource): - """A copy activity Responsys source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ResponsysSource, self).__init__(**kwargs) - self.type = 'ResponsysSource' # type: str - self.query = kwargs.get('query', None) - - -class RestResourceDataset(Dataset): - """A Rest service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param relative_url: The relative URL to the resource that the RESTful API provides. Type: - string (or Expression with resultType string). - :type relative_url: object - :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: - string (or Expression with resultType string). - :type request_method: object - :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: - string (or Expression with resultType string). - :type request_body: object - :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: - string (or Expression with resultType string). - :type additional_headers: object - :param pagination_rules: The pagination rules to compose next page requests. Type: string (or - Expression with resultType string). - :type pagination_rules: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, - 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, - 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, - 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(RestResourceDataset, self).__init__(**kwargs) - self.type = 'RestResource' # type: str - self.relative_url = kwargs.get('relative_url', None) - self.request_method = kwargs.get('request_method', None) - self.request_body = kwargs.get('request_body', None) - self.additional_headers = kwargs.get('additional_headers', None) - self.pagination_rules = kwargs.get('pagination_rules', None) - - -class RestServiceLinkedService(LinkedService): - """Rest Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. The base URL of the REST service. - :type url: object - :param enable_server_certificate_validation: Whether to validate server side SSL certificate - when connecting to the endpoint.The default value is true. Type: boolean (or Expression with - resultType boolean). - :type enable_server_certificate_validation: object - :param authentication_type: Required. Type of authentication used to connect to the REST - service. Possible values include: "Anonymous", "Basic", "AadServicePrincipal", - "ManagedServiceIdentity". - :type authentication_type: str or ~azure.mgmt.datafactory.models.RestServiceAuthenticationType - :param user_name: The user name used in Basic authentication type. - :type user_name: object - :param password: The password used in Basic authentication type. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param auth_headers: The additional HTTP headers in the request to RESTful API used for - authorization. Type: object (or Expression with resultType object). - :type auth_headers: object - :param service_principal_id: The application's client ID used in AadServicePrincipal - authentication type. - :type service_principal_id: object - :param service_principal_key: The application's key used in AadServicePrincipal authentication - type. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal - authentication type under which your application resides. - :type tenant: object - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed - values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data - factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object - :param aad_resource_id: The resource you are requesting authorization to use. - :type aad_resource_id: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'auth_headers': {'key': 'typeProperties.authHeaders', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, - } - - def __init__( - self, - **kwargs - ): - super(RestServiceLinkedService, self).__init__(**kwargs) - self.type = 'RestService' # type: str - self.url = kwargs['url'] - self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) - self.authentication_type = kwargs['authentication_type'] - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.auth_headers = kwargs.get('auth_headers', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.azure_cloud_type = kwargs.get('azure_cloud_type', None) - self.aad_resource_id = kwargs.get('aad_resource_id', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.credential = kwargs.get('credential', None) - - -class RestSink(CopySink): - """A copy activity Rest service Sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param request_method: The HTTP method used to call the RESTful API. The default is POST. Type: - string (or Expression with resultType string). - :type request_method: object - :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: - string (or Expression with resultType string). - :type additional_headers: object - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout - to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string - (or Expression with resultType string), pattern: - ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object - :param request_interval: The time to await before sending next request, in milliseconds. - :type request_interval: object - :param http_compression_type: Http Compression Type to Send data in compressed format with - Optimal Compression Level, Default is None. And The Only Supported option is Gzip. - :type http_compression_type: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'request_method': {'key': 'requestMethod', 'type': 'object'}, - 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - 'http_compression_type': {'key': 'httpCompressionType', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(RestSink, self).__init__(**kwargs) - self.type = 'RestSink' # type: str - self.request_method = kwargs.get('request_method', None) - self.additional_headers = kwargs.get('additional_headers', None) - self.http_request_timeout = kwargs.get('http_request_timeout', None) - self.request_interval = kwargs.get('request_interval', None) - self.http_compression_type = kwargs.get('http_compression_type', None) - - -class RestSource(CopySource): - """A copy activity Rest service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: - string (or Expression with resultType string). - :type request_method: object - :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: - string (or Expression with resultType string). - :type request_body: object - :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: - string (or Expression with resultType string). - :type additional_headers: object - :param pagination_rules: The pagination rules to compose next page requests. Type: string (or - Expression with resultType string). - :type pagination_rules: object - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout - to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string - (or Expression with resultType string), pattern: - ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object - :param request_interval: The time to await before sending next page request. - :type request_interval: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'request_method': {'key': 'requestMethod', 'type': 'object'}, - 'request_body': {'key': 'requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, - 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(RestSource, self).__init__(**kwargs) - self.type = 'RestSource' # type: str - self.request_method = kwargs.get('request_method', None) - self.request_body = kwargs.get('request_body', None) - self.additional_headers = kwargs.get('additional_headers', None) - self.pagination_rules = kwargs.get('pagination_rules', None) - self.http_request_timeout = kwargs.get('http_request_timeout', None) - self.request_interval = kwargs.get('request_interval', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class RetryPolicy(msrest.serialization.Model): - """Execution policy for an activity. - - :param count: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with - resultType integer), minimum: 0. - :type count: object - :param interval_in_seconds: Interval between retries in seconds. Default is 30. - :type interval_in_seconds: int - """ - - _validation = { - 'interval_in_seconds': {'maximum': 86400, 'minimum': 30}, - } - - _attribute_map = { - 'count': {'key': 'count', 'type': 'object'}, - 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(RetryPolicy, self).__init__(**kwargs) - self.count = kwargs.get('count', None) - self.interval_in_seconds = kwargs.get('interval_in_seconds', None) - - -class RunFilterParameters(msrest.serialization.Model): - """Query parameters for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param continuation_token: The continuation token for getting the next page of results. Null - for first page. - :type continuation_token: str - :param last_updated_after: Required. The time at or after which the run event was updated in - 'ISO 8601' format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: Required. The time at or before which the run event was updated in - 'ISO 8601' format. - :type last_updated_before: ~datetime.datetime - :param filters: List of filters. - :type filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] - """ - - _validation = { - 'last_updated_after': {'required': True}, - 'last_updated_before': {'required': True}, - } - - _attribute_map = { - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - 'last_updated_after': {'key': 'lastUpdatedAfter', 'type': 'iso-8601'}, - 'last_updated_before': {'key': 'lastUpdatedBefore', 'type': 'iso-8601'}, - 'filters': {'key': 'filters', 'type': '[RunQueryFilter]'}, - 'order_by': {'key': 'orderBy', 'type': '[RunQueryOrderBy]'}, - } - - def __init__( - self, - **kwargs - ): - super(RunFilterParameters, self).__init__(**kwargs) - self.continuation_token = kwargs.get('continuation_token', None) - self.last_updated_after = kwargs['last_updated_after'] - self.last_updated_before = kwargs['last_updated_before'] - self.filters = kwargs.get('filters', None) - self.order_by = kwargs.get('order_by', None) - - -class RunQueryFilter(msrest.serialization.Model): - """Query filter option for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param operand: Required. Parameter name to be used for filter. The allowed operands to query - pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are - ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger - runs are TriggerName, TriggerRunTimestamp and Status. Possible values include: "PipelineName", - "Status", "RunStart", "RunEnd", "ActivityName", "ActivityRunStart", "ActivityRunEnd", - "ActivityType", "TriggerName", "TriggerRunTimestamp", "RunGroupId", "LatestOnly". - :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand - :param operator: Required. Operator to be used for filter. Possible values include: "Equals", - "NotEquals", "In", "NotIn". - :type operator: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperator - :param values: Required. List of filter values. - :type values: list[str] - """ - - _validation = { - 'operand': {'required': True}, - 'operator': {'required': True}, - 'values': {'required': True}, - } - - _attribute_map = { - 'operand': {'key': 'operand', 'type': 'str'}, - 'operator': {'key': 'operator', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - super(RunQueryFilter, self).__init__(**kwargs) - self.operand = kwargs['operand'] - self.operator = kwargs['operator'] - self.values = kwargs['values'] - - -class RunQueryOrderBy(msrest.serialization.Model): - """An object to provide order by options for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param order_by: Required. Parameter name to be used for order by. The allowed parameters to - order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are - ActivityName, ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, - TriggerRunTimestamp and Status. Possible values include: "RunStart", "RunEnd", "PipelineName", - "Status", "ActivityName", "ActivityRunStart", "ActivityRunEnd", "TriggerName", - "TriggerRunTimestamp". - :type order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField - :param order: Required. Sorting order of the parameter. Possible values include: "ASC", "DESC". - :type order: str or ~azure.mgmt.datafactory.models.RunQueryOrder - """ - - _validation = { - 'order_by': {'required': True}, - 'order': {'required': True}, - } - - _attribute_map = { - 'order_by': {'key': 'orderBy', 'type': 'str'}, - 'order': {'key': 'order', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(RunQueryOrderBy, self).__init__(**kwargs) - self.order_by = kwargs['order_by'] - self.order = kwargs['order'] - - -class SalesforceLinkedService(LinkedService): - """Linked service for Salesforce. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param environment_url: The URL of Salesforce instance. Default is - 'https://login.salesforce.com'. To copy data from sandbox, specify - 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, - 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). - :type environment_url: object - :param username: The username for Basic authentication of the Salesforce instance. Type: string - (or Expression with resultType string). - :type username: object - :param password: The password for Basic authentication of the Salesforce instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is optional to remotely access Salesforce instance. - :type security_token: ~azure.mgmt.datafactory.models.SecretBase - :param api_version: The Salesforce API version used in ADF. Type: string (or Expression with - resultType string). - :type api_version: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, - 'api_version': {'key': 'typeProperties.apiVersion', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SalesforceLinkedService, self).__init__(**kwargs) - self.type = 'Salesforce' # type: str - self.environment_url = kwargs.get('environment_url', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.security_token = kwargs.get('security_token', None) - self.api_version = kwargs.get('api_version', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class SalesforceMarketingCloudLinkedService(LinkedService): - """Salesforce Marketing Cloud linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_properties: Properties used to connect to Salesforce Marketing Cloud. It is - mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: object - :param client_id: The client ID associated with the Salesforce Marketing Cloud application. - Type: string (or Expression with resultType string). - :type client_id: object - :param client_secret: The client secret associated with the Salesforce Marketing Cloud - application. Type: string (or Expression with resultType string). - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. Type: boolean (or Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. Type: boolean (or Expression with resultType - boolean). - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SalesforceMarketingCloudLinkedService, self).__init__(**kwargs) - self.type = 'SalesforceMarketingCloud' # type: str - self.connection_properties = kwargs.get('connection_properties', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class SalesforceMarketingCloudObjectDataset(Dataset): - """Salesforce Marketing Cloud dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SalesforceMarketingCloudObjectDataset, self).__init__(**kwargs) - self.type = 'SalesforceMarketingCloudObject' # type: str - self.table_name = kwargs.get('table_name', None) - - -class SalesforceMarketingCloudSource(TabularSource): - """A copy activity Salesforce Marketing Cloud source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SalesforceMarketingCloudSource, self).__init__(**kwargs) - self.type = 'SalesforceMarketingCloudSource' # type: str - self.query = kwargs.get('query', None) - - -class SalesforceObjectDataset(Dataset): - """The Salesforce object dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param object_api_name: The Salesforce object API name. Type: string (or Expression with - resultType string). - :type object_api_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SalesforceObjectDataset, self).__init__(**kwargs) - self.type = 'SalesforceObject' # type: str - self.object_api_name = kwargs.get('object_api_name', None) - - -class SalesforceServiceCloudLinkedService(LinkedService): - """Linked service for Salesforce Service Cloud. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param environment_url: The URL of Salesforce Service Cloud instance. Default is - 'https://login.salesforce.com'. To copy data from sandbox, specify - 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, - 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). - :type environment_url: object - :param username: The username for Basic authentication of the Salesforce instance. Type: string - (or Expression with resultType string). - :type username: object - :param password: The password for Basic authentication of the Salesforce instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is optional to remotely access Salesforce instance. - :type security_token: ~azure.mgmt.datafactory.models.SecretBase - :param api_version: The Salesforce API version used in ADF. Type: string (or Expression with - resultType string). - :type api_version: object - :param extended_properties: Extended properties appended to the connection string. Type: string - (or Expression with resultType string). - :type extended_properties: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, - 'api_version': {'key': 'typeProperties.apiVersion', 'type': 'object'}, - 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SalesforceServiceCloudLinkedService, self).__init__(**kwargs) - self.type = 'SalesforceServiceCloud' # type: str - self.environment_url = kwargs.get('environment_url', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.security_token = kwargs.get('security_token', None) - self.api_version = kwargs.get('api_version', None) - self.extended_properties = kwargs.get('extended_properties', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class SalesforceServiceCloudObjectDataset(Dataset): - """The Salesforce Service Cloud object dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param object_api_name: The Salesforce Service Cloud object API name. Type: string (or - Expression with resultType string). - :type object_api_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SalesforceServiceCloudObjectDataset, self).__init__(**kwargs) - self.type = 'SalesforceServiceCloudObject' # type: str - self.object_api_name = kwargs.get('object_api_name', None) - - -class SalesforceServiceCloudSink(CopySink): - """A copy activity Salesforce Service Cloud sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param write_behavior: The write behavior for the operation. Default is Insert. Possible values - include: "Insert", "Upsert". - :type write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior - :param external_id_field_name: The name of the external ID field for upsert operation. Default - value is 'Id' column. Type: string (or Expression with resultType string). - :type external_id_field_name: object - :param ignore_null_values: The flag indicating whether or not to ignore null values from input - dataset (except key fields) during write operation. Default value is false. If set it to true, - it means ADF will leave the data in the destination object unchanged when doing upsert/update - operation and insert defined default value when doing insert operation, versus ADF will update - the data in the destination object to NULL when doing upsert/update operation and insert NULL - value when doing insert operation. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SalesforceServiceCloudSink, self).__init__(**kwargs) - self.type = 'SalesforceServiceCloudSink' # type: str - self.write_behavior = kwargs.get('write_behavior', None) - self.external_id_field_name = kwargs.get('external_id_field_name', None) - self.ignore_null_values = kwargs.get('ignore_null_values', None) - - -class SalesforceServiceCloudSource(CopySource): - """A copy activity Salesforce Service Cloud source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query: Database query. Type: string (or Expression with resultType string). - :type query: object - :param read_behavior: The read behavior for the operation. Default is Query. Possible values - include: "Query", "QueryAll". - :type read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SalesforceServiceCloudSource, self).__init__(**kwargs) - self.type = 'SalesforceServiceCloudSource' # type: str - self.query = kwargs.get('query', None) - self.read_behavior = kwargs.get('read_behavior', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class SalesforceSink(CopySink): - """A copy activity Salesforce sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param write_behavior: The write behavior for the operation. Default is Insert. Possible values - include: "Insert", "Upsert". - :type write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior - :param external_id_field_name: The name of the external ID field for upsert operation. Default - value is 'Id' column. Type: string (or Expression with resultType string). - :type external_id_field_name: object - :param ignore_null_values: The flag indicating whether or not to ignore null values from input - dataset (except key fields) during write operation. Default value is false. If set it to true, - it means ADF will leave the data in the destination object unchanged when doing upsert/update - operation and insert defined default value when doing insert operation, versus ADF will update - the data in the destination object to NULL when doing upsert/update operation and insert NULL - value when doing insert operation. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SalesforceSink, self).__init__(**kwargs) - self.type = 'SalesforceSink' # type: str - self.write_behavior = kwargs.get('write_behavior', None) - self.external_id_field_name = kwargs.get('external_id_field_name', None) - self.ignore_null_values = kwargs.get('ignore_null_values', None) - - -class SalesforceSource(TabularSource): - """A copy activity Salesforce source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: Database query. Type: string (or Expression with resultType string). - :type query: object - :param read_behavior: The read behavior for the operation. Default is Query. Possible values - include: "Query", "QueryAll". - :type read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SalesforceSource, self).__init__(**kwargs) - self.type = 'SalesforceSource' # type: str - self.query = kwargs.get('query', None) - self.read_behavior = kwargs.get('read_behavior', None) - - -class SapBwCubeDataset(Dataset): - """The SAP BW cube dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - } - - def __init__( - self, - **kwargs - ): - super(SapBwCubeDataset, self).__init__(**kwargs) - self.type = 'SapBwCube' # type: str - - -class SapBwLinkedService(LinkedService): - """SAP Business Warehouse Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param server: Required. Host name of the SAP BW instance. Type: string (or Expression with - resultType string). - :type server: object - :param system_number: Required. System number of the BW system. (Usually a two-digit decimal - number represented as a string.) Type: string (or Expression with resultType string). - :type system_number: object - :param client_id: Required. Client ID of the client on the BW system. (Usually a three-digit - decimal number represented as a string) Type: string (or Expression with resultType string). - :type client_id: object - :param user_name: Username to access the SAP BW server. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password to access the SAP BW server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'system_number': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SapBwLinkedService, self).__init__(**kwargs) - self.type = 'SapBW' # type: str - self.server = kwargs['server'] - self.system_number = kwargs['system_number'] - self.client_id = kwargs['client_id'] - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class SapBwSource(TabularSource): - """A copy activity source for SapBW server via MDX. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: MDX query. Type: string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SapBwSource, self).__init__(**kwargs) - self.type = 'SapBwSource' # type: str - self.query = kwargs.get('query', None) - - -class SapCloudForCustomerLinkedService(LinkedService): - """Linked service for SAP Cloud for Customer. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. The URL of SAP Cloud for Customer OData API. For example, - '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with - resultType string). - :type url: object - :param username: The username for Basic authentication. Type: string (or Expression with - resultType string). - :type username: object - :param password: The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Either encryptedCredential or - username/password must be provided. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SapCloudForCustomerLinkedService, self).__init__(**kwargs) - self.type = 'SapCloudForCustomer' # type: str - self.url = kwargs['url'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class SapCloudForCustomerResourceDataset(Dataset): - """The path of the SAP Cloud for Customer OData entity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param path: Required. The path of the SAP Cloud for Customer OData entity. Type: string (or - Expression with resultType string). - :type path: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SapCloudForCustomerResourceDataset, self).__init__(**kwargs) - self.type = 'SapCloudForCustomerResource' # type: str - self.path = kwargs['path'] - - -class SapCloudForCustomerSink(CopySink): - """A copy activity SAP Cloud for Customer sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible - values include: "Insert", "Update". - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout - to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string - (or Expression with resultType string), pattern: - ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SapCloudForCustomerSink, self).__init__(**kwargs) - self.type = 'SapCloudForCustomerSink' # type: str - self.write_behavior = kwargs.get('write_behavior', None) - self.http_request_timeout = kwargs.get('http_request_timeout', None) - - -class SapCloudForCustomerSource(TabularSource): - """A copy activity source for SAP Cloud for Customer source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or - Expression with resultType string). - :type query: object - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout - to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string - (or Expression with resultType string), pattern: - ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SapCloudForCustomerSource, self).__init__(**kwargs) - self.type = 'SapCloudForCustomerSource' # type: str - self.query = kwargs.get('query', None) - self.http_request_timeout = kwargs.get('http_request_timeout', None) - - -class SapEccLinkedService(LinkedService): - """Linked service for SAP ERP Central Component(SAP ECC). - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. The URL of SAP ECC OData API. For example, - '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with - resultType string). - :type url: str - :param username: The username for Basic authentication. Type: string (or Expression with - resultType string). - :type username: str - :param password: The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Either encryptedCredential or - username/password must be provided. Type: string (or Expression with resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'str'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SapEccLinkedService, self).__init__(**kwargs) - self.type = 'SapEcc' # type: str - self.url = kwargs['url'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class SapEccResourceDataset(Dataset): - """The path of the SAP ECC OData entity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with - resultType string). - :type path: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SapEccResourceDataset, self).__init__(**kwargs) - self.type = 'SapEccResource' # type: str - self.path = kwargs['path'] - - -class SapEccSource(TabularSource): - """A copy activity source for SAP ECC source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with - resultType string). - :type query: object - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout - to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string - (or Expression with resultType string), pattern: - ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SapEccSource, self).__init__(**kwargs) - self.type = 'SapEccSource' # type: str - self.query = kwargs.get('query', None) - self.http_request_timeout = kwargs.get('http_request_timeout', None) - - -class SapHanaLinkedService(LinkedService): - """SAP HANA Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: SAP HANA ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param server: Host name of the SAP HANA server. Type: string (or Expression with resultType - string). - :type server: object - :param authentication_type: The authentication type to be used to connect to the SAP HANA - server. Possible values include: "Basic", "Windows". - :type authentication_type: str or ~azure.mgmt.datafactory.models.SapHanaAuthenticationType - :param user_name: Username to access the SAP HANA server. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password to access the SAP HANA server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SapHanaLinkedService, self).__init__(**kwargs) - self.type = 'SapHana' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.server = kwargs.get('server', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class SapHanaPartitionSettings(msrest.serialization.Model): - """The settings that will be leveraged for SAP HANA source partitioning. - - :param partition_column_name: The name of the column that will be used for proceeding range - partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SapHanaPartitionSettings, self).__init__(**kwargs) - self.partition_column_name = kwargs.get('partition_column_name', None) - - -class SapHanaSource(TabularSource): - """A copy activity source for SAP HANA source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). - :type query: object - :param packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression - with resultType integer). - :type packet_size: object - :param partition_option: The partition mechanism that will be used for SAP HANA read in - parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange". - :type partition_option: object - :param partition_settings: The settings that will be leveraged for SAP HANA source - partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SapHanaPartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'packet_size': {'key': 'packetSize', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SapHanaPartitionSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(SapHanaSource, self).__init__(**kwargs) - self.type = 'SapHanaSource' # type: str - self.query = kwargs.get('query', None) - self.packet_size = kwargs.get('packet_size', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) - - -class SapHanaTableDataset(Dataset): - """SAP HANA Table properties. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param schema_type_properties_schema: The schema name of SAP HANA. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of SAP HANA. Type: string (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SapHanaTableDataset, self).__init__(**kwargs) - self.type = 'SapHanaTable' # type: str - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - self.table = kwargs.get('table', None) - - -class SapOpenHubLinkedService(LinkedService): - """SAP Business Warehouse Open Hub Destination Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param server: Host name of the SAP BW instance where the open hub destination is located. - Type: string (or Expression with resultType string). - :type server: object - :param system_number: System number of the BW system where the open hub destination is located. - (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with - resultType string). - :type system_number: object - :param client_id: Client ID of the client on the BW system where the open hub destination is - located. (Usually a three-digit decimal number represented as a string) Type: string (or - Expression with resultType string). - :type client_id: object - :param language: Language of the BW system where the open hub destination is located. The - default value is EN. Type: string (or Expression with resultType string). - :type language: object - :param system_id: SystemID of the SAP system where the table is located. Type: string (or - Expression with resultType string). - :type system_id: object - :param user_name: Username to access the SAP BW server where the open hub destination is - located. Type: string (or Expression with resultType string). - :type user_name: object - :param password: Password to access the SAP BW server where the open hub destination is - located. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with - resultType string). - :type message_server: object - :param message_server_service: The service name or port number of the Message Server. Type: - string (or Expression with resultType string). - :type message_server_service: object - :param logon_group: The Logon Group for the SAP System. Type: string (or Expression with - resultType string). - :type logon_group: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'language': {'key': 'typeProperties.language', 'type': 'object'}, - 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, - 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, - 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SapOpenHubLinkedService, self).__init__(**kwargs) - self.type = 'SapOpenHub' # type: str - self.server = kwargs.get('server', None) - self.system_number = kwargs.get('system_number', None) - self.client_id = kwargs.get('client_id', None) - self.language = kwargs.get('language', None) - self.system_id = kwargs.get('system_id', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.message_server = kwargs.get('message_server', None) - self.message_server_service = kwargs.get('message_server_service', None) - self.logon_group = kwargs.get('logon_group', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class SapOpenHubSource(TabularSource): - """A copy activity source for SAP Business Warehouse Open Hub Destination source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param exclude_last_request: Whether to exclude the records of the last request. The default - value is true. Type: boolean (or Expression with resultType boolean). - :type exclude_last_request: object - :param base_request_id: The ID of request for delta loading. Once it is set, only data with - requestId larger than the value of this property will be retrieved. The default value is 0. - Type: integer (or Expression with resultType integer ). - :type base_request_id: object - :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that - will be used to read data from SAP Table. Type: string (or Expression with resultType string). - :type custom_rfc_read_table_function_module: object - :param sap_data_column_delimiter: The single character that will be used as delimiter passed to - SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with - resultType string). - :type sap_data_column_delimiter: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, - 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, - 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, - 'sap_data_column_delimiter': {'key': 'sapDataColumnDelimiter', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SapOpenHubSource, self).__init__(**kwargs) - self.type = 'SapOpenHubSource' # type: str - self.exclude_last_request = kwargs.get('exclude_last_request', None) - self.base_request_id = kwargs.get('base_request_id', None) - self.custom_rfc_read_table_function_module = kwargs.get('custom_rfc_read_table_function_module', None) - self.sap_data_column_delimiter = kwargs.get('sap_data_column_delimiter', None) - - -class SapOpenHubTableDataset(Dataset): - """Sap Business Warehouse Open Hub Destination Table properties. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param open_hub_destination_name: Required. The name of the Open Hub Destination with - destination type as Database Table. Type: string (or Expression with resultType string). - :type open_hub_destination_name: object - :param exclude_last_request: Whether to exclude the records of the last request. The default - value is true. Type: boolean (or Expression with resultType boolean). - :type exclude_last_request: object - :param base_request_id: The ID of request for delta loading. Once it is set, only data with - requestId larger than the value of this property will be retrieved. The default value is 0. - Type: integer (or Expression with resultType integer ). - :type base_request_id: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'open_hub_destination_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, - 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, - 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SapOpenHubTableDataset, self).__init__(**kwargs) - self.type = 'SapOpenHubTable' # type: str - self.open_hub_destination_name = kwargs['open_hub_destination_name'] - self.exclude_last_request = kwargs.get('exclude_last_request', None) - self.base_request_id = kwargs.get('base_request_id', None) - - -class SapTableLinkedService(LinkedService): - """SAP Table Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param server: Host name of the SAP instance where the table is located. Type: string (or - Expression with resultType string). - :type server: object - :param system_number: System number of the SAP system where the table is located. (Usually a - two-digit decimal number represented as a string.) Type: string (or Expression with resultType - string). - :type system_number: object - :param client_id: Client ID of the client on the SAP system where the table is located. - (Usually a three-digit decimal number represented as a string) Type: string (or Expression with - resultType string). - :type client_id: object - :param language: Language of the SAP system where the table is located. The default value is - EN. Type: string (or Expression with resultType string). - :type language: object - :param system_id: SystemID of the SAP system where the table is located. Type: string (or - Expression with resultType string). - :type system_id: object - :param user_name: Username to access the SAP server where the table is located. Type: string - (or Expression with resultType string). - :type user_name: object - :param password: Password to access the SAP server where the table is located. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with - resultType string). - :type message_server: object - :param message_server_service: The service name or port number of the Message Server. Type: - string (or Expression with resultType string). - :type message_server_service: object - :param snc_mode: SNC activation indicator to access the SAP server where the table is located. - Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). - :type snc_mode: object - :param snc_my_name: Initiator's SNC name to access the SAP server where the table is located. - Type: string (or Expression with resultType string). - :type snc_my_name: object - :param snc_partner_name: Communication partner's SNC name to access the SAP server where the - table is located. Type: string (or Expression with resultType string). - :type snc_partner_name: object - :param snc_library_path: External security product's library to access the SAP server where the - table is located. Type: string (or Expression with resultType string). - :type snc_library_path: object - :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string - (or Expression with resultType string). - :type snc_qop: object - :param logon_group: The Logon Group for the SAP System. Type: string (or Expression with - resultType string). - :type logon_group: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'language': {'key': 'typeProperties.language', 'type': 'object'}, - 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, - 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, - 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, - 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, - 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, - 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, - 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, - 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SapTableLinkedService, self).__init__(**kwargs) - self.type = 'SapTable' # type: str - self.server = kwargs.get('server', None) - self.system_number = kwargs.get('system_number', None) - self.client_id = kwargs.get('client_id', None) - self.language = kwargs.get('language', None) - self.system_id = kwargs.get('system_id', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.message_server = kwargs.get('message_server', None) - self.message_server_service = kwargs.get('message_server_service', None) - self.snc_mode = kwargs.get('snc_mode', None) - self.snc_my_name = kwargs.get('snc_my_name', None) - self.snc_partner_name = kwargs.get('snc_partner_name', None) - self.snc_library_path = kwargs.get('snc_library_path', None) - self.snc_qop = kwargs.get('snc_qop', None) - self.logon_group = kwargs.get('logon_group', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class SapTablePartitionSettings(msrest.serialization.Model): - """The settings that will be leveraged for SAP table source partitioning. - - :param partition_column_name: The name of the column that will be used for proceeding range - partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in partitionColumnName that - will be used for proceeding range partitioning. Type: string (or Expression with resultType - string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in partitionColumnName that - will be used for proceeding range partitioning. Type: string (or Expression with resultType - string). - :type partition_lower_bound: object - :param max_partitions_number: The maximum value of partitions the table will be split into. - Type: integer (or Expression with resultType string). - :type max_partitions_number: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SapTablePartitionSettings, self).__init__(**kwargs) - self.partition_column_name = kwargs.get('partition_column_name', None) - self.partition_upper_bound = kwargs.get('partition_upper_bound', None) - self.partition_lower_bound = kwargs.get('partition_lower_bound', None) - self.max_partitions_number = kwargs.get('max_partitions_number', None) - - -class SapTableResourceDataset(Dataset): - """SAP Table Resource properties. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: Required. The name of the SAP Table. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'table_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SapTableResourceDataset, self).__init__(**kwargs) - self.type = 'SapTableResource' # type: str - self.table_name = kwargs['table_name'] - - -class SapTableSource(TabularSource): - """A copy activity source for SAP Table source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param row_count: The number of rows to be retrieved. Type: integer(or Expression with - resultType integer). - :type row_count: object - :param row_skips: The number of rows that will be skipped. Type: integer (or Expression with - resultType integer). - :type row_skips: object - :param rfc_table_fields: The fields of the SAP table that will be retrieved. For example, - column0, column1. Type: string (or Expression with resultType string). - :type rfc_table_fields: object - :param rfc_table_options: The options for the filtering of the SAP Table. For example, COLUMN0 - EQ SOME VALUE. Type: string (or Expression with resultType string). - :type rfc_table_options: object - :param batch_size: Specifies the maximum number of rows that will be retrieved at a time when - retrieving data from SAP Table. Type: integer (or Expression with resultType integer). - :type batch_size: object - :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that - will be used to read data from SAP Table. Type: string (or Expression with resultType string). - :type custom_rfc_read_table_function_module: object - :param sap_data_column_delimiter: The single character that will be used as delimiter passed to - SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with - resultType string). - :type sap_data_column_delimiter: object - :param partition_option: The partition mechanism that will be used for SAP table read in - parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", - "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". - :type partition_option: object - :param partition_settings: The settings that will be leveraged for SAP table source - partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SapTablePartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'row_count': {'key': 'rowCount', 'type': 'object'}, - 'row_skips': {'key': 'rowSkips', 'type': 'object'}, - 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, - 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, - 'sap_data_column_delimiter': {'key': 'sapDataColumnDelimiter', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(SapTableSource, self).__init__(**kwargs) - self.type = 'SapTableSource' # type: str - self.row_count = kwargs.get('row_count', None) - self.row_skips = kwargs.get('row_skips', None) - self.rfc_table_fields = kwargs.get('rfc_table_fields', None) - self.rfc_table_options = kwargs.get('rfc_table_options', None) - self.batch_size = kwargs.get('batch_size', None) - self.custom_rfc_read_table_function_module = kwargs.get('custom_rfc_read_table_function_module', None) - self.sap_data_column_delimiter = kwargs.get('sap_data_column_delimiter', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) - - -class ScheduleTrigger(MultiplePipelineTrigger): - """Trigger that creates pipeline runs periodically, on schedule. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] - :param pipelines: Pipelines that need to be started. - :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param recurrence: Required. Recurrence schedule configuration. - :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence - """ - - _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, - 'recurrence': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, - } - - def __init__( - self, - **kwargs - ): - super(ScheduleTrigger, self).__init__(**kwargs) - self.type = 'ScheduleTrigger' # type: str - self.recurrence = kwargs['recurrence'] - - -class ScheduleTriggerRecurrence(msrest.serialization.Model): - """The workflow trigger recurrence. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param frequency: The frequency. Possible values include: "NotSpecified", "Minute", "Hour", - "Day", "Week", "Month", "Year". - :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency - :param interval: The interval. - :type interval: int - :param start_time: The start time. - :type start_time: ~datetime.datetime - :param end_time: The end time. - :type end_time: ~datetime.datetime - :param time_zone: The time zone. - :type time_zone: str - :param schedule: The recurrence schedule. - :type schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'frequency': {'key': 'frequency', 'type': 'str'}, - 'interval': {'key': 'interval', 'type': 'int'}, - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, - } - - def __init__( - self, - **kwargs - ): - super(ScheduleTriggerRecurrence, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.frequency = kwargs.get('frequency', None) - self.interval = kwargs.get('interval', None) - self.start_time = kwargs.get('start_time', None) - self.end_time = kwargs.get('end_time', None) - self.time_zone = kwargs.get('time_zone', None) - self.schedule = kwargs.get('schedule', None) - - -class ScriptAction(msrest.serialization.Model): - """Custom script action to run on HDI ondemand cluster once it's up. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. The user provided name of the script action. - :type name: str - :param uri: Required. The URI for the script action. - :type uri: str - :param roles: Required. The node types on which the script action should be executed. - :type roles: str - :param parameters: The parameters for the script action. - :type parameters: str - """ - - _validation = { - 'name': {'required': True}, - 'uri': {'required': True}, - 'roles': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'roles': {'key': 'roles', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ScriptAction, self).__init__(**kwargs) - self.name = kwargs['name'] - self.uri = kwargs['uri'] - self.roles = kwargs['roles'] - self.parameters = kwargs.get('parameters', None) - - -class SecureString(SecretBase): - """Azure Data Factory secure string definition. The string value will be masked with asterisks '*' during Get or List API calls. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Type of the secret.Constant filled by server. - :type type: str - :param value: Required. Value of secure string. - :type value: str - """ - - _validation = { - 'type': {'required': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SecureString, self).__init__(**kwargs) - self.type = 'SecureString' # type: str - self.value = kwargs['value'] - - -class SelfDependencyTumblingWindowTriggerReference(DependencyReference): - """Self referenced tumbling window trigger dependency. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. The type of dependency reference.Constant filled by server. - :type type: str - :param offset: Required. Timespan applied to the start time of a tumbling window when - evaluating dependency. - :type offset: str - :param size: The size of the window when evaluating the dependency. If undefined the frequency - of the tumbling window will be used. - :type size: str - """ - - _validation = { - 'type': {'required': True}, - 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'-((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'offset': {'key': 'offset', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) - self.type = 'SelfDependencyTumblingWindowTriggerReference' # type: str - self.offset = kwargs['offset'] - self.size = kwargs.get('size', None) - - -class SelfHostedIntegrationRuntime(IntegrationRuntime): - """Self-hosted integration runtime. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of integration runtime.Constant filled by server. Possible values - include: "Managed", "SelfHosted". - :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType - :param description: Integration runtime description. - :type description: str - :param linked_info: The base definition of a linked integration runtime. - :type linked_info: ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, - } - - def __init__( - self, - **kwargs - ): - super(SelfHostedIntegrationRuntime, self).__init__(**kwargs) - self.type = 'SelfHosted' # type: str - self.linked_info = kwargs.get('linked_info', None) - - -class SelfHostedIntegrationRuntimeNode(msrest.serialization.Model): - """Properties of Self-hosted integration runtime node. - - Variables are only populated by the server, and will be ignored when sending a request. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :ivar node_name: Name of the integration runtime node. - :vartype node_name: str - :ivar machine_name: Machine name of the integration runtime node. - :vartype machine_name: str - :ivar host_service_uri: URI for the host machine of the integration runtime. - :vartype host_service_uri: str - :ivar status: Status of the integration runtime node. Possible values include: - "NeedRegistration", "Online", "Limited", "Offline", "Upgrading", "Initializing", - "InitializeFailed". - :vartype status: str or ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus - :ivar capabilities: The integration runtime capabilities dictionary. - :vartype capabilities: dict[str, str] - :ivar version_status: Status of the integration runtime node version. - :vartype version_status: str - :ivar version: Version of the integration runtime node. - :vartype version: str - :ivar register_time: The time at which the integration runtime node was registered in ISO8601 - format. - :vartype register_time: ~datetime.datetime - :ivar last_connect_time: The most recent time at which the integration runtime was connected in - ISO8601 format. - :vartype last_connect_time: ~datetime.datetime - :ivar expiry_time: The time at which the integration runtime will expire in ISO8601 format. - :vartype expiry_time: ~datetime.datetime - :ivar last_start_time: The time the node last started up. - :vartype last_start_time: ~datetime.datetime - :ivar last_stop_time: The integration runtime node last stop time. - :vartype last_stop_time: ~datetime.datetime - :ivar last_update_result: The result of the last integration runtime node update. Possible - values include: "None", "Succeed", "Fail". - :vartype last_update_result: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeUpdateResult - :ivar last_start_update_time: The last time for the integration runtime node update start. - :vartype last_start_update_time: ~datetime.datetime - :ivar last_end_update_time: The last time for the integration runtime node update end. - :vartype last_end_update_time: ~datetime.datetime - :ivar is_active_dispatcher: Indicates whether this node is the active dispatcher for - integration runtime requests. - :vartype is_active_dispatcher: bool - :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration runtime node. - :vartype concurrent_jobs_limit: int - :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration runtime. - :vartype max_concurrent_jobs: int - """ - - _validation = { - 'node_name': {'readonly': True}, - 'machine_name': {'readonly': True}, - 'host_service_uri': {'readonly': True}, - 'status': {'readonly': True}, - 'capabilities': {'readonly': True}, - 'version_status': {'readonly': True}, - 'version': {'readonly': True}, - 'register_time': {'readonly': True}, - 'last_connect_time': {'readonly': True}, - 'expiry_time': {'readonly': True}, - 'last_start_time': {'readonly': True}, - 'last_stop_time': {'readonly': True}, - 'last_update_result': {'readonly': True}, - 'last_start_update_time': {'readonly': True}, - 'last_end_update_time': {'readonly': True}, - 'is_active_dispatcher': {'readonly': True}, - 'concurrent_jobs_limit': {'readonly': True}, - 'max_concurrent_jobs': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'node_name': {'key': 'nodeName', 'type': 'str'}, - 'machine_name': {'key': 'machineName', 'type': 'str'}, - 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'capabilities': {'key': 'capabilities', 'type': '{str}'}, - 'version_status': {'key': 'versionStatus', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, - 'register_time': {'key': 'registerTime', 'type': 'iso-8601'}, - 'last_connect_time': {'key': 'lastConnectTime', 'type': 'iso-8601'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'last_start_time': {'key': 'lastStartTime', 'type': 'iso-8601'}, - 'last_stop_time': {'key': 'lastStopTime', 'type': 'iso-8601'}, - 'last_update_result': {'key': 'lastUpdateResult', 'type': 'str'}, - 'last_start_update_time': {'key': 'lastStartUpdateTime', 'type': 'iso-8601'}, - 'last_end_update_time': {'key': 'lastEndUpdateTime', 'type': 'iso-8601'}, - 'is_active_dispatcher': {'key': 'isActiveDispatcher', 'type': 'bool'}, - 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, - 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.node_name = None - self.machine_name = None - self.host_service_uri = None - self.status = None - self.capabilities = None - self.version_status = None - self.version = None - self.register_time = None - self.last_connect_time = None - self.expiry_time = None - self.last_start_time = None - self.last_stop_time = None - self.last_update_result = None - self.last_start_update_time = None - self.last_end_update_time = None - self.is_active_dispatcher = None - self.concurrent_jobs_limit = None - self.max_concurrent_jobs = None - - -class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): - """Self-hosted integration runtime status. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of integration runtime.Constant filled by server. Possible values - include: "Managed", "SelfHosted". - :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType - :ivar data_factory_name: The data factory name which the integration runtime belong to. - :vartype data_factory_name: str - :ivar state: The state of integration runtime. Possible values include: "Initial", "Stopped", - "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", - "AccessDenied". - :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :ivar create_time: The time at which the integration runtime was created, in ISO8601 format. - :vartype create_time: ~datetime.datetime - :ivar task_queue_id: The task queue id of the integration runtime. - :vartype task_queue_id: str - :ivar internal_channel_encryption: It is used to set the encryption mode for node-node - communication channel (when more than 2 self-hosted integration runtime nodes exist). Possible - values include: "NotSet", "SslEncrypted", "NotEncrypted". - :vartype internal_channel_encryption: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode - :ivar version: Version of the integration runtime. - :vartype version: str - :param nodes: The list of nodes for this integration runtime. - :type nodes: list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] - :ivar scheduled_update_date: The date at which the integration runtime will be scheduled to - update, in ISO8601 format. - :vartype scheduled_update_date: ~datetime.datetime - :ivar update_delay_offset: The time in the date scheduled by service to update the integration - runtime, e.g., PT03H is 3 hours. - :vartype update_delay_offset: str - :ivar local_time_zone_offset: The local time zone offset in hours. - :vartype local_time_zone_offset: str - :ivar capabilities: Object with additional information about integration runtime capabilities. - :vartype capabilities: dict[str, str] - :ivar service_urls: The URLs for the services used in integration runtime backend service. - :vartype service_urls: list[str] - :ivar auto_update: Whether Self-hosted integration runtime auto update has been turned on. - Possible values include: "On", "Off". - :vartype auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate - :ivar version_status: Status of the integration runtime version. - :vartype version_status: str - :param links: The list of linked integration runtimes that are created to share with this - integration runtime. - :type links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] - :ivar pushed_version: The version that the integration runtime is going to update to. - :vartype pushed_version: str - :ivar latest_version: The latest version on download center. - :vartype latest_version: str - :ivar auto_update_eta: The estimated time when the self-hosted integration runtime will be - updated. - :vartype auto_update_eta: ~datetime.datetime - """ - - _validation = { - 'type': {'required': True}, - 'data_factory_name': {'readonly': True}, - 'state': {'readonly': True}, - 'create_time': {'readonly': True}, - 'task_queue_id': {'readonly': True}, - 'internal_channel_encryption': {'readonly': True}, - 'version': {'readonly': True}, - 'scheduled_update_date': {'readonly': True}, - 'update_delay_offset': {'readonly': True}, - 'local_time_zone_offset': {'readonly': True}, - 'capabilities': {'readonly': True}, - 'service_urls': {'readonly': True}, - 'auto_update': {'readonly': True}, - 'version_status': {'readonly': True}, - 'pushed_version': {'readonly': True}, - 'latest_version': {'readonly': True}, - 'auto_update_eta': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, - 'task_queue_id': {'key': 'typeProperties.taskQueueId', 'type': 'str'}, - 'internal_channel_encryption': {'key': 'typeProperties.internalChannelEncryption', 'type': 'str'}, - 'version': {'key': 'typeProperties.version', 'type': 'str'}, - 'nodes': {'key': 'typeProperties.nodes', 'type': '[SelfHostedIntegrationRuntimeNode]'}, - 'scheduled_update_date': {'key': 'typeProperties.scheduledUpdateDate', 'type': 'iso-8601'}, - 'update_delay_offset': {'key': 'typeProperties.updateDelayOffset', 'type': 'str'}, - 'local_time_zone_offset': {'key': 'typeProperties.localTimeZoneOffset', 'type': 'str'}, - 'capabilities': {'key': 'typeProperties.capabilities', 'type': '{str}'}, - 'service_urls': {'key': 'typeProperties.serviceUrls', 'type': '[str]'}, - 'auto_update': {'key': 'typeProperties.autoUpdate', 'type': 'str'}, - 'version_status': {'key': 'typeProperties.versionStatus', 'type': 'str'}, - 'links': {'key': 'typeProperties.links', 'type': '[LinkedIntegrationRuntime]'}, - 'pushed_version': {'key': 'typeProperties.pushedVersion', 'type': 'str'}, - 'latest_version': {'key': 'typeProperties.latestVersion', 'type': 'str'}, - 'auto_update_eta': {'key': 'typeProperties.autoUpdateETA', 'type': 'iso-8601'}, - } - - def __init__( - self, - **kwargs - ): - super(SelfHostedIntegrationRuntimeStatus, self).__init__(**kwargs) - self.type = 'SelfHosted' # type: str - self.create_time = None - self.task_queue_id = None - self.internal_channel_encryption = None - self.version = None - self.nodes = kwargs.get('nodes', None) - self.scheduled_update_date = None - self.update_delay_offset = None - self.local_time_zone_offset = None - self.capabilities = None - self.service_urls = None - self.auto_update = None - self.version_status = None - self.links = kwargs.get('links', None) - self.pushed_version = None - self.latest_version = None - self.auto_update_eta = None - - -class ServiceNowLinkedService(LinkedService): - """ServiceNow server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. - :code:``.service-now.com). - :type endpoint: object - :param authentication_type: Required. The authentication type to use. Possible values include: - "Basic", "OAuth2". - :type authentication_type: str or ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType - :param username: The user name used to connect to the ServiceNow server for Basic and OAuth2 - authentication. - :type username: object - :param password: The password corresponding to the user name for Basic and OAuth2 - authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id for OAuth2 authentication. - :type client_id: object - :param client_secret: The client secret for OAuth2 authentication. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ServiceNowLinkedService, self).__init__(**kwargs) - self.type = 'ServiceNow' # type: str - self.endpoint = kwargs['endpoint'] - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class ServiceNowObjectDataset(Dataset): - """ServiceNow server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ServiceNowObjectDataset, self).__init__(**kwargs) - self.type = 'ServiceNowObject' # type: str - self.table_name = kwargs.get('table_name', None) - - -class ServiceNowSource(TabularSource): - """A copy activity ServiceNow server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ServiceNowSource, self).__init__(**kwargs) - self.type = 'ServiceNowSource' # type: str - self.query = kwargs.get('query', None) - - -class ServicePrincipalCredential(Credential): - """Service principal credential. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of credential.Constant filled by server. - :type type: str - :param description: Credential description. - :type description: str - :param annotations: List of tags that can be used for describing the Credential. - :type annotations: list[object] - :param service_principal_id: The app ID of the service principal used to authenticate. - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate. - :type service_principal_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param tenant: The ID of the tenant to which the service principal belongs. - :type tenant: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'AzureKeyVaultSecretReference'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ServicePrincipalCredential, self).__init__(**kwargs) - self.type = 'ServicePrincipal' # type: str - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - - -class SetVariableActivity(Activity): - """Set value for a Variable. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param variable_name: Name of the variable whose value needs to be set. - :type variable_name: str - :param value: Value to be set. Could be a static value or Expression. - :type value: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'value': {'key': 'typeProperties.value', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SetVariableActivity, self).__init__(**kwargs) - self.type = 'SetVariable' # type: str - self.variable_name = kwargs.get('variable_name', None) - self.value = kwargs.get('value', None) - - -class SftpLocation(DatasetLocation): - """The location of SFTP dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType - string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SftpLocation, self).__init__(**kwargs) - self.type = 'SftpLocation' # type: str - - -class SftpReadSettings(StoreReadSettings): - """Sftp read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param recursive: If true, files under the folder path will be read recursively. Default is - true. Type: boolean (or Expression with resultType boolean). - :type recursive: object - :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or Expression with - resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType - string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: - string (or Expression with resultType string). - :type partition_root_path: object - :param file_list_path: Point to a text file that lists each file (relative path to the path - configured in the dataset) that you want to copy. Type: string (or Expression with resultType - string). - :type file_list_path: object - :param delete_files_after_completion: Indicates whether the source files need to be deleted - after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object - :param modified_datetime_start: The start of file's modified datetime. Type: string (or - Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression - with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SftpReadSettings, self).__init__(**kwargs) - self.type = 'SftpReadSettings' # type: str - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.partition_root_path = kwargs.get('partition_root_path', None) - self.file_list_path = kwargs.get('file_list_path', None) - self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - - -class SftpServerLinkedService(LinkedService): - """A linked service for an SSH File Transfer Protocol (SFTP) server. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The SFTP server host name. Type: string (or Expression with resultType - string). - :type host: object - :param port: The TCP port number that the SFTP server uses to listen for client connections. - Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param authentication_type: The authentication type to be used to connect to the FTP server. - Possible values include: "Basic", "SshPublicKey", "MultiFactor". - :type authentication_type: str or ~azure.mgmt.datafactory.models.SftpAuthenticationType - :param user_name: The username used to log on to the SFTP server. Type: string (or Expression - with resultType string). - :type user_name: object - :param password: Password to logon the SFTP server for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param private_key_path: The SSH private key file path for SshPublicKey authentication. Only - valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either - PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH - format. Type: string (or Expression with resultType string). - :type private_key_path: object - :param private_key_content: Base64 encoded SSH private key content for SshPublicKey - authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or - PrivateKeyContent should be specified. SSH private key should be OpenSSH format. - :type private_key_content: ~azure.mgmt.datafactory.models.SecretBase - :param pass_phrase: The password to decrypt the SSH private key if the SSH private key is - encrypted. - :type pass_phrase: ~azure.mgmt.datafactory.models.SecretBase - :param skip_host_key_validation: If true, skip the SSH host key validation. Default value is - false. Type: boolean (or Expression with resultType boolean). - :type skip_host_key_validation: object - :param host_key_fingerprint: The host key finger-print of the SFTP server. When - SkipHostKeyValidation is false, HostKeyFingerprint should be specified. Type: string (or - Expression with resultType string). - :type host_key_fingerprint: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'private_key_path': {'key': 'typeProperties.privateKeyPath', 'type': 'object'}, - 'private_key_content': {'key': 'typeProperties.privateKeyContent', 'type': 'SecretBase'}, - 'pass_phrase': {'key': 'typeProperties.passPhrase', 'type': 'SecretBase'}, - 'skip_host_key_validation': {'key': 'typeProperties.skipHostKeyValidation', 'type': 'object'}, - 'host_key_fingerprint': {'key': 'typeProperties.hostKeyFingerprint', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SftpServerLinkedService, self).__init__(**kwargs) - self.type = 'Sftp' # type: str - self.host = kwargs['host'] - self.port = kwargs.get('port', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.private_key_path = kwargs.get('private_key_path', None) - self.private_key_content = kwargs.get('private_key_content', None) - self.pass_phrase = kwargs.get('pass_phrase', None) - self.skip_host_key_validation = kwargs.get('skip_host_key_validation', None) - self.host_key_fingerprint = kwargs.get('host_key_fingerprint', None) - - -class SftpWriteSettings(StoreWriteSettings): - """Sftp write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default - value: 01:00:00 (one hour). Type: string (or Expression with resultType string). - :type operation_timeout: object - :param use_temp_file_rename: Upload to temporary file(s) and rename. Disable this option if - your SFTP server doesn't support rename operation. Type: boolean (or Expression with resultType - boolean). - :type use_temp_file_rename: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'operation_timeout': {'key': 'operationTimeout', 'type': 'object'}, - 'use_temp_file_rename': {'key': 'useTempFileRename', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SftpWriteSettings, self).__init__(**kwargs) - self.type = 'SftpWriteSettings' # type: str - self.operation_timeout = kwargs.get('operation_timeout', None) - self.use_temp_file_rename = kwargs.get('use_temp_file_rename', None) - - -class SharePointOnlineListLinkedService(LinkedService): - """SharePoint Online List linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param site_url: Required. The URL of the SharePoint Online site. For example, - https://contoso.sharepoint.com/sites/siteName. Type: string (or Expression with resultType - string). - :type site_url: object - :param tenant_id: Required. The tenant ID under which your application resides. You can find it - from Azure portal Active Directory overview page. Type: string (or Expression with resultType - string). - :type tenant_id: object - :param service_principal_id: Required. The application (client) ID of your application - registered in Azure Active Directory. Make sure to grant SharePoint site permission to this - application. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. The client secret of your application registered in - Azure Active Directory. Type: string (or Expression with resultType string). - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'site_url': {'required': True}, - 'tenant_id': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'site_url': {'key': 'typeProperties.siteUrl', 'type': 'object'}, - 'tenant_id': {'key': 'typeProperties.tenantId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SharePointOnlineListLinkedService, self).__init__(**kwargs) - self.type = 'SharePointOnlineList' # type: str - self.site_url = kwargs['site_url'] - self.tenant_id = kwargs['tenant_id'] - self.service_principal_id = kwargs['service_principal_id'] - self.service_principal_key = kwargs['service_principal_key'] - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class SharePointOnlineListResourceDataset(Dataset): - """The sharepoint online list resource dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param list_name: The name of the SharePoint Online list. Type: string (or Expression with - resultType string). - :type list_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'list_name': {'key': 'typeProperties.listName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SharePointOnlineListResourceDataset, self).__init__(**kwargs) - self.type = 'SharePointOnlineListResource' # type: str - self.list_name = kwargs.get('list_name', None) - - -class SharePointOnlineListSource(CopySource): - """A copy activity source for sharePoint online list source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query: The OData query to filter the data in SharePoint Online list. For example, - "$top=1". Type: string (or Expression with resultType string). - :type query: object - :param http_request_timeout: The wait time to get a response from SharePoint Online. Default - value is 5 minutes (00:05:00). Type: string (or Expression with resultType string), pattern: - ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SharePointOnlineListSource, self).__init__(**kwargs) - self.type = 'SharePointOnlineListSource' # type: str - self.query = kwargs.get('query', None) - self.http_request_timeout = kwargs.get('http_request_timeout', None) - - -class ShopifyLinkedService(LinkedService): - """Shopify Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The endpoint of the Shopify server. (i.e. mystore.myshopify.com). - :type host: object - :param access_token: The API access token that can be used to access Shopify’s data. The token - won't expire if it is offline mode. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ShopifyLinkedService, self).__init__(**kwargs) - self.type = 'Shopify' # type: str - self.host = kwargs['host'] - self.access_token = kwargs.get('access_token', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class ShopifyObjectDataset(Dataset): - """Shopify Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ShopifyObjectDataset, self).__init__(**kwargs) - self.type = 'ShopifyObject' # type: str - self.table_name = kwargs.get('table_name', None) - - -class ShopifySource(TabularSource): - """A copy activity Shopify Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ShopifySource, self).__init__(**kwargs) - self.type = 'ShopifySource' # type: str - self.query = kwargs.get('query', None) - - -class SkipErrorFile(msrest.serialization.Model): - """Skip error file. - - :param file_missing: Skip if file is deleted by other client during copy. Default is true. - Type: boolean (or Expression with resultType boolean). - :type file_missing: object - :param data_inconsistency: Skip if source/sink file changed by other concurrent write. Default - is false. Type: boolean (or Expression with resultType boolean). - :type data_inconsistency: object - """ - - _attribute_map = { - 'file_missing': {'key': 'fileMissing', 'type': 'object'}, - 'data_inconsistency': {'key': 'dataInconsistency', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SkipErrorFile, self).__init__(**kwargs) - self.file_missing = kwargs.get('file_missing', None) - self.data_inconsistency = kwargs.get('data_inconsistency', None) - - -class SnowflakeDataset(Dataset): - """The snowflake dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param schema_type_properties_schema: The schema name of the Snowflake database. Type: string - (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the Snowflake database. Type: string (or Expression with - resultType string). - :type table: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SnowflakeDataset, self).__init__(**kwargs) - self.type = 'SnowflakeTable' # type: str - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - self.table = kwargs.get('table', None) - - -class SnowflakeExportCopyCommand(ExportSettings): - """Snowflake export command settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The export setting type.Constant filled by server. - :type type: str - :param additional_copy_options: Additional copy options directly passed to snowflake Copy - Command. Type: key value pairs (value should be string type) (or Expression with resultType - object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": - "'HH24:MI:SS.FF'" }. - :type additional_copy_options: dict[str, object] - :param additional_format_options: Additional format options directly passed to snowflake Copy - Command. Type: key value pairs (value should be string type) (or Expression with resultType - object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": "'FALSE'" - }. - :type additional_format_options: dict[str, object] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'additional_copy_options': {'key': 'additionalCopyOptions', 'type': '{object}'}, - 'additional_format_options': {'key': 'additionalFormatOptions', 'type': '{object}'}, - } - - def __init__( - self, - **kwargs - ): - super(SnowflakeExportCopyCommand, self).__init__(**kwargs) - self.type = 'SnowflakeExportCopyCommand' # type: str - self.additional_copy_options = kwargs.get('additional_copy_options', None) - self.additional_format_options = kwargs.get('additional_format_options', None) - - -class SnowflakeImportCopyCommand(ImportSettings): - """Snowflake import command settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The import setting type.Constant filled by server. - :type type: str - :param additional_copy_options: Additional copy options directly passed to snowflake Copy - Command. Type: key value pairs (value should be string type) (or Expression with resultType - object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": - "'HH24:MI:SS.FF'" }. - :type additional_copy_options: dict[str, object] - :param additional_format_options: Additional format options directly passed to snowflake Copy - Command. Type: key value pairs (value should be string type) (or Expression with resultType - object). Example: "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": - "'FALSE'" }. - :type additional_format_options: dict[str, object] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'additional_copy_options': {'key': 'additionalCopyOptions', 'type': '{object}'}, - 'additional_format_options': {'key': 'additionalFormatOptions', 'type': '{object}'}, - } - - def __init__( - self, - **kwargs - ): - super(SnowflakeImportCopyCommand, self).__init__(**kwargs) - self.type = 'SnowflakeImportCopyCommand' # type: str - self.additional_copy_options = kwargs.get('additional_copy_options', None) - self.additional_format_options = kwargs.get('additional_format_options', None) - - -class SnowflakeLinkedService(LinkedService): - """Snowflake linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string of snowflake. Type: string, - SecureString. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SnowflakeLinkedService, self).__init__(**kwargs) - self.type = 'Snowflake' # type: str - self.connection_string = kwargs['connection_string'] - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class SnowflakeSink(CopySink): - """A copy activity snowflake sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType - string). - :type pre_copy_script: object - :param import_settings: Snowflake import settings. - :type import_settings: ~azure.mgmt.datafactory.models.SnowflakeImportCopyCommand - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'import_settings': {'key': 'importSettings', 'type': 'SnowflakeImportCopyCommand'}, - } - - def __init__( - self, - **kwargs - ): - super(SnowflakeSink, self).__init__(**kwargs) - self.type = 'SnowflakeSink' # type: str - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.import_settings = kwargs.get('import_settings', None) - - -class SnowflakeSource(CopySource): - """A copy activity snowflake source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query: Snowflake Sql query. Type: string (or Expression with resultType string). - :type query: object - :param export_settings: Snowflake export settings. - :type export_settings: ~azure.mgmt.datafactory.models.SnowflakeExportCopyCommand - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'export_settings': {'key': 'exportSettings', 'type': 'SnowflakeExportCopyCommand'}, - } - - def __init__( - self, - **kwargs - ): - super(SnowflakeSource, self).__init__(**kwargs) - self.type = 'SnowflakeSource' # type: str - self.query = kwargs.get('query', None) - self.export_settings = kwargs.get('export_settings', None) - - -class SparkLinkedService(LinkedService): - """Spark Server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. IP address or host name of the Spark server. - :type host: object - :param port: Required. The TCP port that the Spark server uses to listen for client - connections. - :type port: object - :param server_type: The type of Spark server. Possible values include: "SharkServer", - "SharkServer2", "SparkThriftServer". - :type server_type: str or ~azure.mgmt.datafactory.models.SparkServerType - :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible - values include: "Binary", "SASL", "HTTP ". - :type thrift_transport_protocol: str or - ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol - :param authentication_type: Required. The authentication method used to access the Spark - server. Possible values include: "Anonymous", "Username", "UsernameAndPassword", - "WindowsAzureHDInsightService". - :type authentication_type: str or ~azure.mgmt.datafactory.models.SparkAuthenticationType - :param username: The user name that you use to access Spark Server. - :type username: object - :param password: The password corresponding to the user name that you provided in the Username - field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param http_path: The partial URL corresponding to the Spark server. - :type http_path: object - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'port': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, - 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SparkLinkedService, self).__init__(**kwargs) - self.type = 'Spark' # type: str - self.host = kwargs['host'] - self.port = kwargs['port'] - self.server_type = kwargs.get('server_type', None) - self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.http_path = kwargs.get('http_path', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class SparkObjectDataset(Dataset): - """Spark Server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of the Spark. Type: string (or Expression with resultType string). - :type table: object - :param schema_type_properties_schema: The schema name of the Spark. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SparkObjectDataset, self).__init__(**kwargs) - self.type = 'SparkObject' # type: str - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - - -class SparkSource(TabularSource): - """A copy activity Spark Server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SparkSource, self).__init__(**kwargs) - self.type = 'SparkSource' # type: str - self.query = kwargs.get('query', None) - - -class SqlAlwaysEncryptedProperties(msrest.serialization.Model): - """Sql always encrypted properties. - - All required parameters must be populated in order to send to Azure. - - :param always_encrypted_akv_auth_type: Required. Sql always encrypted AKV authentication type. - Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipal", "ManagedIdentity". - :type always_encrypted_akv_auth_type: str or - ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedAkvAuthType - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Azure Key Vault authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Azure Key Vault. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'always_encrypted_akv_auth_type': {'required': True}, - } - - _attribute_map = { - 'always_encrypted_akv_auth_type': {'key': 'alwaysEncryptedAkvAuthType', 'type': 'str'}, - 'service_principal_id': {'key': 'servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'servicePrincipalKey', 'type': 'SecretBase'}, - } - - def __init__( - self, - **kwargs - ): - super(SqlAlwaysEncryptedProperties, self).__init__(**kwargs) - self.always_encrypted_akv_auth_type = kwargs['always_encrypted_akv_auth_type'] - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - - -class SqlDwSink(CopySink): - """A copy activity SQL Data Warehouse sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType - string). - :type pre_copy_script: object - :param allow_poly_base: Indicates to use PolyBase to copy data into SQL Data Warehouse when - applicable. Type: boolean (or Expression with resultType boolean). - :type allow_poly_base: object - :param poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. - :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings - :param allow_copy_command: Indicates to use Copy Command to copy data into SQL Data Warehouse. - Type: boolean (or Expression with resultType boolean). - :type allow_copy_command: object - :param copy_command_settings: Specifies Copy Command related settings when allowCopyCommand is - true. - :type copy_command_settings: ~azure.mgmt.datafactory.models.DwCopyCommandSettings - :param table_option: The option to handle sink table, such as autoCreate. For now only - 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: object - :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or - Expression with resultType boolean). - :type sql_writer_use_table_lock: object - :param write_behavior: Write behavior when copying data into azure SQL DW. Type: - SqlDWWriteBehaviorEnum (or Expression with resultType SqlDWWriteBehaviorEnum). - :type write_behavior: object - :param upsert_settings: SQL DW upsert settings. - :type upsert_settings: ~azure.mgmt.datafactory.models.SqlDwUpsertSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, - 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, - 'allow_copy_command': {'key': 'allowCopyCommand', 'type': 'object'}, - 'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DwCopyCommandSettings'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlDwUpsertSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(SqlDwSink, self).__init__(**kwargs) - self.type = 'SqlDWSink' # type: str - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.allow_poly_base = kwargs.get('allow_poly_base', None) - self.poly_base_settings = kwargs.get('poly_base_settings', None) - self.allow_copy_command = kwargs.get('allow_copy_command', None) - self.copy_command_settings = kwargs.get('copy_command_settings', None) - self.table_option = kwargs.get('table_option', None) - self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None) - self.write_behavior = kwargs.get('write_behavior', None) - self.upsert_settings = kwargs.get('upsert_settings', None) - - -class SqlDwSource(TabularSource): - """A copy activity SQL Data Warehouse source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with - resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Data Warehouse - source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression - with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored procedure parameters. - Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType - object), itemType: StoredProcedureParameter. - :type stored_procedure_parameters: object - :param partition_option: The partition mechanism that will be used for Sql read in parallel. - Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: object - :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(SqlDwSource, self).__init__(**kwargs) - self.type = 'SqlDWSource' # type: str - self.sql_reader_query = kwargs.get('sql_reader_query', None) - self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) - - -class SqlDwUpsertSettings(msrest.serialization.Model): - """Sql DW upsert option settings. - - :param interim_schema_name: Schema name for interim table. Type: string (or Expression with - resultType string). - :type interim_schema_name: object - :param keys: Key column names for unique row identification. Type: array of strings (or - Expression with resultType array of strings). - :type keys: object - """ - - _attribute_map = { - 'interim_schema_name': {'key': 'interimSchemaName', 'type': 'object'}, - 'keys': {'key': 'keys', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SqlDwUpsertSettings, self).__init__(**kwargs) - self.interim_schema_name = kwargs.get('interim_schema_name', None) - self.keys = kwargs.get('keys', None) - - -class SqlMiSink(CopySink): - """A copy activity Azure SQL Managed Instance sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or - Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with - resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType - string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the - table type. Type: string (or Expression with resultType string). - :type stored_procedure_table_type_parameter_name: object - :param table_option: The option to handle sink table, such as autoCreate. For now only - 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: object - :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or - Expression with resultType boolean). - :type sql_writer_use_table_lock: object - :param write_behavior: White behavior when copying data into azure SQL MI. Type: - SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). - :type write_behavior: object - :param upsert_settings: SQL upsert settings. - :type upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(SqlMiSink, self).__init__(**kwargs) - self.type = 'SqlMISink' # type: str - self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) - self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) - self.table_option = kwargs.get('table_option', None) - self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None) - self.write_behavior = kwargs.get('write_behavior', None) - self.upsert_settings = kwargs.get('upsert_settings', None) - - -class SqlMiSource(TabularSource): - """A copy activity Azure SQL Managed Instance source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed - Instance source. This cannot be used at the same time as SqlReaderQuery. Type: string (or - Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored procedure parameters. - Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object - :param partition_option: The partition mechanism that will be used for Sql read in parallel. - Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: object - :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(SqlMiSource, self).__init__(**kwargs) - self.type = 'SqlMISource' # type: str - self.sql_reader_query = kwargs.get('sql_reader_query', None) - self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.produce_additional_types = kwargs.get('produce_additional_types', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) - - -class SqlPartitionSettings(msrest.serialization.Model): - """The settings that will be leveraged for Sql source partitioning. - - :param partition_column_name: The name of the column in integer or datetime type that will be - used for proceeding partitioning. If not specified, the primary key of the table is auto- - detected and used as the partition column. Type: string (or Expression with resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of the partition column for partition range - splitting. This value is used to decide the partition stride, not for filtering the rows in - table. All rows in the table or query result will be partitioned and copied. Type: string (or - Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of the partition column for partition range - splitting. This value is used to decide the partition stride, not for filtering the rows in - table. All rows in the table or query result will be partitioned and copied. Type: string (or - Expression with resultType string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SqlPartitionSettings, self).__init__(**kwargs) - self.partition_column_name = kwargs.get('partition_column_name', None) - self.partition_upper_bound = kwargs.get('partition_upper_bound', None) - self.partition_lower_bound = kwargs.get('partition_lower_bound', None) - - -class SqlServerLinkedService(LinkedService): - """SQL Server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param user_name: The on-premises Windows authentication user name. Type: string (or Expression - with resultType string). - :type user_name: object - :param password: The on-premises Windows authentication password. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param always_encrypted_settings: Sql always encrypted properties. - :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(SqlServerLinkedService, self).__init__(**kwargs) - self.type = 'SqlServer' # type: str - self.connection_string = kwargs['connection_string'] - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.always_encrypted_settings = kwargs.get('always_encrypted_settings', None) - - -class SqlServerSink(CopySink): - """A copy activity SQL server sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or - Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with - resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType - string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the - table type. Type: string (or Expression with resultType string). - :type stored_procedure_table_type_parameter_name: object - :param table_option: The option to handle sink table, such as autoCreate. For now only - 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: object - :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or - Expression with resultType boolean). - :type sql_writer_use_table_lock: object - :param write_behavior: Write behavior when copying data into sql server. Type: - SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). - :type write_behavior: object - :param upsert_settings: SQL upsert settings. - :type upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(SqlServerSink, self).__init__(**kwargs) - self.type = 'SqlServerSink' # type: str - self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) - self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) - self.table_option = kwargs.get('table_option', None) - self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None) - self.write_behavior = kwargs.get('write_behavior', None) - self.upsert_settings = kwargs.get('upsert_settings', None) - - -class SqlServerSource(TabularSource): - """A copy activity SQL server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database - source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression - with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored procedure parameters. - Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object - :param partition_option: The partition mechanism that will be used for Sql read in parallel. - Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: object - :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(SqlServerSource, self).__init__(**kwargs) - self.type = 'SqlServerSource' # type: str - self.sql_reader_query = kwargs.get('sql_reader_query', None) - self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.produce_additional_types = kwargs.get('produce_additional_types', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) - - -class SqlServerStoredProcedureActivity(ExecutionActivity): - """SQL stored procedure activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param stored_procedure_name: Required. Stored procedure name. Type: string (or Expression with - resultType string). - :type stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored procedure parameters. - Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'stored_procedure_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'stored_procedure_name': {'key': 'typeProperties.storedProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - } - - def __init__( - self, - **kwargs - ): - super(SqlServerStoredProcedureActivity, self).__init__(**kwargs) - self.type = 'SqlServerStoredProcedure' # type: str - self.stored_procedure_name = kwargs['stored_procedure_name'] - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - - -class SqlServerTableDataset(Dataset): - """The on-premises SQL Server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The schema name of the SQL Server dataset. Type: string - (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the SQL Server dataset. Type: string (or Expression with - resultType string). - :type table: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SqlServerTableDataset, self).__init__(**kwargs) - self.type = 'SqlServerTable' # type: str - self.table_name = kwargs.get('table_name', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - self.table = kwargs.get('table', None) - - -class SqlSink(CopySink): - """A copy activity SQL sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or - Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with - resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType - string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the - table type. Type: string (or Expression with resultType string). - :type stored_procedure_table_type_parameter_name: object - :param table_option: The option to handle sink table, such as autoCreate. For now only - 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: object - :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or - Expression with resultType boolean). - :type sql_writer_use_table_lock: object - :param write_behavior: Write behavior when copying data into sql. Type: SqlWriteBehaviorEnum - (or Expression with resultType SqlWriteBehaviorEnum). - :type write_behavior: object - :param upsert_settings: SQL upsert settings. - :type upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(SqlSink, self).__init__(**kwargs) - self.type = 'SqlSink' # type: str - self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) - self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) - self.table_option = kwargs.get('table_option', None) - self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None) - self.write_behavior = kwargs.get('write_behavior', None) - self.upsert_settings = kwargs.get('upsert_settings', None) - - -class SqlSource(TabularSource): - """A copy activity SQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database - source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression - with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored procedure parameters. - Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param isolation_level: Specifies the transaction locking behavior for the SQL source. Allowed - values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value - is ReadCommitted. Type: string (or Expression with resultType string). - :type isolation_level: object - :param partition_option: The partition mechanism that will be used for Sql read in parallel. - Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: object - :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'isolation_level': {'key': 'isolationLevel', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(SqlSource, self).__init__(**kwargs) - self.type = 'SqlSource' # type: str - self.sql_reader_query = kwargs.get('sql_reader_query', None) - self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.isolation_level = kwargs.get('isolation_level', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) - - -class SqlUpsertSettings(msrest.serialization.Model): - """Sql upsert option settings. - - :param use_temp_db: Specifies whether to use temp db for upsert interim table. Type: boolean - (or Expression with resultType boolean). - :type use_temp_db: object - :param interim_schema_name: Schema name for interim table. Type: string (or Expression with - resultType string). - :type interim_schema_name: object - :param keys: Key column names for unique row identification. Type: array of strings (or - Expression with resultType array of strings). - :type keys: object - """ - - _attribute_map = { - 'use_temp_db': {'key': 'useTempDB', 'type': 'object'}, - 'interim_schema_name': {'key': 'interimSchemaName', 'type': 'object'}, - 'keys': {'key': 'keys', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SqlUpsertSettings, self).__init__(**kwargs) - self.use_temp_db = kwargs.get('use_temp_db', None) - self.interim_schema_name = kwargs.get('interim_schema_name', None) - self.keys = kwargs.get('keys', None) - - -class SquareLinkedService(LinkedService): - """Square Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_properties: Properties used to connect to Square. It is mutually exclusive - with any other properties in the linked service. Type: object. - :type connection_properties: object - :param host: The URL of the Square instance. (i.e. mystore.mysquare.com). - :type host: object - :param client_id: The client ID associated with your Square application. - :type client_id: object - :param client_secret: The client secret associated with your Square application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param redirect_uri: The redirect URL assigned in the Square application dashboard. (i.e. - http://localhost:2500). - :type redirect_uri: object - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'redirect_uri': {'key': 'typeProperties.redirectUri', 'type': 'object'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SquareLinkedService, self).__init__(**kwargs) - self.type = 'Square' # type: str - self.connection_properties = kwargs.get('connection_properties', None) - self.host = kwargs.get('host', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.redirect_uri = kwargs.get('redirect_uri', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class SquareObjectDataset(Dataset): - """Square Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SquareObjectDataset, self).__init__(**kwargs) - self.type = 'SquareObject' # type: str - self.table_name = kwargs.get('table_name', None) - - -class SquareSource(TabularSource): - """A copy activity Square Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SquareSource, self).__init__(**kwargs) - self.type = 'SquareSource' # type: str - self.query = kwargs.get('query', None) - - -class SsisAccessCredential(msrest.serialization.Model): - """SSIS access credential. - - All required parameters must be populated in order to send to Azure. - - :param domain: Required. Domain for windows authentication. - :type domain: object - :param user_name: Required. UseName for windows authentication. - :type user_name: object - :param password: Required. Password for windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'domain': {'required': True}, - 'user_name': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'domain': {'key': 'domain', 'type': 'object'}, - 'user_name': {'key': 'userName', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecretBase'}, - } - - def __init__( - self, - **kwargs - ): - super(SsisAccessCredential, self).__init__(**kwargs) - self.domain = kwargs['domain'] - self.user_name = kwargs['user_name'] - self.password = kwargs['password'] - - -class SsisChildPackage(msrest.serialization.Model): - """SSIS embedded child package. - - All required parameters must be populated in order to send to Azure. - - :param package_path: Required. Path for embedded child package. Type: string (or Expression - with resultType string). - :type package_path: object - :param package_name: Name for embedded child package. - :type package_name: str - :param package_content: Required. Content for embedded child package. Type: string (or - Expression with resultType string). - :type package_content: object - :param package_last_modified_date: Last modified date for embedded child package. - :type package_last_modified_date: str - """ - - _validation = { - 'package_path': {'required': True}, - 'package_content': {'required': True}, - } - - _attribute_map = { - 'package_path': {'key': 'packagePath', 'type': 'object'}, - 'package_name': {'key': 'packageName', 'type': 'str'}, - 'package_content': {'key': 'packageContent', 'type': 'object'}, - 'package_last_modified_date': {'key': 'packageLastModifiedDate', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SsisChildPackage, self).__init__(**kwargs) - self.package_path = kwargs['package_path'] - self.package_name = kwargs.get('package_name', None) - self.package_content = kwargs['package_content'] - self.package_last_modified_date = kwargs.get('package_last_modified_date', None) - - -class SsisObjectMetadata(msrest.serialization.Model): - """SSIS object metadata. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SsisEnvironment, SsisFolder, SsisPackage, SsisProject. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Type of metadata.Constant filled by server. Possible values include: - "Folder", "Project", "Package", "Environment". - :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'Environment': 'SsisEnvironment', 'Folder': 'SsisFolder', 'Package': 'SsisPackage', 'Project': 'SsisProject'} - } - - def __init__( - self, - **kwargs - ): - super(SsisObjectMetadata, self).__init__(**kwargs) - self.type = None # type: Optional[str] - self.id = kwargs.get('id', None) - self.name = kwargs.get('name', None) - self.description = kwargs.get('description', None) - - -class SsisEnvironment(SsisObjectMetadata): - """Ssis environment. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Type of metadata.Constant filled by server. Possible values include: - "Folder", "Project", "Package", "Environment". - :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param folder_id: Folder id which contains environment. - :type folder_id: long - :param variables: Variable in environment. - :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'folder_id': {'key': 'folderId', 'type': 'long'}, - 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, - } - - def __init__( - self, - **kwargs - ): - super(SsisEnvironment, self).__init__(**kwargs) - self.type = 'Environment' # type: str - self.folder_id = kwargs.get('folder_id', None) - self.variables = kwargs.get('variables', None) - - -class SsisEnvironmentReference(msrest.serialization.Model): - """Ssis environment reference. - - :param id: Environment reference id. - :type id: long - :param environment_folder_name: Environment folder name. - :type environment_folder_name: str - :param environment_name: Environment name. - :type environment_name: str - :param reference_type: Reference type. - :type reference_type: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, - 'environment_name': {'key': 'environmentName', 'type': 'str'}, - 'reference_type': {'key': 'referenceType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SsisEnvironmentReference, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.environment_folder_name = kwargs.get('environment_folder_name', None) - self.environment_name = kwargs.get('environment_name', None) - self.reference_type = kwargs.get('reference_type', None) - - -class SsisExecutionCredential(msrest.serialization.Model): - """SSIS package execution credential. - - All required parameters must be populated in order to send to Azure. - - :param domain: Required. Domain for windows authentication. - :type domain: object - :param user_name: Required. UseName for windows authentication. - :type user_name: object - :param password: Required. Password for windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecureString - """ - - _validation = { - 'domain': {'required': True}, - 'user_name': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'domain': {'key': 'domain', 'type': 'object'}, - 'user_name': {'key': 'userName', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecureString'}, - } - - def __init__( - self, - **kwargs - ): - super(SsisExecutionCredential, self).__init__(**kwargs) - self.domain = kwargs['domain'] - self.user_name = kwargs['user_name'] - self.password = kwargs['password'] - - -class SsisExecutionParameter(msrest.serialization.Model): - """SSIS execution parameter. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. SSIS package execution parameter value. Type: string (or Expression - with resultType string). - :type value: object - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SsisExecutionParameter, self).__init__(**kwargs) - self.value = kwargs['value'] - - -class SsisFolder(SsisObjectMetadata): - """Ssis folder. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Type of metadata.Constant filled by server. Possible values include: - "Folder", "Project", "Package", "Environment". - :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SsisFolder, self).__init__(**kwargs) - self.type = 'Folder' # type: str - - -class SsisLogLocation(msrest.serialization.Model): - """SSIS package execution log location. - - All required parameters must be populated in order to send to Azure. - - :param log_path: Required. The SSIS package execution log path. Type: string (or Expression - with resultType string). - :type log_path: object - :param type: Required. The type of SSIS log location. Possible values include: "File". - :type type: str or ~azure.mgmt.datafactory.models.SsisLogLocationType - :param access_credential: The package execution log access credential. - :type access_credential: ~azure.mgmt.datafactory.models.SsisAccessCredential - :param log_refresh_interval: Specifies the interval to refresh log. The default interval is 5 - minutes. Type: string (or Expression with resultType string), pattern: - ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type log_refresh_interval: object - """ - - _validation = { - 'log_path': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'log_path': {'key': 'logPath', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SsisAccessCredential'}, - 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SsisLogLocation, self).__init__(**kwargs) - self.log_path = kwargs['log_path'] - self.type = kwargs['type'] - self.access_credential = kwargs.get('access_credential', None) - self.log_refresh_interval = kwargs.get('log_refresh_interval', None) - - -class SsisObjectMetadataListResponse(msrest.serialization.Model): - """A list of SSIS object metadata. - - :param value: List of SSIS object metadata. - :type value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[SsisObjectMetadata]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SsisObjectMetadataListResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) - - -class SsisObjectMetadataStatusResponse(msrest.serialization.Model): - """The status of the operation. - - :param status: The status of the operation. - :type status: str - :param name: The operation name. - :type name: str - :param properties: The operation properties. - :type properties: str - :param error: The operation error message. - :type error: str - """ - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SsisObjectMetadataStatusResponse, self).__init__(**kwargs) - self.status = kwargs.get('status', None) - self.name = kwargs.get('name', None) - self.properties = kwargs.get('properties', None) - self.error = kwargs.get('error', None) - - -class SsisPackage(SsisObjectMetadata): - """Ssis Package. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Type of metadata.Constant filled by server. Possible values include: - "Folder", "Project", "Package", "Environment". - :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param folder_id: Folder id which contains package. - :type folder_id: long - :param project_version: Project version which contains package. - :type project_version: long - :param project_id: Project id which contains package. - :type project_id: long - :param parameters: Parameters in package. - :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'folder_id': {'key': 'folderId', 'type': 'long'}, - 'project_version': {'key': 'projectVersion', 'type': 'long'}, - 'project_id': {'key': 'projectId', 'type': 'long'}, - 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, - } - - def __init__( - self, - **kwargs - ): - super(SsisPackage, self).__init__(**kwargs) - self.type = 'Package' # type: str - self.folder_id = kwargs.get('folder_id', None) - self.project_version = kwargs.get('project_version', None) - self.project_id = kwargs.get('project_id', None) - self.parameters = kwargs.get('parameters', None) - - -class SsisPackageLocation(msrest.serialization.Model): - """SSIS package location. - - :param package_path: The SSIS package path. Type: string (or Expression with resultType - string). - :type package_path: object - :param type: The type of SSIS package location. Possible values include: "SSISDB", "File", - "InlinePackage", "PackageStore". - :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType - :param package_password: Password of the package. - :type package_password: ~azure.mgmt.datafactory.models.SecretBase - :param access_credential: The package access credential. - :type access_credential: ~azure.mgmt.datafactory.models.SsisAccessCredential - :param configuration_path: The configuration file of the package execution. Type: string (or - Expression with resultType string). - :type configuration_path: object - :param configuration_access_credential: The configuration file access credential. - :type configuration_access_credential: ~azure.mgmt.datafactory.models.SsisAccessCredential - :param package_name: The package name. - :type package_name: str - :param package_content: The embedded package content. Type: string (or Expression with - resultType string). - :type package_content: object - :param package_last_modified_date: The embedded package last modified date. - :type package_last_modified_date: str - :param child_packages: The embedded child package list. - :type child_packages: list[~azure.mgmt.datafactory.models.SsisChildPackage] - """ - - _attribute_map = { - 'package_path': {'key': 'packagePath', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecretBase'}, - 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SsisAccessCredential'}, - 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, - 'configuration_access_credential': {'key': 'typeProperties.configurationAccessCredential', 'type': 'SsisAccessCredential'}, - 'package_name': {'key': 'typeProperties.packageName', 'type': 'str'}, - 'package_content': {'key': 'typeProperties.packageContent', 'type': 'object'}, - 'package_last_modified_date': {'key': 'typeProperties.packageLastModifiedDate', 'type': 'str'}, - 'child_packages': {'key': 'typeProperties.childPackages', 'type': '[SsisChildPackage]'}, - } - - def __init__( - self, - **kwargs - ): - super(SsisPackageLocation, self).__init__(**kwargs) - self.package_path = kwargs.get('package_path', None) - self.type = kwargs.get('type', None) - self.package_password = kwargs.get('package_password', None) - self.access_credential = kwargs.get('access_credential', None) - self.configuration_path = kwargs.get('configuration_path', None) - self.configuration_access_credential = kwargs.get('configuration_access_credential', None) - self.package_name = kwargs.get('package_name', None) - self.package_content = kwargs.get('package_content', None) - self.package_last_modified_date = kwargs.get('package_last_modified_date', None) - self.child_packages = kwargs.get('child_packages', None) - - -class SsisParameter(msrest.serialization.Model): - """Ssis parameter. - - :param id: Parameter id. - :type id: long - :param name: Parameter name. - :type name: str - :param description: Parameter description. - :type description: str - :param data_type: Parameter type. - :type data_type: str - :param required: Whether parameter is required. - :type required: bool - :param sensitive: Whether parameter is sensitive. - :type sensitive: bool - :param design_default_value: Design default value of parameter. - :type design_default_value: str - :param default_value: Default value of parameter. - :type default_value: str - :param sensitive_default_value: Default sensitive value of parameter. - :type sensitive_default_value: str - :param value_type: Parameter value type. - :type value_type: str - :param value_set: Parameter value set. - :type value_set: bool - :param variable: Parameter reference variable. - :type variable: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'required': {'key': 'required', 'type': 'bool'}, - 'sensitive': {'key': 'sensitive', 'type': 'bool'}, - 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'str'}, - 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, - 'value_type': {'key': 'valueType', 'type': 'str'}, - 'value_set': {'key': 'valueSet', 'type': 'bool'}, - 'variable': {'key': 'variable', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SsisParameter, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.name = kwargs.get('name', None) - self.description = kwargs.get('description', None) - self.data_type = kwargs.get('data_type', None) - self.required = kwargs.get('required', None) - self.sensitive = kwargs.get('sensitive', None) - self.design_default_value = kwargs.get('design_default_value', None) - self.default_value = kwargs.get('default_value', None) - self.sensitive_default_value = kwargs.get('sensitive_default_value', None) - self.value_type = kwargs.get('value_type', None) - self.value_set = kwargs.get('value_set', None) - self.variable = kwargs.get('variable', None) - - -class SsisProject(SsisObjectMetadata): - """Ssis project. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Type of metadata.Constant filled by server. Possible values include: - "Folder", "Project", "Package", "Environment". - :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param folder_id: Folder id which contains project. - :type folder_id: long - :param version: Project version. - :type version: long - :param environment_refs: Environment reference in project. - :type environment_refs: list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] - :param parameters: Parameters in project. - :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'folder_id': {'key': 'folderId', 'type': 'long'}, - 'version': {'key': 'version', 'type': 'long'}, - 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, - 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, - } - - def __init__( - self, - **kwargs - ): - super(SsisProject, self).__init__(**kwargs) - self.type = 'Project' # type: str - self.folder_id = kwargs.get('folder_id', None) - self.version = kwargs.get('version', None) - self.environment_refs = kwargs.get('environment_refs', None) - self.parameters = kwargs.get('parameters', None) - - -class SsisPropertyOverride(msrest.serialization.Model): - """SSIS property override. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. SSIS package property override value. Type: string (or Expression with - resultType string). - :type value: object - :param is_sensitive: Whether SSIS package property override value is sensitive data. Value will - be encrypted in SSISDB if it is true. - :type is_sensitive: bool - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': 'object'}, - 'is_sensitive': {'key': 'isSensitive', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - super(SsisPropertyOverride, self).__init__(**kwargs) - self.value = kwargs['value'] - self.is_sensitive = kwargs.get('is_sensitive', None) - - -class SsisVariable(msrest.serialization.Model): - """Ssis variable. - - :param id: Variable id. - :type id: long - :param name: Variable name. - :type name: str - :param description: Variable description. - :type description: str - :param data_type: Variable type. - :type data_type: str - :param sensitive: Whether variable is sensitive. - :type sensitive: bool - :param value: Variable value. - :type value: str - :param sensitive_value: Variable sensitive value. - :type sensitive_value: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'sensitive': {'key': 'sensitive', 'type': 'bool'}, - 'value': {'key': 'value', 'type': 'str'}, - 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SsisVariable, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.name = kwargs.get('name', None) - self.description = kwargs.get('description', None) - self.data_type = kwargs.get('data_type', None) - self.sensitive = kwargs.get('sensitive', None) - self.value = kwargs.get('value', None) - self.sensitive_value = kwargs.get('sensitive_value', None) - - -class StagingSettings(msrest.serialization.Model): - """Staging settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param linked_service_name: Required. Staging linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param path: The path to storage for storing the interim data. Type: string (or Expression with - resultType string). - :type path: object - :param enable_compression: Specifies whether to use compression when copying data via an - interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_compression: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'path': {'key': 'path', 'type': 'object'}, - 'enable_compression': {'key': 'enableCompression', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(StagingSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.linked_service_name = kwargs['linked_service_name'] - self.path = kwargs.get('path', None) - self.enable_compression = kwargs.get('enable_compression', None) - - -class StoredProcedureParameter(msrest.serialization.Model): - """SQL stored procedure parameter. - - :param value: Stored procedure parameter value. Type: string (or Expression with resultType - string). - :type value: object - :param type: Stored procedure parameter type. Possible values include: "String", "Int", - "Int64", "Decimal", "Guid", "Boolean", "Date". - :type type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(StoredProcedureParameter, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.type = kwargs.get('type', None) - - -class SwitchActivity(Activity): - """This activity evaluates an expression and executes activities under the cases property that correspond to the expression evaluation expected in the equals property. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param on: Required. An expression that would evaluate to a string or integer. This is used to - determine the block of activities in cases that will be executed. - :type on: ~azure.mgmt.datafactory.models.Expression - :param cases: List of cases that correspond to expected values of the 'on' property. This is an - optional property and if not provided, the activity will execute activities provided in - defaultActivities. - :type cases: list[~azure.mgmt.datafactory.models.SwitchCase] - :param default_activities: List of activities to execute if no case condition is satisfied. - This is an optional property and if not provided, the activity will exit without any action. - :type default_activities: list[~azure.mgmt.datafactory.models.Activity] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'on': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'on': {'key': 'typeProperties.on', 'type': 'Expression'}, - 'cases': {'key': 'typeProperties.cases', 'type': '[SwitchCase]'}, - 'default_activities': {'key': 'typeProperties.defaultActivities', 'type': '[Activity]'}, - } - - def __init__( - self, - **kwargs - ): - super(SwitchActivity, self).__init__(**kwargs) - self.type = 'Switch' # type: str - self.on = kwargs['on'] - self.cases = kwargs.get('cases', None) - self.default_activities = kwargs.get('default_activities', None) - - -class SwitchCase(msrest.serialization.Model): - """Switch cases with have a value and corresponding activities. - - :param value: Expected value that satisfies the expression result of the 'on' property. - :type value: str - :param activities: List of activities to execute for satisfied case condition. - :type activities: list[~azure.mgmt.datafactory.models.Activity] - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': 'str'}, - 'activities': {'key': 'activities', 'type': '[Activity]'}, - } - - def __init__( - self, - **kwargs - ): - super(SwitchCase, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.activities = kwargs.get('activities', None) - - -class SybaseLinkedService(LinkedService): - """Linked service for Sybase data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param server: Required. Server name for connection. Type: string (or Expression with - resultType string). - :type server: object - :param database: Required. Database name for connection. Type: string (or Expression with - resultType string). - :type database: object - :param schema: Schema name for connection. Type: string (or Expression with resultType string). - :type schema: object - :param authentication_type: AuthenticationType to be used for connection. Possible values - include: "Basic", "Windows". - :type authentication_type: str or ~azure.mgmt.datafactory.models.SybaseAuthenticationType - :param username: Username for authentication. Type: string (or Expression with resultType - string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SybaseLinkedService, self).__init__(**kwargs) - self.type = 'Sybase' # type: str - self.server = kwargs['server'] - self.database = kwargs['database'] - self.schema = kwargs.get('schema', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class SybaseSource(TabularSource): - """A copy activity source for Sybase databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: Database query. Type: string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SybaseSource, self).__init__(**kwargs) - self.type = 'SybaseSource' # type: str - self.query = kwargs.get('query', None) - - -class SybaseTableDataset(Dataset): - """The Sybase table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The Sybase table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(SybaseTableDataset, self).__init__(**kwargs) - self.type = 'SybaseTable' # type: str - self.table_name = kwargs.get('table_name', None) - - -class TabularTranslator(CopyTranslator): - """A copy activity tabular translator. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy translator type.Constant filled by server. - :type type: str - :param column_mappings: Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: - MyName" Type: string (or Expression with resultType string). This property will be retired. - Please use mappings property. - :type column_mappings: object - :param schema_mapping: The schema mapping to map between tabular data and hierarchical data. - Example: {"Column1": "$.Column1", "Column2": "$.Column2.Property1", "Column3": - "$.Column2.Property2"}. Type: object (or Expression with resultType object). This property will - be retired. Please use mappings property. - :type schema_mapping: object - :param collection_reference: The JSON Path of the Nested Array that is going to do cross-apply. - Type: object (or Expression with resultType object). - :type collection_reference: object - :param map_complex_values_to_string: Whether to map complex (array and object) values to simple - strings in json format. Type: boolean (or Expression with resultType boolean). - :type map_complex_values_to_string: object - :param mappings: Column mappings with logical types. Tabular->tabular example: - [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. - Hierarchical->tabular example: - [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. - Type: object (or Expression with resultType object). - :type mappings: object - :param type_conversion: Whether to enable the advanced type conversion feature in the Copy - activity. Type: boolean (or Expression with resultType boolean). - :type type_conversion: object - :param type_conversion_settings: Type conversion settings. - :type type_conversion_settings: ~azure.mgmt.datafactory.models.TypeConversionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, - 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, - 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, - 'map_complex_values_to_string': {'key': 'mapComplexValuesToString', 'type': 'object'}, - 'mappings': {'key': 'mappings', 'type': 'object'}, - 'type_conversion': {'key': 'typeConversion', 'type': 'object'}, - 'type_conversion_settings': {'key': 'typeConversionSettings', 'type': 'TypeConversionSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(TabularTranslator, self).__init__(**kwargs) - self.type = 'TabularTranslator' # type: str - self.column_mappings = kwargs.get('column_mappings', None) - self.schema_mapping = kwargs.get('schema_mapping', None) - self.collection_reference = kwargs.get('collection_reference', None) - self.map_complex_values_to_string = kwargs.get('map_complex_values_to_string', None) - self.mappings = kwargs.get('mappings', None) - self.type_conversion = kwargs.get('type_conversion', None) - self.type_conversion_settings = kwargs.get('type_conversion_settings', None) - - -class TarGZipReadSettings(CompressionReadSettings): - """The TarGZip compression read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The Compression setting type.Constant filled by server. - :type type: str - :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder - path. Type: boolean (or Expression with resultType boolean). - :type preserve_compression_file_name_as_folder: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(TarGZipReadSettings, self).__init__(**kwargs) - self.type = 'TarGZipReadSettings' # type: str - self.preserve_compression_file_name_as_folder = kwargs.get('preserve_compression_file_name_as_folder', None) - - -class TarReadSettings(CompressionReadSettings): - """The Tar compression read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The Compression setting type.Constant filled by server. - :type type: str - :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder - path. Type: boolean (or Expression with resultType boolean). - :type preserve_compression_file_name_as_folder: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(TarReadSettings, self).__init__(**kwargs) - self.type = 'TarReadSettings' # type: str - self.preserve_compression_file_name_as_folder = kwargs.get('preserve_compression_file_name_as_folder', None) - - -class TeradataLinkedService(LinkedService): - """Linked service for Teradata data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Teradata ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param server: Server name for connection. Type: string (or Expression with resultType string). - :type server: object - :param authentication_type: AuthenticationType to be used for connection. Possible values - include: "Basic", "Windows". - :type authentication_type: str or ~azure.mgmt.datafactory.models.TeradataAuthenticationType - :param username: Username for authentication. Type: string (or Expression with resultType - string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(TeradataLinkedService, self).__init__(**kwargs) - self.type = 'Teradata' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.server = kwargs.get('server', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class TeradataPartitionSettings(msrest.serialization.Model): - """The settings that will be leveraged for teradata source partitioning. - - :param partition_column_name: The name of the column that will be used for proceeding range or - hash partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in partitionColumnName that - will be used for proceeding range partitioning. Type: string (or Expression with resultType - string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in partitionColumnName that - will be used for proceeding range partitioning. Type: string (or Expression with resultType - string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(TeradataPartitionSettings, self).__init__(**kwargs) - self.partition_column_name = kwargs.get('partition_column_name', None) - self.partition_upper_bound = kwargs.get('partition_upper_bound', None) - self.partition_lower_bound = kwargs.get('partition_lower_bound', None) - - -class TeradataSource(TabularSource): - """A copy activity Teradata source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: Teradata query. Type: string (or Expression with resultType string). - :type query: object - :param partition_option: The partition mechanism that will be used for teradata read in - parallel. Possible values include: "None", "Hash", "DynamicRange". - :type partition_option: object - :param partition_settings: The settings that will be leveraged for teradata source - partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.TeradataPartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(TeradataSource, self).__init__(**kwargs) - self.type = 'TeradataSource' # type: str - self.query = kwargs.get('query', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) - - -class TeradataTableDataset(Dataset): - """The Teradata database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param database: The database name of Teradata. Type: string (or Expression with resultType - string). - :type database: object - :param table: The table name of Teradata. Type: string (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(TeradataTableDataset, self).__init__(**kwargs) - self.type = 'TeradataTable' # type: str - self.database = kwargs.get('database', None) - self.table = kwargs.get('table', None) - - -class TextFormat(DatasetStorageFormat): - """The data stored in text format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage format.Constant filled by server. - :type type: str - :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: object - :param column_delimiter: The column delimiter. Type: string (or Expression with resultType - string). - :type column_delimiter: object - :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). - :type row_delimiter: object - :param escape_char: The escape character. Type: string (or Expression with resultType string). - :type escape_char: object - :param quote_char: The quote character. Type: string (or Expression with resultType string). - :type quote_char: object - :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: object - :param encoding_name: The code page name of the preferred encoding. If miss, the default value - is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode encoding. Refer to the ΓÇ£NameΓÇ¥ column of - the table in the following link to set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with - resultType string). - :type encoding_name: object - :param treat_empty_as_null: Treat empty column values in the text file as null. The default - value is true. Type: boolean (or Expression with resultType boolean). - :type treat_empty_as_null: object - :param skip_line_count: The number of lines/rows to be skipped when parsing text files. The - default value is 0. Type: integer (or Expression with resultType integer). - :type skip_line_count: object - :param first_row_as_header: When used as input, treat the first row of data as headers. When - used as output,write the headers into the output as the first row of data. The default value is - false. Type: boolean (or Expression with resultType boolean). - :type first_row_as_header: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'column_delimiter': {'key': 'columnDelimiter', 'type': 'object'}, - 'row_delimiter': {'key': 'rowDelimiter', 'type': 'object'}, - 'escape_char': {'key': 'escapeChar', 'type': 'object'}, - 'quote_char': {'key': 'quoteChar', 'type': 'object'}, - 'null_value': {'key': 'nullValue', 'type': 'object'}, - 'encoding_name': {'key': 'encodingName', 'type': 'object'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, - 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, - 'first_row_as_header': {'key': 'firstRowAsHeader', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(TextFormat, self).__init__(**kwargs) - self.type = 'TextFormat' # type: str - self.column_delimiter = kwargs.get('column_delimiter', None) - self.row_delimiter = kwargs.get('row_delimiter', None) - self.escape_char = kwargs.get('escape_char', None) - self.quote_char = kwargs.get('quote_char', None) - self.null_value = kwargs.get('null_value', None) - self.encoding_name = kwargs.get('encoding_name', None) - self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) - self.skip_line_count = kwargs.get('skip_line_count', None) - self.first_row_as_header = kwargs.get('first_row_as_header', None) - - -class TriggerDependencyReference(DependencyReference): - """Trigger referenced dependency. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: TumblingWindowTriggerDependencyReference. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. The type of dependency reference.Constant filled by server. - :type type: str - :param reference_trigger: Required. Referenced trigger. - :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference - """ - - _validation = { - 'type': {'required': True}, - 'reference_trigger': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, - } - - _subtype_map = { - 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} - } - - def __init__( - self, - **kwargs - ): - super(TriggerDependencyReference, self).__init__(**kwargs) - self.type = 'TriggerDependencyReference' # type: str - self.reference_trigger = kwargs['reference_trigger'] - - -class TriggerFilterParameters(msrest.serialization.Model): - """Query parameters for triggers. - - :param continuation_token: The continuation token for getting the next page of results. Null - for first page. - :type continuation_token: str - :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun - triggers. - :type parent_trigger_name: str - """ - - _attribute_map = { - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - 'parent_trigger_name': {'key': 'parentTriggerName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(TriggerFilterParameters, self).__init__(**kwargs) - self.continuation_token = kwargs.get('continuation_token', None) - self.parent_trigger_name = kwargs.get('parent_trigger_name', None) - - -class TriggerListResponse(msrest.serialization.Model): - """A list of trigger resources. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of triggers. - :type value: list[~azure.mgmt.datafactory.models.TriggerResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[TriggerResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(TriggerListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) - - -class TriggerPipelineReference(msrest.serialization.Model): - """Pipeline that needs to be triggered with the given parameters. - - :param pipeline_reference: Pipeline reference. - :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference - :param parameters: Pipeline parameters. - :type parameters: dict[str, object] - """ - - _attribute_map = { - 'pipeline_reference': {'key': 'pipelineReference', 'type': 'PipelineReference'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - def __init__( - self, - **kwargs - ): - super(TriggerPipelineReference, self).__init__(**kwargs) - self.pipeline_reference = kwargs.get('pipeline_reference', None) - self.parameters = kwargs.get('parameters', None) - - -class TriggerQueryResponse(msrest.serialization.Model): - """A query of triggers. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of triggers. - :type value: list[~azure.mgmt.datafactory.models.TriggerResource] - :param continuation_token: The continuation token for getting the next page of results, if any - remaining results exist, null otherwise. - :type continuation_token: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[TriggerResource]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(TriggerQueryResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.continuation_token = kwargs.get('continuation_token', None) - - -class TriggerReference(msrest.serialization.Model): - """Trigger reference type. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Trigger reference type. Default value: "TriggerReference". - :vartype type: str - :param reference_name: Required. Reference trigger name. - :type reference_name: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - } - - type = "TriggerReference" - - def __init__( - self, - **kwargs - ): - super(TriggerReference, self).__init__(**kwargs) - self.reference_name = kwargs['reference_name'] - - -class TriggerResource(SubResource): - """Trigger resource type. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of the trigger. - :type properties: ~azure.mgmt.datafactory.models.Trigger - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Trigger'}, - } - - def __init__( - self, - **kwargs - ): - super(TriggerResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class TriggerRun(msrest.serialization.Model): - """Trigger runs. - - Variables are only populated by the server, and will be ignored when sending a request. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :ivar trigger_run_id: Trigger run id. - :vartype trigger_run_id: str - :ivar trigger_name: Trigger name. - :vartype trigger_name: str - :ivar trigger_type: Trigger type. - :vartype trigger_type: str - :ivar trigger_run_timestamp: Trigger run start time. - :vartype trigger_run_timestamp: ~datetime.datetime - :ivar status: Trigger run status. Possible values include: "Succeeded", "Failed", "Inprogress". - :vartype status: str or ~azure.mgmt.datafactory.models.TriggerRunStatus - :ivar message: Trigger error message. - :vartype message: str - :ivar properties: List of property name and value related to trigger run. Name, value pair - depends on type of trigger. - :vartype properties: dict[str, str] - :ivar triggered_pipelines: List of pipeline name and run Id triggered by the trigger run. - :vartype triggered_pipelines: dict[str, str] - :ivar run_dimension: Run dimension for which trigger was fired. - :vartype run_dimension: dict[str, str] - :ivar dependency_status: Status of the upstream pipelines. - :vartype dependency_status: dict[str, object] - """ - - _validation = { - 'trigger_run_id': {'readonly': True}, - 'trigger_name': {'readonly': True}, - 'trigger_type': {'readonly': True}, - 'trigger_run_timestamp': {'readonly': True}, - 'status': {'readonly': True}, - 'message': {'readonly': True}, - 'properties': {'readonly': True}, - 'triggered_pipelines': {'readonly': True}, - 'run_dimension': {'readonly': True}, - 'dependency_status': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'trigger_run_id': {'key': 'triggerRunId', 'type': 'str'}, - 'trigger_name': {'key': 'triggerName', 'type': 'str'}, - 'trigger_type': {'key': 'triggerType', 'type': 'str'}, - 'trigger_run_timestamp': {'key': 'triggerRunTimestamp', 'type': 'iso-8601'}, - 'status': {'key': 'status', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'triggered_pipelines': {'key': 'triggeredPipelines', 'type': '{str}'}, - 'run_dimension': {'key': 'runDimension', 'type': '{str}'}, - 'dependency_status': {'key': 'dependencyStatus', 'type': '{object}'}, - } - - def __init__( - self, - **kwargs - ): - super(TriggerRun, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.trigger_run_id = None - self.trigger_name = None - self.trigger_type = None - self.trigger_run_timestamp = None - self.status = None - self.message = None - self.properties = None - self.triggered_pipelines = None - self.run_dimension = None - self.dependency_status = None - - -class TriggerRunsQueryResponse(msrest.serialization.Model): - """A list of trigger runs. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of trigger runs. - :type value: list[~azure.mgmt.datafactory.models.TriggerRun] - :param continuation_token: The continuation token for getting the next page of results, if any - remaining results exist, null otherwise. - :type continuation_token: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[TriggerRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(TriggerRunsQueryResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.continuation_token = kwargs.get('continuation_token', None) - - -class TriggerSubscriptionOperationStatus(msrest.serialization.Model): - """Defines the response of a trigger subscription operation. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar trigger_name: Trigger name. - :vartype trigger_name: str - :ivar status: Event Subscription Status. Possible values include: "Enabled", "Provisioning", - "Deprovisioning", "Disabled", "Unknown". - :vartype status: str or ~azure.mgmt.datafactory.models.EventSubscriptionStatus - """ - - _validation = { - 'trigger_name': {'readonly': True}, - 'status': {'readonly': True}, - } - - _attribute_map = { - 'trigger_name': {'key': 'triggerName', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) - self.trigger_name = None - self.status = None - - -class TumblingWindowTrigger(Trigger): - """Trigger that schedules pipeline runs for all fixed time interval windows from a start time without gaps and also supports backfill scenarios (when start time is in the past). - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] - :param pipeline: Required. Pipeline for which runs are created when an event is fired for - trigger window that is ready. - :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference - :param frequency: Required. The frequency of the time windows. Possible values include: - "Minute", "Hour", "Month". - :type frequency: str or ~azure.mgmt.datafactory.models.TumblingWindowFrequency - :param interval: Required. The interval of the time windows. The minimum interval allowed is 15 - Minutes. - :type interval: int - :param start_time: Required. The start time for the time period for the trigger during which - events are fired for windows that are ready. Only UTC time is currently supported. - :type start_time: ~datetime.datetime - :param end_time: The end time for the time period for the trigger during which events are fired - for windows that are ready. Only UTC time is currently supported. - :type end_time: ~datetime.datetime - :param delay: Specifies how long the trigger waits past due time before triggering new run. It - doesn't alter window start and end time. The default is 0. Type: string (or Expression with - resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type delay: object - :param max_concurrency: Required. The max number of parallel time windows (ready for execution) - for which a new run is triggered. - :type max_concurrency: int - :param retry_policy: Retry policy that will be applied for failed pipeline runs. - :type retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy - :param depends_on: Triggers that this trigger depends on. Only tumbling window triggers are - supported. - :type depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] - """ - - _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, - 'pipeline': {'required': True}, - 'frequency': {'required': True}, - 'interval': {'required': True}, - 'start_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, - 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, - 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, - 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'typeProperties.endTime', 'type': 'iso-8601'}, - 'delay': {'key': 'typeProperties.delay', 'type': 'object'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, - 'retry_policy': {'key': 'typeProperties.retryPolicy', 'type': 'RetryPolicy'}, - 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, - } - - def __init__( - self, - **kwargs - ): - super(TumblingWindowTrigger, self).__init__(**kwargs) - self.type = 'TumblingWindowTrigger' # type: str - self.pipeline = kwargs['pipeline'] - self.frequency = kwargs['frequency'] - self.interval = kwargs['interval'] - self.start_time = kwargs['start_time'] - self.end_time = kwargs.get('end_time', None) - self.delay = kwargs.get('delay', None) - self.max_concurrency = kwargs['max_concurrency'] - self.retry_policy = kwargs.get('retry_policy', None) - self.depends_on = kwargs.get('depends_on', None) - - -class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): - """Referenced tumbling window trigger dependency. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. The type of dependency reference.Constant filled by server. - :type type: str - :param reference_trigger: Required. Referenced trigger. - :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference - :param offset: Timespan applied to the start time of a tumbling window when evaluating - dependency. - :type offset: str - :param size: The size of the window when evaluating the dependency. If undefined the frequency - of the tumbling window will be used. - :type size: str - """ - - _validation = { - 'type': {'required': True}, - 'reference_trigger': {'required': True}, - 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'-?((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, - 'offset': {'key': 'offset', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(TumblingWindowTriggerDependencyReference, self).__init__(**kwargs) - self.type = 'TumblingWindowTriggerDependencyReference' # type: str - self.offset = kwargs.get('offset', None) - self.size = kwargs.get('size', None) - - -class TypeConversionSettings(msrest.serialization.Model): - """Type conversion settings. - - :param allow_data_truncation: Whether to allow data truncation when converting the data. Type: - boolean (or Expression with resultType boolean). - :type allow_data_truncation: object - :param treat_boolean_as_number: Whether to treat boolean values as numbers. Type: boolean (or - Expression with resultType boolean). - :type treat_boolean_as_number: object - :param date_time_format: The format for DateTime values. Type: string (or Expression with - resultType string). - :type date_time_format: object - :param date_time_offset_format: The format for DateTimeOffset values. Type: string (or - Expression with resultType string). - :type date_time_offset_format: object - :param time_span_format: The format for TimeSpan values. Type: string (or Expression with - resultType string). - :type time_span_format: object - :param culture: The culture used to convert data from/to string. Type: string (or Expression - with resultType string). - :type culture: object - """ - - _attribute_map = { - 'allow_data_truncation': {'key': 'allowDataTruncation', 'type': 'object'}, - 'treat_boolean_as_number': {'key': 'treatBooleanAsNumber', 'type': 'object'}, - 'date_time_format': {'key': 'dateTimeFormat', 'type': 'object'}, - 'date_time_offset_format': {'key': 'dateTimeOffsetFormat', 'type': 'object'}, - 'time_span_format': {'key': 'timeSpanFormat', 'type': 'object'}, - 'culture': {'key': 'culture', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(TypeConversionSettings, self).__init__(**kwargs) - self.allow_data_truncation = kwargs.get('allow_data_truncation', None) - self.treat_boolean_as_number = kwargs.get('treat_boolean_as_number', None) - self.date_time_format = kwargs.get('date_time_format', None) - self.date_time_offset_format = kwargs.get('date_time_offset_format', None) - self.time_span_format = kwargs.get('time_span_format', None) - self.culture = kwargs.get('culture', None) - - -class UntilActivity(Activity): - """This activity executes inner activities until the specified boolean expression results to true or timeout is reached, whichever is earlier. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param expression: Required. An expression that would evaluate to Boolean. The loop will - continue until this expression evaluates to true. - :type expression: ~azure.mgmt.datafactory.models.Expression - :param timeout: Specifies the timeout for the activity to run. If there is no value specified, - it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or - Expression with resultType string), pattern: - ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: string (or Expression with - resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object - :param activities: Required. List of activities to execute. - :type activities: list[~azure.mgmt.datafactory.models.Activity] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'expression': {'required': True}, - 'activities': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, - 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, - 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, - } - - def __init__( - self, - **kwargs - ): - super(UntilActivity, self).__init__(**kwargs) - self.type = 'Until' # type: str - self.expression = kwargs['expression'] - self.timeout = kwargs.get('timeout', None) - self.activities = kwargs['activities'] - - -class UpdateIntegrationRuntimeNodeRequest(msrest.serialization.Model): - """Update integration runtime node request. - - :param concurrent_jobs_limit: The number of concurrent jobs permitted to run on the integration - runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed. - :type concurrent_jobs_limit: int - """ - - _validation = { - 'concurrent_jobs_limit': {'minimum': 1}, - } - - _attribute_map = { - 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(UpdateIntegrationRuntimeNodeRequest, self).__init__(**kwargs) - self.concurrent_jobs_limit = kwargs.get('concurrent_jobs_limit', None) - - -class UpdateIntegrationRuntimeRequest(msrest.serialization.Model): - """Update integration runtime request. - - :param auto_update: Enables or disables the auto-update feature of the self-hosted integration - runtime. See https://go.microsoft.com/fwlink/?linkid=854189. Possible values include: "On", - "Off". - :type auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate - :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The - integration runtime auto update will happen on that time. - :type update_delay_offset: str - """ - - _attribute_map = { - 'auto_update': {'key': 'autoUpdate', 'type': 'str'}, - 'update_delay_offset': {'key': 'updateDelayOffset', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(UpdateIntegrationRuntimeRequest, self).__init__(**kwargs) - self.auto_update = kwargs.get('auto_update', None) - self.update_delay_offset = kwargs.get('update_delay_offset', None) - - -class UserAccessPolicy(msrest.serialization.Model): - """Get Data Plane read only token request definition. - - :param permissions: The string with permissions for Data Plane access. Currently only 'r' is - supported which grants read only access. - :type permissions: str - :param access_resource_path: The resource path to get access relative to factory. Currently - only empty string is supported which corresponds to the factory resource. - :type access_resource_path: str - :param profile_name: The name of the profile. Currently only the default is supported. The - default value is DefaultProfile. - :type profile_name: str - :param start_time: Start time for the token. If not specified the current time will be used. - :type start_time: str - :param expire_time: Expiration time for the token. Maximum duration for the token is eight - hours and by default the token will expire in eight hours. - :type expire_time: str - """ - - _attribute_map = { - 'permissions': {'key': 'permissions', 'type': 'str'}, - 'access_resource_path': {'key': 'accessResourcePath', 'type': 'str'}, - 'profile_name': {'key': 'profileName', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'expire_time': {'key': 'expireTime', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(UserAccessPolicy, self).__init__(**kwargs) - self.permissions = kwargs.get('permissions', None) - self.access_resource_path = kwargs.get('access_resource_path', None) - self.profile_name = kwargs.get('profile_name', None) - self.start_time = kwargs.get('start_time', None) - self.expire_time = kwargs.get('expire_time', None) - - -class UserProperty(msrest.serialization.Model): - """User property. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. User property name. - :type name: str - :param value: Required. User property value. Type: string (or Expression with resultType - string). - :type value: object - """ - - _validation = { - 'name': {'required': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(UserProperty, self).__init__(**kwargs) - self.name = kwargs['name'] - self.value = kwargs['value'] - - -class ValidationActivity(Activity): - """This activity verifies that an external resource exists. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param timeout: Specifies the timeout for the activity to run. If there is no value specified, - it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or - Expression with resultType string), pattern: - ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object - :param sleep: A delay in seconds between validation attempts. If no value is specified, 10 - seconds will be used as the default. Type: integer (or Expression with resultType integer). - :type sleep: object - :param minimum_size: Can be used if dataset points to a file. The file must be greater than or - equal in size to the value specified. Type: integer (or Expression with resultType integer). - :type minimum_size: object - :param child_items: Can be used if dataset points to a folder. If set to true, the folder must - have at least one file. If set to false, the folder must be empty. Type: boolean (or Expression - with resultType boolean). - :type child_items: object - :param dataset: Required. Validation activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, - 'sleep': {'key': 'typeProperties.sleep', 'type': 'object'}, - 'minimum_size': {'key': 'typeProperties.minimumSize', 'type': 'object'}, - 'child_items': {'key': 'typeProperties.childItems', 'type': 'object'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - } - - def __init__( - self, - **kwargs - ): - super(ValidationActivity, self).__init__(**kwargs) - self.type = 'Validation' # type: str - self.timeout = kwargs.get('timeout', None) - self.sleep = kwargs.get('sleep', None) - self.minimum_size = kwargs.get('minimum_size', None) - self.child_items = kwargs.get('child_items', None) - self.dataset = kwargs['dataset'] - - -class VariableSpecification(msrest.serialization.Model): - """Definition of a single variable for a Pipeline. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Variable type. Possible values include: "String", "Bool", "Array". - :type type: str or ~azure.mgmt.datafactory.models.VariableType - :param default_value: Default value of variable. - :type default_value: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(VariableSpecification, self).__init__(**kwargs) - self.type = kwargs['type'] - self.default_value = kwargs.get('default_value', None) - - -class VerticaLinkedService(LinkedService): - """Vertica linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(VerticaLinkedService, self).__init__(**kwargs) - self.type = 'Vertica' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class VerticaSource(TabularSource): - """A copy activity Vertica source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(VerticaSource, self).__init__(**kwargs) - self.type = 'VerticaSource' # type: str - self.query = kwargs.get('query', None) - - -class VerticaTableDataset(Dataset): - """Vertica dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of the Vertica. Type: string (or Expression with resultType - string). - :type table: object - :param schema_type_properties_schema: The schema name of the Vertica. Type: string (or - Expression with resultType string). - :type schema_type_properties_schema: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(VerticaTableDataset, self).__init__(**kwargs) - self.type = 'VerticaTable' # type: str - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - - -class WaitActivity(Activity): - """This activity suspends pipeline execution for the specified interval. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param wait_time_in_seconds: Required. Duration in seconds. - :type wait_time_in_seconds: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'wait_time_in_seconds': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'wait_time_in_seconds': {'key': 'typeProperties.waitTimeInSeconds', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(WaitActivity, self).__init__(**kwargs) - self.type = 'Wait' # type: str - self.wait_time_in_seconds = kwargs['wait_time_in_seconds'] - - -class WebActivity(ExecutionActivity): - """Web activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible values include: "GET", - "POST", "PUT", "DELETE". - :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod - :param url: Required. Web activity target endpoint and path. Type: string (or Expression with - resultType string). - :type url: object - :param headers: Represents the headers that will be sent to the request. For example, to set - the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": - "application/json" }. Type: string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT - method, not allowed for GET method Type: string (or Expression with resultType string). - :type body: object - :param authentication: Authentication method used for calling the endpoint. - :type authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication - :param datasets: List of datasets passed to web endpoint. - :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] - :param linked_services: List of linked services passed to web endpoint. - :type linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, - 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, - 'datasets': {'key': 'typeProperties.datasets', 'type': '[DatasetReference]'}, - 'linked_services': {'key': 'typeProperties.linkedServices', 'type': '[LinkedServiceReference]'}, - 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, - } - - def __init__( - self, - **kwargs - ): - super(WebActivity, self).__init__(**kwargs) - self.type = 'WebActivity' # type: str - self.method = kwargs['method'] - self.url = kwargs['url'] - self.headers = kwargs.get('headers', None) - self.body = kwargs.get('body', None) - self.authentication = kwargs.get('authentication', None) - self.datasets = kwargs.get('datasets', None) - self.linked_services = kwargs.get('linked_services', None) - self.connect_via = kwargs.get('connect_via', None) - - -class WebActivityAuthentication(msrest.serialization.Model): - """Web activity authentication properties. - - :param type: Web activity authentication (Basic/ClientCertificate/MSI/ServicePrincipal). - :type type: str - :param pfx: Base64-encoded contents of a PFX file or Certificate when used for - ServicePrincipal. - :type pfx: ~azure.mgmt.datafactory.models.SecretBase - :param username: Web activity authentication user name for basic authentication or ClientID - when used for ServicePrincipal. Type: string (or Expression with resultType string). - :type username: object - :param password: Password for the PFX file or basic authentication / Secret when used for - ServicePrincipal. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param resource: Resource for which Azure Auth token will be requested when using MSI - Authentication. Type: string (or Expression with resultType string). - :type resource: object - :param user_tenant: TenantId for which Azure Auth token will be requested when using - ServicePrincipal Authentication. Type: string (or Expression with resultType string). - :type user_tenant: object - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference - """ - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, - 'username': {'key': 'username', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecretBase'}, - 'resource': {'key': 'resource', 'type': 'object'}, - 'user_tenant': {'key': 'userTenant', 'type': 'object'}, - 'credential': {'key': 'credential', 'type': 'CredentialReference'}, - } - - def __init__( - self, - **kwargs - ): - super(WebActivityAuthentication, self).__init__(**kwargs) - self.type = kwargs.get('type', None) - self.pfx = kwargs.get('pfx', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.resource = kwargs.get('resource', None) - self.user_tenant = kwargs.get('user_tenant', None) - self.credential = kwargs.get('credential', None) - - -class WebLinkedServiceTypeProperties(msrest.serialization.Model): - """Base definition of WebLinkedServiceTypeProperties, this typeProperties is polymorphic based on authenticationType, so not flattened in SDK models. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: WebAnonymousAuthentication, WebBasicAuthentication, WebClientCertificateAuthentication. - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . - Type: string (or Expression with resultType string). - :type url: object - :param authentication_type: Required. Type of authentication used to connect to the web table - source.Constant filled by server. Possible values include: "Basic", "Anonymous", - "ClientCertificate". - :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - } - - _subtype_map = { - 'authentication_type': {'Anonymous': 'WebAnonymousAuthentication', 'Basic': 'WebBasicAuthentication', 'ClientCertificate': 'WebClientCertificateAuthentication'} - } - - def __init__( - self, - **kwargs - ): - super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) - self.url = kwargs['url'] - self.authentication_type = None # type: Optional[str] - - -class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): - """A WebLinkedService that uses anonymous authentication to communicate with an HTTP endpoint. - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . - Type: string (or Expression with resultType string). - :type url: object - :param authentication_type: Required. Type of authentication used to connect to the web table - source.Constant filled by server. Possible values include: "Basic", "Anonymous", - "ClientCertificate". - :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(WebAnonymousAuthentication, self).__init__(**kwargs) - self.authentication_type = 'Anonymous' # type: str - - -class WebBasicAuthentication(WebLinkedServiceTypeProperties): - """A WebLinkedService that uses basic authentication to communicate with an HTTP endpoint. - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . - Type: string (or Expression with resultType string). - :type url: object - :param authentication_type: Required. Type of authentication used to connect to the web table - source.Constant filled by server. Possible values include: "Basic", "Anonymous", - "ClientCertificate". - :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType - :param username: Required. User name for Basic authentication. Type: string (or Expression with - resultType string). - :type username: object - :param password: Required. The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - 'username': {'key': 'username', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecretBase'}, - } - - def __init__( - self, - **kwargs - ): - super(WebBasicAuthentication, self).__init__(**kwargs) - self.authentication_type = 'Basic' # type: str - self.username = kwargs['username'] - self.password = kwargs['password'] - - -class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): - """A WebLinkedService that uses client certificate based authentication to communicate with an HTTP endpoint. This scheme follows mutual authentication; the server must also provide valid credentials to the client. - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . - Type: string (or Expression with resultType string). - :type url: object - :param authentication_type: Required. Type of authentication used to connect to the web table - source.Constant filled by server. Possible values include: "Basic", "Anonymous", - "ClientCertificate". - :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType - :param pfx: Required. Base64-encoded contents of a PFX file. - :type pfx: ~azure.mgmt.datafactory.models.SecretBase - :param password: Required. Password for the PFX file. - :type password: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - 'pfx': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, - 'password': {'key': 'password', 'type': 'SecretBase'}, - } - - def __init__( - self, - **kwargs - ): - super(WebClientCertificateAuthentication, self).__init__(**kwargs) - self.authentication_type = 'ClientCertificate' # type: str - self.pfx = kwargs['pfx'] - self.password = kwargs['password'] - - -class WebHookActivity(Activity): - """WebHook activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param method: Required. Rest API method for target endpoint. Possible values include: "POST". - :type method: str or ~azure.mgmt.datafactory.models.WebHookActivityMethod - :param url: Required. WebHook activity target endpoint and path. Type: string (or Expression - with resultType string). - :type url: object - :param timeout: The timeout within which the webhook should be called back. If there is no - value specified, it defaults to 10 minutes. Type: string. Pattern: - ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: str - :param headers: Represents the headers that will be sent to the request. For example, to set - the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": - "application/json" }. Type: string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT - method, not allowed for GET method Type: string (or Expression with resultType string). - :type body: object - :param authentication: Authentication method used for calling the endpoint. - :type authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication - :param report_status_on_call_back: When set to true, statusCode, output and error in callback - request body will be consumed by activity. The activity can be marked as failed by setting - statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with - resultType boolean). - :type report_status_on_call_back: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'timeout': {'key': 'typeProperties.timeout', 'type': 'str'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, - 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, - 'report_status_on_call_back': {'key': 'typeProperties.reportStatusOnCallBack', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(WebHookActivity, self).__init__(**kwargs) - self.type = 'WebHook' # type: str - self.method = kwargs['method'] - self.url = kwargs['url'] - self.timeout = kwargs.get('timeout', None) - self.headers = kwargs.get('headers', None) - self.body = kwargs.get('body', None) - self.authentication = kwargs.get('authentication', None) - self.report_status_on_call_back = kwargs.get('report_status_on_call_back', None) - - -class WebLinkedService(LinkedService): - """Web linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param type_properties: Required. Web linked service properties. - :type type_properties: ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties - """ - - _validation = { - 'type': {'required': True}, - 'type_properties': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type_properties': {'key': 'typeProperties', 'type': 'WebLinkedServiceTypeProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(WebLinkedService, self).__init__(**kwargs) - self.type = 'Web' # type: str - self.type_properties = kwargs['type_properties'] - - -class WebSource(CopySource): - """A copy activity source for web page table. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(WebSource, self).__init__(**kwargs) - self.type = 'WebSource' # type: str - self.additional_columns = kwargs.get('additional_columns', None) - - -class WebTableDataset(Dataset): - """The dataset points to a HTML table in the web page. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param index: Required. The zero-based index of the table in the web page. Type: integer (or - Expression with resultType integer), minimum: 0. - :type index: object - :param path: The relative URL to the web page from the linked service URL. Type: string (or - Expression with resultType string). - :type path: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'index': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'index': {'key': 'typeProperties.index', 'type': 'object'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(WebTableDataset, self).__init__(**kwargs) - self.type = 'WebTable' # type: str - self.index = kwargs['index'] - self.path = kwargs.get('path', None) - - -class XeroLinkedService(LinkedService): - """Xero Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_properties: Properties used to connect to Xero. It is mutually exclusive with - any other properties in the linked service. Type: object. - :type connection_properties: object - :param host: The endpoint of the Xero server. (i.e. api.xero.com). - :type host: object - :param consumer_key: The consumer key associated with the Xero application. - :type consumer_key: ~azure.mgmt.datafactory.models.SecretBase - :param private_key: The private key from the .pem file that was generated for your Xero private - application. You must include all the text from the .pem file, including the Unix line endings( - ). - :type private_key: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'SecretBase'}, - 'private_key': {'key': 'typeProperties.privateKey', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(XeroLinkedService, self).__init__(**kwargs) - self.type = 'Xero' # type: str - self.connection_properties = kwargs.get('connection_properties', None) - self.host = kwargs.get('host', None) - self.consumer_key = kwargs.get('consumer_key', None) - self.private_key = kwargs.get('private_key', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class XeroObjectDataset(Dataset): - """Xero Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(XeroObjectDataset, self).__init__(**kwargs) - self.type = 'XeroObject' # type: str - self.table_name = kwargs.get('table_name', None) - - -class XeroSource(TabularSource): - """A copy activity Xero Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(XeroSource, self).__init__(**kwargs) - self.type = 'XeroSource' # type: str - self.query = kwargs.get('query', None) - - -class XmlDataset(Dataset): - """Xml dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the json data storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param encoding_name: The code page name of the preferred encoding. If not specified, the - default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column - of the table in the following link to set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with - resultType string). - :type encoding_name: object - :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: object - :param compression: The data compression method used for the json dataset. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__( - self, - **kwargs - ): - super(XmlDataset, self).__init__(**kwargs) - self.type = 'Xml' # type: str - self.location = kwargs.get('location', None) - self.encoding_name = kwargs.get('encoding_name', None) - self.null_value = kwargs.get('null_value', None) - self.compression = kwargs.get('compression', None) - - -class XmlReadSettings(FormatReadSettings): - """Xml read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param compression_properties: Compression settings. - :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings - :param validation_mode: Indicates what validation method is used when reading the xml files. - Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). - :type validation_mode: object - :param detect_data_type: Indicates whether type detection is enabled when reading the xml - files. Type: boolean (or Expression with resultType boolean). - :type detect_data_type: object - :param namespaces: Indicates whether namespace is enabled when reading the xml files. Type: - boolean (or Expression with resultType boolean). - :type namespaces: object - :param namespace_prefixes: Namespace uri to prefix mappings to override the prefixes in column - names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml - element/attribute name in the xml data file will be used. Example: - "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression with resultType object). - :type namespace_prefixes: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, - 'validation_mode': {'key': 'validationMode', 'type': 'object'}, - 'detect_data_type': {'key': 'detectDataType', 'type': 'object'}, - 'namespaces': {'key': 'namespaces', 'type': 'object'}, - 'namespace_prefixes': {'key': 'namespacePrefixes', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(XmlReadSettings, self).__init__(**kwargs) - self.type = 'XmlReadSettings' # type: str - self.compression_properties = kwargs.get('compression_properties', None) - self.validation_mode = kwargs.get('validation_mode', None) - self.detect_data_type = kwargs.get('detect_data_type', None) - self.namespaces = kwargs.get('namespaces', None) - self.namespace_prefixes = kwargs.get('namespace_prefixes', None) - - -class XmlSource(CopySource): - """A copy activity Xml source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param store_settings: Xml store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param format_settings: Xml format settings. - :type format_settings: ~azure.mgmt.datafactory.models.XmlReadSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'XmlReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(XmlSource, self).__init__(**kwargs) - self.type = 'XmlSource' # type: str - self.store_settings = kwargs.get('store_settings', None) - self.format_settings = kwargs.get('format_settings', None) - self.additional_columns = kwargs.get('additional_columns', None) - - -class ZipDeflateReadSettings(CompressionReadSettings): - """The ZipDeflate compression read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The Compression setting type.Constant filled by server. - :type type: str - :param preserve_zip_file_name_as_folder: Preserve the zip file name as folder path. Type: - boolean (or Expression with resultType boolean). - :type preserve_zip_file_name_as_folder: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'preserve_zip_file_name_as_folder': {'key': 'preserveZipFileNameAsFolder', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ZipDeflateReadSettings, self).__init__(**kwargs) - self.type = 'ZipDeflateReadSettings' # type: str - self.preserve_zip_file_name_as_folder = kwargs.get('preserve_zip_file_name_as_folder', None) - - -class ZohoLinkedService(LinkedService): - """Zoho server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_properties: Properties used to connect to Zoho. It is mutually exclusive with - any other properties in the linked service. Type: object. - :type connection_properties: object - :param endpoint: The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private). - :type endpoint: object - :param access_token: The access token for Zoho authentication. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ZohoLinkedService, self).__init__(**kwargs) - self.type = 'Zoho' # type: str - self.connection_properties = kwargs.get('connection_properties', None) - self.endpoint = kwargs.get('endpoint', None) - self.access_token = kwargs.get('access_token', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class ZohoObjectDataset(Dataset): - """Zoho server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ZohoObjectDataset, self).__init__(**kwargs) - self.type = 'ZohoObject' # type: str - self.table_name = kwargs.get('table_name', None) - - -class ZohoSource(TabularSource): - """A copy activity Zoho server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param disable_metrics_collection: If true, disable data store metrics collection. Default is - false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ZohoSource, self).__init__(**kwargs) - self.type = 'ZohoSource' # type: str - self.query = kwargs.get('query', None) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py index 27cfed06684..c34ac740123 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py @@ -1,4 +1,5 @@ # coding=utf-8 +# pylint: disable=too-many-lines # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. @@ -7,16 +8,22 @@ # -------------------------------------------------------------------------- import datetime +import sys from typing import Any, Dict, List, Optional, TYPE_CHECKING, Union -import msrest.serialization +from .. import _serialization if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - import __init__ as _models + from .. import models as _models +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object -class AccessPolicyResponse(msrest.serialization.Model): +class AccessPolicyResponse(_serialization.Model): """Get Data Plane read only token response definition. :ivar policy: The user access policy. @@ -28,9 +35,9 @@ class AccessPolicyResponse(msrest.serialization.Model): """ _attribute_map = { - 'policy': {'key': 'policy', 'type': 'UserAccessPolicy'}, - 'access_token': {'key': 'accessToken', 'type': 'str'}, - 'data_plane_url': {'key': 'dataPlaneUrl', 'type': 'str'}, + "policy": {"key": "policy", "type": "UserAccessPolicy"}, + "access_token": {"key": "accessToken", "type": "str"}, + "data_plane_url": {"key": "dataPlaneUrl", "type": "str"}, } def __init__( @@ -49,26 +56,26 @@ def __init__( :keyword data_plane_url: Data Plane service base URL. :paramtype data_plane_url: str """ - super(AccessPolicyResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.policy = policy self.access_token = access_token self.data_plane_url = data_plane_url -class Activity(msrest.serialization.Model): +class Activity(_serialization.Model): """A pipeline activity. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ControlActivity, ExecuteWranglingDataflowActivity, ExecutionActivity. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ControlActivity, ExecuteWranglingDataflowActivity, ExecutionActivity All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -79,28 +86,32 @@ class Activity(msrest.serialization.Model): """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, } _subtype_map = { - 'type': {'Container': 'ControlActivity', 'ExecuteWranglingDataflow': 'ExecuteWranglingDataflowActivity', 'Execution': 'ExecutionActivity'} + "type": { + "Container": "ControlActivity", + "ExecuteWranglingDataflow": "ExecuteWranglingDataflowActivity", + "Execution": "ExecutionActivity", + } } def __init__( self, *, name: str, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, @@ -109,8 +120,8 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -119,38 +130,38 @@ def __init__( :keyword user_properties: Activity user properties. :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] """ - super(Activity, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.name = name - self.type = 'Activity' # type: str + self.type = None # type: Optional[str] self.description = description self.depends_on = depends_on self.user_properties = user_properties -class ActivityDependency(msrest.serialization.Model): +class ActivityDependency(_serialization.Model): """Activity dependency information. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar activity: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar activity: Activity name. Required. :vartype activity: str - :ivar dependency_conditions: Required. Match-Condition for the dependency. + :ivar dependency_conditions: Match-Condition for the dependency. Required. :vartype dependency_conditions: list[str or ~azure.mgmt.datafactory.models.DependencyCondition] """ _validation = { - 'activity': {'required': True}, - 'dependency_conditions': {'required': True}, + "activity": {"required": True}, + "dependency_conditions": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'activity': {'key': 'activity', 'type': 'str'}, - 'dependency_conditions': {'key': 'dependencyConditions', 'type': '[str]'}, + "additional_properties": {"key": "", "type": "{object}"}, + "activity": {"key": "activity", "type": "str"}, + "dependency_conditions": {"key": "dependencyConditions", "type": "[str]"}, } def __init__( @@ -158,38 +169,38 @@ def __init__( *, activity: str, dependency_conditions: List[Union[str, "_models.DependencyCondition"]], - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword activity: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword activity: Activity name. Required. :paramtype activity: str - :keyword dependency_conditions: Required. Match-Condition for the dependency. + :keyword dependency_conditions: Match-Condition for the dependency. Required. :paramtype dependency_conditions: list[str or ~azure.mgmt.datafactory.models.DependencyCondition] """ - super(ActivityDependency, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.activity = activity self.dependency_conditions = dependency_conditions -class ActivityPolicy(msrest.serialization.Model): +class ActivityPolicy(_serialization.Model): """Execution policy for an activity. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar timeout: Specifies the timeout for the activity to run. The default timeout is 7 days. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype timeout: any + :vartype timeout: JSON :ivar retry: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype retry: any + :vartype retry: JSON :ivar retry_interval_in_seconds: Interval between each retry attempt (in seconds). The default is 30 sec. :vartype retry_interval_in_seconds: int @@ -202,24 +213,24 @@ class ActivityPolicy(msrest.serialization.Model): """ _validation = { - 'retry_interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + "retry_interval_in_seconds": {"maximum": 86400, "minimum": 30}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'timeout': {'key': 'timeout', 'type': 'object'}, - 'retry': {'key': 'retry', 'type': 'object'}, - 'retry_interval_in_seconds': {'key': 'retryIntervalInSeconds', 'type': 'int'}, - 'secure_input': {'key': 'secureInput', 'type': 'bool'}, - 'secure_output': {'key': 'secureOutput', 'type': 'bool'}, + "additional_properties": {"key": "", "type": "{object}"}, + "timeout": {"key": "timeout", "type": "object"}, + "retry": {"key": "retry", "type": "object"}, + "retry_interval_in_seconds": {"key": "retryIntervalInSeconds", "type": "int"}, + "secure_input": {"key": "secureInput", "type": "bool"}, + "secure_output": {"key": "secureOutput", "type": "bool"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - timeout: Optional[Any] = None, - retry: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + timeout: Optional[JSON] = None, + retry: Optional[JSON] = None, retry_interval_in_seconds: Optional[int] = None, secure_input: Optional[bool] = None, secure_output: Optional[bool] = None, @@ -228,14 +239,14 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword timeout: Specifies the timeout for the activity to run. The default timeout is 7 days. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype timeout: any + :paramtype timeout: JSON :keyword retry: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype retry: any + :paramtype retry: JSON :keyword retry_interval_in_seconds: Interval between each retry attempt (in seconds). The default is 30 sec. :paramtype retry_interval_in_seconds: int @@ -246,7 +257,7 @@ def __init__( not be logged to monitoring. :paramtype secure_output: bool """ - super(ActivityPolicy, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.timeout = timeout self.retry = retry @@ -255,14 +266,14 @@ def __init__( self.secure_output = secure_output -class ActivityRun(msrest.serialization.Model): +class ActivityRun(_serialization.Model): # pylint: disable=too-many-instance-attributes """Information about an activity run in a pipeline. Variables are only populated by the server, and will be ignored when sending a request. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar pipeline_name: The name of the pipeline. :vartype pipeline_name: str :ivar pipeline_run_id: The id of the pipeline run. @@ -284,58 +295,53 @@ class ActivityRun(msrest.serialization.Model): :ivar duration_in_ms: The duration of the activity run. :vartype duration_in_ms: int :ivar input: The input for the activity. - :vartype input: any + :vartype input: JSON :ivar output: The output for the activity. - :vartype output: any + :vartype output: JSON :ivar error: The error if any from the activity run. - :vartype error: any + :vartype error: JSON """ _validation = { - 'pipeline_name': {'readonly': True}, - 'pipeline_run_id': {'readonly': True}, - 'activity_name': {'readonly': True}, - 'activity_type': {'readonly': True}, - 'activity_run_id': {'readonly': True}, - 'linked_service_name': {'readonly': True}, - 'status': {'readonly': True}, - 'activity_run_start': {'readonly': True}, - 'activity_run_end': {'readonly': True}, - 'duration_in_ms': {'readonly': True}, - 'input': {'readonly': True}, - 'output': {'readonly': True}, - 'error': {'readonly': True}, + "pipeline_name": {"readonly": True}, + "pipeline_run_id": {"readonly": True}, + "activity_name": {"readonly": True}, + "activity_type": {"readonly": True}, + "activity_run_id": {"readonly": True}, + "linked_service_name": {"readonly": True}, + "status": {"readonly": True}, + "activity_run_start": {"readonly": True}, + "activity_run_end": {"readonly": True}, + "duration_in_ms": {"readonly": True}, + "input": {"readonly": True}, + "output": {"readonly": True}, + "error": {"readonly": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, - 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, - 'activity_name': {'key': 'activityName', 'type': 'str'}, - 'activity_type': {'key': 'activityType', 'type': 'str'}, - 'activity_run_id': {'key': 'activityRunId', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'activity_run_start': {'key': 'activityRunStart', 'type': 'iso-8601'}, - 'activity_run_end': {'key': 'activityRunEnd', 'type': 'iso-8601'}, - 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, - 'input': {'key': 'input', 'type': 'object'}, - 'output': {'key': 'output', 'type': 'object'}, - 'error': {'key': 'error', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "pipeline_name": {"key": "pipelineName", "type": "str"}, + "pipeline_run_id": {"key": "pipelineRunId", "type": "str"}, + "activity_name": {"key": "activityName", "type": "str"}, + "activity_type": {"key": "activityType", "type": "str"}, + "activity_run_id": {"key": "activityRunId", "type": "str"}, + "linked_service_name": {"key": "linkedServiceName", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "activity_run_start": {"key": "activityRunStart", "type": "iso-8601"}, + "activity_run_end": {"key": "activityRunEnd", "type": "iso-8601"}, + "duration_in_ms": {"key": "durationInMs", "type": "int"}, + "input": {"key": "input", "type": "object"}, + "output": {"key": "output", "type": "object"}, + "error": {"key": "error", "type": "object"}, } - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - **kwargs - ): + def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, **kwargs): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] """ - super(ActivityRun, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.pipeline_name = None self.pipeline_run_id = None @@ -352,12 +358,12 @@ def __init__( self.error = None -class ActivityRunsQueryResponse(msrest.serialization.Model): +class ActivityRunsQueryResponse(_serialization.Model): """A list activity runs. All required parameters must be populated in order to send to Azure. - :ivar value: Required. List of activity runs. + :ivar value: List of activity runs. Required. :vartype value: list[~azure.mgmt.datafactory.models.ActivityRun] :ivar continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. @@ -365,34 +371,28 @@ class ActivityRunsQueryResponse(msrest.serialization.Model): """ _validation = { - 'value': {'required': True}, + "value": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[ActivityRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + "value": {"key": "value", "type": "[ActivityRun]"}, + "continuation_token": {"key": "continuationToken", "type": "str"}, } - def __init__( - self, - *, - value: List["_models.ActivityRun"], - continuation_token: Optional[str] = None, - **kwargs - ): + def __init__(self, *, value: List["_models.ActivityRun"], continuation_token: Optional[str] = None, **kwargs): """ - :keyword value: Required. List of activity runs. + :keyword value: List of activity runs. Required. :paramtype value: list[~azure.mgmt.datafactory.models.ActivityRun] :keyword continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. :paramtype continuation_token: str """ - super(ActivityRunsQueryResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.continuation_token = continuation_token -class AddDataFlowToDebugSessionResponse(msrest.serialization.Model): +class AddDataFlowToDebugSessionResponse(_serialization.Model): """Response body structure for starting data flow debug session. :ivar job_version: The ID of data flow debug job version. @@ -400,67 +400,90 @@ class AddDataFlowToDebugSessionResponse(msrest.serialization.Model): """ _attribute_map = { - 'job_version': {'key': 'jobVersion', 'type': 'str'}, + "job_version": {"key": "jobVersion", "type": "str"}, } - def __init__( - self, - *, - job_version: Optional[str] = None, - **kwargs - ): + def __init__(self, *, job_version: Optional[str] = None, **kwargs): """ :keyword job_version: The ID of data flow debug job version. :paramtype job_version: str """ - super(AddDataFlowToDebugSessionResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.job_version = job_version -class AdditionalColumns(msrest.serialization.Model): +class AdditionalColumns(_serialization.Model): """Specify the column name and value of additional columns. :ivar name: Additional column name. Type: string (or Expression with resultType string). - :vartype name: any + :vartype name: JSON :ivar value: Additional column value. Type: string (or Expression with resultType string). - :vartype value: any + :vartype value: JSON """ _attribute_map = { - 'name': {'key': 'name', 'type': 'object'}, - 'value': {'key': 'value', 'type': 'object'}, + "name": {"key": "name", "type": "object"}, + "value": {"key": "value", "type": "object"}, } - def __init__( - self, - *, - name: Optional[Any] = None, - value: Optional[Any] = None, - **kwargs - ): + def __init__(self, *, name: Optional[JSON] = None, value: Optional[JSON] = None, **kwargs): """ :keyword name: Additional column name. Type: string (or Expression with resultType string). - :paramtype name: any + :paramtype name: JSON :keyword value: Additional column value. Type: string (or Expression with resultType string). - :paramtype value: any + :paramtype value: JSON """ - super(AdditionalColumns, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name self.value = value -class LinkedService(msrest.serialization.Model): - """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMWSLinkedService, AmazonRdsForOracleLinkedService, AmazonRdsForSqlServerLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AmazonS3CompatibleLinkedService, AppFiguresLinkedService, AsanaLinkedService, AzureBatchLinkedService, AzureBlobFSLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureDatabricksDeltaLakeLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMLLinkedService, AzureMLServiceLinkedService, AzureMariaDBLinkedService, AzureMySqlLinkedService, AzurePostgreSqlLinkedService, AzureSearchLinkedService, AzureSqlDWLinkedService, AzureSqlDatabaseLinkedService, AzureSqlMILinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDbLinkedService, CosmosDbMongoDbApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, DataworldLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAXLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HDInsightLinkedService, HDInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDBLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDbLinkedService, MongoDbAtlasLinkedService, MongoDbV2LinkedService, MySqlLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleCloudStorageLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSqlLinkedService, PrestoLinkedService, QuickBooksLinkedService, QuickbaseLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBWLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOdpLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SmartsheetLinkedService, SnowflakeLinkedService, SparkLinkedService, SqlServerLinkedService, SquareLinkedService, SybaseLinkedService, TeamDeskLinkedService, TeradataLinkedService, TwilioLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZendeskLinkedService, ZohoLinkedService. - - All required parameters must be populated in order to send to Azure. - - :ivar additional_properties: Unmatched properties from the message are deserialized to this - collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. +class LinkedService(_serialization.Model): + """The nested object which contains the information and credential which can be used to connect with related store or compute resource. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AmazonMWSLinkedService, AmazonRdsForOracleLinkedService, AmazonRdsForSqlServerLinkedService, + AmazonRedshiftLinkedService, AmazonS3LinkedService, AmazonS3CompatibleLinkedService, + AppFiguresLinkedService, AsanaLinkedService, AzureBatchLinkedService, AzureBlobFSLinkedService, + AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, + AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, + AzureDatabricksLinkedService, AzureDatabricksDeltaLakeLinkedService, + AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, + AzureMLLinkedService, AzureMLServiceLinkedService, AzureMariaDBLinkedService, + AzureMySqlLinkedService, AzurePostgreSqlLinkedService, AzureSearchLinkedService, + AzureSqlDWLinkedService, AzureSqlDatabaseLinkedService, AzureSqlMILinkedService, + AzureStorageLinkedService, AzureSynapseArtifactsLinkedService, AzureTableStorageLinkedService, + CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, + CosmosDbLinkedService, CosmosDbMongoDbApiLinkedService, CouchbaseLinkedService, + CustomDataSourceLinkedService, DataworldLinkedService, Db2LinkedService, DrillLinkedService, + DynamicsLinkedService, DynamicsAXLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, + FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, + GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GoogleSheetsLinkedService, + GreenplumLinkedService, HBaseLinkedService, HDInsightLinkedService, + HDInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, + HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, + MagentoLinkedService, MariaDBLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, + MongoDbLinkedService, MongoDbAtlasLinkedService, MongoDbV2LinkedService, MySqlLinkedService, + NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, + OracleLinkedService, OracleCloudStorageLinkedService, OracleServiceCloudLinkedService, + PaypalLinkedService, PhoenixLinkedService, PostgreSqlLinkedService, PrestoLinkedService, + QuickBooksLinkedService, QuickbaseLinkedService, ResponsysLinkedService, + RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, + SalesforceServiceCloudLinkedService, SapBWLinkedService, SapCloudForCustomerLinkedService, + SapEccLinkedService, SapHanaLinkedService, SapOdpLinkedService, SapOpenHubLinkedService, + SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, + SharePointOnlineListLinkedService, ShopifyLinkedService, SmartsheetLinkedService, + SnowflakeLinkedService, SparkLinkedService, SqlServerLinkedService, SquareLinkedService, + SybaseLinkedService, TeamDeskLinkedService, TeradataLinkedService, TwilioLinkedService, + VerticaLinkedService, WebLinkedService, XeroLinkedService, ZendeskLinkedService, + ZohoLinkedService + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -469,40 +492,154 @@ class LinkedService(msrest.serialization.Model): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, } _subtype_map = { - 'type': {'AmazonMWS': 'AmazonMWSLinkedService', 'AmazonRdsForOracle': 'AmazonRdsForOracleLinkedService', 'AmazonRdsForSqlServer': 'AmazonRdsForSqlServerLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AmazonS3Compatible': 'AmazonS3CompatibleLinkedService', 'AppFigures': 'AppFiguresLinkedService', 'Asana': 'AsanaLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMLLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Dataworld': 'DataworldLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HDInsightLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDbLinkedService', 'MongoDbAtlas': 'MongoDbAtlasLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MySql': 'MySqlLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleCloudStorage': 'OracleCloudStorageLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Quickbase': 'QuickbaseLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBWLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOdp': 'SapOdpLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Smartsheet': 'SmartsheetLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SqlServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'TeamDesk': 'TeamDeskLinkedService', 'Teradata': 'TeradataLinkedService', 'Twilio': 'TwilioLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zendesk': 'ZendeskLinkedService', 'Zoho': 'ZohoLinkedService'} - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, + "type": { + "AmazonMWS": "AmazonMWSLinkedService", + "AmazonRdsForOracle": "AmazonRdsForOracleLinkedService", + "AmazonRdsForSqlServer": "AmazonRdsForSqlServerLinkedService", + "AmazonRedshift": "AmazonRedshiftLinkedService", + "AmazonS3": "AmazonS3LinkedService", + "AmazonS3Compatible": "AmazonS3CompatibleLinkedService", + "AppFigures": "AppFiguresLinkedService", + "Asana": "AsanaLinkedService", + "AzureBatch": "AzureBatchLinkedService", + "AzureBlobFS": "AzureBlobFSLinkedService", + "AzureBlobStorage": "AzureBlobStorageLinkedService", + "AzureDataExplorer": "AzureDataExplorerLinkedService", + "AzureDataLakeAnalytics": "AzureDataLakeAnalyticsLinkedService", + "AzureDataLakeStore": "AzureDataLakeStoreLinkedService", + "AzureDatabricks": "AzureDatabricksLinkedService", + "AzureDatabricksDeltaLake": "AzureDatabricksDeltaLakeLinkedService", + "AzureFileStorage": "AzureFileStorageLinkedService", + "AzureFunction": "AzureFunctionLinkedService", + "AzureKeyVault": "AzureKeyVaultLinkedService", + "AzureML": "AzureMLLinkedService", + "AzureMLService": "AzureMLServiceLinkedService", + "AzureMariaDB": "AzureMariaDBLinkedService", + "AzureMySql": "AzureMySqlLinkedService", + "AzurePostgreSql": "AzurePostgreSqlLinkedService", + "AzureSearch": "AzureSearchLinkedService", + "AzureSqlDW": "AzureSqlDWLinkedService", + "AzureSqlDatabase": "AzureSqlDatabaseLinkedService", + "AzureSqlMI": "AzureSqlMILinkedService", + "AzureStorage": "AzureStorageLinkedService", + "AzureSynapseArtifacts": "AzureSynapseArtifactsLinkedService", + "AzureTableStorage": "AzureTableStorageLinkedService", + "Cassandra": "CassandraLinkedService", + "CommonDataServiceForApps": "CommonDataServiceForAppsLinkedService", + "Concur": "ConcurLinkedService", + "CosmosDb": "CosmosDbLinkedService", + "CosmosDbMongoDbApi": "CosmosDbMongoDbApiLinkedService", + "Couchbase": "CouchbaseLinkedService", + "CustomDataSource": "CustomDataSourceLinkedService", + "Dataworld": "DataworldLinkedService", + "Db2": "Db2LinkedService", + "Drill": "DrillLinkedService", + "Dynamics": "DynamicsLinkedService", + "DynamicsAX": "DynamicsAXLinkedService", + "DynamicsCrm": "DynamicsCrmLinkedService", + "Eloqua": "EloquaLinkedService", + "FileServer": "FileServerLinkedService", + "FtpServer": "FtpServerLinkedService", + "GoogleAdWords": "GoogleAdWordsLinkedService", + "GoogleBigQuery": "GoogleBigQueryLinkedService", + "GoogleCloudStorage": "GoogleCloudStorageLinkedService", + "GoogleSheets": "GoogleSheetsLinkedService", + "Greenplum": "GreenplumLinkedService", + "HBase": "HBaseLinkedService", + "HDInsight": "HDInsightLinkedService", + "HDInsightOnDemand": "HDInsightOnDemandLinkedService", + "Hdfs": "HdfsLinkedService", + "Hive": "HiveLinkedService", + "HttpServer": "HttpLinkedService", + "Hubspot": "HubspotLinkedService", + "Impala": "ImpalaLinkedService", + "Informix": "InformixLinkedService", + "Jira": "JiraLinkedService", + "Magento": "MagentoLinkedService", + "MariaDB": "MariaDBLinkedService", + "Marketo": "MarketoLinkedService", + "MicrosoftAccess": "MicrosoftAccessLinkedService", + "MongoDb": "MongoDbLinkedService", + "MongoDbAtlas": "MongoDbAtlasLinkedService", + "MongoDbV2": "MongoDbV2LinkedService", + "MySql": "MySqlLinkedService", + "Netezza": "NetezzaLinkedService", + "OData": "ODataLinkedService", + "Odbc": "OdbcLinkedService", + "Office365": "Office365LinkedService", + "Oracle": "OracleLinkedService", + "OracleCloudStorage": "OracleCloudStorageLinkedService", + "OracleServiceCloud": "OracleServiceCloudLinkedService", + "Paypal": "PaypalLinkedService", + "Phoenix": "PhoenixLinkedService", + "PostgreSql": "PostgreSqlLinkedService", + "Presto": "PrestoLinkedService", + "QuickBooks": "QuickBooksLinkedService", + "Quickbase": "QuickbaseLinkedService", + "Responsys": "ResponsysLinkedService", + "RestService": "RestServiceLinkedService", + "Salesforce": "SalesforceLinkedService", + "SalesforceMarketingCloud": "SalesforceMarketingCloudLinkedService", + "SalesforceServiceCloud": "SalesforceServiceCloudLinkedService", + "SapBW": "SapBWLinkedService", + "SapCloudForCustomer": "SapCloudForCustomerLinkedService", + "SapEcc": "SapEccLinkedService", + "SapHana": "SapHanaLinkedService", + "SapOdp": "SapOdpLinkedService", + "SapOpenHub": "SapOpenHubLinkedService", + "SapTable": "SapTableLinkedService", + "ServiceNow": "ServiceNowLinkedService", + "Sftp": "SftpServerLinkedService", + "SharePointOnlineList": "SharePointOnlineListLinkedService", + "Shopify": "ShopifyLinkedService", + "Smartsheet": "SmartsheetLinkedService", + "Snowflake": "SnowflakeLinkedService", + "Spark": "SparkLinkedService", + "SqlServer": "SqlServerLinkedService", + "Square": "SquareLinkedService", + "Sybase": "SybaseLinkedService", + "TeamDesk": "TeamDeskLinkedService", + "Teradata": "TeradataLinkedService", + "Twilio": "TwilioLinkedService", + "Vertica": "VerticaLinkedService", + "Web": "WebLinkedService", + "Xero": "XeroLinkedService", + "Zendesk": "ZendeskLinkedService", + "Zoho": "ZohoLinkedService", + } + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -510,26 +647,26 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] """ - super(LinkedService, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties - self.type = 'LinkedService' # type: str + self.type = None # type: Optional[str] self.connect_via = connect_via self.description = description self.parameters = parameters self.annotations = annotations -class AmazonMWSLinkedService(LinkedService): +class AmazonMWSLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Amazon Marketplace Web Service linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -538,88 +675,88 @@ class AmazonMWSLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar endpoint: Required. The endpoint of the Amazon MWS server, (i.e. mws.amazonservices.com). - :vartype endpoint: any - :ivar marketplace_id: Required. The Amazon Marketplace ID you want to retrieve data from. To - retrieve data from multiple Marketplace IDs, separate them with a comma (,). (i.e. - A2EUQ1WTGCTBG2). - :vartype marketplace_id: any - :ivar seller_id: Required. The Amazon seller ID. - :vartype seller_id: any + :vartype annotations: list[JSON] + :ivar endpoint: The endpoint of the Amazon MWS server, (i.e. mws.amazonservices.com). Required. + :vartype endpoint: JSON + :ivar marketplace_id: The Amazon Marketplace ID you want to retrieve data from. To retrieve + data from multiple Marketplace IDs, separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2). + Required. + :vartype marketplace_id: JSON + :ivar seller_id: The Amazon seller ID. Required. + :vartype seller_id: JSON :ivar mws_auth_token: The Amazon MWS authentication token. :vartype mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase - :ivar access_key_id: Required. The access key id used to access data. - :vartype access_key_id: any + :ivar access_key_id: The access key id used to access data. Required. + :vartype access_key_id: JSON :ivar secret_key: The secret key used to access data. :vartype secret_key: ~azure.mgmt.datafactory.models.SecretBase :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :vartype use_encrypted_endpoints: any + :vartype use_encrypted_endpoints: JSON :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :vartype use_host_verification: any + :vartype use_host_verification: JSON :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :vartype use_peer_verification: any + :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'marketplace_id': {'required': True}, - 'seller_id': {'required': True}, - 'access_key_id': {'required': True}, + "type": {"required": True}, + "endpoint": {"required": True}, + "marketplace_id": {"required": True}, + "seller_id": {"required": True}, + "access_key_id": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'marketplace_id': {'key': 'typeProperties.marketplaceID', 'type': 'object'}, - 'seller_id': {'key': 'typeProperties.sellerID', 'type': 'object'}, - 'mws_auth_token': {'key': 'typeProperties.mwsAuthToken', 'type': 'SecretBase'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_key': {'key': 'typeProperties.secretKey', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "endpoint": {"key": "typeProperties.endpoint", "type": "object"}, + "marketplace_id": {"key": "typeProperties.marketplaceID", "type": "object"}, + "seller_id": {"key": "typeProperties.sellerID", "type": "object"}, + "mws_auth_token": {"key": "typeProperties.mwsAuthToken", "type": "SecretBase"}, + "access_key_id": {"key": "typeProperties.accessKeyId", "type": "object"}, + "secret_key": {"key": "typeProperties.secretKey", "type": "SecretBase"}, + "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, + "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, + "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - endpoint: Any, - marketplace_id: Any, - seller_id: Any, - access_key_id: Any, - additional_properties: Optional[Dict[str, Any]] = None, + endpoint: JSON, + marketplace_id: JSON, + seller_id: JSON, + access_key_id: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, mws_auth_token: Optional["_models.SecretBase"] = None, secret_key: Optional["_models.SecretBase"] = None, - use_encrypted_endpoints: Optional[Any] = None, - use_host_verification: Optional[Any] = None, - use_peer_verification: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + use_encrypted_endpoints: Optional[JSON] = None, + use_host_verification: Optional[JSON] = None, + use_peer_verification: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -627,39 +764,46 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword endpoint: Required. The endpoint of the Amazon MWS server, (i.e. - mws.amazonservices.com). - :paramtype endpoint: any - :keyword marketplace_id: Required. The Amazon Marketplace ID you want to retrieve data from. To - retrieve data from multiple Marketplace IDs, separate them with a comma (,). (i.e. - A2EUQ1WTGCTBG2). - :paramtype marketplace_id: any - :keyword seller_id: Required. The Amazon seller ID. - :paramtype seller_id: any + :paramtype annotations: list[JSON] + :keyword endpoint: The endpoint of the Amazon MWS server, (i.e. mws.amazonservices.com). + Required. + :paramtype endpoint: JSON + :keyword marketplace_id: The Amazon Marketplace ID you want to retrieve data from. To retrieve + data from multiple Marketplace IDs, separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2). + Required. + :paramtype marketplace_id: JSON + :keyword seller_id: The Amazon seller ID. Required. + :paramtype seller_id: JSON :keyword mws_auth_token: The Amazon MWS authentication token. :paramtype mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase - :keyword access_key_id: Required. The access key id used to access data. - :paramtype access_key_id: any + :keyword access_key_id: The access key id used to access data. Required. + :paramtype access_key_id: JSON :keyword secret_key: The secret key used to access data. :paramtype secret_key: ~azure.mgmt.datafactory.models.SecretBase :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :paramtype use_encrypted_endpoints: any + :paramtype use_encrypted_endpoints: JSON :keyword use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :paramtype use_host_verification: any + :paramtype use_host_verification: JSON :keyword use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :paramtype use_peer_verification: any + :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(AmazonMWSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AmazonMWS' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AmazonMWS" # type: str self.endpoint = endpoint self.marketplace_id = marketplace_id self.seller_id = seller_id @@ -672,97 +816,219 @@ def __init__( self.encrypted_credential = encrypted_credential -class Dataset(msrest.serialization.Model): +class Dataset(_serialization.Model): """The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMWSObjectDataset, AmazonRdsForOracleTableDataset, AmazonRdsForSqlServerTableDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFSDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureDatabricksDeltaLakeDataset, AzureMariaDBTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAXResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbAtlasCollectionDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOdpResourceDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. - - All required parameters must be populated in order to send to Azure. - - :ivar additional_properties: Unmatched properties from the message are deserialized to this - collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AmazonMWSObjectDataset, AmazonRdsForOracleTableDataset, AmazonRdsForSqlServerTableDataset, + AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFSDataset, + AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureDatabricksDeltaLakeDataset, + AzureMariaDBTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, + AzureSearchIndexDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, AzureSqlTableDataset, + AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, + ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, + CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, + DocumentDbCollectionDataset, DrillTableDataset, DynamicsAXResourceDataset, + DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, + FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, + GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, + HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, + JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, + MicrosoftAccessTableDataset, MongoDbAtlasCollectionDataset, MongoDbCollectionDataset, + MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, + OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, + OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, + PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, + RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, + SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, + SapEccResourceDataset, SapHanaTableDataset, SapOdpResourceDataset, SapOpenHubTableDataset, + SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, + ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SqlServerTableDataset, + SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, + WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, } _subtype_map = { - 'type': {'AmazonMWSObject': 'AmazonMWSObjectDataset', 'AmazonRdsForOracleTable': 'AmazonRdsForOracleTableDataset', 'AmazonRdsForSqlServerTable': 'AmazonRdsForSqlServerTableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureDatabricksDeltaLakeDataset': 'AzureDatabricksDeltaLakeDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbAtlasCollection': 'MongoDbAtlasCollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOdpResource': 'SapOdpResourceDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} + "type": { + "AmazonMWSObject": "AmazonMWSObjectDataset", + "AmazonRdsForOracleTable": "AmazonRdsForOracleTableDataset", + "AmazonRdsForSqlServerTable": "AmazonRdsForSqlServerTableDataset", + "AmazonRedshiftTable": "AmazonRedshiftTableDataset", + "AmazonS3Object": "AmazonS3Dataset", + "Avro": "AvroDataset", + "AzureBlob": "AzureBlobDataset", + "AzureBlobFSFile": "AzureBlobFSDataset", + "AzureDataExplorerTable": "AzureDataExplorerTableDataset", + "AzureDataLakeStoreFile": "AzureDataLakeStoreDataset", + "AzureDatabricksDeltaLakeDataset": "AzureDatabricksDeltaLakeDataset", + "AzureMariaDBTable": "AzureMariaDBTableDataset", + "AzureMySqlTable": "AzureMySqlTableDataset", + "AzurePostgreSqlTable": "AzurePostgreSqlTableDataset", + "AzureSearchIndex": "AzureSearchIndexDataset", + "AzureSqlDWTable": "AzureSqlDWTableDataset", + "AzureSqlMITable": "AzureSqlMITableDataset", + "AzureSqlTable": "AzureSqlTableDataset", + "AzureTable": "AzureTableDataset", + "Binary": "BinaryDataset", + "CassandraTable": "CassandraTableDataset", + "CommonDataServiceForAppsEntity": "CommonDataServiceForAppsEntityDataset", + "ConcurObject": "ConcurObjectDataset", + "CosmosDbMongoDbApiCollection": "CosmosDbMongoDbApiCollectionDataset", + "CosmosDbSqlApiCollection": "CosmosDbSqlApiCollectionDataset", + "CouchbaseTable": "CouchbaseTableDataset", + "CustomDataset": "CustomDataset", + "Db2Table": "Db2TableDataset", + "DelimitedText": "DelimitedTextDataset", + "DocumentDbCollection": "DocumentDbCollectionDataset", + "DrillTable": "DrillTableDataset", + "DynamicsAXResource": "DynamicsAXResourceDataset", + "DynamicsCrmEntity": "DynamicsCrmEntityDataset", + "DynamicsEntity": "DynamicsEntityDataset", + "EloquaObject": "EloquaObjectDataset", + "Excel": "ExcelDataset", + "FileShare": "FileShareDataset", + "GoogleAdWordsObject": "GoogleAdWordsObjectDataset", + "GoogleBigQueryObject": "GoogleBigQueryObjectDataset", + "GreenplumTable": "GreenplumTableDataset", + "HBaseObject": "HBaseObjectDataset", + "HiveObject": "HiveObjectDataset", + "HttpFile": "HttpDataset", + "HubspotObject": "HubspotObjectDataset", + "ImpalaObject": "ImpalaObjectDataset", + "InformixTable": "InformixTableDataset", + "JiraObject": "JiraObjectDataset", + "Json": "JsonDataset", + "MagentoObject": "MagentoObjectDataset", + "MariaDBTable": "MariaDBTableDataset", + "MarketoObject": "MarketoObjectDataset", + "MicrosoftAccessTable": "MicrosoftAccessTableDataset", + "MongoDbAtlasCollection": "MongoDbAtlasCollectionDataset", + "MongoDbCollection": "MongoDbCollectionDataset", + "MongoDbV2Collection": "MongoDbV2CollectionDataset", + "MySqlTable": "MySqlTableDataset", + "NetezzaTable": "NetezzaTableDataset", + "ODataResource": "ODataResourceDataset", + "OdbcTable": "OdbcTableDataset", + "Office365Table": "Office365Dataset", + "OracleServiceCloudObject": "OracleServiceCloudObjectDataset", + "OracleTable": "OracleTableDataset", + "Orc": "OrcDataset", + "Parquet": "ParquetDataset", + "PaypalObject": "PaypalObjectDataset", + "PhoenixObject": "PhoenixObjectDataset", + "PostgreSqlTable": "PostgreSqlTableDataset", + "PrestoObject": "PrestoObjectDataset", + "QuickBooksObject": "QuickBooksObjectDataset", + "RelationalTable": "RelationalTableDataset", + "ResponsysObject": "ResponsysObjectDataset", + "RestResource": "RestResourceDataset", + "SalesforceMarketingCloudObject": "SalesforceMarketingCloudObjectDataset", + "SalesforceObject": "SalesforceObjectDataset", + "SalesforceServiceCloudObject": "SalesforceServiceCloudObjectDataset", + "SapBwCube": "SapBwCubeDataset", + "SapCloudForCustomerResource": "SapCloudForCustomerResourceDataset", + "SapEccResource": "SapEccResourceDataset", + "SapHanaTable": "SapHanaTableDataset", + "SapOdpResource": "SapOdpResourceDataset", + "SapOpenHubTable": "SapOpenHubTableDataset", + "SapTableResource": "SapTableResourceDataset", + "ServiceNowObject": "ServiceNowObjectDataset", + "SharePointOnlineListResource": "SharePointOnlineListResourceDataset", + "ShopifyObject": "ShopifyObjectDataset", + "SnowflakeTable": "SnowflakeDataset", + "SparkObject": "SparkObjectDataset", + "SqlServerTable": "SqlServerTableDataset", + "SquareObject": "SquareObjectDataset", + "SybaseTable": "SybaseTableDataset", + "TeradataTable": "TeradataTableDataset", + "VerticaTable": "VerticaTableDataset", + "WebTable": "WebTableDataset", + "XeroObject": "XeroObjectDataset", + "Xml": "XmlDataset", + "ZohoObject": "ZohoObjectDataset", + } } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder """ - super(Dataset, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties - self.type = 'Dataset' # type: str + self.type = None # type: Optional[str] self.description = description self.structure = structure self.schema = schema @@ -779,165 +1045,220 @@ class AmazonMWSObjectDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AmazonMWSObject' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "AmazonMWSObject" # type: str self.table_name = table_name -class CopySource(msrest.serialization.Model): +class CopySource(_serialization.Model): """A copy activity source. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonRdsForOracleSource, AvroSource, AzureBlobFSSource, AzureDataExplorerSource, AzureDataLakeStoreSource, AzureDatabricksDeltaLakeSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDbMongoDbApiSource, CosmosDbSqlApiSource, DelimitedTextSource, DocumentDbCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDbAtlasSource, MongoDbSource, MongoDbV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AmazonRdsForOracleSource, AvroSource, AzureBlobFSSource, AzureDataExplorerSource, + AzureDataLakeStoreSource, AzureDatabricksDeltaLakeSource, BinarySource, BlobSource, + CommonDataServiceForAppsSource, CosmosDbMongoDbApiSource, CosmosDbSqlApiSource, + DelimitedTextSource, DocumentDbCollectionSource, DynamicsCrmSource, DynamicsSource, + ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, + MongoDbAtlasSource, MongoDbSource, MongoDbV2Source, ODataSource, Office365Source, OracleSource, + OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, + SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, } _subtype_map = { - 'type': {'AmazonRdsForOracleSource': 'AmazonRdsForOracleSource', 'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'AzureDatabricksDeltaLakeSource': 'AzureDatabricksDeltaLakeSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbAtlasSource': 'MongoDbAtlasSource', 'MongoDbSource': 'MongoDbSource', 'MongoDbV2Source': 'MongoDbV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - **kwargs - ): - """ - :keyword additional_properties: Unmatched properties from the message are deserialized to this - collection. - :paramtype additional_properties: dict[str, any] + "type": { + "AmazonRdsForOracleSource": "AmazonRdsForOracleSource", + "AvroSource": "AvroSource", + "AzureBlobFSSource": "AzureBlobFSSource", + "AzureDataExplorerSource": "AzureDataExplorerSource", + "AzureDataLakeStoreSource": "AzureDataLakeStoreSource", + "AzureDatabricksDeltaLakeSource": "AzureDatabricksDeltaLakeSource", + "BinarySource": "BinarySource", + "BlobSource": "BlobSource", + "CommonDataServiceForAppsSource": "CommonDataServiceForAppsSource", + "CosmosDbMongoDbApiSource": "CosmosDbMongoDbApiSource", + "CosmosDbSqlApiSource": "CosmosDbSqlApiSource", + "DelimitedTextSource": "DelimitedTextSource", + "DocumentDbCollectionSource": "DocumentDbCollectionSource", + "DynamicsCrmSource": "DynamicsCrmSource", + "DynamicsSource": "DynamicsSource", + "ExcelSource": "ExcelSource", + "FileSystemSource": "FileSystemSource", + "HdfsSource": "HdfsSource", + "HttpSource": "HttpSource", + "JsonSource": "JsonSource", + "MicrosoftAccessSource": "MicrosoftAccessSource", + "MongoDbAtlasSource": "MongoDbAtlasSource", + "MongoDbSource": "MongoDbSource", + "MongoDbV2Source": "MongoDbV2Source", + "ODataSource": "ODataSource", + "Office365Source": "Office365Source", + "OracleSource": "OracleSource", + "OrcSource": "OrcSource", + "ParquetSource": "ParquetSource", + "RelationalSource": "RelationalSource", + "RestSource": "RestSource", + "SalesforceServiceCloudSource": "SalesforceServiceCloudSource", + "SharePointOnlineListSource": "SharePointOnlineListSource", + "SnowflakeSource": "SnowflakeSource", + "TabularSource": "TabularSource", + "WebSource": "WebSource", + "XmlSource": "XmlSource", + } + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + **kwargs + ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON """ - super(CopySource, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties - self.type = 'CopySource' # type: str + self.type = None # type: Optional[str] self.source_retry_count = source_retry_count self.source_retry_wait = source_retry_wait self.max_concurrent_connections = max_concurrent_connections @@ -947,92 +1268,169 @@ def __init__( class TabularSource(CopySource): """Copy activity sources of tabular type. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMWSSource, AmazonRdsForSqlServerSource, AmazonRedshiftSource, AzureMariaDBSource, AzureMySqlSource, AzurePostgreSqlSource, AzureSqlSource, AzureTableSource, CassandraSource, ConcurSource, CouchbaseSource, Db2Source, DrillSource, DynamicsAXSource, EloquaSource, GoogleAdWordsSource, GoogleBigQuerySource, GreenplumSource, HBaseSource, HiveSource, HubspotSource, ImpalaSource, InformixSource, JiraSource, MagentoSource, MariaDBSource, MarketoSource, MySqlSource, NetezzaSource, OdbcSource, OracleServiceCloudSource, PaypalSource, PhoenixSource, PostgreSqlSource, PrestoSource, QuickBooksSource, ResponsysSource, SalesforceMarketingCloudSource, SalesforceSource, SapBwSource, SapCloudForCustomerSource, SapEccSource, SapHanaSource, SapOdpSource, SapOpenHubSource, SapTableSource, ServiceNowSource, ShopifySource, SparkSource, SqlDWSource, SqlMISource, SqlServerSource, SqlSource, SquareSource, SybaseSource, TeradataSource, VerticaSource, XeroSource, ZohoSource. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AmazonMWSSource, AmazonRdsForSqlServerSource, AmazonRedshiftSource, AzureMariaDBSource, + AzureMySqlSource, AzurePostgreSqlSource, AzureSqlSource, AzureTableSource, CassandraSource, + ConcurSource, CouchbaseSource, Db2Source, DrillSource, DynamicsAXSource, EloquaSource, + GoogleAdWordsSource, GoogleBigQuerySource, GreenplumSource, HBaseSource, HiveSource, + HubspotSource, ImpalaSource, InformixSource, JiraSource, MagentoSource, MariaDBSource, + MarketoSource, MySqlSource, NetezzaSource, OdbcSource, OracleServiceCloudSource, PaypalSource, + PhoenixSource, PostgreSqlSource, PrestoSource, QuickBooksSource, ResponsysSource, + SalesforceMarketingCloudSource, SalesforceSource, SapBwSource, SapCloudForCustomerSource, + SapEccSource, SapHanaSource, SapOdpSource, SapOpenHubSource, SapTableSource, ServiceNowSource, + ShopifySource, SparkSource, SqlDWSource, SqlMISource, SqlServerSource, SqlSource, SquareSource, + SybaseSource, TeradataSource, VerticaSource, XeroSource, ZohoSource All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } _subtype_map = { - 'type': {'AmazonMWSSource': 'AmazonMWSSource', 'AmazonRdsForSqlServerSource': 'AmazonRdsForSqlServerSource', 'AmazonRedshiftSource': 'AmazonRedshiftSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'AzureMySqlSource': 'AzureMySqlSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AzureSqlSource': 'AzureSqlSource', 'AzureTableSource': 'AzureTableSource', 'CassandraSource': 'CassandraSource', 'ConcurSource': 'ConcurSource', 'CouchbaseSource': 'CouchbaseSource', 'Db2Source': 'Db2Source', 'DrillSource': 'DrillSource', 'DynamicsAXSource': 'DynamicsAXSource', 'EloquaSource': 'EloquaSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'GreenplumSource': 'GreenplumSource', 'HBaseSource': 'HBaseSource', 'HiveSource': 'HiveSource', 'HubspotSource': 'HubspotSource', 'ImpalaSource': 'ImpalaSource', 'InformixSource': 'InformixSource', 'JiraSource': 'JiraSource', 'MagentoSource': 'MagentoSource', 'MariaDBSource': 'MariaDBSource', 'MarketoSource': 'MarketoSource', 'MySqlSource': 'MySqlSource', 'NetezzaSource': 'NetezzaSource', 'OdbcSource': 'OdbcSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'PaypalSource': 'PaypalSource', 'PhoenixSource': 'PhoenixSource', 'PostgreSqlSource': 'PostgreSqlSource', 'PrestoSource': 'PrestoSource', 'QuickBooksSource': 'QuickBooksSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SapEccSource': 'SapEccSource', 'SapHanaSource': 'SapHanaSource', 'SapOdpSource': 'SapOdpSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapTableSource': 'SapTableSource', 'ServiceNowSource': 'ServiceNowSource', 'ShopifySource': 'ShopifySource', 'SparkSource': 'SparkSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'SquareSource': 'SquareSource', 'SybaseSource': 'SybaseSource', 'TeradataSource': 'TeradataSource', 'VerticaSource': 'VerticaSource', 'XeroSource': 'XeroSource', 'ZohoSource': 'ZohoSource'} - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - **kwargs - ): - """ - :keyword additional_properties: Unmatched properties from the message are deserialized to this - collection. - :paramtype additional_properties: dict[str, any] + "type": { + "AmazonMWSSource": "AmazonMWSSource", + "AmazonRdsForSqlServerSource": "AmazonRdsForSqlServerSource", + "AmazonRedshiftSource": "AmazonRedshiftSource", + "AzureMariaDBSource": "AzureMariaDBSource", + "AzureMySqlSource": "AzureMySqlSource", + "AzurePostgreSqlSource": "AzurePostgreSqlSource", + "AzureSqlSource": "AzureSqlSource", + "AzureTableSource": "AzureTableSource", + "CassandraSource": "CassandraSource", + "ConcurSource": "ConcurSource", + "CouchbaseSource": "CouchbaseSource", + "Db2Source": "Db2Source", + "DrillSource": "DrillSource", + "DynamicsAXSource": "DynamicsAXSource", + "EloquaSource": "EloquaSource", + "GoogleAdWordsSource": "GoogleAdWordsSource", + "GoogleBigQuerySource": "GoogleBigQuerySource", + "GreenplumSource": "GreenplumSource", + "HBaseSource": "HBaseSource", + "HiveSource": "HiveSource", + "HubspotSource": "HubspotSource", + "ImpalaSource": "ImpalaSource", + "InformixSource": "InformixSource", + "JiraSource": "JiraSource", + "MagentoSource": "MagentoSource", + "MariaDBSource": "MariaDBSource", + "MarketoSource": "MarketoSource", + "MySqlSource": "MySqlSource", + "NetezzaSource": "NetezzaSource", + "OdbcSource": "OdbcSource", + "OracleServiceCloudSource": "OracleServiceCloudSource", + "PaypalSource": "PaypalSource", + "PhoenixSource": "PhoenixSource", + "PostgreSqlSource": "PostgreSqlSource", + "PrestoSource": "PrestoSource", + "QuickBooksSource": "QuickBooksSource", + "ResponsysSource": "ResponsysSource", + "SalesforceMarketingCloudSource": "SalesforceMarketingCloudSource", + "SalesforceSource": "SalesforceSource", + "SapBwSource": "SapBwSource", + "SapCloudForCustomerSource": "SapCloudForCustomerSource", + "SapEccSource": "SapEccSource", + "SapHanaSource": "SapHanaSource", + "SapOdpSource": "SapOdpSource", + "SapOpenHubSource": "SapOpenHubSource", + "SapTableSource": "SapTableSource", + "ServiceNowSource": "ServiceNowSource", + "ShopifySource": "ShopifySource", + "SparkSource": "SparkSource", + "SqlDWSource": "SqlDWSource", + "SqlMISource": "SqlMISource", + "SqlServerSource": "SqlServerSource", + "SqlSource": "SqlSource", + "SquareSource": "SquareSource", + "SybaseSource": "SybaseSource", + "TeradataSource": "TeradataSource", + "VerticaSource": "VerticaSource", + "XeroSource": "XeroSource", + "ZohoSource": "ZohoSource", + } + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + **kwargs + ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'TabularSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "TabularSource" # type: str self.query_timeout = query_timeout self.additional_columns = additional_columns @@ -1044,89 +1442,98 @@ class AmazonMWSSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'AmazonMWSSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "AmazonMWSSource" # type: str self.query = query @@ -1137,8 +1544,8 @@ class AmazonRdsForOracleLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -1147,52 +1554,52 @@ class AmazonRdsForOracleLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype annotations: list[JSON] + :ivar connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. + :vartype connection_string: JSON :ivar password: The Azure key vault secret reference of password in connection string. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, + "type": {"required": True}, + "connection_string": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - connection_string: Any, - additional_properties: Optional[Dict[str, Any]] = None, + connection_string: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -1200,187 +1607,201 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype annotations: list[JSON] + :keyword connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. + :paramtype connection_string: JSON :keyword password: The Azure key vault secret reference of password in connection string. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(AmazonRdsForOracleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AmazonRdsForOracle' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AmazonRdsForOracle" # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential -class AmazonRdsForOraclePartitionSettings(msrest.serialization.Model): +class AmazonRdsForOraclePartitionSettings(_serialization.Model): """The settings that will be leveraged for AmazonRdsForOracle source partitioning. :ivar partition_names: Names of the physical partitions of AmazonRdsForOracle table. - :vartype partition_names: any + :vartype partition_names: JSON :ivar partition_column_name: The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :vartype partition_column_name: any + :vartype partition_column_name: JSON :ivar partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :vartype partition_upper_bound: any + :vartype partition_upper_bound: JSON :ivar partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :vartype partition_lower_bound: any + :vartype partition_lower_bound: JSON """ _attribute_map = { - 'partition_names': {'key': 'partitionNames', 'type': 'object'}, - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + "partition_names": {"key": "partitionNames", "type": "object"}, + "partition_column_name": {"key": "partitionColumnName", "type": "object"}, + "partition_upper_bound": {"key": "partitionUpperBound", "type": "object"}, + "partition_lower_bound": {"key": "partitionLowerBound", "type": "object"}, } def __init__( self, *, - partition_names: Optional[Any] = None, - partition_column_name: Optional[Any] = None, - partition_upper_bound: Optional[Any] = None, - partition_lower_bound: Optional[Any] = None, + partition_names: Optional[JSON] = None, + partition_column_name: Optional[JSON] = None, + partition_upper_bound: Optional[JSON] = None, + partition_lower_bound: Optional[JSON] = None, **kwargs ): """ :keyword partition_names: Names of the physical partitions of AmazonRdsForOracle table. - :paramtype partition_names: any + :paramtype partition_names: JSON :keyword partition_column_name: The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :paramtype partition_column_name: any + :paramtype partition_column_name: JSON :keyword partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :paramtype partition_upper_bound: any + :paramtype partition_upper_bound: JSON :keyword partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :paramtype partition_lower_bound: any + :paramtype partition_lower_bound: JSON """ - super(AmazonRdsForOraclePartitionSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.partition_names = partition_names self.partition_column_name = partition_column_name self.partition_upper_bound = partition_upper_bound self.partition_lower_bound = partition_lower_bound -class AmazonRdsForOracleSource(CopySource): +class AmazonRdsForOracleSource(CopySource): # pylint: disable=too-many-instance-attributes """A copy activity AmazonRdsForOracle source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar oracle_reader_query: AmazonRdsForOracle reader query. Type: string (or Expression with resultType string). - :vartype oracle_reader_query: any + :vartype oracle_reader_query: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar partition_option: The partition mechanism that will be used for AmazonRdsForOracle read in parallel. Type: string (or Expression with resultType string). - :vartype partition_option: any + :vartype partition_option: JSON :ivar partition_settings: The settings that will be leveraged for AmazonRdsForOracle source partitioning. :vartype partition_settings: ~azure.mgmt.datafactory.models.AmazonRdsForOraclePartitionSettings :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'AmazonRdsForOraclePartitionSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "oracle_reader_query": {"key": "oracleReaderQuery", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "partition_option": {"key": "partitionOption", "type": "object"}, + "partition_settings": {"key": "partitionSettings", "type": "AmazonRdsForOraclePartitionSettings"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - oracle_reader_query: Optional[Any] = None, - query_timeout: Optional[Any] = None, - partition_option: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + oracle_reader_query: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + partition_option: Optional[JSON] = None, partition_settings: Optional["_models.AmazonRdsForOraclePartitionSettings"] = None, - additional_columns: Optional[Any] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword oracle_reader_query: AmazonRdsForOracle reader query. Type: string (or Expression with resultType string). - :paramtype oracle_reader_query: any + :paramtype oracle_reader_query: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword partition_option: The partition mechanism that will be used for AmazonRdsForOracle read in parallel. Type: string (or Expression with resultType string). - :paramtype partition_option: any + :paramtype partition_option: JSON :keyword partition_settings: The settings that will be leveraged for AmazonRdsForOracle source partitioning. :paramtype partition_settings: ~azure.mgmt.datafactory.models.AmazonRdsForOraclePartitionSettings :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(AmazonRdsForOracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AmazonRdsForOracleSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AmazonRdsForOracleSource" # type: str self.oracle_reader_query = oracle_reader_query self.query_timeout = query_timeout self.partition_option = partition_option @@ -1388,118 +1809,128 @@ def __init__( self.additional_columns = additional_columns -class AmazonRdsForOracleTableDataset(Dataset): +class AmazonRdsForOracleTableDataset(Dataset): # pylint: disable=too-many-instance-attributes """The AmazonRdsForOracle database dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar schema_type_properties_schema: The schema name of the AmazonRdsForOracle database. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON :ivar table: The table name of the AmazonRdsForOracle database. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - schema_type_properties_schema: Optional[Any] = None, - table: Optional[Any] = None, + schema_type_properties_schema: Optional[JSON] = None, + table: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword schema_type_properties_schema: The schema name of the AmazonRdsForOracle database. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any + :paramtype schema_type_properties_schema: JSON :keyword table: The table name of the AmazonRdsForOracle database. Type: string (or Expression with resultType string). - :paramtype table: any - """ - super(AmazonRdsForOracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AmazonRdsForOracleTable' # type: str + :paramtype table: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "AmazonRdsForOracleTable" # type: str self.schema_type_properties_schema = schema_type_properties_schema self.table = table -class AmazonRdsForSqlServerLinkedService(LinkedService): +class AmazonRdsForSqlServerLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Amazon RDS for SQL Server linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -1508,61 +1939,64 @@ class AmazonRdsForSqlServerLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype annotations: list[JSON] + :ivar connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. + :vartype connection_string: JSON :ivar user_name: The on-premises Windows authentication user name. Type: string (or Expression with resultType string). - :vartype user_name: any + :vartype user_name: JSON :ivar password: The on-premises Windows authentication password. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON :ivar always_encrypted_settings: Sql always encrypted properties. :vartype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, + "type": {"required": True}, + "connection_string": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "always_encrypted_settings": { + "key": "typeProperties.alwaysEncryptedSettings", + "type": "SqlAlwaysEncryptedProperties", + }, } def __init__( self, *, - connection_string: Any, - additional_properties: Optional[Dict[str, Any]] = None, + connection_string: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - user_name: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, always_encrypted_settings: Optional["_models.SqlAlwaysEncryptedProperties"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -1570,25 +2004,32 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype annotations: list[JSON] + :keyword connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. + :paramtype connection_string: JSON :keyword user_name: The on-premises Windows authentication user name. Type: string (or Expression with resultType string). - :paramtype user_name: any + :paramtype user_name: JSON :keyword password: The on-premises Windows authentication password. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any + :paramtype encrypted_credential: JSON :keyword always_encrypted_settings: Sql always encrypted properties. :paramtype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ - super(AmazonRdsForSqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AmazonRdsForSqlServer' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AmazonRdsForSqlServer" # type: str self.connection_string = connection_string self.user_name = user_name self.password = password @@ -1596,135 +2037,144 @@ def __init__( self.always_encrypted_settings = always_encrypted_settings -class AmazonRdsForSqlServerSource(TabularSource): +class AmazonRdsForSqlServerSource(TabularSource): # pylint: disable=too-many-instance-attributes """A copy activity Amazon RDS for SQL Server source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :vartype sql_reader_query: any + :vartype sql_reader_query: JSON :ivar sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :vartype sql_reader_stored_procedure_name: any + :vartype sql_reader_stored_procedure_name: JSON :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :ivar produce_additional_types: Which additional types to produce. - :vartype produce_additional_types: any + :vartype produce_additional_types: JSON :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :vartype partition_option: any + :vartype partition_option: JSON :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "sql_reader_query": {"key": "sqlReaderQuery", "type": "object"}, + "sql_reader_stored_procedure_name": {"key": "sqlReaderStoredProcedureName", "type": "object"}, + "stored_procedure_parameters": {"key": "storedProcedureParameters", "type": "{StoredProcedureParameter}"}, + "produce_additional_types": {"key": "produceAdditionalTypes", "type": "object"}, + "partition_option": {"key": "partitionOption", "type": "object"}, + "partition_settings": {"key": "partitionSettings", "type": "SqlPartitionSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - sql_reader_query: Optional[Any] = None, - sql_reader_stored_procedure_name: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + sql_reader_query: Optional[JSON] = None, + sql_reader_stored_procedure_name: Optional[JSON] = None, stored_procedure_parameters: Optional[Dict[str, "_models.StoredProcedureParameter"]] = None, - produce_additional_types: Optional[Any] = None, - partition_option: Optional[Any] = None, + produce_additional_types: Optional[JSON] = None, + partition_option: Optional[JSON] = None, partition_settings: Optional["_models.SqlPartitionSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :paramtype sql_reader_query: any + :paramtype sql_reader_query: JSON :keyword sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :paramtype sql_reader_stored_procedure_name: any + :paramtype sql_reader_stored_procedure_name: JSON :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :paramtype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :keyword produce_additional_types: Which additional types to produce. - :paramtype produce_additional_types: any + :paramtype produce_additional_types: JSON :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :paramtype partition_option: any + :paramtype partition_option: JSON :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ - super(AmazonRdsForSqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'AmazonRdsForSqlServerSource' # type: str + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "AmazonRdsForSqlServerSource" # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -1733,118 +2183,128 @@ def __init__( self.partition_settings = partition_settings -class AmazonRdsForSqlServerTableDataset(Dataset): +class AmazonRdsForSqlServerTableDataset(Dataset): # pylint: disable=too-many-instance-attributes """The Amazon RDS for SQL Server dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar schema_type_properties_schema: The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON :ivar table: The table name of the SQL Server dataset. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - schema_type_properties_schema: Optional[Any] = None, - table: Optional[Any] = None, + schema_type_properties_schema: Optional[JSON] = None, + table: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword schema_type_properties_schema: The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any + :paramtype schema_type_properties_schema: JSON :keyword table: The table name of the SQL Server dataset. Type: string (or Expression with resultType string). - :paramtype table: any - """ - super(AmazonRdsForSqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AmazonRdsForSqlServerTable' # type: str + :paramtype table: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "AmazonRdsForSqlServerTable" # type: str self.schema_type_properties_schema = schema_type_properties_schema self.table = table -class AmazonRedshiftLinkedService(LinkedService): +class AmazonRedshiftLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Linked service for Amazon Redshift. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -1853,68 +2313,68 @@ class AmazonRedshiftLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar server: Required. The name of the Amazon Redshift server. Type: string (or Expression - with resultType string). - :vartype server: any + :vartype annotations: list[JSON] + :ivar server: The name of the Amazon Redshift server. Type: string (or Expression with + resultType string). Required. + :vartype server: JSON :ivar username: The username of the Amazon Redshift source. Type: string (or Expression with resultType string). - :vartype username: any + :vartype username: JSON :ivar password: The password of the Amazon Redshift source. :vartype password: ~azure.mgmt.datafactory.models.SecretBase - :ivar database: Required. The database name of the Amazon Redshift source. Type: string (or - Expression with resultType string). - :vartype database: any + :ivar database: The database name of the Amazon Redshift source. Type: string (or Expression + with resultType string). Required. + :vartype database: JSON :ivar port: The TCP port number that the Amazon Redshift server uses to listen for client connections. The default value is 5439. Type: integer (or Expression with resultType integer). - :vartype port: any + :vartype port: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, + "type": {"required": True}, + "server": {"required": True}, + "database": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "server": {"key": "typeProperties.server", "type": "object"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "database": {"key": "typeProperties.database", "type": "object"}, + "port": {"key": "typeProperties.port", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - server: Any, - database: Any, - additional_properties: Optional[Dict[str, Any]] = None, + server: JSON, + database: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - username: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - port: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + port: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -1922,28 +2382,35 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword server: Required. The name of the Amazon Redshift server. Type: string (or Expression - with resultType string). - :paramtype server: any + :paramtype annotations: list[JSON] + :keyword server: The name of the Amazon Redshift server. Type: string (or Expression with + resultType string). Required. + :paramtype server: JSON :keyword username: The username of the Amazon Redshift source. Type: string (or Expression with resultType string). - :paramtype username: any + :paramtype username: JSON :keyword password: The password of the Amazon Redshift source. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase - :keyword database: Required. The database name of the Amazon Redshift source. Type: string (or - Expression with resultType string). - :paramtype database: any + :keyword database: The database name of the Amazon Redshift source. Type: string (or Expression + with resultType string). Required. + :paramtype database: JSON :keyword port: The TCP port number that the Amazon Redshift server uses to listen for client connections. The default value is 5439. Type: integer (or Expression with resultType integer). - :paramtype port: any + :paramtype port: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AmazonRedshift' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AmazonRedshift" # type: str self.server = server self.username = username self.password = password @@ -1959,29 +2426,29 @@ class AmazonRedshiftSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: Database query. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. @@ -1989,192 +2456,211 @@ class AmazonRedshiftSource(TabularSource): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "redshift_unload_settings": {"key": "redshiftUnloadSettings", "type": "RedshiftUnloadSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, redshift_unload_settings: Optional["_models.RedshiftUnloadSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: Database query. Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. :paramtype redshift_unload_settings: ~azure.mgmt.datafactory.models.RedshiftUnloadSettings """ - super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'AmazonRedshiftSource' # type: str + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "AmazonRedshiftSource" # type: str self.query = query self.redshift_unload_settings = redshift_unload_settings -class AmazonRedshiftTableDataset(Dataset): +class AmazonRedshiftTableDataset(Dataset): # pylint: disable=too-many-instance-attributes """The Amazon Redshift table dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :vartype table_name: any + :vartype table_name: JSON :ivar table: The Amazon Redshift table name. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON :ivar schema_type_properties_schema: The Amazon Redshift schema name. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, - table: Optional[Any] = None, - schema_type_properties_schema: Optional[Any] = None, + table_name: Optional[JSON] = None, + table: Optional[JSON] = None, + schema_type_properties_schema: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: This property will be retired. Please consider using schema + table properties instead. - :paramtype table_name: any + :paramtype table_name: JSON :keyword table: The Amazon Redshift table name. Type: string (or Expression with resultType string). - :paramtype table: any + :paramtype table: JSON :keyword schema_type_properties_schema: The Amazon Redshift schema name. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any - """ - super(AmazonRedshiftTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AmazonRedshiftTable' # type: str + :paramtype schema_type_properties_schema: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "AmazonRedshiftTable" # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema -class AmazonS3CompatibleLinkedService(LinkedService): +class AmazonS3CompatibleLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Linked service for Amazon S3 Compatible. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -2183,10 +2669,10 @@ class AmazonS3CompatibleLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar access_key_id: The access key identifier of the Amazon S3 Compatible Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :vartype access_key_id: any + :vartype access_key_id: JSON :ivar secret_access_key: The secret access key of the Amazon S3 Compatible Identity and Access Management (IAM) user. :vartype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase @@ -2194,53 +2680,53 @@ class AmazonS3CompatibleLinkedService(LinkedService): Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :vartype service_url: any + :vartype service_url: JSON :ivar force_path_style: If true, use S3 path-style access instead of virtual hosted-style access. Default value is false. Type: boolean (or Expression with resultType boolean). - :vartype force_path_style: any + :vartype force_path_style: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, - 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, - 'force_path_style': {'key': 'typeProperties.forcePathStyle', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "access_key_id": {"key": "typeProperties.accessKeyId", "type": "object"}, + "secret_access_key": {"key": "typeProperties.secretAccessKey", "type": "SecretBase"}, + "service_url": {"key": "typeProperties.serviceUrl", "type": "object"}, + "force_path_style": {"key": "typeProperties.forcePathStyle", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - access_key_id: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + access_key_id: Optional[JSON] = None, secret_access_key: Optional["_models.SecretBase"] = None, - service_url: Optional[Any] = None, - force_path_style: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + service_url: Optional[JSON] = None, + force_path_style: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -2248,10 +2734,10 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword access_key_id: The access key identifier of the Amazon S3 Compatible Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :paramtype access_key_id: any + :paramtype access_key_id: JSON :keyword secret_access_key: The secret access key of the Amazon S3 Compatible Identity and Access Management (IAM) user. :paramtype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase @@ -2259,17 +2745,24 @@ def __init__( Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :paramtype service_url: any + :paramtype service_url: JSON :keyword force_path_style: If true, use S3 path-style access instead of virtual hosted-style access. Default value is false. Type: boolean (or Expression with resultType boolean). - :paramtype force_path_style: any + :paramtype force_path_style: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(AmazonS3CompatibleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AmazonS3Compatible' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AmazonS3Compatible" # type: str self.access_key_id = access_key_id self.secret_access_key = secret_access_key self.service_url = service_url @@ -2277,64 +2770,81 @@ def __init__( self.encrypted_credential = encrypted_credential -class DatasetLocation(msrest.serialization.Model): +class DatasetLocation(_serialization.Model): """Dataset location. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonS3CompatibleLocation, AmazonS3Location, AzureBlobFSLocation, AzureBlobStorageLocation, AzureDataLakeStoreLocation, AzureFileStorageLocation, FileServerLocation, FtpServerLocation, GoogleCloudStorageLocation, HdfsLocation, HttpServerLocation, OracleCloudStorageLocation, SftpLocation. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AmazonS3CompatibleLocation, AmazonS3Location, AzureBlobFSLocation, AzureBlobStorageLocation, + AzureDataLakeStoreLocation, AzureFileStorageLocation, FileServerLocation, FtpServerLocation, + GoogleCloudStorageLocation, HdfsLocation, HttpServerLocation, OracleCloudStorageLocation, + SftpLocation All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage location. Required. :vartype type: str :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :vartype folder_path: any + :vartype folder_path: JSON :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :vartype file_name: any + :vartype file_name: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "folder_path": {"key": "folderPath", "type": "object"}, + "file_name": {"key": "fileName", "type": "object"}, } _subtype_map = { - 'type': {'AmazonS3CompatibleLocation': 'AmazonS3CompatibleLocation', 'AmazonS3Location': 'AmazonS3Location', 'AzureBlobFSLocation': 'AzureBlobFSLocation', 'AzureBlobStorageLocation': 'AzureBlobStorageLocation', 'AzureDataLakeStoreLocation': 'AzureDataLakeStoreLocation', 'AzureFileStorageLocation': 'AzureFileStorageLocation', 'FileServerLocation': 'FileServerLocation', 'FtpServerLocation': 'FtpServerLocation', 'GoogleCloudStorageLocation': 'GoogleCloudStorageLocation', 'HdfsLocation': 'HdfsLocation', 'HttpServerLocation': 'HttpServerLocation', 'OracleCloudStorageLocation': 'OracleCloudStorageLocation', 'SftpLocation': 'SftpLocation'} + "type": { + "AmazonS3CompatibleLocation": "AmazonS3CompatibleLocation", + "AmazonS3Location": "AmazonS3Location", + "AzureBlobFSLocation": "AzureBlobFSLocation", + "AzureBlobStorageLocation": "AzureBlobStorageLocation", + "AzureDataLakeStoreLocation": "AzureDataLakeStoreLocation", + "AzureFileStorageLocation": "AzureFileStorageLocation", + "FileServerLocation": "FileServerLocation", + "FtpServerLocation": "FtpServerLocation", + "GoogleCloudStorageLocation": "GoogleCloudStorageLocation", + "HdfsLocation": "HdfsLocation", + "HttpServerLocation": "HttpServerLocation", + "OracleCloudStorageLocation": "OracleCloudStorageLocation", + "SftpLocation": "SftpLocation", + } } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - folder_path: Optional[Any] = None, - file_name: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + folder_path: Optional[JSON] = None, + file_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :paramtype folder_path: any + :paramtype folder_path: JSON :keyword file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :paramtype file_name: any + :paramtype file_name: JSON """ - super(DatasetLocation, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties - self.type = 'DatasetLocation' # type: str + self.type = None # type: Optional[str] self.folder_path = folder_path self.file_name = file_name @@ -2346,261 +2856,285 @@ class AmazonS3CompatibleLocation(DatasetLocation): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage location. Required. :vartype type: str :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :vartype folder_path: any + :vartype folder_path: JSON :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :vartype file_name: any + :vartype file_name: JSON :ivar bucket_name: Specify the bucketName of Amazon S3 Compatible. Type: string (or Expression with resultType string). - :vartype bucket_name: any + :vartype bucket_name: JSON :ivar version: Specify the version of Amazon S3 Compatible. Type: string (or Expression with resultType string). - :vartype version: any + :vartype version: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'bucket_name': {'key': 'bucketName', 'type': 'object'}, - 'version': {'key': 'version', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "folder_path": {"key": "folderPath", "type": "object"}, + "file_name": {"key": "fileName", "type": "object"}, + "bucket_name": {"key": "bucketName", "type": "object"}, + "version": {"key": "version", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - folder_path: Optional[Any] = None, - file_name: Optional[Any] = None, - bucket_name: Optional[Any] = None, - version: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + folder_path: Optional[JSON] = None, + file_name: Optional[JSON] = None, + bucket_name: Optional[JSON] = None, + version: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :paramtype folder_path: any + :paramtype folder_path: JSON :keyword file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :paramtype file_name: any + :paramtype file_name: JSON :keyword bucket_name: Specify the bucketName of Amazon S3 Compatible. Type: string (or Expression with resultType string). - :paramtype bucket_name: any + :paramtype bucket_name: JSON :keyword version: Specify the version of Amazon S3 Compatible. Type: string (or Expression with resultType string). - :paramtype version: any + :paramtype version: JSON """ - super(AmazonS3CompatibleLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type = 'AmazonS3CompatibleLocation' # type: str + super().__init__( + additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs + ) + self.type = "AmazonS3CompatibleLocation" # type: str self.bucket_name = bucket_name self.version = version -class StoreReadSettings(msrest.serialization.Model): +class StoreReadSettings(_serialization.Model): """Connector read setting. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonS3CompatibleReadSettings, AmazonS3ReadSettings, AzureBlobFSReadSettings, AzureBlobStorageReadSettings, AzureDataLakeStoreReadSettings, AzureFileStorageReadSettings, FileServerReadSettings, FtpReadSettings, GoogleCloudStorageReadSettings, HdfsReadSettings, HttpReadSettings, OracleCloudStorageReadSettings, SftpReadSettings. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AmazonS3CompatibleReadSettings, AmazonS3ReadSettings, AzureBlobFSReadSettings, + AzureBlobStorageReadSettings, AzureDataLakeStoreReadSettings, AzureFileStorageReadSettings, + FileServerReadSettings, FtpReadSettings, GoogleCloudStorageReadSettings, HdfsReadSettings, + HttpReadSettings, OracleCloudStorageReadSettings, SftpReadSettings All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The read setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. :vartype type: str :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, } _subtype_map = { - 'type': {'AmazonS3CompatibleReadSettings': 'AmazonS3CompatibleReadSettings', 'AmazonS3ReadSettings': 'AmazonS3ReadSettings', 'AzureBlobFSReadSettings': 'AzureBlobFSReadSettings', 'AzureBlobStorageReadSettings': 'AzureBlobStorageReadSettings', 'AzureDataLakeStoreReadSettings': 'AzureDataLakeStoreReadSettings', 'AzureFileStorageReadSettings': 'AzureFileStorageReadSettings', 'FileServerReadSettings': 'FileServerReadSettings', 'FtpReadSettings': 'FtpReadSettings', 'GoogleCloudStorageReadSettings': 'GoogleCloudStorageReadSettings', 'HdfsReadSettings': 'HdfsReadSettings', 'HttpReadSettings': 'HttpReadSettings', 'OracleCloudStorageReadSettings': 'OracleCloudStorageReadSettings', 'SftpReadSettings': 'SftpReadSettings'} + "type": { + "AmazonS3CompatibleReadSettings": "AmazonS3CompatibleReadSettings", + "AmazonS3ReadSettings": "AmazonS3ReadSettings", + "AzureBlobFSReadSettings": "AzureBlobFSReadSettings", + "AzureBlobStorageReadSettings": "AzureBlobStorageReadSettings", + "AzureDataLakeStoreReadSettings": "AzureDataLakeStoreReadSettings", + "AzureFileStorageReadSettings": "AzureFileStorageReadSettings", + "FileServerReadSettings": "FileServerReadSettings", + "FtpReadSettings": "FtpReadSettings", + "GoogleCloudStorageReadSettings": "GoogleCloudStorageReadSettings", + "HdfsReadSettings": "HdfsReadSettings", + "HttpReadSettings": "HttpReadSettings", + "OracleCloudStorageReadSettings": "OracleCloudStorageReadSettings", + "SftpReadSettings": "SftpReadSettings", + } } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON """ - super(StoreReadSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties - self.type = 'StoreReadSettings' # type: str + self.type = None # type: Optional[str] self.max_concurrent_connections = max_concurrent_connections self.disable_metrics_collection = disable_metrics_collection -class AmazonS3CompatibleReadSettings(StoreReadSettings): +class AmazonS3CompatibleReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes """Amazon S3 Compatible read settings. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The read setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. :vartype type: str :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :vartype recursive: any + :vartype recursive: JSON :ivar wildcard_folder_path: Amazon S3 Compatible wildcardFolderPath. Type: string (or Expression with resultType string). - :vartype wildcard_folder_path: any + :vartype wildcard_folder_path: JSON :ivar wildcard_file_name: Amazon S3 Compatible wildcardFileName. Type: string (or Expression with resultType string). - :vartype wildcard_file_name: any + :vartype wildcard_file_name: JSON :ivar prefix: The prefix filter for the S3 Compatible object name. Type: string (or Expression with resultType string). - :vartype prefix: any + :vartype prefix: JSON :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :vartype file_list_path: any + :vartype file_list_path: JSON :ivar enable_partition_discovery: Indicates whether to enable partition discovery. :vartype enable_partition_discovery: bool :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :vartype partition_root_path: any + :vartype partition_root_path: JSON :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype delete_files_after_completion: any + :vartype delete_files_after_completion: JSON :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_start: any + :vartype modified_datetime_start: JSON :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_end: any + :vartype modified_datetime_end: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'prefix': {'key': 'prefix', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "recursive": {"key": "recursive", "type": "object"}, + "wildcard_folder_path": {"key": "wildcardFolderPath", "type": "object"}, + "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, + "prefix": {"key": "prefix", "type": "object"}, + "file_list_path": {"key": "fileListPath", "type": "object"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "partition_root_path": {"key": "partitionRootPath", "type": "object"}, + "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, + "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, + "modified_datetime_end": {"key": "modifiedDatetimeEnd", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - recursive: Optional[Any] = None, - wildcard_folder_path: Optional[Any] = None, - wildcard_file_name: Optional[Any] = None, - prefix: Optional[Any] = None, - file_list_path: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + recursive: Optional[JSON] = None, + wildcard_folder_path: Optional[JSON] = None, + wildcard_file_name: Optional[JSON] = None, + prefix: Optional[JSON] = None, + file_list_path: Optional[JSON] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[Any] = None, - delete_files_after_completion: Optional[Any] = None, - modified_datetime_start: Optional[Any] = None, - modified_datetime_end: Optional[Any] = None, + partition_root_path: Optional[JSON] = None, + delete_files_after_completion: Optional[JSON] = None, + modified_datetime_start: Optional[JSON] = None, + modified_datetime_end: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :paramtype recursive: any + :paramtype recursive: JSON :keyword wildcard_folder_path: Amazon S3 Compatible wildcardFolderPath. Type: string (or Expression with resultType string). - :paramtype wildcard_folder_path: any + :paramtype wildcard_folder_path: JSON :keyword wildcard_file_name: Amazon S3 Compatible wildcardFileName. Type: string (or Expression with resultType string). - :paramtype wildcard_file_name: any + :paramtype wildcard_file_name: JSON :keyword prefix: The prefix filter for the S3 Compatible object name. Type: string (or Expression with resultType string). - :paramtype prefix: any + :paramtype prefix: JSON :keyword file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :paramtype file_list_path: any + :paramtype file_list_path: JSON :keyword enable_partition_discovery: Indicates whether to enable partition discovery. :paramtype enable_partition_discovery: bool :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :paramtype partition_root_path: any + :paramtype partition_root_path: JSON :keyword delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype delete_files_after_completion: any + :paramtype delete_files_after_completion: JSON :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_start: any + :paramtype modified_datetime_start: JSON :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_end: any - """ - super(AmazonS3CompatibleReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AmazonS3CompatibleReadSettings' # type: str + :paramtype modified_datetime_end: JSON + """ + super().__init__( + additional_properties=additional_properties, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AmazonS3CompatibleReadSettings" # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -2613,51 +3147,51 @@ def __init__( self.modified_datetime_end = modified_datetime_end -class AmazonS3Dataset(Dataset): +class AmazonS3Dataset(Dataset): # pylint: disable=too-many-instance-attributes """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :ivar bucket_name: Required. The name of the Amazon S3 bucket. Type: string (or Expression with - resultType string). - :vartype bucket_name: any + :ivar bucket_name: The name of the Amazon S3 bucket. Type: string (or Expression with + resultType string). Required. + :vartype bucket_name: JSON :ivar key: The key of the Amazon S3 object. Type: string (or Expression with resultType string). - :vartype key: any + :vartype key: JSON :ivar prefix: The prefix filter for the S3 object name. Type: string (or Expression with resultType string). - :vartype prefix: any + :vartype prefix: JSON :ivar version: The version for the S3 object. Type: string (or Expression with resultType string). - :vartype version: any + :vartype version: JSON :ivar modified_datetime_start: The start of S3 object's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_start: any + :vartype modified_datetime_start: JSON :ivar modified_datetime_end: The end of S3 object's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_end: any + :vartype modified_datetime_end: JSON :ivar format: The format of files. :vartype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :ivar compression: The data compression method used for the Amazon S3 object. @@ -2665,48 +3199,48 @@ class AmazonS3Dataset(Dataset): """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'bucket_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, + "bucket_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'bucket_name': {'key': 'typeProperties.bucketName', 'type': 'object'}, - 'key': {'key': 'typeProperties.key', 'type': 'object'}, - 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, - 'version': {'key': 'typeProperties.version', 'type': 'object'}, - 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "bucket_name": {"key": "typeProperties.bucketName", "type": "object"}, + "key": {"key": "typeProperties.key", "type": "object"}, + "prefix": {"key": "typeProperties.prefix", "type": "object"}, + "version": {"key": "typeProperties.version", "type": "object"}, + "modified_datetime_start": {"key": "typeProperties.modifiedDatetimeStart", "type": "object"}, + "modified_datetime_end": {"key": "typeProperties.modifiedDatetimeEnd", "type": "object"}, + "format": {"key": "typeProperties.format", "type": "DatasetStorageFormat"}, + "compression": {"key": "typeProperties.compression", "type": "DatasetCompression"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - bucket_name: Any, - additional_properties: Optional[Dict[str, Any]] = None, + bucket_name: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - key: Optional[Any] = None, - prefix: Optional[Any] = None, - version: Optional[Any] = None, - modified_datetime_start: Optional[Any] = None, - modified_datetime_end: Optional[Any] = None, + key: Optional[JSON] = None, + prefix: Optional[JSON] = None, + version: Optional[JSON] = None, + modified_datetime_start: Optional[JSON] = None, + modified_datetime_end: Optional[JSON] = None, format: Optional["_models.DatasetStorageFormat"] = None, compression: Optional["_models.DatasetCompression"] = None, **kwargs @@ -2714,49 +3248,59 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :keyword bucket_name: Required. The name of the Amazon S3 bucket. Type: string (or Expression - with resultType string). - :paramtype bucket_name: any + :keyword bucket_name: The name of the Amazon S3 bucket. Type: string (or Expression with + resultType string). Required. + :paramtype bucket_name: JSON :keyword key: The key of the Amazon S3 object. Type: string (or Expression with resultType string). - :paramtype key: any + :paramtype key: JSON :keyword prefix: The prefix filter for the S3 object name. Type: string (or Expression with resultType string). - :paramtype prefix: any + :paramtype prefix: JSON :keyword version: The version for the S3 object. Type: string (or Expression with resultType string). - :paramtype version: any + :paramtype version: JSON :keyword modified_datetime_start: The start of S3 object's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_start: any + :paramtype modified_datetime_start: JSON :keyword modified_datetime_end: The end of S3 object's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_end: any + :paramtype modified_datetime_end: JSON :keyword format: The format of files. :paramtype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :keyword compression: The data compression method used for the Amazon S3 object. :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ - super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AmazonS3Object' # type: str + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "AmazonS3Object" # type: str self.bucket_name = bucket_name self.key = key self.prefix = prefix @@ -2767,15 +3311,15 @@ def __init__( self.compression = compression -class AmazonS3LinkedService(LinkedService): +class AmazonS3LinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Linked service for Amazon S3. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -2784,67 +3328,67 @@ class AmazonS3LinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar authentication_type: The authentication type of S3. Allowed value: AccessKey (default) or TemporarySecurityCredentials. Type: string (or Expression with resultType string). - :vartype authentication_type: any + :vartype authentication_type: JSON :ivar access_key_id: The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :vartype access_key_id: any + :vartype access_key_id: JSON :ivar secret_access_key: The secret access key of the Amazon S3 Identity and Access Management (IAM) user. :vartype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase :ivar service_url: This value specifies the endpoint to access with the S3 Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :vartype service_url: any + :vartype service_url: JSON :ivar session_token: The session token for the S3 temporary security credential. :vartype session_token: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, - 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, - 'session_token': {'key': 'typeProperties.sessionToken', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "object"}, + "access_key_id": {"key": "typeProperties.accessKeyId", "type": "object"}, + "secret_access_key": {"key": "typeProperties.secretAccessKey", "type": "SecretBase"}, + "service_url": {"key": "typeProperties.serviceUrl", "type": "object"}, + "session_token": {"key": "typeProperties.sessionToken", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - authentication_type: Optional[Any] = None, - access_key_id: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + authentication_type: Optional[JSON] = None, + access_key_id: Optional[JSON] = None, secret_access_key: Optional["_models.SecretBase"] = None, - service_url: Optional[Any] = None, + service_url: Optional[JSON] = None, session_token: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -2852,29 +3396,36 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword authentication_type: The authentication type of S3. Allowed value: AccessKey (default) or TemporarySecurityCredentials. Type: string (or Expression with resultType string). - :paramtype authentication_type: any + :paramtype authentication_type: JSON :keyword access_key_id: The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :paramtype access_key_id: any + :paramtype access_key_id: JSON :keyword secret_access_key: The secret access key of the Amazon S3 Identity and Access Management (IAM) user. :paramtype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase :keyword service_url: This value specifies the endpoint to access with the S3 Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :paramtype service_url: any + :paramtype service_url: JSON :keyword session_token: The session token for the S3 temporary security credential. :paramtype session_token: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AmazonS3' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AmazonS3" # type: str self.authentication_type = authentication_type self.access_key_id = access_key_id self.secret_access_key = secret_access_key @@ -2890,199 +3441,206 @@ class AmazonS3Location(DatasetLocation): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage location. Required. :vartype type: str :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :vartype folder_path: any + :vartype folder_path: JSON :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :vartype file_name: any + :vartype file_name: JSON :ivar bucket_name: Specify the bucketName of amazon S3. Type: string (or Expression with resultType string). - :vartype bucket_name: any + :vartype bucket_name: JSON :ivar version: Specify the version of amazon S3. Type: string (or Expression with resultType string). - :vartype version: any + :vartype version: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'bucket_name': {'key': 'bucketName', 'type': 'object'}, - 'version': {'key': 'version', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "folder_path": {"key": "folderPath", "type": "object"}, + "file_name": {"key": "fileName", "type": "object"}, + "bucket_name": {"key": "bucketName", "type": "object"}, + "version": {"key": "version", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - folder_path: Optional[Any] = None, - file_name: Optional[Any] = None, - bucket_name: Optional[Any] = None, - version: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + folder_path: Optional[JSON] = None, + file_name: Optional[JSON] = None, + bucket_name: Optional[JSON] = None, + version: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :paramtype folder_path: any + :paramtype folder_path: JSON :keyword file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :paramtype file_name: any + :paramtype file_name: JSON :keyword bucket_name: Specify the bucketName of amazon S3. Type: string (or Expression with resultType string). - :paramtype bucket_name: any + :paramtype bucket_name: JSON :keyword version: Specify the version of amazon S3. Type: string (or Expression with resultType string). - :paramtype version: any + :paramtype version: JSON """ - super(AmazonS3Location, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type = 'AmazonS3Location' # type: str + super().__init__( + additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs + ) + self.type = "AmazonS3Location" # type: str self.bucket_name = bucket_name self.version = version -class AmazonS3ReadSettings(StoreReadSettings): +class AmazonS3ReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes """Amazon S3 read settings. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The read setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. :vartype type: str :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :vartype recursive: any + :vartype recursive: JSON :ivar wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or Expression with resultType string). - :vartype wildcard_folder_path: any + :vartype wildcard_folder_path: JSON :ivar wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or Expression with resultType string). - :vartype wildcard_file_name: any + :vartype wildcard_file_name: JSON :ivar prefix: The prefix filter for the S3 object name. Type: string (or Expression with resultType string). - :vartype prefix: any + :vartype prefix: JSON :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :vartype file_list_path: any + :vartype file_list_path: JSON :ivar enable_partition_discovery: Indicates whether to enable partition discovery. :vartype enable_partition_discovery: bool :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :vartype partition_root_path: any + :vartype partition_root_path: JSON :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype delete_files_after_completion: any + :vartype delete_files_after_completion: JSON :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_start: any + :vartype modified_datetime_start: JSON :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_end: any + :vartype modified_datetime_end: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'prefix': {'key': 'prefix', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "recursive": {"key": "recursive", "type": "object"}, + "wildcard_folder_path": {"key": "wildcardFolderPath", "type": "object"}, + "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, + "prefix": {"key": "prefix", "type": "object"}, + "file_list_path": {"key": "fileListPath", "type": "object"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "partition_root_path": {"key": "partitionRootPath", "type": "object"}, + "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, + "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, + "modified_datetime_end": {"key": "modifiedDatetimeEnd", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - recursive: Optional[Any] = None, - wildcard_folder_path: Optional[Any] = None, - wildcard_file_name: Optional[Any] = None, - prefix: Optional[Any] = None, - file_list_path: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + recursive: Optional[JSON] = None, + wildcard_folder_path: Optional[JSON] = None, + wildcard_file_name: Optional[JSON] = None, + prefix: Optional[JSON] = None, + file_list_path: Optional[JSON] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[Any] = None, - delete_files_after_completion: Optional[Any] = None, - modified_datetime_start: Optional[Any] = None, - modified_datetime_end: Optional[Any] = None, + partition_root_path: Optional[JSON] = None, + delete_files_after_completion: Optional[JSON] = None, + modified_datetime_start: Optional[JSON] = None, + modified_datetime_end: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :paramtype recursive: any + :paramtype recursive: JSON :keyword wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or Expression with resultType string). - :paramtype wildcard_folder_path: any + :paramtype wildcard_folder_path: JSON :keyword wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or Expression with resultType string). - :paramtype wildcard_file_name: any + :paramtype wildcard_file_name: JSON :keyword prefix: The prefix filter for the S3 object name. Type: string (or Expression with resultType string). - :paramtype prefix: any + :paramtype prefix: JSON :keyword file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :paramtype file_list_path: any + :paramtype file_list_path: JSON :keyword enable_partition_discovery: Indicates whether to enable partition discovery. :paramtype enable_partition_discovery: bool :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :paramtype partition_root_path: any + :paramtype partition_root_path: JSON :keyword delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype delete_files_after_completion: any + :paramtype delete_files_after_completion: JSON :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_start: any + :paramtype modified_datetime_start: JSON :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_end: any - """ - super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AmazonS3ReadSettings' # type: str + :paramtype modified_datetime_end: JSON + """ + super().__init__( + additional_properties=additional_properties, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AmazonS3ReadSettings" # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -3098,17 +3656,19 @@ def __init__( class ControlActivity(Activity): """Base class for all control activities like IfCondition, ForEach , Until. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AppendVariableActivity, ExecutePipelineActivity, FailActivity, FilterActivity, ForEachActivity, IfConditionActivity, SetVariableActivity, SwitchActivity, UntilActivity, ValidationActivity, WaitActivity, WebHookActivity. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AppendVariableActivity, ExecutePipelineActivity, FailActivity, FilterActivity, ForEachActivity, + IfConditionActivity, SetVariableActivity, SwitchActivity, UntilActivity, ValidationActivity, + WaitActivity, WebHookActivity All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -3119,28 +3679,41 @@ class ControlActivity(Activity): """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, } _subtype_map = { - 'type': {'AppendVariable': 'AppendVariableActivity', 'ExecutePipeline': 'ExecutePipelineActivity', 'Fail': 'FailActivity', 'Filter': 'FilterActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'SetVariable': 'SetVariableActivity', 'Switch': 'SwitchActivity', 'Until': 'UntilActivity', 'Validation': 'ValidationActivity', 'Wait': 'WaitActivity', 'WebHook': 'WebHookActivity'} + "type": { + "AppendVariable": "AppendVariableActivity", + "ExecutePipeline": "ExecutePipelineActivity", + "Fail": "FailActivity", + "Filter": "FilterActivity", + "ForEach": "ForEachActivity", + "IfCondition": "IfConditionActivity", + "SetVariable": "SetVariableActivity", + "Switch": "SwitchActivity", + "Until": "UntilActivity", + "Validation": "ValidationActivity", + "Wait": "WaitActivity", + "WebHook": "WebHookActivity", + } } def __init__( self, *, name: str, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, @@ -3149,8 +3722,8 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -3159,8 +3732,15 @@ def __init__( :keyword user_properties: Activity user properties. :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] """ - super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'Container' # type: str + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + **kwargs + ) + self.type = "Container" # type: str class AppendVariableActivity(ControlActivity): @@ -3170,10 +3750,10 @@ class AppendVariableActivity(ControlActivity): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -3184,42 +3764,42 @@ class AppendVariableActivity(ControlActivity): :ivar variable_name: Name of the variable whose value needs to be appended to. :vartype variable_name: str :ivar value: Value to be appended. Could be a static value or Expression. - :vartype value: any + :vartype value: JSON """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'value': {'key': 'typeProperties.value', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "variable_name": {"key": "typeProperties.variableName", "type": "str"}, + "value": {"key": "typeProperties.value", "type": "object"}, } def __init__( self, *, name: str, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, variable_name: Optional[str] = None, - value: Optional[Any] = None, + value: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -3230,10 +3810,17 @@ def __init__( :keyword variable_name: Name of the variable whose value needs to be appended to. :paramtype variable_name: str :keyword value: Value to be appended. Could be a static value or Expression. - :paramtype value: any - """ - super(AppendVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'AppendVariable' # type: str + :paramtype value: JSON + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + **kwargs + ) + self.type = "AppendVariable" # type: str self.variable_name = variable_name self.value = value @@ -3245,8 +3832,8 @@ class AppFiguresLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -3255,51 +3842,51 @@ class AppFiguresLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar user_name: Required. The username of the Appfigures source. - :vartype user_name: any - :ivar password: Required. The password of the AppFigures source. + :vartype annotations: list[JSON] + :ivar user_name: The username of the Appfigures source. Required. + :vartype user_name: JSON + :ivar password: The password of the AppFigures source. Required. :vartype password: ~azure.mgmt.datafactory.models.SecretBase - :ivar client_key: Required. The client key for the AppFigures source. + :ivar client_key: The client key for the AppFigures source. Required. :vartype client_key: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { - 'type': {'required': True}, - 'user_name': {'required': True}, - 'password': {'required': True}, - 'client_key': {'required': True}, + "type": {"required": True}, + "user_name": {"required": True}, + "password": {"required": True}, + "client_key": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'client_key': {'key': 'typeProperties.clientKey', 'type': 'SecretBase'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "client_key": {"key": "typeProperties.clientKey", "type": "SecretBase"}, } def __init__( self, *, - user_name: Any, + user_name: JSON, password: "_models.SecretBase", client_key: "_models.SecretBase", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -3307,22 +3894,29 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword user_name: Required. The username of the Appfigures source. - :paramtype user_name: any - :keyword password: Required. The password of the AppFigures source. + :paramtype annotations: list[JSON] + :keyword user_name: The username of the Appfigures source. Required. + :paramtype user_name: JSON + :keyword password: The password of the AppFigures source. Required. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase - :keyword client_key: Required. The client key for the AppFigures source. + :keyword client_key: The client key for the AppFigures source. Required. :paramtype client_key: ~azure.mgmt.datafactory.models.SecretBase """ - super(AppFiguresLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AppFigures' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AppFigures" # type: str self.user_name = user_name self.password = password self.client_key = client_key -class ArmIdWrapper(msrest.serialization.Model): +class ArmIdWrapper(_serialization.Model): """A wrapper for an ARM resource id. Variables are only populated by the server, and will be ignored when sending a request. @@ -3332,20 +3926,16 @@ class ArmIdWrapper(msrest.serialization.Model): """ _validation = { - 'id': {'readonly': True}, + "id": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(ArmIdWrapper, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.id = None @@ -3356,8 +3946,8 @@ class AsanaLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -3366,47 +3956,47 @@ class AsanaLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar api_token: Required. The api token for the Asana source. + :vartype annotations: list[JSON] + :ivar api_token: The api token for the Asana source. Required. :vartype api_token: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'api_token': {'required': True}, + "type": {"required": True}, + "api_token": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'api_token': {'key': 'typeProperties.apiToken', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "api_token": {"key": "typeProperties.apiToken", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, api_token: "_models.SecretBase", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - encrypted_credential: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -3414,44 +4004,51 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword api_token: Required. The api token for the Asana source. + :paramtype annotations: list[JSON] + :keyword api_token: The api token for the Asana source. Required. :paramtype api_token: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(AsanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Asana' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Asana" # type: str self.api_token = api_token self.encrypted_credential = encrypted_credential -class AvroDataset(Dataset): +class AvroDataset(Dataset): # pylint: disable=too-many-instance-attributes """Avro dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -3459,66 +4056,66 @@ class AvroDataset(Dataset): :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation :ivar avro_compression_codec: The data avroCompressionCodec. Type: string (or Expression with resultType string). - :vartype avro_compression_codec: any + :vartype avro_compression_codec: JSON :ivar avro_compression_level: :vartype avro_compression_level: int """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'avro_compression_level': {'maximum': 9, 'minimum': 1}, + "type": {"required": True}, + "linked_service_name": {"required": True}, + "avro_compression_level": {"maximum": 9, "minimum": 1}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'object'}, - 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "location": {"key": "typeProperties.location", "type": "DatasetLocation"}, + "avro_compression_codec": {"key": "typeProperties.avroCompressionCodec", "type": "object"}, + "avro_compression_level": {"key": "typeProperties.avroCompressionLevel", "type": "int"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, location: Optional["_models.DatasetLocation"] = None, - avro_compression_codec: Optional[Any] = None, + avro_compression_codec: Optional[JSON] = None, avro_compression_level: Optional[int] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -3526,71 +4123,87 @@ def __init__( :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation :keyword avro_compression_codec: The data avroCompressionCodec. Type: string (or Expression with resultType string). - :paramtype avro_compression_codec: any + :paramtype avro_compression_codec: JSON :keyword avro_compression_level: :paramtype avro_compression_level: int """ - super(AvroDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'Avro' # type: str + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "Avro" # type: str self.location = location self.avro_compression_codec = avro_compression_codec self.avro_compression_level = avro_compression_level -class DatasetStorageFormat(msrest.serialization.Model): +class DatasetStorageFormat(_serialization.Model): """The format definition of a storage. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroFormat, JsonFormat, OrcFormat, ParquetFormat, TextFormat. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AvroFormat, JsonFormat, OrcFormat, ParquetFormat, TextFormat All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset storage format.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage format. Required. :vartype type: str :ivar serializer: Serializer. Type: string (or Expression with resultType string). - :vartype serializer: any + :vartype serializer: JSON :ivar deserializer: Deserializer. Type: string (or Expression with resultType string). - :vartype deserializer: any + :vartype deserializer: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "serializer": {"key": "serializer", "type": "object"}, + "deserializer": {"key": "deserializer", "type": "object"}, } _subtype_map = { - 'type': {'AvroFormat': 'AvroFormat', 'JsonFormat': 'JsonFormat', 'OrcFormat': 'OrcFormat', 'ParquetFormat': 'ParquetFormat', 'TextFormat': 'TextFormat'} + "type": { + "AvroFormat": "AvroFormat", + "JsonFormat": "JsonFormat", + "OrcFormat": "OrcFormat", + "ParquetFormat": "ParquetFormat", + "TextFormat": "TextFormat", + } } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - serializer: Optional[Any] = None, - deserializer: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + serializer: Optional[JSON] = None, + deserializer: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword serializer: Serializer. Type: string (or Expression with resultType string). - :paramtype serializer: any + :paramtype serializer: JSON :keyword deserializer: Deserializer. Type: string (or Expression with resultType string). - :paramtype deserializer: any + :paramtype deserializer: JSON """ - super(DatasetStorageFormat, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties - self.type = 'DatasetStorageFormat' # type: str + self.type = None # type: Optional[str] self.serializer = serializer self.deserializer = deserializer @@ -3602,137 +4215,186 @@ class AvroFormat(DatasetStorageFormat): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset storage format.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage format. Required. :vartype type: str :ivar serializer: Serializer. Type: string (or Expression with resultType string). - :vartype serializer: any + :vartype serializer: JSON :ivar deserializer: Deserializer. Type: string (or Expression with resultType string). - :vartype deserializer: any + :vartype deserializer: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "serializer": {"key": "serializer", "type": "object"}, + "deserializer": {"key": "deserializer", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - serializer: Optional[Any] = None, - deserializer: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + serializer: Optional[JSON] = None, + deserializer: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword serializer: Serializer. Type: string (or Expression with resultType string). - :paramtype serializer: any + :paramtype serializer: JSON :keyword deserializer: Deserializer. Type: string (or Expression with resultType string). - :paramtype deserializer: any + :paramtype deserializer: JSON """ - super(AvroFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.type = 'AvroFormat' # type: str + super().__init__( + additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs + ) + self.type = "AvroFormat" # type: str -class CopySink(msrest.serialization.Model): +class CopySink(_serialization.Model): """A copy activity sink. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSink, AzureBlobFSSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, MongoDbAtlasSink, MongoDbV2Sink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDWSink, SqlMISink, SqlServerSink, SqlSink. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AvroSink, AzureBlobFSSink, AzureDataExplorerSink, AzureDataLakeStoreSink, + AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, + AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, + CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, + DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, + JsonSink, MicrosoftAccessSink, MongoDbAtlasSink, MongoDbV2Sink, OdbcSink, OracleSink, OrcSink, + ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, + SnowflakeSink, SqlDWSink, SqlMISink, SqlServerSink, SqlSink All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, } _subtype_map = { - 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'MongoDbAtlasSink': 'MongoDbAtlasSink', 'MongoDbV2Sink': 'MongoDbV2Sink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - **kwargs - ): - """ - :keyword additional_properties: Unmatched properties from the message are deserialized to this - collection. - :paramtype additional_properties: dict[str, any] + "type": { + "AvroSink": "AvroSink", + "AzureBlobFSSink": "AzureBlobFSSink", + "AzureDataExplorerSink": "AzureDataExplorerSink", + "AzureDataLakeStoreSink": "AzureDataLakeStoreSink", + "AzureDatabricksDeltaLakeSink": "AzureDatabricksDeltaLakeSink", + "AzureMySqlSink": "AzureMySqlSink", + "AzurePostgreSqlSink": "AzurePostgreSqlSink", + "AzureQueueSink": "AzureQueueSink", + "AzureSearchIndexSink": "AzureSearchIndexSink", + "AzureSqlSink": "AzureSqlSink", + "AzureTableSink": "AzureTableSink", + "BinarySink": "BinarySink", + "BlobSink": "BlobSink", + "CommonDataServiceForAppsSink": "CommonDataServiceForAppsSink", + "CosmosDbMongoDbApiSink": "CosmosDbMongoDbApiSink", + "CosmosDbSqlApiSink": "CosmosDbSqlApiSink", + "DelimitedTextSink": "DelimitedTextSink", + "DocumentDbCollectionSink": "DocumentDbCollectionSink", + "DynamicsCrmSink": "DynamicsCrmSink", + "DynamicsSink": "DynamicsSink", + "FileSystemSink": "FileSystemSink", + "InformixSink": "InformixSink", + "JsonSink": "JsonSink", + "MicrosoftAccessSink": "MicrosoftAccessSink", + "MongoDbAtlasSink": "MongoDbAtlasSink", + "MongoDbV2Sink": "MongoDbV2Sink", + "OdbcSink": "OdbcSink", + "OracleSink": "OracleSink", + "OrcSink": "OrcSink", + "ParquetSink": "ParquetSink", + "RestSink": "RestSink", + "SalesforceServiceCloudSink": "SalesforceServiceCloudSink", + "SalesforceSink": "SalesforceSink", + "SapCloudForCustomerSink": "SapCloudForCustomerSink", + "SnowflakeSink": "SnowflakeSink", + "SqlDWSink": "SqlDWSink", + "SqlMISink": "SqlMISink", + "SqlServerSink": "SqlServerSink", + "SqlSink": "SqlSink", + } + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + **kwargs + ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON """ - super(CopySink, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties - self.type = 'CopySink' # type: str + self.type = None # type: Optional[str] self.write_batch_size = write_batch_size self.write_batch_timeout = write_batch_timeout self.sink_retry_count = sink_retry_count @@ -3748,27 +4410,27 @@ class AvroSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar store_settings: Avro store settings. :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :ivar format_settings: Avro format settings. @@ -3776,32 +4438,32 @@ class AvroSink(CopySink): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "store_settings": {"key": "storeSettings", "type": "StoreWriteSettings"}, + "format_settings": {"key": "formatSettings", "type": "AvroWriteSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, store_settings: Optional["_models.StoreWriteSettings"] = None, format_settings: Optional["_models.AvroWriteSettings"] = None, **kwargs @@ -3809,32 +4471,41 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword store_settings: Avro store settings. :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :keyword format_settings: Avro format settings. :paramtype format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings """ - super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AvroSink' # type: str + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AvroSink" # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -3846,125 +4517,134 @@ class AvroSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar store_settings: Avro store settings. :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "store_settings": {"key": "storeSettings", "type": "StoreReadSettings"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, store_settings: Optional["_models.StoreReadSettings"] = None, - additional_columns: Optional[Any] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword store_settings: Avro store settings. :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AvroSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AvroSource" # type: str self.store_settings = store_settings self.additional_columns = additional_columns -class FormatWriteSettings(msrest.serialization.Model): +class FormatWriteSettings(_serialization.Model): """Format write settings. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings, OrcWriteSettings, ParquetWriteSettings. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings, OrcWriteSettings, + ParquetWriteSettings All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The write setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The write setting type. Required. :vartype type: str """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, } _subtype_map = { - 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings', 'OrcWriteSettings': 'OrcWriteSettings', 'ParquetWriteSettings': 'ParquetWriteSettings'} + "type": { + "AvroWriteSettings": "AvroWriteSettings", + "DelimitedTextWriteSettings": "DelimitedTextWriteSettings", + "JsonWriteSettings": "JsonWriteSettings", + "OrcWriteSettings": "OrcWriteSettings", + "ParquetWriteSettings": "ParquetWriteSettings", + } } - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - **kwargs - ): + def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, **kwargs): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] """ - super(FormatWriteSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties - self.type = 'FormatWriteSettings' # type: str + self.type = None # type: Optional[str] class AvroWriteSettings(FormatWriteSettings): @@ -3974,8 +4654,8 @@ class AvroWriteSettings(FormatWriteSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The write setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The write setting type. Required. :vartype type: str :ivar record_name: Top level record name in write result, which is required in AVRO spec. :vartype record_name: str @@ -3983,91 +4663,92 @@ class AvroWriteSettings(FormatWriteSettings): :vartype record_namespace: str :ivar max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :vartype max_rows_per_file: any + :vartype max_rows_per_file: JSON :ivar file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :vartype file_name_prefix: any + :vartype file_name_prefix: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'record_name': {'key': 'recordName', 'type': 'str'}, - 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, - 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, - 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "record_name": {"key": "recordName", "type": "str"}, + "record_namespace": {"key": "recordNamespace", "type": "str"}, + "max_rows_per_file": {"key": "maxRowsPerFile", "type": "object"}, + "file_name_prefix": {"key": "fileNamePrefix", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, record_name: Optional[str] = None, record_namespace: Optional[str] = None, - max_rows_per_file: Optional[Any] = None, - file_name_prefix: Optional[Any] = None, + max_rows_per_file: Optional[JSON] = None, + file_name_prefix: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword record_name: Top level record name in write result, which is required in AVRO spec. :paramtype record_name: str :keyword record_namespace: Record namespace in the write result. :paramtype record_namespace: str :keyword max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :paramtype max_rows_per_file: any + :paramtype max_rows_per_file: JSON :keyword file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :paramtype file_name_prefix: any + :paramtype file_name_prefix: JSON """ - super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'AvroWriteSettings' # type: str + super().__init__(additional_properties=additional_properties, **kwargs) + self.type = "AvroWriteSettings" # type: str self.record_name = record_name self.record_namespace = record_namespace self.max_rows_per_file = max_rows_per_file self.file_name_prefix = file_name_prefix -class CustomSetupBase(msrest.serialization.Model): +class CustomSetupBase(_serialization.Model): """The base definition of the custom setup. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzPowerShellSetup, CmdkeySetup, ComponentSetup, EnvironmentVariableSetup. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzPowerShellSetup, CmdkeySetup, ComponentSetup, EnvironmentVariableSetup All required parameters must be populated in order to send to Azure. - :ivar type: Required. The type of custom setup.Constant filled by server. + :ivar type: The type of custom setup. Required. :vartype type: str """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, + "type": {"key": "type", "type": "str"}, } _subtype_map = { - 'type': {'AzPowerShellSetup': 'AzPowerShellSetup', 'CmdkeySetup': 'CmdkeySetup', 'ComponentSetup': 'ComponentSetup', 'EnvironmentVariableSetup': 'EnvironmentVariableSetup'} + "type": { + "AzPowerShellSetup": "AzPowerShellSetup", + "CmdkeySetup": "CmdkeySetup", + "ComponentSetup": "ComponentSetup", + "EnvironmentVariableSetup": "EnvironmentVariableSetup", + } } - def __init__( - self, - **kwargs - ): - """ - """ - super(CustomSetupBase, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.type = None # type: Optional[str] @@ -4076,46 +4757,41 @@ class AzPowerShellSetup(CustomSetupBase): All required parameters must be populated in order to send to Azure. - :ivar type: Required. The type of custom setup.Constant filled by server. + :ivar type: The type of custom setup. Required. :vartype type: str - :ivar version: Required. The required version of Azure PowerShell to install. + :ivar version: The required version of Azure PowerShell to install. Required. :vartype version: str """ _validation = { - 'type': {'required': True}, - 'version': {'required': True}, + "type": {"required": True}, + "version": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'version': {'key': 'typeProperties.version', 'type': 'str'}, + "type": {"key": "type", "type": "str"}, + "version": {"key": "typeProperties.version", "type": "str"}, } - def __init__( - self, - *, - version: str, - **kwargs - ): + def __init__(self, *, version: str, **kwargs): """ - :keyword version: Required. The required version of Azure PowerShell to install. + :keyword version: The required version of Azure PowerShell to install. Required. :paramtype version: str """ - super(AzPowerShellSetup, self).__init__(**kwargs) - self.type = 'AzPowerShellSetup' # type: str + super().__init__(**kwargs) + self.type = "AzPowerShellSetup" # type: str self.version = version -class AzureBatchLinkedService(LinkedService): +class AzureBatchLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Azure Batch linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -4124,73 +4800,73 @@ class AzureBatchLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar account_name: Required. The Azure Batch account name. Type: string (or Expression with - resultType string). - :vartype account_name: any + :vartype annotations: list[JSON] + :ivar account_name: The Azure Batch account name. Type: string (or Expression with resultType + string). Required. + :vartype account_name: JSON :ivar access_key: The Azure Batch account access key. :vartype access_key: ~azure.mgmt.datafactory.models.SecretBase - :ivar batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType - string). - :vartype batch_uri: any - :ivar pool_name: Required. The Azure Batch pool name. Type: string (or Expression with - resultType string). - :vartype pool_name: any - :ivar linked_service_name: Required. The Azure Storage linked service reference. + :ivar batch_uri: The Azure Batch URI. Type: string (or Expression with resultType string). + Required. + :vartype batch_uri: JSON + :ivar pool_name: The Azure Batch pool name. Type: string (or Expression with resultType + string). Required. + :vartype pool_name: JSON + :ivar linked_service_name: The Azure Storage linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'batch_uri': {'required': True}, - 'pool_name': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "account_name": {"required": True}, + "batch_uri": {"required": True}, + "pool_name": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, - 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, - 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "account_name": {"key": "typeProperties.accountName", "type": "object"}, + "access_key": {"key": "typeProperties.accessKey", "type": "SecretBase"}, + "batch_uri": {"key": "typeProperties.batchUri", "type": "object"}, + "pool_name": {"key": "typeProperties.poolName", "type": "object"}, + "linked_service_name": {"key": "typeProperties.linkedServiceName", "type": "LinkedServiceReference"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, } def __init__( self, *, - account_name: Any, - batch_uri: Any, - pool_name: Any, + account_name: JSON, + batch_uri: JSON, + pool_name: JSON, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, access_key: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, credential: Optional["_models.CredentialReference"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -4198,29 +4874,36 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword account_name: Required. The Azure Batch account name. Type: string (or Expression with - resultType string). - :paramtype account_name: any + :paramtype annotations: list[JSON] + :keyword account_name: The Azure Batch account name. Type: string (or Expression with + resultType string). Required. + :paramtype account_name: JSON :keyword access_key: The Azure Batch account access key. :paramtype access_key: ~azure.mgmt.datafactory.models.SecretBase - :keyword batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType - string). - :paramtype batch_uri: any - :keyword pool_name: Required. The Azure Batch pool name. Type: string (or Expression with - resultType string). - :paramtype pool_name: any - :keyword linked_service_name: Required. The Azure Storage linked service reference. + :keyword batch_uri: The Azure Batch URI. Type: string (or Expression with resultType string). + Required. + :paramtype batch_uri: JSON + :keyword pool_name: The Azure Batch pool name. Type: string (or Expression with resultType + string). Required. + :paramtype pool_name: JSON + :keyword linked_service_name: The Azure Storage linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any + :paramtype encrypted_credential: JSON :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ - super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureBatch' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureBatch" # type: str self.account_name = account_name self.access_key = access_key self.batch_uri = batch_uri @@ -4230,48 +4913,48 @@ def __init__( self.credential = credential -class AzureBlobDataset(Dataset): +class AzureBlobDataset(Dataset): # pylint: disable=too-many-instance-attributes """The Azure Blob storage. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar folder_path: The path of the Azure Blob storage. Type: string (or Expression with resultType string). - :vartype folder_path: any + :vartype folder_path: JSON :ivar table_root_location: The root of blob path. Type: string (or Expression with resultType string). - :vartype table_root_location: any + :vartype table_root_location: JSON :ivar file_name: The name of the Azure Blob. Type: string (or Expression with resultType string). - :vartype file_name: any + :vartype file_name: JSON :ivar modified_datetime_start: The start of Azure Blob's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_start: any + :vartype modified_datetime_start: JSON :ivar modified_datetime_end: The end of Azure Blob's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_end: any + :vartype modified_datetime_end: JSON :ivar format: The format of the Azure Blob storage. :vartype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :ivar compression: The data compression method used for the blob storage. @@ -4279,45 +4962,45 @@ class AzureBlobDataset(Dataset): """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "folder_path": {"key": "typeProperties.folderPath", "type": "object"}, + "table_root_location": {"key": "typeProperties.tableRootLocation", "type": "object"}, + "file_name": {"key": "typeProperties.fileName", "type": "object"}, + "modified_datetime_start": {"key": "typeProperties.modifiedDatetimeStart", "type": "object"}, + "modified_datetime_end": {"key": "typeProperties.modifiedDatetimeEnd", "type": "object"}, + "format": {"key": "typeProperties.format", "type": "DatasetStorageFormat"}, + "compression": {"key": "typeProperties.compression", "type": "DatasetCompression"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - folder_path: Optional[Any] = None, - table_root_location: Optional[Any] = None, - file_name: Optional[Any] = None, - modified_datetime_start: Optional[Any] = None, - modified_datetime_end: Optional[Any] = None, + folder_path: Optional[JSON] = None, + table_root_location: Optional[JSON] = None, + file_name: Optional[JSON] = None, + modified_datetime_start: Optional[JSON] = None, + modified_datetime_end: Optional[JSON] = None, format: Optional["_models.DatasetStorageFormat"] = None, compression: Optional["_models.DatasetCompression"] = None, **kwargs @@ -4325,46 +5008,56 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword folder_path: The path of the Azure Blob storage. Type: string (or Expression with resultType string). - :paramtype folder_path: any + :paramtype folder_path: JSON :keyword table_root_location: The root of blob path. Type: string (or Expression with resultType string). - :paramtype table_root_location: any + :paramtype table_root_location: JSON :keyword file_name: The name of the Azure Blob. Type: string (or Expression with resultType string). - :paramtype file_name: any + :paramtype file_name: JSON :keyword modified_datetime_start: The start of Azure Blob's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_start: any + :paramtype modified_datetime_start: JSON :keyword modified_datetime_end: The end of Azure Blob's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_end: any + :paramtype modified_datetime_end: JSON :keyword format: The format of the Azure Blob storage. :paramtype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :keyword compression: The data compression method used for the blob storage. :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ - super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzureBlob' # type: str + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "AzureBlob" # type: str self.folder_path = folder_path self.table_root_location = table_root_location self.file_name = file_name @@ -4374,39 +5067,39 @@ def __init__( self.compression = compression -class AzureBlobFSDataset(Dataset): +class AzureBlobFSDataset(Dataset): # pylint: disable=too-many-instance-attributes """The Azure Data Lake Storage Gen2 storage. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar folder_path: The path of the Azure Data Lake Storage Gen2 storage. Type: string (or Expression with resultType string). - :vartype folder_path: any + :vartype folder_path: JSON :ivar file_name: The name of the Azure Data Lake Storage Gen2. Type: string (or Expression with resultType string). - :vartype file_name: any + :vartype file_name: JSON :ivar format: The format of the Azure Data Lake Storage Gen2 storage. :vartype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :ivar compression: The data compression method used for the blob storage. @@ -4414,39 +5107,39 @@ class AzureBlobFSDataset(Dataset): """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "folder_path": {"key": "typeProperties.folderPath", "type": "object"}, + "file_name": {"key": "typeProperties.fileName", "type": "object"}, + "format": {"key": "typeProperties.format", "type": "DatasetStorageFormat"}, + "compression": {"key": "typeProperties.compression", "type": "DatasetCompression"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - folder_path: Optional[Any] = None, - file_name: Optional[Any] = None, + folder_path: Optional[JSON] = None, + file_name: Optional[JSON] = None, format: Optional["_models.DatasetStorageFormat"] = None, compression: Optional["_models.DatasetCompression"] = None, **kwargs @@ -4454,52 +5147,62 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword folder_path: The path of the Azure Data Lake Storage Gen2 storage. Type: string (or Expression with resultType string). - :paramtype folder_path: any + :paramtype folder_path: JSON :keyword file_name: The name of the Azure Data Lake Storage Gen2. Type: string (or Expression with resultType string). - :paramtype file_name: any + :paramtype file_name: JSON :keyword format: The format of the Azure Data Lake Storage Gen2 storage. :paramtype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :keyword compression: The data compression method used for the blob storage. :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ - super(AzureBlobFSDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzureBlobFSFile' # type: str + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "AzureBlobFSFile" # type: str self.folder_path = folder_path self.file_name = file_name self.format = format self.compression = compression -class AzureBlobFSLinkedService(LinkedService): +class AzureBlobFSLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Azure Data Lake Storage Gen2 linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -4508,36 +5211,36 @@ class AzureBlobFSLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or - Expression with resultType string). - :vartype url: any + :vartype annotations: list[JSON] + :ivar url: Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or Expression + with resultType string). Required. + :vartype url: JSON :ivar account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). - :vartype account_key: any + :vartype account_key: JSON :ivar service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). - :vartype service_principal_id: any + :vartype service_principal_id: JSON :ivar service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Storage Gen2 account. :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :vartype tenant: any + :vartype tenant: JSON :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :vartype azure_cloud_type: any + :vartype azure_cloud_type: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference :ivar service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). - :vartype service_principal_credential_type: any + :vartype service_principal_credential_type: JSON :ivar service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -4547,53 +5250,53 @@ class AzureBlobFSLinkedService(LinkedService): """ _validation = { - 'type': {'required': True}, - 'url': {'required': True}, + "type": {"required": True}, + "url": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, - 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "url": {"key": "typeProperties.url", "type": "object"}, + "account_key": {"key": "typeProperties.accountKey", "type": "object"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, + "tenant": {"key": "typeProperties.tenant", "type": "object"}, + "azure_cloud_type": {"key": "typeProperties.azureCloudType", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, + "service_principal_credential_type": {"key": "typeProperties.servicePrincipalCredentialType", "type": "object"}, + "service_principal_credential": {"key": "typeProperties.servicePrincipalCredential", "type": "SecretBase"}, } def __init__( self, *, - url: Any, - additional_properties: Optional[Dict[str, Any]] = None, + url: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - account_key: Optional[Any] = None, - service_principal_id: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + account_key: Optional[JSON] = None, + service_principal_id: Optional[JSON] = None, service_principal_key: Optional["_models.SecretBase"] = None, - tenant: Optional[Any] = None, - azure_cloud_type: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + tenant: Optional[JSON] = None, + azure_cloud_type: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, credential: Optional["_models.CredentialReference"] = None, - service_principal_credential_type: Optional[Any] = None, + service_principal_credential_type: Optional[JSON] = None, service_principal_credential: Optional["_models.SecretBase"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -4601,36 +5304,36 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or - Expression with resultType string). - :paramtype url: any + :paramtype annotations: list[JSON] + :keyword url: Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or + Expression with resultType string). Required. + :paramtype url: JSON :keyword account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). - :paramtype account_key: any + :paramtype account_key: JSON :keyword service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). - :paramtype service_principal_id: any + :paramtype service_principal_id: JSON :keyword service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Storage Gen2 account. :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :paramtype tenant: any + :paramtype tenant: JSON :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :paramtype azure_cloud_type: any + :paramtype azure_cloud_type: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any + :paramtype encrypted_credential: JSON :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference :keyword service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). - :paramtype service_principal_credential_type: any + :paramtype service_principal_credential_type: JSON :keyword service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -4638,8 +5341,15 @@ def __init__( be AzureKeyVaultSecretReference. :paramtype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase """ - super(AzureBlobFSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureBlobFS' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureBlobFS" # type: str self.url = url self.account_key = account_key self.service_principal_id = service_principal_id @@ -4659,182 +5369,189 @@ class AzureBlobFSLocation(DatasetLocation): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage location. Required. :vartype type: str :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :vartype folder_path: any + :vartype folder_path: JSON :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :vartype file_name: any + :vartype file_name: JSON :ivar file_system: Specify the fileSystem of azure blobFS. Type: string (or Expression with resultType string). - :vartype file_system: any + :vartype file_system: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'file_system': {'key': 'fileSystem', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "folder_path": {"key": "folderPath", "type": "object"}, + "file_name": {"key": "fileName", "type": "object"}, + "file_system": {"key": "fileSystem", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - folder_path: Optional[Any] = None, - file_name: Optional[Any] = None, - file_system: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + folder_path: Optional[JSON] = None, + file_name: Optional[JSON] = None, + file_system: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :paramtype folder_path: any + :paramtype folder_path: JSON :keyword file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :paramtype file_name: any + :paramtype file_name: JSON :keyword file_system: Specify the fileSystem of azure blobFS. Type: string (or Expression with resultType string). - :paramtype file_system: any + :paramtype file_system: JSON """ - super(AzureBlobFSLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type = 'AzureBlobFSLocation' # type: str + super().__init__( + additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs + ) + self.type = "AzureBlobFSLocation" # type: str self.file_system = file_system -class AzureBlobFSReadSettings(StoreReadSettings): +class AzureBlobFSReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes """Azure blobFS read settings. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The read setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. :vartype type: str :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :vartype recursive: any + :vartype recursive: JSON :ivar wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType string). - :vartype wildcard_folder_path: any + :vartype wildcard_folder_path: JSON :ivar wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with resultType string). - :vartype wildcard_file_name: any + :vartype wildcard_file_name: JSON :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :vartype file_list_path: any + :vartype file_list_path: JSON :ivar enable_partition_discovery: Indicates whether to enable partition discovery. :vartype enable_partition_discovery: bool :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :vartype partition_root_path: any + :vartype partition_root_path: JSON :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype delete_files_after_completion: any + :vartype delete_files_after_completion: JSON :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_start: any + :vartype modified_datetime_start: JSON :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_end: any + :vartype modified_datetime_end: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "recursive": {"key": "recursive", "type": "object"}, + "wildcard_folder_path": {"key": "wildcardFolderPath", "type": "object"}, + "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, + "file_list_path": {"key": "fileListPath", "type": "object"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "partition_root_path": {"key": "partitionRootPath", "type": "object"}, + "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, + "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, + "modified_datetime_end": {"key": "modifiedDatetimeEnd", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - recursive: Optional[Any] = None, - wildcard_folder_path: Optional[Any] = None, - wildcard_file_name: Optional[Any] = None, - file_list_path: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + recursive: Optional[JSON] = None, + wildcard_folder_path: Optional[JSON] = None, + wildcard_file_name: Optional[JSON] = None, + file_list_path: Optional[JSON] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[Any] = None, - delete_files_after_completion: Optional[Any] = None, - modified_datetime_start: Optional[Any] = None, - modified_datetime_end: Optional[Any] = None, + partition_root_path: Optional[JSON] = None, + delete_files_after_completion: Optional[JSON] = None, + modified_datetime_start: Optional[JSON] = None, + modified_datetime_end: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :paramtype recursive: any + :paramtype recursive: JSON :keyword wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType string). - :paramtype wildcard_folder_path: any + :paramtype wildcard_folder_path: JSON :keyword wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with resultType string). - :paramtype wildcard_file_name: any + :paramtype wildcard_file_name: JSON :keyword file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :paramtype file_list_path: any + :paramtype file_list_path: JSON :keyword enable_partition_discovery: Indicates whether to enable partition discovery. :paramtype enable_partition_discovery: bool :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :paramtype partition_root_path: any + :paramtype partition_root_path: JSON :keyword delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype delete_files_after_completion: any + :paramtype delete_files_after_completion: JSON :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_start: any + :paramtype modified_datetime_start: JSON :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_end: any - """ - super(AzureBlobFSReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AzureBlobFSReadSettings' # type: str + :paramtype modified_datetime_end: JSON + """ + super().__init__( + additional_properties=additional_properties, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AzureBlobFSReadSettings" # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -4853,95 +5570,104 @@ class AzureBlobFSSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar copy_behavior: The type of copy behavior for copy sink. - :vartype copy_behavior: any + :vartype copy_behavior: JSON :ivar metadata: Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). :vartype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "copy_behavior": {"key": "copyBehavior", "type": "object"}, + "metadata": {"key": "metadata", "type": "[MetadataItem]"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - copy_behavior: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + copy_behavior: Optional[JSON] = None, metadata: Optional[List["_models.MetadataItem"]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword copy_behavior: The type of copy behavior for copy sink. - :paramtype copy_behavior: any + :paramtype copy_behavior: JSON :keyword metadata: Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). :paramtype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ - super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AzureBlobFSSink' # type: str + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AzureBlobFSSink" # type: str self.copy_behavior = copy_behavior self.metadata = metadata @@ -4953,158 +5679,173 @@ class AzureBlobFSSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). - :vartype treat_empty_as_null: any + :vartype treat_empty_as_null: JSON :ivar skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). - :vartype skip_header_line_count: any + :vartype skip_header_line_count: JSON :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :vartype recursive: any + :vartype recursive: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, - 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "treat_empty_as_null": {"key": "treatEmptyAsNull", "type": "object"}, + "skip_header_line_count": {"key": "skipHeaderLineCount", "type": "object"}, + "recursive": {"key": "recursive", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - treat_empty_as_null: Optional[Any] = None, - skip_header_line_count: Optional[Any] = None, - recursive: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + treat_empty_as_null: Optional[JSON] = None, + skip_header_line_count: Optional[JSON] = None, + recursive: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). - :paramtype treat_empty_as_null: any + :paramtype treat_empty_as_null: JSON :keyword skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). - :paramtype skip_header_line_count: any + :paramtype skip_header_line_count: JSON :keyword recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :paramtype recursive: any - """ - super(AzureBlobFSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AzureBlobFSSource' # type: str + :paramtype recursive: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AzureBlobFSSource" # type: str self.treat_empty_as_null = treat_empty_as_null self.skip_header_line_count = skip_header_line_count self.recursive = recursive -class StoreWriteSettings(msrest.serialization.Model): +class StoreWriteSettings(_serialization.Model): """Connector write settings. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings, AzureDataLakeStoreWriteSettings, AzureFileStorageWriteSettings, FileServerWriteSettings, SftpWriteSettings. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings, AzureDataLakeStoreWriteSettings, + AzureFileStorageWriteSettings, FileServerWriteSettings, SftpWriteSettings All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The write setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The write setting type. Required. :vartype type: str :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar copy_behavior: The type of copy behavior for copy sink. - :vartype copy_behavior: any + :vartype copy_behavior: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "copy_behavior": {"key": "copyBehavior", "type": "object"}, } _subtype_map = { - 'type': {'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureFileStorageWriteSettings': 'AzureFileStorageWriteSettings', 'FileServerWriteSettings': 'FileServerWriteSettings', 'SftpWriteSettings': 'SftpWriteSettings'} + "type": { + "AzureBlobFSWriteSettings": "AzureBlobFSWriteSettings", + "AzureBlobStorageWriteSettings": "AzureBlobStorageWriteSettings", + "AzureDataLakeStoreWriteSettings": "AzureDataLakeStoreWriteSettings", + "AzureFileStorageWriteSettings": "AzureFileStorageWriteSettings", + "FileServerWriteSettings": "FileServerWriteSettings", + "SftpWriteSettings": "SftpWriteSettings", + } } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - copy_behavior: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + copy_behavior: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword copy_behavior: The type of copy behavior for copy sink. - :paramtype copy_behavior: any + :paramtype copy_behavior: JSON """ - super(StoreWriteSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties - self.type = 'StoreWriteSettings' # type: str + self.type = None # type: Optional[str] self.max_concurrent_connections = max_concurrent_connections self.disable_metrics_collection = disable_metrics_collection self.copy_behavior = copy_behavior @@ -5117,75 +5858,81 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The write setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The write setting type. Required. :vartype type: str :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar copy_behavior: The type of copy behavior for copy sink. - :vartype copy_behavior: any + :vartype copy_behavior: JSON :ivar block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). - :vartype block_size_in_mb: any + :vartype block_size_in_mb: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "copy_behavior": {"key": "copyBehavior", "type": "object"}, + "block_size_in_mb": {"key": "blockSizeInMB", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - copy_behavior: Optional[Any] = None, - block_size_in_mb: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + copy_behavior: Optional[JSON] = None, + block_size_in_mb: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword copy_behavior: The type of copy behavior for copy sink. - :paramtype copy_behavior: any + :paramtype copy_behavior: JSON :keyword block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). - :paramtype block_size_in_mb: any - """ - super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) - self.type = 'AzureBlobFSWriteSettings' # type: str + :paramtype block_size_in_mb: JSON + """ + super().__init__( + additional_properties=additional_properties, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + copy_behavior=copy_behavior, + **kwargs + ) + self.type = "AzureBlobFSWriteSettings" # type: str self.block_size_in_mb = block_size_in_mb -class AzureBlobStorageLinkedService(LinkedService): +class AzureBlobStorageLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """The azure blob storage linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -5194,16 +5941,16 @@ class AzureBlobStorageLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_string: The connection string. It is mutually exclusive with sasUri, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype connection_string: JSON :ivar account_key: The Azure key vault secret reference of accountKey in connection string. :vartype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with connectionString, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype sas_uri: any + :vartype sas_uri: JSON :ivar sas_token: The Azure key vault secret reference of sasToken in sas uri. :vartype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is @@ -5211,17 +5958,17 @@ class AzureBlobStorageLinkedService(LinkedService): :vartype service_endpoint: str :ivar service_principal_id: The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :vartype service_principal_id: any + :vartype service_principal_id: JSON :ivar service_principal_key: The key of the service principal used to authenticate against Azure SQL Data Warehouse. :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :vartype tenant: any + :vartype tenant: JSON :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :vartype azure_cloud_type: any + :vartype azure_cloud_type: JSON :ivar account_kind: Specify the kind of your storage account. Allowed values are: Storage (general purpose v1), StorageV2 (general purpose v2), BlobStorage, or BlockBlobStorage. Type: string (or Expression with resultType string). @@ -5235,47 +5982,47 @@ class AzureBlobStorageLinkedService(LinkedService): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, - 'account_kind': {'key': 'typeProperties.accountKind', 'type': 'str'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "account_key": {"key": "typeProperties.accountKey", "type": "AzureKeyVaultSecretReference"}, + "sas_uri": {"key": "typeProperties.sasUri", "type": "object"}, + "sas_token": {"key": "typeProperties.sasToken", "type": "AzureKeyVaultSecretReference"}, + "service_endpoint": {"key": "typeProperties.serviceEndpoint", "type": "str"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, + "tenant": {"key": "typeProperties.tenant", "type": "object"}, + "azure_cloud_type": {"key": "typeProperties.azureCloudType", "type": "object"}, + "account_kind": {"key": "typeProperties.accountKind", "type": "str"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, + "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_string: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_string: Optional[JSON] = None, account_key: Optional["_models.AzureKeyVaultSecretReference"] = None, - sas_uri: Optional[Any] = None, + sas_uri: Optional[JSON] = None, sas_token: Optional["_models.AzureKeyVaultSecretReference"] = None, service_endpoint: Optional[str] = None, - service_principal_id: Optional[Any] = None, + service_principal_id: Optional[JSON] = None, service_principal_key: Optional["_models.SecretBase"] = None, - tenant: Optional[Any] = None, - azure_cloud_type: Optional[Any] = None, + tenant: Optional[JSON] = None, + azure_cloud_type: Optional[JSON] = None, account_kind: Optional[str] = None, encrypted_credential: Optional[str] = None, credential: Optional["_models.CredentialReference"] = None, @@ -5284,7 +6031,7 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -5292,16 +6039,16 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_string: The connection string. It is mutually exclusive with sasUri, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype connection_string: JSON :keyword account_key: The Azure key vault secret reference of accountKey in connection string. :paramtype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with connectionString, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype sas_uri: any + :paramtype sas_uri: JSON :keyword sas_token: The Azure key vault secret reference of sasToken in sas uri. :paramtype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is @@ -5309,17 +6056,17 @@ def __init__( :paramtype service_endpoint: str :keyword service_principal_id: The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :paramtype service_principal_id: any + :paramtype service_principal_id: JSON :keyword service_principal_key: The key of the service principal used to authenticate against Azure SQL Data Warehouse. :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :paramtype tenant: any + :paramtype tenant: JSON :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :paramtype azure_cloud_type: any + :paramtype azure_cloud_type: JSON :keyword account_kind: Specify the kind of your storage account. Allowed values are: Storage (general purpose v1), StorageV2 (general purpose v2), BlobStorage, or BlockBlobStorage. Type: string (or Expression with resultType string). @@ -5331,8 +6078,15 @@ def __init__( :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ - super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureBlobStorage' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureBlobStorage" # type: str self.connection_string = connection_string self.account_key = account_key self.sas_uri = sas_uri @@ -5354,190 +6108,197 @@ class AzureBlobStorageLocation(DatasetLocation): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage location. Required. :vartype type: str :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :vartype folder_path: any + :vartype folder_path: JSON :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :vartype file_name: any + :vartype file_name: JSON :ivar container: Specify the container of azure blob. Type: string (or Expression with resultType string). - :vartype container: any + :vartype container: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'container': {'key': 'container', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "folder_path": {"key": "folderPath", "type": "object"}, + "file_name": {"key": "fileName", "type": "object"}, + "container": {"key": "container", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - folder_path: Optional[Any] = None, - file_name: Optional[Any] = None, - container: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + folder_path: Optional[JSON] = None, + file_name: Optional[JSON] = None, + container: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :paramtype folder_path: any + :paramtype folder_path: JSON :keyword file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :paramtype file_name: any + :paramtype file_name: JSON :keyword container: Specify the container of azure blob. Type: string (or Expression with resultType string). - :paramtype container: any + :paramtype container: JSON """ - super(AzureBlobStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type = 'AzureBlobStorageLocation' # type: str + super().__init__( + additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs + ) + self.type = "AzureBlobStorageLocation" # type: str self.container = container -class AzureBlobStorageReadSettings(StoreReadSettings): +class AzureBlobStorageReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes """Azure blob read settings. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The read setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. :vartype type: str :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :vartype recursive: any + :vartype recursive: JSON :ivar wildcard_folder_path: Azure blob wildcardFolderPath. Type: string (or Expression with resultType string). - :vartype wildcard_folder_path: any + :vartype wildcard_folder_path: JSON :ivar wildcard_file_name: Azure blob wildcardFileName. Type: string (or Expression with resultType string). - :vartype wildcard_file_name: any + :vartype wildcard_file_name: JSON :ivar prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with resultType string). - :vartype prefix: any + :vartype prefix: JSON :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :vartype file_list_path: any + :vartype file_list_path: JSON :ivar enable_partition_discovery: Indicates whether to enable partition discovery. :vartype enable_partition_discovery: bool :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :vartype partition_root_path: any + :vartype partition_root_path: JSON :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype delete_files_after_completion: any + :vartype delete_files_after_completion: JSON :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_start: any + :vartype modified_datetime_start: JSON :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_end: any + :vartype modified_datetime_end: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'prefix': {'key': 'prefix', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "recursive": {"key": "recursive", "type": "object"}, + "wildcard_folder_path": {"key": "wildcardFolderPath", "type": "object"}, + "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, + "prefix": {"key": "prefix", "type": "object"}, + "file_list_path": {"key": "fileListPath", "type": "object"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "partition_root_path": {"key": "partitionRootPath", "type": "object"}, + "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, + "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, + "modified_datetime_end": {"key": "modifiedDatetimeEnd", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - recursive: Optional[Any] = None, - wildcard_folder_path: Optional[Any] = None, - wildcard_file_name: Optional[Any] = None, - prefix: Optional[Any] = None, - file_list_path: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + recursive: Optional[JSON] = None, + wildcard_folder_path: Optional[JSON] = None, + wildcard_file_name: Optional[JSON] = None, + prefix: Optional[JSON] = None, + file_list_path: Optional[JSON] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[Any] = None, - delete_files_after_completion: Optional[Any] = None, - modified_datetime_start: Optional[Any] = None, - modified_datetime_end: Optional[Any] = None, + partition_root_path: Optional[JSON] = None, + delete_files_after_completion: Optional[JSON] = None, + modified_datetime_start: Optional[JSON] = None, + modified_datetime_end: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :paramtype recursive: any + :paramtype recursive: JSON :keyword wildcard_folder_path: Azure blob wildcardFolderPath. Type: string (or Expression with resultType string). - :paramtype wildcard_folder_path: any + :paramtype wildcard_folder_path: JSON :keyword wildcard_file_name: Azure blob wildcardFileName. Type: string (or Expression with resultType string). - :paramtype wildcard_file_name: any + :paramtype wildcard_file_name: JSON :keyword prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with resultType string). - :paramtype prefix: any + :paramtype prefix: JSON :keyword file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :paramtype file_list_path: any + :paramtype file_list_path: JSON :keyword enable_partition_discovery: Indicates whether to enable partition discovery. :paramtype enable_partition_discovery: bool :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :paramtype partition_root_path: any + :paramtype partition_root_path: JSON :keyword delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype delete_files_after_completion: any + :paramtype delete_files_after_completion: JSON :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_start: any + :paramtype modified_datetime_start: JSON :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_end: any - """ - super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AzureBlobStorageReadSettings' # type: str + :paramtype modified_datetime_end: JSON + """ + super().__init__( + additional_properties=additional_properties, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AzureBlobStorageReadSettings" # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -5557,209 +6318,223 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The write setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The write setting type. Required. :vartype type: str :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar copy_behavior: The type of copy behavior for copy sink. - :vartype copy_behavior: any + :vartype copy_behavior: JSON :ivar block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). - :vartype block_size_in_mb: any + :vartype block_size_in_mb: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "copy_behavior": {"key": "copyBehavior", "type": "object"}, + "block_size_in_mb": {"key": "blockSizeInMB", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - copy_behavior: Optional[Any] = None, - block_size_in_mb: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + copy_behavior: Optional[JSON] = None, + block_size_in_mb: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword copy_behavior: The type of copy behavior for copy sink. - :paramtype copy_behavior: any + :paramtype copy_behavior: JSON :keyword block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). - :paramtype block_size_in_mb: any - """ - super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) - self.type = 'AzureBlobStorageWriteSettings' # type: str + :paramtype block_size_in_mb: JSON + """ + super().__init__( + additional_properties=additional_properties, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + copy_behavior=copy_behavior, + **kwargs + ) + self.type = "AzureBlobStorageWriteSettings" # type: str self.block_size_in_mb = block_size_in_mb -class AzureDatabricksDeltaLakeDataset(Dataset): +class AzureDatabricksDeltaLakeDataset(Dataset): # pylint: disable=too-many-instance-attributes """Azure Databricks Delta Lake dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table: The name of delta table. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON :ivar database: The database name of delta table. Type: string (or Expression with resultType string). - :vartype database: any + :vartype database: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table": {"key": "typeProperties.table", "type": "object"}, + "database": {"key": "typeProperties.database", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table: Optional[Any] = None, - database: Optional[Any] = None, + table: Optional[JSON] = None, + database: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table: The name of delta table. Type: string (or Expression with resultType string). - :paramtype table: any + :paramtype table: JSON :keyword database: The database name of delta table. Type: string (or Expression with resultType string). - :paramtype database: any - """ - super(AzureDatabricksDeltaLakeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzureDatabricksDeltaLakeDataset' # type: str + :paramtype database: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "AzureDatabricksDeltaLakeDataset" # type: str self.table = table self.database = database -class ExportSettings(msrest.serialization.Model): +class ExportSettings(_serialization.Model): """Export command settings. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDatabricksDeltaLakeExportCommand, SnowflakeExportCopyCommand. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureDatabricksDeltaLakeExportCommand, SnowflakeExportCopyCommand All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The export setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The export setting type. Required. :vartype type: str """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, } _subtype_map = { - 'type': {'AzureDatabricksDeltaLakeExportCommand': 'AzureDatabricksDeltaLakeExportCommand', 'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} + "type": { + "AzureDatabricksDeltaLakeExportCommand": "AzureDatabricksDeltaLakeExportCommand", + "SnowflakeExportCopyCommand": "SnowflakeExportCopyCommand", + } } - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - **kwargs - ): + def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, **kwargs): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] """ - super(ExportSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties - self.type = 'ExportSettings' # type: str + self.type = None # type: Optional[str] class AzureDatabricksDeltaLakeExportCommand(ExportSettings): @@ -5769,95 +6544,93 @@ class AzureDatabricksDeltaLakeExportCommand(ExportSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The export setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The export setting type. Required. :vartype type: str :ivar date_format: Specify the date format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :vartype date_format: any + :vartype date_format: JSON :ivar timestamp_format: Specify the timestamp format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :vartype timestamp_format: any + :vartype timestamp_format: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'date_format': {'key': 'dateFormat', 'type': 'object'}, - 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "date_format": {"key": "dateFormat", "type": "object"}, + "timestamp_format": {"key": "timestampFormat", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - date_format: Optional[Any] = None, - timestamp_format: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + date_format: Optional[JSON] = None, + timestamp_format: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword date_format: Specify the date format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :paramtype date_format: any + :paramtype date_format: JSON :keyword timestamp_format: Specify the timestamp format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :paramtype timestamp_format: any + :paramtype timestamp_format: JSON """ - super(AzureDatabricksDeltaLakeExportCommand, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'AzureDatabricksDeltaLakeExportCommand' # type: str + super().__init__(additional_properties=additional_properties, **kwargs) + self.type = "AzureDatabricksDeltaLakeExportCommand" # type: str self.date_format = date_format self.timestamp_format = timestamp_format -class ImportSettings(msrest.serialization.Model): +class ImportSettings(_serialization.Model): """Import command settings. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDatabricksDeltaLakeImportCommand, SnowflakeImportCopyCommand. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureDatabricksDeltaLakeImportCommand, SnowflakeImportCopyCommand All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The import setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The import setting type. Required. :vartype type: str """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, } _subtype_map = { - 'type': {'AzureDatabricksDeltaLakeImportCommand': 'AzureDatabricksDeltaLakeImportCommand', 'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} + "type": { + "AzureDatabricksDeltaLakeImportCommand": "AzureDatabricksDeltaLakeImportCommand", + "SnowflakeImportCopyCommand": "SnowflakeImportCopyCommand", + } } - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - **kwargs - ): + def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, **kwargs): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] """ - super(ImportSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties - self.type = 'ImportSettings' # type: str + self.type = None # type: Optional[str] class AzureDatabricksDeltaLakeImportCommand(ImportSettings): @@ -5867,62 +6640,62 @@ class AzureDatabricksDeltaLakeImportCommand(ImportSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The import setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The import setting type. Required. :vartype type: str :ivar date_format: Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :vartype date_format: any + :vartype date_format: JSON :ivar timestamp_format: Specify the timestamp format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :vartype timestamp_format: any + :vartype timestamp_format: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'date_format': {'key': 'dateFormat', 'type': 'object'}, - 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "date_format": {"key": "dateFormat", "type": "object"}, + "timestamp_format": {"key": "timestampFormat", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - date_format: Optional[Any] = None, - timestamp_format: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + date_format: Optional[JSON] = None, + timestamp_format: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword date_format: Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :paramtype date_format: any + :paramtype date_format: JSON :keyword timestamp_format: Specify the timestamp format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :paramtype timestamp_format: any + :paramtype timestamp_format: JSON """ - super(AzureDatabricksDeltaLakeImportCommand, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'AzureDatabricksDeltaLakeImportCommand' # type: str + super().__init__(additional_properties=additional_properties, **kwargs) + self.type = "AzureDatabricksDeltaLakeImportCommand" # type: str self.date_format = date_format self.timestamp_format = timestamp_format -class AzureDatabricksDeltaLakeLinkedService(LinkedService): +class AzureDatabricksDeltaLakeLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Azure Databricks Delta Lake linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -5931,68 +6704,68 @@ class AzureDatabricksDeltaLakeLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks - deployment. Type: string (or Expression with resultType string). - :vartype domain: any + :vartype annotations: list[JSON] + :ivar domain: :code:``.azuredatabricks.net, domain name of your Databricks deployment. + Type: string (or Expression with resultType string). Required. + :vartype domain: JSON :ivar access_token: Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or AzureKeyVaultSecretReference. :vartype access_token: ~azure.mgmt.datafactory.models.SecretBase :ivar cluster_id: The id of an existing interactive cluster that will be used for all runs of this job. Type: string (or Expression with resultType string). - :vartype cluster_id: any + :vartype cluster_id: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference :ivar workspace_resource_id: Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). - :vartype workspace_resource_id: any + :vartype workspace_resource_id: JSON """ _validation = { - 'type': {'required': True}, - 'domain': {'required': True}, + "type": {"required": True}, + "domain": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'cluster_id': {'key': 'typeProperties.clusterId', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, - 'workspace_resource_id': {'key': 'typeProperties.workspaceResourceId', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "domain": {"key": "typeProperties.domain", "type": "object"}, + "access_token": {"key": "typeProperties.accessToken", "type": "SecretBase"}, + "cluster_id": {"key": "typeProperties.clusterId", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, + "workspace_resource_id": {"key": "typeProperties.workspaceResourceId", "type": "object"}, } def __init__( self, *, - domain: Any, - additional_properties: Optional[Dict[str, Any]] = None, + domain: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, access_token: Optional["_models.SecretBase"] = None, - cluster_id: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + cluster_id: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, credential: Optional["_models.CredentialReference"] = None, - workspace_resource_id: Optional[Any] = None, + workspace_resource_id: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -6000,29 +6773,36 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks - deployment. Type: string (or Expression with resultType string). - :paramtype domain: any + :paramtype annotations: list[JSON] + :keyword domain: :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). Required. + :paramtype domain: JSON :keyword access_token: Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or AzureKeyVaultSecretReference. :paramtype access_token: ~azure.mgmt.datafactory.models.SecretBase :keyword cluster_id: The id of an existing interactive cluster that will be used for all runs of this job. Type: string (or Expression with resultType string). - :paramtype cluster_id: any + :paramtype cluster_id: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any + :paramtype encrypted_credential: JSON :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference :keyword workspace_resource_id: Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). - :paramtype workspace_resource_id: any - """ - super(AzureDatabricksDeltaLakeLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureDatabricksDeltaLake' # type: str + :paramtype workspace_resource_id: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureDatabricksDeltaLake" # type: str self.domain = domain self.access_token = access_token self.cluster_id = cluster_id @@ -6038,96 +6818,105 @@ class AzureDatabricksDeltaLakeSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :vartype pre_copy_script: any + :vartype pre_copy_script: JSON :ivar import_settings: Azure Databricks Delta Lake import settings. :vartype import_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeImportCommand """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "pre_copy_script": {"key": "preCopyScript", "type": "object"}, + "import_settings": {"key": "importSettings", "type": "AzureDatabricksDeltaLakeImportCommand"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - pre_copy_script: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + pre_copy_script: Optional[JSON] = None, import_settings: Optional["_models.AzureDatabricksDeltaLakeImportCommand"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :paramtype pre_copy_script: any + :paramtype pre_copy_script: JSON :keyword import_settings: Azure Databricks Delta Lake import settings. :paramtype import_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeImportCommand """ - super(AzureDatabricksDeltaLakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AzureDatabricksDeltaLakeSink' # type: str + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AzureDatabricksDeltaLakeSink" # type: str self.pre_copy_script = pre_copy_script self.import_settings = import_settings @@ -6139,93 +6928,100 @@ class AzureDatabricksDeltaLakeSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar export_settings: Azure Databricks Delta Lake export settings. :vartype export_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeExportCommand """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "export_settings": {"key": "exportSettings", "type": "AzureDatabricksDeltaLakeExportCommand"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query: Optional[JSON] = None, export_settings: Optional["_models.AzureDatabricksDeltaLakeExportCommand"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword export_settings: Azure Databricks Delta Lake export settings. :paramtype export_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeExportCommand """ - super(AzureDatabricksDeltaLakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AzureDatabricksDeltaLakeSource' # type: str + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AzureDatabricksDeltaLakeSource" # type: str self.query = query self.export_settings = export_settings -class AzureDatabricksLinkedService(LinkedService): +class AzureDatabricksLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Azure Databricks linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -6234,142 +7030,142 @@ class AzureDatabricksLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks - deployment. Type: string (or Expression with resultType string). - :vartype domain: any + :vartype annotations: list[JSON] + :ivar domain: :code:``.azuredatabricks.net, domain name of your Databricks deployment. + Type: string (or Expression with resultType string). Required. + :vartype domain: JSON :ivar access_token: Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). :vartype access_token: ~azure.mgmt.datafactory.models.SecretBase :ivar authentication: Required to specify MSI, if using Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). - :vartype authentication: any + :vartype authentication: JSON :ivar workspace_resource_id: Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). - :vartype workspace_resource_id: any + :vartype workspace_resource_id: JSON :ivar existing_cluster_id: The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string). - :vartype existing_cluster_id: any + :vartype existing_cluster_id: JSON :ivar instance_pool_id: The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression with resultType string). - :vartype instance_pool_id: any + :vartype instance_pool_id: JSON :ivar new_cluster_version: If not using an existing interactive cluster, this specifies the Spark version of a new job cluster or instance pool nodes created for each run of this activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string). - :vartype new_cluster_version: any + :vartype new_cluster_version: JSON :ivar new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies the number of worker nodes to use for the new job cluster or instance pool. For new job clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: string (or Expression with resultType string). - :vartype new_cluster_num_of_worker: any + :vartype new_cluster_num_of_worker: JSON :ivar new_cluster_node_type: The node type of the new job cluster. This property is required if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property is ignored. Type: string (or Expression with resultType string). - :vartype new_cluster_node_type: any + :vartype new_cluster_node_type: JSON :ivar new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value pairs. - :vartype new_cluster_spark_conf: dict[str, any] + :vartype new_cluster_spark_conf: dict[str, JSON] :ivar new_cluster_spark_env_vars: A set of optional, user-specified Spark environment variables key-value pairs. - :vartype new_cluster_spark_env_vars: dict[str, any] + :vartype new_cluster_spark_env_vars: dict[str, JSON] :ivar new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored in instance pool configurations. - :vartype new_cluster_custom_tags: dict[str, any] + :vartype new_cluster_custom_tags: dict[str, JSON] :ivar new_cluster_log_destination: Specify a location to deliver Spark driver, worker, and event logs. Type: string (or Expression with resultType string). - :vartype new_cluster_log_destination: any + :vartype new_cluster_log_destination: JSON :ivar new_cluster_driver_node_type: The driver node type for the new job cluster. This property is ignored in instance pool configurations. Type: string (or Expression with resultType string). - :vartype new_cluster_driver_node_type: any + :vartype new_cluster_driver_node_type: JSON :ivar new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType array of strings). - :vartype new_cluster_init_scripts: any + :vartype new_cluster_init_scripts: JSON :ivar new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This property is now ignored, and takes the default elastic disk behavior in Databricks (elastic disks are always enabled). Type: boolean (or Expression with resultType boolean). - :vartype new_cluster_enable_elastic_disk: any + :vartype new_cluster_enable_elastic_disk: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON :ivar policy_id: The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string). - :vartype policy_id: any + :vartype policy_id: JSON :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { - 'type': {'required': True}, - 'domain': {'required': True}, + "type": {"required": True}, + "domain": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, - 'workspace_resource_id': {'key': 'typeProperties.workspaceResourceId', 'type': 'object'}, - 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, - 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, - 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, - 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, - 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, - 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, - 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, - 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, - 'new_cluster_log_destination': {'key': 'typeProperties.newClusterLogDestination', 'type': 'object'}, - 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, - 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, - 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'policy_id': {'key': 'typeProperties.policyId', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "domain": {"key": "typeProperties.domain", "type": "object"}, + "access_token": {"key": "typeProperties.accessToken", "type": "SecretBase"}, + "authentication": {"key": "typeProperties.authentication", "type": "object"}, + "workspace_resource_id": {"key": "typeProperties.workspaceResourceId", "type": "object"}, + "existing_cluster_id": {"key": "typeProperties.existingClusterId", "type": "object"}, + "instance_pool_id": {"key": "typeProperties.instancePoolId", "type": "object"}, + "new_cluster_version": {"key": "typeProperties.newClusterVersion", "type": "object"}, + "new_cluster_num_of_worker": {"key": "typeProperties.newClusterNumOfWorker", "type": "object"}, + "new_cluster_node_type": {"key": "typeProperties.newClusterNodeType", "type": "object"}, + "new_cluster_spark_conf": {"key": "typeProperties.newClusterSparkConf", "type": "{object}"}, + "new_cluster_spark_env_vars": {"key": "typeProperties.newClusterSparkEnvVars", "type": "{object}"}, + "new_cluster_custom_tags": {"key": "typeProperties.newClusterCustomTags", "type": "{object}"}, + "new_cluster_log_destination": {"key": "typeProperties.newClusterLogDestination", "type": "object"}, + "new_cluster_driver_node_type": {"key": "typeProperties.newClusterDriverNodeType", "type": "object"}, + "new_cluster_init_scripts": {"key": "typeProperties.newClusterInitScripts", "type": "object"}, + "new_cluster_enable_elastic_disk": {"key": "typeProperties.newClusterEnableElasticDisk", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "policy_id": {"key": "typeProperties.policyId", "type": "object"}, + "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, } - def __init__( + def __init__( # pylint: disable=too-many-locals self, *, - domain: Any, - additional_properties: Optional[Dict[str, Any]] = None, + domain: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, access_token: Optional["_models.SecretBase"] = None, - authentication: Optional[Any] = None, - workspace_resource_id: Optional[Any] = None, - existing_cluster_id: Optional[Any] = None, - instance_pool_id: Optional[Any] = None, - new_cluster_version: Optional[Any] = None, - new_cluster_num_of_worker: Optional[Any] = None, - new_cluster_node_type: Optional[Any] = None, - new_cluster_spark_conf: Optional[Dict[str, Any]] = None, - new_cluster_spark_env_vars: Optional[Dict[str, Any]] = None, - new_cluster_custom_tags: Optional[Dict[str, Any]] = None, - new_cluster_log_destination: Optional[Any] = None, - new_cluster_driver_node_type: Optional[Any] = None, - new_cluster_init_scripts: Optional[Any] = None, - new_cluster_enable_elastic_disk: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, - policy_id: Optional[Any] = None, + authentication: Optional[JSON] = None, + workspace_resource_id: Optional[JSON] = None, + existing_cluster_id: Optional[JSON] = None, + instance_pool_id: Optional[JSON] = None, + new_cluster_version: Optional[JSON] = None, + new_cluster_num_of_worker: Optional[JSON] = None, + new_cluster_node_type: Optional[JSON] = None, + new_cluster_spark_conf: Optional[Dict[str, JSON]] = None, + new_cluster_spark_env_vars: Optional[Dict[str, JSON]] = None, + new_cluster_custom_tags: Optional[Dict[str, JSON]] = None, + new_cluster_log_destination: Optional[JSON] = None, + new_cluster_driver_node_type: Optional[JSON] = None, + new_cluster_init_scripts: Optional[JSON] = None, + new_cluster_enable_elastic_disk: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, + policy_id: Optional[JSON] = None, credential: Optional["_models.CredentialReference"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -6377,77 +7173,84 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks - deployment. Type: string (or Expression with resultType string). - :paramtype domain: any + :paramtype annotations: list[JSON] + :keyword domain: :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). Required. + :paramtype domain: JSON :keyword access_token: Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). :paramtype access_token: ~azure.mgmt.datafactory.models.SecretBase :keyword authentication: Required to specify MSI, if using Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). - :paramtype authentication: any + :paramtype authentication: JSON :keyword workspace_resource_id: Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). - :paramtype workspace_resource_id: any + :paramtype workspace_resource_id: JSON :keyword existing_cluster_id: The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string). - :paramtype existing_cluster_id: any + :paramtype existing_cluster_id: JSON :keyword instance_pool_id: The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression with resultType string). - :paramtype instance_pool_id: any + :paramtype instance_pool_id: JSON :keyword new_cluster_version: If not using an existing interactive cluster, this specifies the Spark version of a new job cluster or instance pool nodes created for each run of this activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string). - :paramtype new_cluster_version: any + :paramtype new_cluster_version: JSON :keyword new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies the number of worker nodes to use for the new job cluster or instance pool. For new job clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: string (or Expression with resultType string). - :paramtype new_cluster_num_of_worker: any + :paramtype new_cluster_num_of_worker: JSON :keyword new_cluster_node_type: The node type of the new job cluster. This property is required if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property is ignored. Type: string (or Expression with resultType string). - :paramtype new_cluster_node_type: any + :paramtype new_cluster_node_type: JSON :keyword new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value pairs. - :paramtype new_cluster_spark_conf: dict[str, any] + :paramtype new_cluster_spark_conf: dict[str, JSON] :keyword new_cluster_spark_env_vars: A set of optional, user-specified Spark environment variables key-value pairs. - :paramtype new_cluster_spark_env_vars: dict[str, any] + :paramtype new_cluster_spark_env_vars: dict[str, JSON] :keyword new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored in instance pool configurations. - :paramtype new_cluster_custom_tags: dict[str, any] + :paramtype new_cluster_custom_tags: dict[str, JSON] :keyword new_cluster_log_destination: Specify a location to deliver Spark driver, worker, and event logs. Type: string (or Expression with resultType string). - :paramtype new_cluster_log_destination: any + :paramtype new_cluster_log_destination: JSON :keyword new_cluster_driver_node_type: The driver node type for the new job cluster. This property is ignored in instance pool configurations. Type: string (or Expression with resultType string). - :paramtype new_cluster_driver_node_type: any + :paramtype new_cluster_driver_node_type: JSON :keyword new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType array of strings). - :paramtype new_cluster_init_scripts: any + :paramtype new_cluster_init_scripts: JSON :keyword new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This property is now ignored, and takes the default elastic disk behavior in Databricks (elastic disks are always enabled). Type: boolean (or Expression with resultType boolean). - :paramtype new_cluster_enable_elastic_disk: any + :paramtype new_cluster_enable_elastic_disk: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any + :paramtype encrypted_credential: JSON :keyword policy_id: The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string). - :paramtype policy_id: any + :paramtype policy_id: JSON :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ - super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureDatabricks' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureDatabricks" # type: str self.domain = domain self.access_token = access_token self.authentication = authentication @@ -6472,17 +7275,24 @@ def __init__( class ExecutionActivity(Activity): """Base class for all execution activities. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMLBatchExecutionActivity, AzureMLExecutePipelineActivity, AzureMLUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUSQLActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSSISPackageActivity, GetMetadataActivity, HDInsightHiveActivity, HDInsightMapReduceActivity, HDInsightPigActivity, HDInsightSparkActivity, HDInsightStreamingActivity, LookupActivity, ScriptActivity, SqlServerStoredProcedureActivity, WebActivity. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMLBatchExecutionActivity, + AzureMLExecutePipelineActivity, AzureMLUpdateResourceActivity, CopyActivity, CustomActivity, + DataLakeAnalyticsUSQLActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, + DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, + ExecuteSSISPackageActivity, GetMetadataActivity, HDInsightHiveActivity, + HDInsightMapReduceActivity, HDInsightPigActivity, HDInsightSparkActivity, + HDInsightStreamingActivity, LookupActivity, ScriptActivity, SynapseSparkJobDefinitionActivity, + SqlServerStoredProcedureActivity, SynapseNotebookActivity, WebActivity All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -6497,30 +7307,57 @@ class ExecutionActivity(Activity): """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, } _subtype_map = { - 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMLExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'Lookup': 'LookupActivity', 'Script': 'ScriptActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} + "type": { + "AzureDataExplorerCommand": "AzureDataExplorerCommandActivity", + "AzureFunctionActivity": "AzureFunctionActivity", + "AzureMLBatchExecution": "AzureMLBatchExecutionActivity", + "AzureMLExecutePipeline": "AzureMLExecutePipelineActivity", + "AzureMLUpdateResource": "AzureMLUpdateResourceActivity", + "Copy": "CopyActivity", + "Custom": "CustomActivity", + "DataLakeAnalyticsU-SQL": "DataLakeAnalyticsUSQLActivity", + "DatabricksNotebook": "DatabricksNotebookActivity", + "DatabricksSparkJar": "DatabricksSparkJarActivity", + "DatabricksSparkPython": "DatabricksSparkPythonActivity", + "Delete": "DeleteActivity", + "ExecuteDataFlow": "ExecuteDataFlowActivity", + "ExecuteSSISPackage": "ExecuteSSISPackageActivity", + "GetMetadata": "GetMetadataActivity", + "HDInsightHive": "HDInsightHiveActivity", + "HDInsightMapReduce": "HDInsightMapReduceActivity", + "HDInsightPig": "HDInsightPigActivity", + "HDInsightSpark": "HDInsightSparkActivity", + "HDInsightStreaming": "HDInsightStreamingActivity", + "Lookup": "LookupActivity", + "Script": "ScriptActivity", + "SparkJob": "SynapseSparkJobDefinitionActivity", + "SqlServerStoredProcedure": "SqlServerStoredProcedureActivity", + "SynapseNotebook": "SynapseNotebookActivity", + "WebActivity": "WebActivity", + } } def __init__( self, *, name: str, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, @@ -6531,8 +7368,8 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -6545,8 +7382,15 @@ def __init__( :keyword policy: Activity policy. :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy """ - super(ExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'Execution' # type: str + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + **kwargs + ) + self.type = "Execution" # type: str self.linked_service_name = linked_service_name self.policy = policy @@ -6558,10 +7402,10 @@ class AzureDataExplorerCommandActivity(ExecutionActivity): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -6573,52 +7417,52 @@ class AzureDataExplorerCommandActivity(ExecutionActivity): :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar policy: Activity policy. :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :ivar command: Required. A control command, according to the Azure Data Explorer command - syntax. Type: string (or Expression with resultType string). - :vartype command: any + :ivar command: A control command, according to the Azure Data Explorer command syntax. Type: + string (or Expression with resultType string). Required. + :vartype command: JSON :ivar command_timeout: Control command timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). - :vartype command_timeout: any + :vartype command_timeout: JSON """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'command': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "command": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "command": {"key": "typeProperties.command", "type": "object"}, + "command_timeout": {"key": "typeProperties.commandTimeout", "type": "object"}, } def __init__( self, *, name: str, - command: Any, - additional_properties: Optional[Dict[str, Any]] = None, + command: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, - command_timeout: Optional[Any] = None, + command_timeout: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -6630,28 +7474,37 @@ def __init__( :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword policy: Activity policy. :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :keyword command: Required. A control command, according to the Azure Data Explorer command - syntax. Type: string (or Expression with resultType string). - :paramtype command: any + :keyword command: A control command, according to the Azure Data Explorer command syntax. Type: + string (or Expression with resultType string). Required. + :paramtype command: JSON :keyword command_timeout: Control command timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). - :paramtype command_timeout: any - """ - super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'AzureDataExplorerCommand' # type: str + :paramtype command_timeout: JSON + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "AzureDataExplorerCommand" # type: str self.command = command self.command_timeout = command_timeout -class AzureDataExplorerLinkedService(LinkedService): +class AzureDataExplorerLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Azure Data Explorer (Kusto) linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -6660,68 +7513,68 @@ class AzureDataExplorerLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL will - be in the format https://:code:``.:code:``.kusto.windows.net. Type: - string (or Expression with resultType string). - :vartype endpoint: any + :vartype annotations: list[JSON] + :ivar endpoint: The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in the + format https://:code:``.:code:``.kusto.windows.net. Type: string (or + Expression with resultType string). Required. + :vartype endpoint: JSON :ivar service_principal_id: The ID of the service principal used to authenticate against Azure Data Explorer. Type: string (or Expression with resultType string). - :vartype service_principal_id: any + :vartype service_principal_id: JSON :ivar service_principal_key: The key of the service principal used to authenticate against Kusto. :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :ivar database: Required. Database name for connection. Type: string (or Expression with - resultType string). - :vartype database: any + :ivar database: Database name for connection. Type: string (or Expression with resultType + string). Required. + :vartype database: JSON :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :vartype tenant: any + :vartype tenant: JSON :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'database': {'required': True}, + "type": {"required": True}, + "endpoint": {"required": True}, + "database": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "endpoint": {"key": "typeProperties.endpoint", "type": "object"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, + "database": {"key": "typeProperties.database", "type": "object"}, + "tenant": {"key": "typeProperties.tenant", "type": "object"}, + "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, } def __init__( self, *, - endpoint: Any, - database: Any, - additional_properties: Optional[Dict[str, Any]] = None, + endpoint: JSON, + database: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - service_principal_id: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + service_principal_id: Optional[JSON] = None, service_principal_key: Optional["_models.SecretBase"] = None, - tenant: Optional[Any] = None, + tenant: Optional[JSON] = None, credential: Optional["_models.CredentialReference"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -6729,28 +7582,35 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL - will be in the format https://:code:``.:code:``.kusto.windows.net. - Type: string (or Expression with resultType string). - :paramtype endpoint: any + :paramtype annotations: list[JSON] + :keyword endpoint: The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in + the format https://:code:``.:code:``.kusto.windows.net. Type: string + (or Expression with resultType string). Required. + :paramtype endpoint: JSON :keyword service_principal_id: The ID of the service principal used to authenticate against Azure Data Explorer. Type: string (or Expression with resultType string). - :paramtype service_principal_id: any + :paramtype service_principal_id: JSON :keyword service_principal_key: The key of the service principal used to authenticate against Kusto. :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :keyword database: Required. Database name for connection. Type: string (or Expression with - resultType string). - :paramtype database: any + :keyword database: Database name for connection. Type: string (or Expression with resultType + string). Required. + :paramtype database: JSON :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :paramtype tenant: any + :paramtype tenant: JSON :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ - super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureDataExplorer' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureDataExplorer" # type: str self.endpoint = endpoint self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key @@ -6759,112 +7619,121 @@ def __init__( self.credential = credential -class AzureDataExplorerSink(CopySink): +class AzureDataExplorerSink(CopySink): # pylint: disable=too-many-instance-attributes """A copy activity Azure Data Explorer sink. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. - :vartype ingestion_mapping_name: any + :vartype ingestion_mapping_name: JSON :ivar ingestion_mapping_as_json: An explicit column mapping description provided in a json format. Type: string. - :vartype ingestion_mapping_as_json: any + :vartype ingestion_mapping_as_json: JSON :ivar flush_immediately: If set to true, any aggregation will be skipped. Default is false. Type: boolean. - :vartype flush_immediately: any + :vartype flush_immediately: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, - 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, - 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "ingestion_mapping_name": {"key": "ingestionMappingName", "type": "object"}, + "ingestion_mapping_as_json": {"key": "ingestionMappingAsJson", "type": "object"}, + "flush_immediately": {"key": "flushImmediately", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - ingestion_mapping_name: Optional[Any] = None, - ingestion_mapping_as_json: Optional[Any] = None, - flush_immediately: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + ingestion_mapping_name: Optional[JSON] = None, + ingestion_mapping_as_json: Optional[JSON] = None, + flush_immediately: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. - :paramtype ingestion_mapping_name: any + :paramtype ingestion_mapping_name: JSON :keyword ingestion_mapping_as_json: An explicit column mapping description provided in a json format. Type: string. - :paramtype ingestion_mapping_as_json: any + :paramtype ingestion_mapping_as_json: JSON :keyword flush_immediately: If set to true, any aggregation will be skipped. Default is false. Type: boolean. - :paramtype flush_immediately: any - """ - super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AzureDataExplorerSink' # type: str + :paramtype flush_immediately: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AzureDataExplorerSink" # type: str self.ingestion_mapping_name = ingestion_mapping_name self.ingestion_mapping_as_json = ingestion_mapping_as_json self.flush_immediately = flush_immediately @@ -6877,98 +7746,105 @@ class AzureDataExplorerSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any - :ivar query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: - string (or Expression with resultType string). - :vartype query: any + :vartype disable_metrics_collection: JSON + :ivar query: Database query. Should be a Kusto Query Language (KQL) query. Type: string (or + Expression with resultType string). Required. + :vartype query: JSON :ivar no_truncation: The name of the Boolean option that controls whether truncation is applied to result-sets that go beyond a certain row-count limit. - :vartype no_truncation: any + :vartype no_truncation: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, - 'query': {'required': True}, + "type": {"required": True}, + "query": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "no_truncation": {"key": "noTruncation", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - query: Any, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - no_truncation: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, + query: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + no_truncation: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any - :keyword query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: - string (or Expression with resultType string). - :paramtype query: any + :paramtype disable_metrics_collection: JSON + :keyword query: Database query. Should be a Kusto Query Language (KQL) query. Type: string (or + Expression with resultType string). Required. + :paramtype query: JSON :keyword no_truncation: The name of the Boolean option that controls whether truncation is applied to result-sets that go beyond a certain row-count limit. - :paramtype no_truncation: any + :paramtype no_truncation: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AzureDataExplorerSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AzureDataExplorerSource" # type: str self.query = query self.no_truncation = no_truncation self.query_timeout = query_timeout @@ -6982,102 +7858,112 @@ class AzureDataExplorerTableDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table: The table name of the Azure Data Explorer database. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table": {"key": "typeProperties.table", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table: Optional[Any] = None, + table: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table: The table name of the Azure Data Explorer database. Type: string (or Expression with resultType string). - :paramtype table: any - """ - super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzureDataExplorerTable' # type: str + :paramtype table: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "AzureDataExplorerTable" # type: str self.table = table -class AzureDataLakeAnalyticsLinkedService(LinkedService): +class AzureDataLakeAnalyticsLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Azure Data Lake Analytics linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -7086,79 +7972,79 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar account_name: Required. The Azure Data Lake Analytics account name. Type: string (or - Expression with resultType string). - :vartype account_name: any + :vartype annotations: list[JSON] + :ivar account_name: The Azure Data Lake Analytics account name. Type: string (or Expression + with resultType string). Required. + :vartype account_name: JSON :ivar service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Analytics account. Type: string (or Expression with resultType string). - :vartype service_principal_id: any + :vartype service_principal_id: JSON :ivar service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Analytics account. :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :ivar tenant: Required. The name or ID of the tenant to which the service principal belongs. - Type: string (or Expression with resultType string). - :vartype tenant: any + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). Required. + :vartype tenant: JSON :ivar subscription_id: Data Lake Analytics account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). - :vartype subscription_id: any + :vartype subscription_id: JSON :ivar resource_group_name: Data Lake Analytics account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). - :vartype resource_group_name: any + :vartype resource_group_name: JSON :ivar data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string (or Expression with resultType string). - :vartype data_lake_analytics_uri: any + :vartype data_lake_analytics_uri: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'tenant': {'required': True}, + "type": {"required": True}, + "account_name": {"required": True}, + "tenant": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "account_name": {"key": "typeProperties.accountName", "type": "object"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, + "tenant": {"key": "typeProperties.tenant", "type": "object"}, + "subscription_id": {"key": "typeProperties.subscriptionId", "type": "object"}, + "resource_group_name": {"key": "typeProperties.resourceGroupName", "type": "object"}, + "data_lake_analytics_uri": {"key": "typeProperties.dataLakeAnalyticsUri", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - account_name: Any, - tenant: Any, - additional_properties: Optional[Dict[str, Any]] = None, + account_name: JSON, + tenant: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - service_principal_id: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + service_principal_id: Optional[JSON] = None, service_principal_key: Optional["_models.SecretBase"] = None, - subscription_id: Optional[Any] = None, - resource_group_name: Optional[Any] = None, - data_lake_analytics_uri: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + subscription_id: Optional[JSON] = None, + resource_group_name: Optional[JSON] = None, + data_lake_analytics_uri: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -7166,35 +8052,42 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword account_name: Required. The Azure Data Lake Analytics account name. Type: string (or - Expression with resultType string). - :paramtype account_name: any + :paramtype annotations: list[JSON] + :keyword account_name: The Azure Data Lake Analytics account name. Type: string (or Expression + with resultType string). Required. + :paramtype account_name: JSON :keyword service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Analytics account. Type: string (or Expression with resultType string). - :paramtype service_principal_id: any + :paramtype service_principal_id: JSON :keyword service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Analytics account. :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :keyword tenant: Required. The name or ID of the tenant to which the service principal belongs. - Type: string (or Expression with resultType string). - :paramtype tenant: any + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). Required. + :paramtype tenant: JSON :keyword subscription_id: Data Lake Analytics account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). - :paramtype subscription_id: any + :paramtype subscription_id: JSON :keyword resource_group_name: Data Lake Analytics account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). - :paramtype resource_group_name: any + :paramtype resource_group_name: JSON :keyword data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string (or Expression with resultType string). - :paramtype data_lake_analytics_uri: any + :paramtype data_lake_analytics_uri: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(AzureDataLakeAnalyticsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureDataLakeAnalytics' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureDataLakeAnalytics" # type: str self.account_name = account_name self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key @@ -7205,39 +8098,39 @@ def __init__( self.encrypted_credential = encrypted_credential -class AzureDataLakeStoreDataset(Dataset): +class AzureDataLakeStoreDataset(Dataset): # pylint: disable=too-many-instance-attributes """Azure Data Lake Store dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar folder_path: Path to the folder in the Azure Data Lake Store. Type: string (or Expression with resultType string). - :vartype folder_path: any + :vartype folder_path: JSON :ivar file_name: The name of the file in the Azure Data Lake Store. Type: string (or Expression with resultType string). - :vartype file_name: any + :vartype file_name: JSON :ivar format: The format of the Data Lake Store. :vartype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :ivar compression: The data compression method used for the item(s) in the Azure Data Lake @@ -7246,39 +8139,39 @@ class AzureDataLakeStoreDataset(Dataset): """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "folder_path": {"key": "typeProperties.folderPath", "type": "object"}, + "file_name": {"key": "typeProperties.fileName", "type": "object"}, + "format": {"key": "typeProperties.format", "type": "DatasetStorageFormat"}, + "compression": {"key": "typeProperties.compression", "type": "DatasetCompression"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - folder_path: Optional[Any] = None, - file_name: Optional[Any] = None, + folder_path: Optional[JSON] = None, + file_name: Optional[JSON] = None, format: Optional["_models.DatasetStorageFormat"] = None, compression: Optional["_models.DatasetCompression"] = None, **kwargs @@ -7286,53 +8179,63 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword folder_path: Path to the folder in the Azure Data Lake Store. Type: string (or Expression with resultType string). - :paramtype folder_path: any + :paramtype folder_path: JSON :keyword file_name: The name of the file in the Azure Data Lake Store. Type: string (or Expression with resultType string). - :paramtype file_name: any + :paramtype file_name: JSON :keyword format: The format of the Data Lake Store. :paramtype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :keyword compression: The data compression method used for the item(s) in the Azure Data Lake Store. :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ - super(AzureDataLakeStoreDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzureDataLakeStoreFile' # type: str + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "AzureDataLakeStoreFile" # type: str self.folder_path = folder_path self.file_name = file_name self.format = format self.compression = compression -class AzureDataLakeStoreLinkedService(LinkedService): +class AzureDataLakeStoreLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Azure Data Lake Store linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -7341,88 +8244,88 @@ class AzureDataLakeStoreLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or Expression - with resultType string). - :vartype data_lake_store_uri: any + :vartype annotations: list[JSON] + :ivar data_lake_store_uri: Data Lake Store service URI. Type: string (or Expression with + resultType string). Required. + :vartype data_lake_store_uri: JSON :ivar service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Store account. Type: string (or Expression with resultType string). - :vartype service_principal_id: any + :vartype service_principal_id: JSON :ivar service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Store account. :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :vartype tenant: any + :vartype tenant: JSON :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :vartype azure_cloud_type: any + :vartype azure_cloud_type: JSON :ivar account_name: Data Lake Store account name. Type: string (or Expression with resultType string). - :vartype account_name: any + :vartype account_name: JSON :ivar subscription_id: Data Lake Store account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). - :vartype subscription_id: any + :vartype subscription_id: JSON :ivar resource_group_name: Data Lake Store account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). - :vartype resource_group_name: any + :vartype resource_group_name: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { - 'type': {'required': True}, - 'data_lake_store_uri': {'required': True}, + "type": {"required": True}, + "data_lake_store_uri": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "data_lake_store_uri": {"key": "typeProperties.dataLakeStoreUri", "type": "object"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, + "tenant": {"key": "typeProperties.tenant", "type": "object"}, + "azure_cloud_type": {"key": "typeProperties.azureCloudType", "type": "object"}, + "account_name": {"key": "typeProperties.accountName", "type": "object"}, + "subscription_id": {"key": "typeProperties.subscriptionId", "type": "object"}, + "resource_group_name": {"key": "typeProperties.resourceGroupName", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, } def __init__( self, *, - data_lake_store_uri: Any, - additional_properties: Optional[Dict[str, Any]] = None, + data_lake_store_uri: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - service_principal_id: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + service_principal_id: Optional[JSON] = None, service_principal_key: Optional["_models.SecretBase"] = None, - tenant: Optional[Any] = None, - azure_cloud_type: Optional[Any] = None, - account_name: Optional[Any] = None, - subscription_id: Optional[Any] = None, - resource_group_name: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + tenant: Optional[JSON] = None, + azure_cloud_type: Optional[JSON] = None, + account_name: Optional[JSON] = None, + subscription_id: Optional[JSON] = None, + resource_group_name: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, credential: Optional["_models.CredentialReference"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -7430,41 +8333,48 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or - Expression with resultType string). - :paramtype data_lake_store_uri: any + :paramtype annotations: list[JSON] + :keyword data_lake_store_uri: Data Lake Store service URI. Type: string (or Expression with + resultType string). Required. + :paramtype data_lake_store_uri: JSON :keyword service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Store account. Type: string (or Expression with resultType string). - :paramtype service_principal_id: any + :paramtype service_principal_id: JSON :keyword service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Store account. :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :paramtype tenant: any + :paramtype tenant: JSON :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :paramtype azure_cloud_type: any + :paramtype azure_cloud_type: JSON :keyword account_name: Data Lake Store account name. Type: string (or Expression with resultType string). - :paramtype account_name: any + :paramtype account_name: JSON :keyword subscription_id: Data Lake Store account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). - :paramtype subscription_id: any + :paramtype subscription_id: JSON :keyword resource_group_name: Data Lake Store account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). - :paramtype resource_group_name: any + :paramtype resource_group_name: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any + :paramtype encrypted_credential: JSON :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ - super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureDataLakeStore' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureDataLakeStore" # type: str self.data_lake_store_uri = data_lake_store_uri self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key @@ -7484,193 +8394,200 @@ class AzureDataLakeStoreLocation(DatasetLocation): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage location. Required. :vartype type: str :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :vartype folder_path: any + :vartype folder_path: JSON :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :vartype file_name: any + :vartype file_name: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "folder_path": {"key": "folderPath", "type": "object"}, + "file_name": {"key": "fileName", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - folder_path: Optional[Any] = None, - file_name: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + folder_path: Optional[JSON] = None, + file_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :paramtype folder_path: any + :paramtype folder_path: JSON :keyword file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :paramtype file_name: any + :paramtype file_name: JSON """ - super(AzureDataLakeStoreLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type = 'AzureDataLakeStoreLocation' # type: str + super().__init__( + additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs + ) + self.type = "AzureDataLakeStoreLocation" # type: str -class AzureDataLakeStoreReadSettings(StoreReadSettings): +class AzureDataLakeStoreReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes """Azure data lake store read settings. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The read setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. :vartype type: str :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :vartype recursive: any + :vartype recursive: JSON :ivar wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or Expression with resultType string). - :vartype wildcard_folder_path: any + :vartype wildcard_folder_path: JSON :ivar wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType string). - :vartype wildcard_file_name: any + :vartype wildcard_file_name: JSON :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :vartype file_list_path: any + :vartype file_list_path: JSON :ivar list_after: Lists files after the value (exclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). - :vartype list_after: any + :vartype list_after: JSON :ivar list_before: Lists files before the value (inclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). - :vartype list_before: any + :vartype list_before: JSON :ivar enable_partition_discovery: Indicates whether to enable partition discovery. :vartype enable_partition_discovery: bool :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :vartype partition_root_path: any + :vartype partition_root_path: JSON :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype delete_files_after_completion: any + :vartype delete_files_after_completion: JSON :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_start: any + :vartype modified_datetime_start: JSON :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_end: any + :vartype modified_datetime_end: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'list_after': {'key': 'listAfter', 'type': 'object'}, - 'list_before': {'key': 'listBefore', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "recursive": {"key": "recursive", "type": "object"}, + "wildcard_folder_path": {"key": "wildcardFolderPath", "type": "object"}, + "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, + "file_list_path": {"key": "fileListPath", "type": "object"}, + "list_after": {"key": "listAfter", "type": "object"}, + "list_before": {"key": "listBefore", "type": "object"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "partition_root_path": {"key": "partitionRootPath", "type": "object"}, + "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, + "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, + "modified_datetime_end": {"key": "modifiedDatetimeEnd", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - recursive: Optional[Any] = None, - wildcard_folder_path: Optional[Any] = None, - wildcard_file_name: Optional[Any] = None, - file_list_path: Optional[Any] = None, - list_after: Optional[Any] = None, - list_before: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + recursive: Optional[JSON] = None, + wildcard_folder_path: Optional[JSON] = None, + wildcard_file_name: Optional[JSON] = None, + file_list_path: Optional[JSON] = None, + list_after: Optional[JSON] = None, + list_before: Optional[JSON] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[Any] = None, - delete_files_after_completion: Optional[Any] = None, - modified_datetime_start: Optional[Any] = None, - modified_datetime_end: Optional[Any] = None, + partition_root_path: Optional[JSON] = None, + delete_files_after_completion: Optional[JSON] = None, + modified_datetime_start: Optional[JSON] = None, + modified_datetime_end: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :paramtype recursive: any + :paramtype recursive: JSON :keyword wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or Expression with resultType string). - :paramtype wildcard_folder_path: any + :paramtype wildcard_folder_path: JSON :keyword wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType string). - :paramtype wildcard_file_name: any + :paramtype wildcard_file_name: JSON :keyword file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :paramtype file_list_path: any + :paramtype file_list_path: JSON :keyword list_after: Lists files after the value (exclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). - :paramtype list_after: any + :paramtype list_after: JSON :keyword list_before: Lists files before the value (inclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). - :paramtype list_before: any + :paramtype list_before: JSON :keyword enable_partition_discovery: Indicates whether to enable partition discovery. :paramtype enable_partition_discovery: bool :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :paramtype partition_root_path: any + :paramtype partition_root_path: JSON :keyword delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype delete_files_after_completion: any + :paramtype delete_files_after_completion: JSON :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_start: any + :paramtype modified_datetime_start: JSON :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_end: any - """ - super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AzureDataLakeStoreReadSettings' # type: str + :paramtype modified_datetime_end: JSON + """ + super().__init__( + additional_properties=additional_properties, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AzureDataLakeStoreReadSettings" # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -7691,93 +8608,102 @@ class AzureDataLakeStoreSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar copy_behavior: The type of copy behavior for copy sink. - :vartype copy_behavior: any + :vartype copy_behavior: JSON :ivar enable_adls_single_file_parallel: Single File Parallel. - :vartype enable_adls_single_file_parallel: any + :vartype enable_adls_single_file_parallel: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "copy_behavior": {"key": "copyBehavior", "type": "object"}, + "enable_adls_single_file_parallel": {"key": "enableAdlsSingleFileParallel", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - copy_behavior: Optional[Any] = None, - enable_adls_single_file_parallel: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + copy_behavior: Optional[JSON] = None, + enable_adls_single_file_parallel: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword copy_behavior: The type of copy behavior for copy sink. - :paramtype copy_behavior: any + :paramtype copy_behavior: JSON :keyword enable_adls_single_file_parallel: Single File Parallel. - :paramtype enable_adls_single_file_parallel: any - """ - super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AzureDataLakeStoreSink' # type: str + :paramtype enable_adls_single_file_parallel: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AzureDataLakeStoreSink" # type: str self.copy_behavior = copy_behavior self.enable_adls_single_file_parallel = enable_adls_single_file_parallel @@ -7789,73 +8715,80 @@ class AzureDataLakeStoreSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :vartype recursive: any + :vartype recursive: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "recursive": {"key": "recursive", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - recursive: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + recursive: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :paramtype recursive: any - """ - super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AzureDataLakeStoreSource' # type: str + :paramtype recursive: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AzureDataLakeStoreSource" # type: str self.recursive = recursive @@ -7866,77 +8799,83 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The write setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The write setting type. Required. :vartype type: str :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar copy_behavior: The type of copy behavior for copy sink. - :vartype copy_behavior: any + :vartype copy_behavior: JSON :ivar expiry_date_time: Specifies the expiry time of the written files. The time is applied to the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: integer (or Expression with resultType integer). - :vartype expiry_date_time: any + :vartype expiry_date_time: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'expiry_date_time': {'key': 'expiryDateTime', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "copy_behavior": {"key": "copyBehavior", "type": "object"}, + "expiry_date_time": {"key": "expiryDateTime", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - copy_behavior: Optional[Any] = None, - expiry_date_time: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + copy_behavior: Optional[JSON] = None, + expiry_date_time: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword copy_behavior: The type of copy behavior for copy sink. - :paramtype copy_behavior: any + :paramtype copy_behavior: JSON :keyword expiry_date_time: Specifies the expiry time of the written files. The time is applied to the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: integer (or Expression with resultType integer). - :paramtype expiry_date_time: any - """ - super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) - self.type = 'AzureDataLakeStoreWriteSettings' # type: str + :paramtype expiry_date_time: JSON + """ + super().__init__( + additional_properties=additional_properties, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + copy_behavior=copy_behavior, + **kwargs + ) + self.type = "AzureDataLakeStoreWriteSettings" # type: str self.expiry_date_time = expiry_date_time -class AzureFileStorageLinkedService(LinkedService): +class AzureFileStorageLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Azure File Storage linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -7945,83 +8884,83 @@ class AzureFileStorageLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar host: Host name of the server. Type: string (or Expression with resultType string). - :vartype host: any + :vartype host: JSON :ivar user_id: User ID to logon the server. Type: string (or Expression with resultType string). - :vartype user_id: any + :vartype user_id: JSON :ivar password: Password to logon the server. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype connection_string: JSON :ivar account_key: The Azure key vault secret reference of accountKey in connection string. :vartype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar sas_uri: SAS URI of the Azure File resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype sas_uri: any + :vartype sas_uri: JSON :ivar sas_token: The Azure key vault secret reference of sasToken in sas uri. :vartype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar file_share: The azure file share name. It is required when auth with accountKey/sasToken. Type: string (or Expression with resultType string). - :vartype file_share: any + :vartype file_share: JSON :ivar snapshot: The azure file share snapshot version. Type: string (or Expression with resultType string). - :vartype snapshot: any + :vartype snapshot: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'file_share': {'key': 'typeProperties.fileShare', 'type': 'object'}, - 'snapshot': {'key': 'typeProperties.snapshot', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "host": {"key": "typeProperties.host", "type": "object"}, + "user_id": {"key": "typeProperties.userId", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "account_key": {"key": "typeProperties.accountKey", "type": "AzureKeyVaultSecretReference"}, + "sas_uri": {"key": "typeProperties.sasUri", "type": "object"}, + "sas_token": {"key": "typeProperties.sasToken", "type": "AzureKeyVaultSecretReference"}, + "file_share": {"key": "typeProperties.fileShare", "type": "object"}, + "snapshot": {"key": "typeProperties.snapshot", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - host: Optional[Any] = None, - user_id: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + host: Optional[JSON] = None, + user_id: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - connection_string: Optional[Any] = None, + connection_string: Optional[JSON] = None, account_key: Optional["_models.AzureKeyVaultSecretReference"] = None, - sas_uri: Optional[Any] = None, + sas_uri: Optional[JSON] = None, sas_token: Optional["_models.AzureKeyVaultSecretReference"] = None, - file_share: Optional[Any] = None, - snapshot: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + file_share: Optional[JSON] = None, + snapshot: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -8029,37 +8968,44 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword host: Host name of the server. Type: string (or Expression with resultType string). - :paramtype host: any + :paramtype host: JSON :keyword user_id: User ID to logon the server. Type: string (or Expression with resultType string). - :paramtype user_id: any + :paramtype user_id: JSON :keyword password: Password to logon the server. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype connection_string: JSON :keyword account_key: The Azure key vault secret reference of accountKey in connection string. :paramtype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword sas_uri: SAS URI of the Azure File resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype sas_uri: any + :paramtype sas_uri: JSON :keyword sas_token: The Azure key vault secret reference of sasToken in sas uri. :paramtype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword file_share: The azure file share name. It is required when auth with accountKey/sasToken. Type: string (or Expression with resultType string). - :paramtype file_share: any + :paramtype file_share: JSON :keyword snapshot: The azure file share snapshot version. Type: string (or Expression with resultType string). - :paramtype snapshot: any + :paramtype snapshot: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(AzureFileStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureFileStorage' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureFileStorage" # type: str self.host = host self.user_id = user_id self.password = password @@ -8079,181 +9025,188 @@ class AzureFileStorageLocation(DatasetLocation): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage location. Required. :vartype type: str :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :vartype folder_path: any + :vartype folder_path: JSON :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :vartype file_name: any + :vartype file_name: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "folder_path": {"key": "folderPath", "type": "object"}, + "file_name": {"key": "fileName", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - folder_path: Optional[Any] = None, - file_name: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + folder_path: Optional[JSON] = None, + file_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :paramtype folder_path: any + :paramtype folder_path: JSON :keyword file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :paramtype file_name: any + :paramtype file_name: JSON """ - super(AzureFileStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type = 'AzureFileStorageLocation' # type: str + super().__init__( + additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs + ) + self.type = "AzureFileStorageLocation" # type: str -class AzureFileStorageReadSettings(StoreReadSettings): +class AzureFileStorageReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes """Azure File Storage read settings. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The read setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. :vartype type: str :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :vartype recursive: any + :vartype recursive: JSON :ivar wildcard_folder_path: Azure File Storage wildcardFolderPath. Type: string (or Expression with resultType string). - :vartype wildcard_folder_path: any + :vartype wildcard_folder_path: JSON :ivar wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression with resultType string). - :vartype wildcard_file_name: any + :vartype wildcard_file_name: JSON :ivar prefix: The prefix filter for the Azure File name starting from root path. Type: string (or Expression with resultType string). - :vartype prefix: any + :vartype prefix: JSON :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :vartype file_list_path: any + :vartype file_list_path: JSON :ivar enable_partition_discovery: Indicates whether to enable partition discovery. :vartype enable_partition_discovery: bool :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :vartype partition_root_path: any + :vartype partition_root_path: JSON :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype delete_files_after_completion: any + :vartype delete_files_after_completion: JSON :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_start: any + :vartype modified_datetime_start: JSON :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_end: any + :vartype modified_datetime_end: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'prefix': {'key': 'prefix', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "recursive": {"key": "recursive", "type": "object"}, + "wildcard_folder_path": {"key": "wildcardFolderPath", "type": "object"}, + "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, + "prefix": {"key": "prefix", "type": "object"}, + "file_list_path": {"key": "fileListPath", "type": "object"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "partition_root_path": {"key": "partitionRootPath", "type": "object"}, + "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, + "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, + "modified_datetime_end": {"key": "modifiedDatetimeEnd", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - recursive: Optional[Any] = None, - wildcard_folder_path: Optional[Any] = None, - wildcard_file_name: Optional[Any] = None, - prefix: Optional[Any] = None, - file_list_path: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + recursive: Optional[JSON] = None, + wildcard_folder_path: Optional[JSON] = None, + wildcard_file_name: Optional[JSON] = None, + prefix: Optional[JSON] = None, + file_list_path: Optional[JSON] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[Any] = None, - delete_files_after_completion: Optional[Any] = None, - modified_datetime_start: Optional[Any] = None, - modified_datetime_end: Optional[Any] = None, + partition_root_path: Optional[JSON] = None, + delete_files_after_completion: Optional[JSON] = None, + modified_datetime_start: Optional[JSON] = None, + modified_datetime_end: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :paramtype recursive: any + :paramtype recursive: JSON :keyword wildcard_folder_path: Azure File Storage wildcardFolderPath. Type: string (or Expression with resultType string). - :paramtype wildcard_folder_path: any + :paramtype wildcard_folder_path: JSON :keyword wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression with resultType string). - :paramtype wildcard_file_name: any + :paramtype wildcard_file_name: JSON :keyword prefix: The prefix filter for the Azure File name starting from root path. Type: string (or Expression with resultType string). - :paramtype prefix: any + :paramtype prefix: JSON :keyword file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :paramtype file_list_path: any + :paramtype file_list_path: JSON :keyword enable_partition_discovery: Indicates whether to enable partition discovery. :paramtype enable_partition_discovery: bool :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :paramtype partition_root_path: any + :paramtype partition_root_path: JSON :keyword delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype delete_files_after_completion: any + :paramtype delete_files_after_completion: JSON :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_start: any + :paramtype modified_datetime_start: JSON :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_end: any - """ - super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AzureFileStorageReadSettings' # type: str + :paramtype modified_datetime_end: JSON + """ + super().__init__( + additional_properties=additional_properties, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AzureFileStorageReadSettings" # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -8273,68 +9226,74 @@ class AzureFileStorageWriteSettings(StoreWriteSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The write setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The write setting type. Required. :vartype type: str :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar copy_behavior: The type of copy behavior for copy sink. - :vartype copy_behavior: any + :vartype copy_behavior: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "copy_behavior": {"key": "copyBehavior", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - copy_behavior: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + copy_behavior: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword copy_behavior: The type of copy behavior for copy sink. - :paramtype copy_behavior: any + :paramtype copy_behavior: JSON """ - super(AzureFileStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) - self.type = 'AzureFileStorageWriteSettings' # type: str + super().__init__( + additional_properties=additional_properties, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + copy_behavior=copy_behavior, + **kwargs + ) + self.type = "AzureFileStorageWriteSettings" # type: str -class AzureFunctionActivity(ExecutionActivity): +class AzureFunctionActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """Azure Function activity. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -8346,41 +9305,41 @@ class AzureFunctionActivity(ExecutionActivity): :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar policy: Activity policy. :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :ivar method: Required. Rest API method for target endpoint. Known values are: "GET", "POST", - "PUT", "DELETE", "OPTIONS", "HEAD", "TRACE". + :ivar method: Rest API method for target endpoint. Required. Known values are: "GET", "POST", + "PUT", "DELETE", "OPTIONS", "HEAD", and "TRACE". :vartype method: str or ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod - :ivar function_name: Required. Name of the Function that the Azure Function Activity will call. - Type: string (or Expression with resultType string). - :vartype function_name: any + :ivar function_name: Name of the Function that the Azure Function Activity will call. Type: + string (or Expression with resultType string). Required. + :vartype function_name: JSON :ivar headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :vartype headers: any + :vartype headers: JSON :ivar body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). - :vartype body: any + :vartype body: JSON """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True}, - 'function_name': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "method": {"required": True}, + "function_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "method": {"key": "typeProperties.method", "type": "str"}, + "function_name": {"key": "typeProperties.functionName", "type": "object"}, + "headers": {"key": "typeProperties.headers", "type": "object"}, + "body": {"key": "typeProperties.body", "type": "object"}, } def __init__( @@ -8388,22 +9347,22 @@ def __init__( *, name: str, method: Union[str, "_models.AzureFunctionActivityMethod"], - function_name: Any, - additional_properties: Optional[Dict[str, Any]] = None, + function_name: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, - headers: Optional[Any] = None, - body: Optional[Any] = None, + headers: Optional[JSON] = None, + body: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -8415,37 +9374,46 @@ def __init__( :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword policy: Activity policy. :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :keyword method: Required. Rest API method for target endpoint. Known values are: "GET", - "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "TRACE". + :keyword method: Rest API method for target endpoint. Required. Known values are: "GET", + "POST", "PUT", "DELETE", "OPTIONS", "HEAD", and "TRACE". :paramtype method: str or ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod - :keyword function_name: Required. Name of the Function that the Azure Function Activity will - call. Type: string (or Expression with resultType string). - :paramtype function_name: any + :keyword function_name: Name of the Function that the Azure Function Activity will call. Type: + string (or Expression with resultType string). Required. + :paramtype function_name: JSON :keyword headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :paramtype headers: any + :paramtype headers: JSON :keyword body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). - :paramtype body: any - """ - super(AzureFunctionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'AzureFunctionActivity' # type: str + :paramtype body: JSON + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "AzureFunctionActivity" # type: str self.method = method self.function_name = function_name self.headers = headers self.body = body -class AzureFunctionLinkedService(LinkedService): +class AzureFunctionLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Azure Function linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -8454,65 +9422,65 @@ class AzureFunctionLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar function_app_url: Required. The endpoint of the Azure Function App. URL will be in the - format https://:code:``.azurewebsites.net. - :vartype function_app_url: any + :vartype annotations: list[JSON] + :ivar function_app_url: The endpoint of the Azure Function App. URL will be in the format + https://:code:``.azurewebsites.net. Required. + :vartype function_app_url: JSON :ivar function_key: Function or Host key for Azure Function App. :vartype function_key: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference :ivar resource_id: Allowed token audiences for azure function. - :vartype resource_id: any + :vartype resource_id: JSON :ivar authentication: Type of authentication (Required to specify MSI) used to connect to AzureFunction. Type: string (or Expression with resultType string). - :vartype authentication: any + :vartype authentication: JSON """ _validation = { - 'type': {'required': True}, - 'function_app_url': {'required': True}, + "type": {"required": True}, + "function_app_url": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, - 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, - 'resource_id': {'key': 'typeProperties.resourceId', 'type': 'object'}, - 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "function_app_url": {"key": "typeProperties.functionAppUrl", "type": "object"}, + "function_key": {"key": "typeProperties.functionKey", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, + "resource_id": {"key": "typeProperties.resourceId", "type": "object"}, + "authentication": {"key": "typeProperties.authentication", "type": "object"}, } def __init__( self, *, - function_app_url: Any, - additional_properties: Optional[Dict[str, Any]] = None, + function_app_url: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, function_key: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, credential: Optional["_models.CredentialReference"] = None, - resource_id: Optional[Any] = None, - authentication: Optional[Any] = None, + resource_id: Optional[JSON] = None, + authentication: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -8520,26 +9488,33 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword function_app_url: Required. The endpoint of the Azure Function App. URL will be in the - format https://:code:``.azurewebsites.net. - :paramtype function_app_url: any + :paramtype annotations: list[JSON] + :keyword function_app_url: The endpoint of the Azure Function App. URL will be in the format + https://:code:``.azurewebsites.net. Required. + :paramtype function_app_url: JSON :keyword function_key: Function or Host key for Azure Function App. :paramtype function_key: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any + :paramtype encrypted_credential: JSON :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference :keyword resource_id: Allowed token audiences for azure function. - :paramtype resource_id: any + :paramtype resource_id: JSON :keyword authentication: Type of authentication (Required to specify MSI) used to connect to AzureFunction. Type: string (or Expression with resultType string). - :paramtype authentication: any - """ - super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureFunction' # type: str + :paramtype authentication: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureFunction" # type: str self.function_app_url = function_app_url self.function_key = function_key self.encrypted_credential = encrypted_credential @@ -8555,8 +9530,8 @@ class AzureKeyVaultLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -8565,46 +9540,46 @@ class AzureKeyVaultLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar base_url: Required. The base URL of the Azure Key Vault. e.g. - https://myakv.vault.azure.net Type: string (or Expression with resultType string). - :vartype base_url: any + :vartype annotations: list[JSON] + :ivar base_url: The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net Type: + string (or Expression with resultType string). Required. + :vartype base_url: JSON :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { - 'type': {'required': True}, - 'base_url': {'required': True}, + "type": {"required": True}, + "base_url": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "base_url": {"key": "typeProperties.baseUrl", "type": "object"}, + "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, } def __init__( self, *, - base_url: Any, - additional_properties: Optional[Dict[str, Any]] = None, + base_url: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, credential: Optional["_models.CredentialReference"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -8612,50 +9587,51 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword base_url: Required. The base URL of the Azure Key Vault. e.g. - https://myakv.vault.azure.net Type: string (or Expression with resultType string). - :paramtype base_url: any + :paramtype annotations: list[JSON] + :keyword base_url: The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net + Type: string (or Expression with resultType string). Required. + :paramtype base_url: JSON :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ - super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureKeyVault' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureKeyVault" # type: str self.base_url = base_url self.credential = credential -class SecretBase(msrest.serialization.Model): +class SecretBase(_serialization.Model): """The base definition of a secret type. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureKeyVaultSecretReference, SecureString. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureKeyVaultSecretReference, SecureString All required parameters must be populated in order to send to Azure. - :ivar type: Required. Type of the secret.Constant filled by server. + :ivar type: Type of the secret. Required. :vartype type: str """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, + "type": {"key": "type", "type": "str"}, } - _subtype_map = { - 'type': {'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference', 'SecureString': 'SecureString'} - } + _subtype_map = {"type": {"AzureKeyVaultSecret": "AzureKeyVaultSecretReference", "SecureString": "SecureString"}} - def __init__( - self, - **kwargs - ): - """ - """ - super(SecretBase, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.type = None # type: Optional[str] @@ -8664,51 +9640,51 @@ class AzureKeyVaultSecretReference(SecretBase): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Type of the secret.Constant filled by server. + :ivar type: Type of the secret. Required. :vartype type: str - :ivar store: Required. The Azure Key Vault linked service reference. + :ivar store: The Azure Key Vault linked service reference. Required. :vartype store: ~azure.mgmt.datafactory.models.LinkedServiceReference - :ivar secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or - Expression with resultType string). - :vartype secret_name: any + :ivar secret_name: The name of the secret in Azure Key Vault. Type: string (or Expression with + resultType string). Required. + :vartype secret_name: JSON :ivar secret_version: The version of the secret in Azure Key Vault. The default value is the latest version of the secret. Type: string (or Expression with resultType string). - :vartype secret_version: any + :vartype secret_version: JSON """ _validation = { - 'type': {'required': True}, - 'store': {'required': True}, - 'secret_name': {'required': True}, + "type": {"required": True}, + "store": {"required": True}, + "secret_name": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, - 'secret_name': {'key': 'secretName', 'type': 'object'}, - 'secret_version': {'key': 'secretVersion', 'type': 'object'}, + "type": {"key": "type", "type": "str"}, + "store": {"key": "store", "type": "LinkedServiceReference"}, + "secret_name": {"key": "secretName", "type": "object"}, + "secret_version": {"key": "secretVersion", "type": "object"}, } def __init__( self, *, store: "_models.LinkedServiceReference", - secret_name: Any, - secret_version: Optional[Any] = None, + secret_name: JSON, + secret_version: Optional[JSON] = None, **kwargs ): """ - :keyword store: Required. The Azure Key Vault linked service reference. + :keyword store: The Azure Key Vault linked service reference. Required. :paramtype store: ~azure.mgmt.datafactory.models.LinkedServiceReference - :keyword secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or - Expression with resultType string). - :paramtype secret_name: any + :keyword secret_name: The name of the secret in Azure Key Vault. Type: string (or Expression + with resultType string). Required. + :paramtype secret_name: JSON :keyword secret_version: The version of the secret in Azure Key Vault. The default value is the latest version of the secret. Type: string (or Expression with resultType string). - :paramtype secret_version: any + :paramtype secret_version: JSON """ - super(AzureKeyVaultSecretReference, self).__init__(**kwargs) - self.type = 'AzureKeyVaultSecret' # type: str + super().__init__(**kwargs) + self.type = "AzureKeyVaultSecret" # type: str self.store = store self.secret_name = secret_name self.secret_version = secret_version @@ -8721,8 +9697,8 @@ class AzureMariaDBLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -8731,51 +9707,51 @@ class AzureMariaDBLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype connection_string: JSON :ivar pwd: The Azure key vault secret reference of password in connection string. :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "pwd": {"key": "typeProperties.pwd", "type": "AzureKeyVaultSecretReference"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_string: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_string: Optional[JSON] = None, pwd: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -8783,19 +9759,26 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype connection_string: JSON :keyword pwd: The Azure key vault secret reference of password in connection string. :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureMariaDB' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureMariaDB" # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential @@ -8808,89 +9791,98 @@ class AzureMariaDBSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'AzureMariaDBSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "AzureMariaDBSource" # type: str self.query = query @@ -8901,102 +9893,112 @@ class AzureMariaDBTableDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzureMariaDBTable' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "AzureMariaDBTable" # type: str self.table_name = table_name -class AzureMLBatchExecutionActivity(ExecutionActivity): +class AzureMLBatchExecutionActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """Azure ML Batch Execution activity. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -9012,7 +10014,7 @@ class AzureMLBatchExecutionActivity(ExecutionActivity): endpoint. Keys must match the names of web service parameters defined in the published Azure ML web service. Values will be passed in the GlobalParameters property of the Azure ML batch execution request. - :vartype global_parameters: dict[str, any] + :vartype global_parameters: dict[str, JSON] :ivar web_service_outputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed in the WebServiceOutputs property of the Azure ML batch execution @@ -9025,35 +10027,35 @@ class AzureMLBatchExecutionActivity(ExecutionActivity): """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, - 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, - 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "global_parameters": {"key": "typeProperties.globalParameters", "type": "{object}"}, + "web_service_outputs": {"key": "typeProperties.webServiceOutputs", "type": "{AzureMLWebServiceFile}"}, + "web_service_inputs": {"key": "typeProperties.webServiceInputs", "type": "{AzureMLWebServiceFile}"}, } def __init__( self, *, name: str, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, - global_parameters: Optional[Dict[str, Any]] = None, + global_parameters: Optional[Dict[str, JSON]] = None, web_service_outputs: Optional[Dict[str, "_models.AzureMLWebServiceFile"]] = None, web_service_inputs: Optional[Dict[str, "_models.AzureMLWebServiceFile"]] = None, **kwargs @@ -9061,8 +10063,8 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -9078,7 +10080,7 @@ def __init__( Service endpoint. Keys must match the names of web service parameters defined in the published Azure ML web service. Values will be passed in the GlobalParameters property of the Azure ML batch execution request. - :paramtype global_parameters: dict[str, any] + :paramtype global_parameters: dict[str, JSON] :keyword web_service_outputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed in the WebServiceOutputs property of the Azure ML batch execution @@ -9090,24 +10092,33 @@ def __init__( request. :paramtype web_service_inputs: dict[str, ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] """ - super(AzureMLBatchExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'AzureMLBatchExecution' # type: str + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "AzureMLBatchExecution" # type: str self.global_parameters = global_parameters self.web_service_outputs = web_service_outputs self.web_service_inputs = web_service_inputs -class AzureMLExecutePipelineActivity(ExecutionActivity): +class AzureMLExecutePipelineActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """Azure ML Execute Pipeline activity. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -9121,85 +10132,85 @@ class AzureMLExecutePipelineActivity(ExecutionActivity): :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy :ivar ml_pipeline_id: ID of the published Azure ML pipeline. Type: string (or Expression with resultType string). - :vartype ml_pipeline_id: any + :vartype ml_pipeline_id: JSON :ivar ml_pipeline_endpoint_id: ID of the published Azure ML pipeline endpoint. Type: string (or Expression with resultType string). - :vartype ml_pipeline_endpoint_id: any + :vartype ml_pipeline_endpoint_id: JSON :ivar version: Version of the published Azure ML pipeline endpoint. Type: string (or Expression with resultType string). - :vartype version: any + :vartype version: JSON :ivar experiment_name: Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with resultType string). - :vartype experiment_name: any + :vartype experiment_name: JSON :ivar ml_pipeline_parameters: Key,Value pairs to be passed to the published Azure ML pipeline endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. Values will be passed in the ParameterAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). - :vartype ml_pipeline_parameters: any + :vartype ml_pipeline_parameters: JSON :ivar data_path_assignments: Dictionary used for changing data path assignments without retraining. Values will be passed in the dataPathAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). - :vartype data_path_assignments: any + :vartype data_path_assignments: JSON :ivar ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType string). - :vartype ml_parent_run_id: any + :vartype ml_parent_run_id: JSON :ivar continue_on_step_failure: Whether to continue execution of other steps in the PipelineRun if a step fails. This information will be passed in the continueOnStepFailure property of the published pipeline execution request. Type: boolean (or Expression with resultType boolean). - :vartype continue_on_step_failure: any + :vartype continue_on_step_failure: JSON """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'ml_pipeline_id': {'key': 'typeProperties.mlPipelineId', 'type': 'object'}, - 'ml_pipeline_endpoint_id': {'key': 'typeProperties.mlPipelineEndpointId', 'type': 'object'}, - 'version': {'key': 'typeProperties.version', 'type': 'object'}, - 'experiment_name': {'key': 'typeProperties.experimentName', 'type': 'object'}, - 'ml_pipeline_parameters': {'key': 'typeProperties.mlPipelineParameters', 'type': 'object'}, - 'data_path_assignments': {'key': 'typeProperties.dataPathAssignments', 'type': 'object'}, - 'ml_parent_run_id': {'key': 'typeProperties.mlParentRunId', 'type': 'object'}, - 'continue_on_step_failure': {'key': 'typeProperties.continueOnStepFailure', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "ml_pipeline_id": {"key": "typeProperties.mlPipelineId", "type": "object"}, + "ml_pipeline_endpoint_id": {"key": "typeProperties.mlPipelineEndpointId", "type": "object"}, + "version": {"key": "typeProperties.version", "type": "object"}, + "experiment_name": {"key": "typeProperties.experimentName", "type": "object"}, + "ml_pipeline_parameters": {"key": "typeProperties.mlPipelineParameters", "type": "object"}, + "data_path_assignments": {"key": "typeProperties.dataPathAssignments", "type": "object"}, + "ml_parent_run_id": {"key": "typeProperties.mlParentRunId", "type": "object"}, + "continue_on_step_failure": {"key": "typeProperties.continueOnStepFailure", "type": "object"}, } def __init__( self, *, name: str, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, - ml_pipeline_id: Optional[Any] = None, - ml_pipeline_endpoint_id: Optional[Any] = None, - version: Optional[Any] = None, - experiment_name: Optional[Any] = None, - ml_pipeline_parameters: Optional[Any] = None, - data_path_assignments: Optional[Any] = None, - ml_parent_run_id: Optional[Any] = None, - continue_on_step_failure: Optional[Any] = None, + ml_pipeline_id: Optional[JSON] = None, + ml_pipeline_endpoint_id: Optional[JSON] = None, + version: Optional[JSON] = None, + experiment_name: Optional[JSON] = None, + ml_pipeline_parameters: Optional[JSON] = None, + data_path_assignments: Optional[JSON] = None, + ml_parent_run_id: Optional[JSON] = None, + continue_on_step_failure: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -9213,38 +10224,47 @@ def __init__( :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy :keyword ml_pipeline_id: ID of the published Azure ML pipeline. Type: string (or Expression with resultType string). - :paramtype ml_pipeline_id: any + :paramtype ml_pipeline_id: JSON :keyword ml_pipeline_endpoint_id: ID of the published Azure ML pipeline endpoint. Type: string (or Expression with resultType string). - :paramtype ml_pipeline_endpoint_id: any + :paramtype ml_pipeline_endpoint_id: JSON :keyword version: Version of the published Azure ML pipeline endpoint. Type: string (or Expression with resultType string). - :paramtype version: any + :paramtype version: JSON :keyword experiment_name: Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with resultType string). - :paramtype experiment_name: any + :paramtype experiment_name: JSON :keyword ml_pipeline_parameters: Key,Value pairs to be passed to the published Azure ML pipeline endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. Values will be passed in the ParameterAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). - :paramtype ml_pipeline_parameters: any + :paramtype ml_pipeline_parameters: JSON :keyword data_path_assignments: Dictionary used for changing data path assignments without retraining. Values will be passed in the dataPathAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). - :paramtype data_path_assignments: any + :paramtype data_path_assignments: JSON :keyword ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType string). - :paramtype ml_parent_run_id: any + :paramtype ml_parent_run_id: JSON :keyword continue_on_step_failure: Whether to continue execution of other steps in the PipelineRun if a step fails. This information will be passed in the continueOnStepFailure property of the published pipeline execution request. Type: boolean (or Expression with resultType boolean). - :paramtype continue_on_step_failure: any - """ - super(AzureMLExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'AzureMLExecutePipeline' # type: str + :paramtype continue_on_step_failure: JSON + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "AzureMLExecutePipeline" # type: str self.ml_pipeline_id = ml_pipeline_id self.ml_pipeline_endpoint_id = ml_pipeline_endpoint_id self.version = version @@ -9255,15 +10275,15 @@ def __init__( self.continue_on_step_failure = continue_on_step_failure -class AzureMLLinkedService(LinkedService): +class AzureMLLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Azure ML Studio Web Service linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -9272,79 +10292,79 @@ class AzureMLLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service - endpoint. Type: string (or Expression with resultType string). - :vartype ml_endpoint: any - :ivar api_key: Required. The API key for accessing the Azure ML model endpoint. + :vartype annotations: list[JSON] + :ivar ml_endpoint: The Batch Execution REST URL for an Azure ML Studio Web Service endpoint. + Type: string (or Expression with resultType string). Required. + :vartype ml_endpoint: JSON + :ivar api_key: The API key for accessing the Azure ML model endpoint. Required. :vartype api_key: ~azure.mgmt.datafactory.models.SecretBase :ivar update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). - :vartype update_resource_endpoint: any + :vartype update_resource_endpoint: JSON :ivar service_principal_id: The ID of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression with resultType string). - :vartype service_principal_id: any + :vartype service_principal_id: JSON :ivar service_principal_key: The key of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :vartype tenant: any + :vartype tenant: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON :ivar authentication: Type of authentication (Required to specify MSI) used to connect to AzureML. Type: string (or Expression with resultType string). - :vartype authentication: any + :vartype authentication: JSON """ _validation = { - 'type': {'required': True}, - 'ml_endpoint': {'required': True}, - 'api_key': {'required': True}, + "type": {"required": True}, + "ml_endpoint": {"required": True}, + "api_key": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, - 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, - 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "ml_endpoint": {"key": "typeProperties.mlEndpoint", "type": "object"}, + "api_key": {"key": "typeProperties.apiKey", "type": "SecretBase"}, + "update_resource_endpoint": {"key": "typeProperties.updateResourceEndpoint", "type": "object"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, + "tenant": {"key": "typeProperties.tenant", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "authentication": {"key": "typeProperties.authentication", "type": "object"}, } def __init__( self, *, - ml_endpoint: Any, + ml_endpoint: JSON, api_key: "_models.SecretBase", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - update_resource_endpoint: Optional[Any] = None, - service_principal_id: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + update_resource_endpoint: Optional[JSON] = None, + service_principal_id: Optional[JSON] = None, service_principal_key: Optional["_models.SecretBase"] = None, - tenant: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, - authentication: Optional[Any] = None, + tenant: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, + authentication: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -9352,35 +10372,42 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service - endpoint. Type: string (or Expression with resultType string). - :paramtype ml_endpoint: any - :keyword api_key: Required. The API key for accessing the Azure ML model endpoint. + :paramtype annotations: list[JSON] + :keyword ml_endpoint: The Batch Execution REST URL for an Azure ML Studio Web Service endpoint. + Type: string (or Expression with resultType string). Required. + :paramtype ml_endpoint: JSON + :keyword api_key: The API key for accessing the Azure ML model endpoint. Required. :paramtype api_key: ~azure.mgmt.datafactory.models.SecretBase :keyword update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). - :paramtype update_resource_endpoint: any + :paramtype update_resource_endpoint: JSON :keyword service_principal_id: The ID of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression with resultType string). - :paramtype service_principal_id: any + :paramtype service_principal_id: JSON :keyword service_principal_key: The key of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :paramtype tenant: any + :paramtype tenant: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any + :paramtype encrypted_credential: JSON :keyword authentication: Type of authentication (Required to specify MSI) used to connect to AzureML. Type: string (or Expression with resultType string). - :paramtype authentication: any - """ - super(AzureMLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureML' # type: str + :paramtype authentication: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureML" # type: str self.ml_endpoint = ml_endpoint self.api_key = api_key self.update_resource_endpoint = update_resource_endpoint @@ -9391,15 +10418,15 @@ def __init__( self.authentication = authentication -class AzureMLServiceLinkedService(LinkedService): +class AzureMLServiceLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Azure ML Service linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -9408,76 +10435,76 @@ class AzureMLServiceLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar subscription_id: Required. Azure ML Service workspace subscription ID. Type: string (or - Expression with resultType string). - :vartype subscription_id: any - :ivar resource_group_name: Required. Azure ML Service workspace resource group name. Type: - string (or Expression with resultType string). - :vartype resource_group_name: any - :ivar ml_workspace_name: Required. Azure ML Service workspace name. Type: string (or Expression - with resultType string). - :vartype ml_workspace_name: any + :vartype annotations: list[JSON] + :ivar subscription_id: Azure ML Service workspace subscription ID. Type: string (or Expression + with resultType string). Required. + :vartype subscription_id: JSON + :ivar resource_group_name: Azure ML Service workspace resource group name. Type: string (or + Expression with resultType string). Required. + :vartype resource_group_name: JSON + :ivar ml_workspace_name: Azure ML Service workspace name. Type: string (or Expression with + resultType string). Required. + :vartype ml_workspace_name: JSON :ivar service_principal_id: The ID of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType string). - :vartype service_principal_id: any + :vartype service_principal_id: JSON :ivar service_principal_key: The key of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :vartype tenant: any + :vartype tenant: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'subscription_id': {'required': True}, - 'resource_group_name': {'required': True}, - 'ml_workspace_name': {'required': True}, + "type": {"required": True}, + "subscription_id": {"required": True}, + "resource_group_name": {"required": True}, + "ml_workspace_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'ml_workspace_name': {'key': 'typeProperties.mlWorkspaceName', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "subscription_id": {"key": "typeProperties.subscriptionId", "type": "object"}, + "resource_group_name": {"key": "typeProperties.resourceGroupName", "type": "object"}, + "ml_workspace_name": {"key": "typeProperties.mlWorkspaceName", "type": "object"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, + "tenant": {"key": "typeProperties.tenant", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - subscription_id: Any, - resource_group_name: Any, - ml_workspace_name: Any, - additional_properties: Optional[Dict[str, Any]] = None, + subscription_id: JSON, + resource_group_name: JSON, + ml_workspace_name: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - service_principal_id: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + service_principal_id: Optional[JSON] = None, service_principal_key: Optional["_models.SecretBase"] = None, - tenant: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + tenant: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -9485,33 +10512,40 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword subscription_id: Required. Azure ML Service workspace subscription ID. Type: string - (or Expression with resultType string). - :paramtype subscription_id: any - :keyword resource_group_name: Required. Azure ML Service workspace resource group name. Type: - string (or Expression with resultType string). - :paramtype resource_group_name: any - :keyword ml_workspace_name: Required. Azure ML Service workspace name. Type: string (or - Expression with resultType string). - :paramtype ml_workspace_name: any + :paramtype annotations: list[JSON] + :keyword subscription_id: Azure ML Service workspace subscription ID. Type: string (or + Expression with resultType string). Required. + :paramtype subscription_id: JSON + :keyword resource_group_name: Azure ML Service workspace resource group name. Type: string (or + Expression with resultType string). Required. + :paramtype resource_group_name: JSON + :keyword ml_workspace_name: Azure ML Service workspace name. Type: string (or Expression with + resultType string). Required. + :paramtype ml_workspace_name: JSON :keyword service_principal_id: The ID of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType string). - :paramtype service_principal_id: any + :paramtype service_principal_id: JSON :keyword service_principal_key: The key of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :paramtype tenant: any + :paramtype tenant: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(AzureMLServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureMLService' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureMLService" # type: str self.subscription_id = subscription_id self.resource_group_name = resource_group_name self.ml_workspace_name = ml_workspace_name @@ -9521,17 +10555,17 @@ def __init__( self.encrypted_credential = encrypted_credential -class AzureMLUpdateResourceActivity(ExecutionActivity): +class AzureMLUpdateResourceActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """Azure ML Update Resource management activity. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -9543,49 +10577,52 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar policy: Activity policy. :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :ivar trained_model_name: Required. Name of the Trained Model module in the Web Service - experiment to be updated. Type: string (or Expression with resultType string). - :vartype trained_model_name: any - :ivar trained_model_linked_service_name: Required. Name of Azure Storage linked service holding - the .ilearner file that will be uploaded by the update operation. + :ivar trained_model_name: Name of the Trained Model module in the Web Service experiment to be + updated. Type: string (or Expression with resultType string). Required. + :vartype trained_model_name: JSON + :ivar trained_model_linked_service_name: Name of Azure Storage linked service holding the + .ilearner file that will be uploaded by the update operation. Required. :vartype trained_model_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :ivar trained_model_file_path: Required. The relative file path in trainedModelLinkedService to - represent the .ilearner file that will be uploaded by the update operation. Type: string (or - Expression with resultType string). - :vartype trained_model_file_path: any + :ivar trained_model_file_path: The relative file path in trainedModelLinkedService to represent + the .ilearner file that will be uploaded by the update operation. Type: string (or Expression + with resultType string). Required. + :vartype trained_model_file_path: JSON """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'trained_model_name': {'required': True}, - 'trained_model_linked_service_name': {'required': True}, - 'trained_model_file_path': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "trained_model_name": {"required": True}, + "trained_model_linked_service_name": {"required": True}, + "trained_model_file_path": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, - 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "trained_model_name": {"key": "typeProperties.trainedModelName", "type": "object"}, + "trained_model_linked_service_name": { + "key": "typeProperties.trainedModelLinkedServiceName", + "type": "LinkedServiceReference", + }, + "trained_model_file_path": {"key": "typeProperties.trainedModelFilePath", "type": "object"}, } def __init__( self, *, name: str, - trained_model_name: Any, + trained_model_name: JSON, trained_model_linked_service_name: "_models.LinkedServiceReference", - trained_model_file_path: Any, - additional_properties: Optional[Dict[str, Any]] = None, + trained_model_file_path: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, @@ -9596,8 +10633,8 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -9609,65 +10646,67 @@ def __init__( :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword policy: Activity policy. :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :keyword trained_model_name: Required. Name of the Trained Model module in the Web Service - experiment to be updated. Type: string (or Expression with resultType string). - :paramtype trained_model_name: any - :keyword trained_model_linked_service_name: Required. Name of Azure Storage linked service - holding the .ilearner file that will be uploaded by the update operation. + :keyword trained_model_name: Name of the Trained Model module in the Web Service experiment to + be updated. Type: string (or Expression with resultType string). Required. + :paramtype trained_model_name: JSON + :keyword trained_model_linked_service_name: Name of Azure Storage linked service holding the + .ilearner file that will be uploaded by the update operation. Required. :paramtype trained_model_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :keyword trained_model_file_path: Required. The relative file path in trainedModelLinkedService - to represent the .ilearner file that will be uploaded by the update operation. Type: string - (or Expression with resultType string). - :paramtype trained_model_file_path: any - """ - super(AzureMLUpdateResourceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'AzureMLUpdateResource' # type: str + :keyword trained_model_file_path: The relative file path in trainedModelLinkedService to + represent the .ilearner file that will be uploaded by the update operation. Type: string (or + Expression with resultType string). Required. + :paramtype trained_model_file_path: JSON + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "AzureMLUpdateResource" # type: str self.trained_model_name = trained_model_name self.trained_model_linked_service_name = trained_model_linked_service_name self.trained_model_file_path = trained_model_file_path -class AzureMLWebServiceFile(msrest.serialization.Model): +class AzureMLWebServiceFile(_serialization.Model): """Azure ML WebService Input/Output file. All required parameters must be populated in order to send to Azure. - :ivar file_path: Required. The relative file path, including container name, in the Azure Blob - Storage specified by the LinkedService. Type: string (or Expression with resultType string). - :vartype file_path: any - :ivar linked_service_name: Required. Reference to an Azure Storage LinkedService, where Azure - ML WebService Input/Output file located. + :ivar file_path: The relative file path, including container name, in the Azure Blob Storage + specified by the LinkedService. Type: string (or Expression with resultType string). Required. + :vartype file_path: JSON + :ivar linked_service_name: Reference to an Azure Storage LinkedService, where Azure ML + WebService Input/Output file located. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { - 'file_path': {'required': True}, - 'linked_service_name': {'required': True}, + "file_path": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'file_path': {'key': 'filePath', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + "file_path": {"key": "filePath", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, } - def __init__( - self, - *, - file_path: Any, - linked_service_name: "_models.LinkedServiceReference", - **kwargs - ): + def __init__(self, *, file_path: JSON, linked_service_name: "_models.LinkedServiceReference", **kwargs): """ - :keyword file_path: Required. The relative file path, including container name, in the Azure - Blob Storage specified by the LinkedService. Type: string (or Expression with resultType - string). - :paramtype file_path: any - :keyword linked_service_name: Required. Reference to an Azure Storage LinkedService, where - Azure ML WebService Input/Output file located. + :keyword file_path: The relative file path, including container name, in the Azure Blob Storage + specified by the LinkedService. Type: string (or Expression with resultType string). Required. + :paramtype file_path: JSON + :keyword linked_service_name: Reference to an Azure Storage LinkedService, where Azure ML + WebService Input/Output file located. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference """ - super(AzureMLWebServiceFile, self).__init__(**kwargs) + super().__init__(**kwargs) self.file_path = file_path self.linked_service_name = linked_service_name @@ -9679,8 +10718,8 @@ class AzureMySqlLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -9689,52 +10728,52 @@ class AzureMySqlLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype annotations: list[JSON] + :ivar connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. + :vartype connection_string: JSON :ivar password: The Azure key vault secret reference of password in connection string. :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, + "type": {"required": True}, + "connection_string": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "AzureKeyVaultSecretReference"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - connection_string: Any, - additional_properties: Optional[Dict[str, Any]] = None, + connection_string: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, password: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -9742,19 +10781,26 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype annotations: list[JSON] + :keyword connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. + :paramtype connection_string: JSON :keyword password: The Azure key vault secret reference of password in connection string. :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(AzureMySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureMySql' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureMySql" # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential @@ -9767,89 +10813,98 @@ class AzureMySqlSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :vartype pre_copy_script: any + :vartype pre_copy_script: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "pre_copy_script": {"key": "preCopyScript", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - pre_copy_script: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + pre_copy_script: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :paramtype pre_copy_script: any - """ - super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AzureMySqlSink' # type: str + :paramtype pre_copy_script: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AzureMySqlSink" # type: str self.pre_copy_script = pre_copy_script @@ -9860,189 +10915,208 @@ class AzureMySqlSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: Database query. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: Database query. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'AzureMySqlSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "AzureMySqlSource" # type: str self.query = query -class AzureMySqlTableDataset(Dataset): +class AzureMySqlTableDataset(Dataset): # pylint: disable=too-many-instance-attributes """The Azure MySQL database dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The Azure MySQL database table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON :ivar table: The name of Azure MySQL database table. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, - table: Optional[Any] = None, + table_name: Optional[JSON] = None, + table: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The Azure MySQL database table name. Type: string (or Expression with resultType string). - :paramtype table_name: any + :paramtype table_name: JSON :keyword table: The name of Azure MySQL database table. Type: string (or Expression with resultType string). - :paramtype table: any - """ - super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzureMySqlTable' # type: str + :paramtype table: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "AzureMySqlTable" # type: str self.table_name = table_name self.table = table @@ -10054,8 +11128,8 @@ class AzurePostgreSqlLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -10064,51 +11138,51 @@ class AzurePostgreSqlLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype connection_string: JSON :ivar password: The Azure key vault secret reference of password in connection string. :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "AzureKeyVaultSecretReference"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_string: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_string: Optional[JSON] = None, password: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -10116,19 +11190,26 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype connection_string: JSON :keyword password: The Azure key vault secret reference of password in connection string. :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(AzurePostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzurePostgreSql' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzurePostgreSql" # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential @@ -10141,89 +11222,98 @@ class AzurePostgreSqlSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :vartype pre_copy_script: any + :vartype pre_copy_script: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "pre_copy_script": {"key": "preCopyScript", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - pre_copy_script: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + pre_copy_script: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :paramtype pre_copy_script: any - """ - super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AzurePostgreSqlSink' # type: str + :paramtype pre_copy_script: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AzurePostgreSqlSink" # type: str self.pre_copy_script = pre_copy_script @@ -10234,199 +11324,218 @@ class AzurePostgreSqlSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'AzurePostgreSqlSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "AzurePostgreSqlSource" # type: str self.query = query -class AzurePostgreSqlTableDataset(Dataset): +class AzurePostgreSqlTableDataset(Dataset): # pylint: disable=too-many-instance-attributes """Azure PostgreSQL dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name of the Azure PostgreSQL database which includes both schema and table. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON :ivar table: The table name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON :ivar schema_type_properties_schema: The schema name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, - table: Optional[Any] = None, - schema_type_properties_schema: Optional[Any] = None, + table_name: Optional[JSON] = None, + table: Optional[JSON] = None, + schema_type_properties_schema: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name of the Azure PostgreSQL database which includes both schema and table. Type: string (or Expression with resultType string). - :paramtype table_name: any + :paramtype table_name: JSON :keyword table: The table name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). - :paramtype table: any + :paramtype table: JSON :keyword schema_type_properties_schema: The schema name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any - """ - super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzurePostgreSqlTable' # type: str + :paramtype schema_type_properties_schema: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "AzurePostgreSqlTable" # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -10439,81 +11548,90 @@ class AzureQueueSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON """ - super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AzureQueueSink' # type: str + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AzureQueueSink" # type: str class AzureSearchIndexDataset(Dataset): @@ -10523,91 +11641,101 @@ class AzureSearchIndexDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :ivar index_name: Required. The name of the Azure Search Index. Type: string (or Expression - with resultType string). - :vartype index_name: any + :ivar index_name: The name of the Azure Search Index. Type: string (or Expression with + resultType string). Required. + :vartype index_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'index_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, + "index_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "index_name": {"key": "typeProperties.indexName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - index_name: Any, - additional_properties: Optional[Dict[str, Any]] = None, + index_name: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :keyword index_name: Required. The name of the Azure Search Index. Type: string (or Expression - with resultType string). - :paramtype index_name: any - """ - super(AzureSearchIndexDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzureSearchIndex' # type: str + :keyword index_name: The name of the Azure Search Index. Type: string (or Expression with + resultType string). Required. + :paramtype index_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "AzureSearchIndex" # type: str self.index_name = index_name @@ -10618,91 +11746,100 @@ class AzureSearchIndexSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar write_behavior: Specify the write behavior when upserting documents into Azure Search - Index. Known values are: "Merge", "Upload". + Index. Known values are: "Merge" and "Upload". :vartype write_behavior: str or ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "write_behavior": {"key": "writeBehavior", "type": "str"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, write_behavior: Optional[Union[str, "_models.AzureSearchIndexWriteBehaviorType"]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword write_behavior: Specify the write behavior when upserting documents into Azure Search - Index. Known values are: "Merge", "Upload". + Index. Known values are: "Merge" and "Upload". :paramtype write_behavior: str or ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType """ - super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AzureSearchIndexSink' # type: str + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AzureSearchIndexSink" # type: str self.write_behavior = write_behavior @@ -10713,8 +11850,8 @@ class AzureSearchLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -10723,52 +11860,52 @@ class AzureSearchLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar url: Required. URL for Azure Search service. Type: string (or Expression with resultType - string). - :vartype url: any + :vartype annotations: list[JSON] + :ivar url: URL for Azure Search service. Type: string (or Expression with resultType string). + Required. + :vartype url: JSON :ivar key: Admin Key for Azure Search service. :vartype key: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'url': {'required': True}, + "type": {"required": True}, + "url": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "url": {"key": "typeProperties.url", "type": "object"}, + "key": {"key": "typeProperties.key", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - url: Any, - additional_properties: Optional[Dict[str, Any]] = None, + url: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, key: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -10776,33 +11913,40 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword url: Required. URL for Azure Search service. Type: string (or Expression with - resultType string). - :paramtype url: any + :paramtype annotations: list[JSON] + :keyword url: URL for Azure Search service. Type: string (or Expression with resultType + string). Required. + :paramtype url: JSON :keyword key: Admin Key for Azure Search service. :paramtype key: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(AzureSearchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureSearch' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureSearch" # type: str self.url = url self.key = key self.encrypted_credential = encrypted_credential -class AzureSqlDatabaseLinkedService(LinkedService): +class AzureSqlDatabaseLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Microsoft Azure SQL Database linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -10811,29 +11955,29 @@ class AzureSqlDatabaseLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype annotations: list[JSON] + :ivar connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. + :vartype connection_string: JSON :ivar password: The Azure key vault secret reference of password in connection string. :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar service_principal_id: The ID of the service principal used to authenticate against Azure SQL Database. Type: string (or Expression with resultType string). - :vartype service_principal_id: any + :vartype service_principal_id: JSON :ivar service_principal_key: The key of the service principal used to authenticate against Azure SQL Database. :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :vartype tenant: any + :vartype tenant: JSON :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :vartype azure_cloud_type: any + :vartype azure_cloud_type: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON :ivar always_encrypted_settings: Sql always encrypted properties. :vartype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties :ivar credential: The credential reference containing authentication information. @@ -10841,43 +11985,46 @@ class AzureSqlDatabaseLinkedService(LinkedService): """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, + "type": {"required": True}, + "connection_string": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "AzureKeyVaultSecretReference"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, + "tenant": {"key": "typeProperties.tenant", "type": "object"}, + "azure_cloud_type": {"key": "typeProperties.azureCloudType", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "always_encrypted_settings": { + "key": "typeProperties.alwaysEncryptedSettings", + "type": "SqlAlwaysEncryptedProperties", + }, + "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, } def __init__( self, *, - connection_string: Any, - additional_properties: Optional[Dict[str, Any]] = None, + connection_string: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, password: Optional["_models.AzureKeyVaultSecretReference"] = None, - service_principal_id: Optional[Any] = None, + service_principal_id: Optional[JSON] = None, service_principal_key: Optional["_models.SecretBase"] = None, - tenant: Optional[Any] = None, - azure_cloud_type: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + tenant: Optional[JSON] = None, + azure_cloud_type: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, always_encrypted_settings: Optional["_models.SqlAlwaysEncryptedProperties"] = None, credential: Optional["_models.CredentialReference"] = None, **kwargs @@ -10885,7 +12032,7 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -10893,37 +12040,44 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype annotations: list[JSON] + :keyword connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. + :paramtype connection_string: JSON :keyword password: The Azure key vault secret reference of password in connection string. :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword service_principal_id: The ID of the service principal used to authenticate against Azure SQL Database. Type: string (or Expression with resultType string). - :paramtype service_principal_id: any + :paramtype service_principal_id: JSON :keyword service_principal_key: The key of the service principal used to authenticate against Azure SQL Database. :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :paramtype tenant: any + :paramtype tenant: JSON :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :paramtype azure_cloud_type: any + :paramtype azure_cloud_type: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any + :paramtype encrypted_credential: JSON :keyword always_encrypted_settings: Sql always encrypted properties. :paramtype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ - super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureSqlDatabase' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureSqlDatabase" # type: str self.connection_string = connection_string self.password = password self.service_principal_id = service_principal_id @@ -10935,15 +12089,15 @@ def __init__( self.credential = credential -class AzureSqlDWLinkedService(LinkedService): +class AzureSqlDWLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Azure SQL Data Warehouse linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -10952,77 +12106,78 @@ class AzureSqlDWLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar connection_string: Required. The connection string. Type: string, SecureString or + :vartype annotations: list[JSON] + :ivar connection_string: The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype connection_string: any + Required. + :vartype connection_string: JSON :ivar password: The Azure key vault secret reference of password in connection string. :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar service_principal_id: The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :vartype service_principal_id: any + :vartype service_principal_id: JSON :ivar service_principal_key: The key of the service principal used to authenticate against Azure SQL Data Warehouse. :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :vartype tenant: any + :vartype tenant: JSON :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :vartype azure_cloud_type: any + :vartype azure_cloud_type: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, + "type": {"required": True}, + "connection_string": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "AzureKeyVaultSecretReference"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, + "tenant": {"key": "typeProperties.tenant", "type": "object"}, + "azure_cloud_type": {"key": "typeProperties.azureCloudType", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, } def __init__( self, *, - connection_string: Any, - additional_properties: Optional[Dict[str, Any]] = None, + connection_string: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, password: Optional["_models.AzureKeyVaultSecretReference"] = None, - service_principal_id: Optional[Any] = None, + service_principal_id: Optional[JSON] = None, service_principal_key: Optional["_models.SecretBase"] = None, - tenant: Optional[Any] = None, - azure_cloud_type: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + tenant: Optional[JSON] = None, + azure_cloud_type: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, credential: Optional["_models.CredentialReference"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -11030,34 +12185,42 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword connection_string: Required. The connection string. Type: string, SecureString or + :paramtype annotations: list[JSON] + :keyword connection_string: The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype connection_string: any + Required. + :paramtype connection_string: JSON :keyword password: The Azure key vault secret reference of password in connection string. :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword service_principal_id: The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :paramtype service_principal_id: any + :paramtype service_principal_id: JSON :keyword service_principal_key: The key of the service principal used to authenticate against Azure SQL Data Warehouse. :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :paramtype tenant: any + :paramtype tenant: JSON :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :paramtype azure_cloud_type: any + :paramtype azure_cloud_type: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any + :paramtype encrypted_credential: JSON :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ - super(AzureSqlDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureSqlDW' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureSqlDW" # type: str self.connection_string = connection_string self.password = password self.service_principal_id = service_principal_id @@ -11068,127 +12231,137 @@ def __init__( self.credential = credential -class AzureSqlDWTableDataset(Dataset): +class AzureSqlDWTableDataset(Dataset): # pylint: disable=too-many-instance-attributes """The Azure SQL Data Warehouse dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :vartype table_name: any + :vartype table_name: JSON :ivar schema_type_properties_schema: The schema name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON :ivar table: The table name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, - schema_type_properties_schema: Optional[Any] = None, - table: Optional[Any] = None, + table_name: Optional[JSON] = None, + schema_type_properties_schema: Optional[JSON] = None, + table: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: This property will be retired. Please consider using schema + table properties instead. - :paramtype table_name: any + :paramtype table_name: JSON :keyword schema_type_properties_schema: The schema name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any + :paramtype schema_type_properties_schema: JSON :keyword table: The table name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :paramtype table: any - """ - super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzureSqlDWTable' # type: str + :paramtype table: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "AzureSqlDWTable" # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table -class AzureSqlMILinkedService(LinkedService): +class AzureSqlMILinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Azure SQL Managed Instance linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -11197,29 +12370,29 @@ class AzureSqlMILinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype annotations: list[JSON] + :ivar connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. + :vartype connection_string: JSON :ivar password: The Azure key vault secret reference of password in connection string. :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar service_principal_id: The ID of the service principal used to authenticate against Azure SQL Managed Instance. Type: string (or Expression with resultType string). - :vartype service_principal_id: any + :vartype service_principal_id: JSON :ivar service_principal_key: The key of the service principal used to authenticate against Azure SQL Managed Instance. :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :vartype tenant: any + :vartype tenant: JSON :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :vartype azure_cloud_type: any + :vartype azure_cloud_type: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON :ivar always_encrypted_settings: Sql always encrypted properties. :vartype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties :ivar credential: The credential reference containing authentication information. @@ -11227,43 +12400,46 @@ class AzureSqlMILinkedService(LinkedService): """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, + "type": {"required": True}, + "connection_string": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "AzureKeyVaultSecretReference"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, + "tenant": {"key": "typeProperties.tenant", "type": "object"}, + "azure_cloud_type": {"key": "typeProperties.azureCloudType", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "always_encrypted_settings": { + "key": "typeProperties.alwaysEncryptedSettings", + "type": "SqlAlwaysEncryptedProperties", + }, + "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, } def __init__( self, *, - connection_string: Any, - additional_properties: Optional[Dict[str, Any]] = None, + connection_string: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, password: Optional["_models.AzureKeyVaultSecretReference"] = None, - service_principal_id: Optional[Any] = None, + service_principal_id: Optional[JSON] = None, service_principal_key: Optional["_models.SecretBase"] = None, - tenant: Optional[Any] = None, - azure_cloud_type: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + tenant: Optional[JSON] = None, + azure_cloud_type: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, always_encrypted_settings: Optional["_models.SqlAlwaysEncryptedProperties"] = None, credential: Optional["_models.CredentialReference"] = None, **kwargs @@ -11271,7 +12447,7 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -11279,37 +12455,44 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype annotations: list[JSON] + :keyword connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. + :paramtype connection_string: JSON :keyword password: The Azure key vault secret reference of password in connection string. :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword service_principal_id: The ID of the service principal used to authenticate against Azure SQL Managed Instance. Type: string (or Expression with resultType string). - :paramtype service_principal_id: any + :paramtype service_principal_id: JSON :keyword service_principal_key: The key of the service principal used to authenticate against Azure SQL Managed Instance. :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :paramtype tenant: any + :paramtype tenant: JSON :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :paramtype azure_cloud_type: any + :paramtype azure_cloud_type: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any + :paramtype encrypted_credential: JSON :keyword always_encrypted_settings: Sql always encrypted properties. :paramtype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ - super(AzureSqlMILinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureSqlMI' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureSqlMI" # type: str self.connection_string = connection_string self.password = password self.service_principal_id = service_principal_id @@ -11321,270 +12504,292 @@ def __init__( self.credential = credential -class AzureSqlMITableDataset(Dataset): +class AzureSqlMITableDataset(Dataset): # pylint: disable=too-many-instance-attributes """The Azure SQL Managed Instance dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :vartype table_name: any + :vartype table_name: JSON :ivar schema_type_properties_schema: The schema name of the Azure SQL Managed Instance. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON :ivar table: The table name of the Azure SQL Managed Instance dataset. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, - schema_type_properties_schema: Optional[Any] = None, - table: Optional[Any] = None, + table_name: Optional[JSON] = None, + schema_type_properties_schema: Optional[JSON] = None, + table: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: This property will be retired. Please consider using schema + table properties instead. - :paramtype table_name: any + :paramtype table_name: JSON :keyword schema_type_properties_schema: The schema name of the Azure SQL Managed Instance. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any + :paramtype schema_type_properties_schema: JSON :keyword table: The table name of the Azure SQL Managed Instance dataset. Type: string (or Expression with resultType string). - :paramtype table: any - """ - super(AzureSqlMITableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzureSqlMITable' # type: str + :paramtype table: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "AzureSqlMITable" # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table -class AzureSqlSink(CopySink): +class AzureSqlSink(CopySink): # pylint: disable=too-many-instance-attributes """A copy activity Azure SQL sink. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :vartype sql_writer_stored_procedure_name: any + :vartype sql_writer_stored_procedure_name: JSON :ivar sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). - :vartype sql_writer_table_type: any + :vartype sql_writer_table_type: JSON :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :vartype pre_copy_script: any + :vartype pre_copy_script: JSON :ivar stored_procedure_parameters: SQL stored procedure parameters. :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :ivar stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :vartype stored_procedure_table_type_parameter_name: any + :vartype stored_procedure_table_type_parameter_name: JSON :ivar table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :vartype table_option: any + :vartype table_option: JSON :ivar sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - :vartype sql_writer_use_table_lock: any + :vartype sql_writer_use_table_lock: JSON :ivar write_behavior: Write behavior when copying data into Azure SQL. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). - :vartype write_behavior: any + :vartype write_behavior: JSON :ivar upsert_settings: SQL upsert settings. :vartype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "sql_writer_stored_procedure_name": {"key": "sqlWriterStoredProcedureName", "type": "object"}, + "sql_writer_table_type": {"key": "sqlWriterTableType", "type": "object"}, + "pre_copy_script": {"key": "preCopyScript", "type": "object"}, + "stored_procedure_parameters": {"key": "storedProcedureParameters", "type": "{StoredProcedureParameter}"}, + "stored_procedure_table_type_parameter_name": { + "key": "storedProcedureTableTypeParameterName", + "type": "object", + }, + "table_option": {"key": "tableOption", "type": "object"}, + "sql_writer_use_table_lock": {"key": "sqlWriterUseTableLock", "type": "object"}, + "write_behavior": {"key": "writeBehavior", "type": "object"}, + "upsert_settings": {"key": "upsertSettings", "type": "SqlUpsertSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - sql_writer_stored_procedure_name: Optional[Any] = None, - sql_writer_table_type: Optional[Any] = None, - pre_copy_script: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + sql_writer_stored_procedure_name: Optional[JSON] = None, + sql_writer_table_type: Optional[JSON] = None, + pre_copy_script: Optional[JSON] = None, stored_procedure_parameters: Optional[Dict[str, "_models.StoredProcedureParameter"]] = None, - stored_procedure_table_type_parameter_name: Optional[Any] = None, - table_option: Optional[Any] = None, - sql_writer_use_table_lock: Optional[Any] = None, - write_behavior: Optional[Any] = None, + stored_procedure_table_type_parameter_name: Optional[JSON] = None, + table_option: Optional[JSON] = None, + sql_writer_use_table_lock: Optional[JSON] = None, + write_behavior: Optional[JSON] = None, upsert_settings: Optional["_models.SqlUpsertSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :paramtype sql_writer_stored_procedure_name: any + :paramtype sql_writer_stored_procedure_name: JSON :keyword sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). - :paramtype sql_writer_table_type: any + :paramtype sql_writer_table_type: JSON :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :paramtype pre_copy_script: any + :paramtype pre_copy_script: JSON :keyword stored_procedure_parameters: SQL stored procedure parameters. :paramtype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :keyword stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :paramtype stored_procedure_table_type_parameter_name: any + :paramtype stored_procedure_table_type_parameter_name: JSON :keyword table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :paramtype table_option: any + :paramtype table_option: JSON :keyword sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - :paramtype sql_writer_use_table_lock: any + :paramtype sql_writer_use_table_lock: JSON :keyword write_behavior: Write behavior when copying data into Azure SQL. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). - :paramtype write_behavior: any + :paramtype write_behavior: JSON :keyword upsert_settings: SQL upsert settings. :paramtype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ - super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AzureSqlSink' # type: str + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AzureSqlSink" # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script @@ -11596,135 +12801,144 @@ def __init__( self.upsert_settings = upsert_settings -class AzureSqlSource(TabularSource): +class AzureSqlSource(TabularSource): # pylint: disable=too-many-instance-attributes """A copy activity Azure SQL source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :vartype sql_reader_query: any + :vartype sql_reader_query: JSON :ivar sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :vartype sql_reader_stored_procedure_name: any + :vartype sql_reader_stored_procedure_name: JSON :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :ivar produce_additional_types: Which additional types to produce. - :vartype produce_additional_types: any + :vartype produce_additional_types: JSON :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :vartype partition_option: any + :vartype partition_option: JSON :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "sql_reader_query": {"key": "sqlReaderQuery", "type": "object"}, + "sql_reader_stored_procedure_name": {"key": "sqlReaderStoredProcedureName", "type": "object"}, + "stored_procedure_parameters": {"key": "storedProcedureParameters", "type": "{StoredProcedureParameter}"}, + "produce_additional_types": {"key": "produceAdditionalTypes", "type": "object"}, + "partition_option": {"key": "partitionOption", "type": "object"}, + "partition_settings": {"key": "partitionSettings", "type": "SqlPartitionSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - sql_reader_query: Optional[Any] = None, - sql_reader_stored_procedure_name: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + sql_reader_query: Optional[JSON] = None, + sql_reader_stored_procedure_name: Optional[JSON] = None, stored_procedure_parameters: Optional[Dict[str, "_models.StoredProcedureParameter"]] = None, - produce_additional_types: Optional[Any] = None, - partition_option: Optional[Any] = None, + produce_additional_types: Optional[JSON] = None, + partition_option: Optional[JSON] = None, partition_settings: Optional["_models.SqlPartitionSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :paramtype sql_reader_query: any + :paramtype sql_reader_query: JSON :keyword sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :paramtype sql_reader_stored_procedure_name: any + :paramtype sql_reader_stored_procedure_name: JSON :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :paramtype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :keyword produce_additional_types: Which additional types to produce. - :paramtype produce_additional_types: any + :paramtype produce_additional_types: JSON :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :paramtype partition_option: any + :paramtype partition_option: JSON :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ - super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'AzureSqlSource' # type: str + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "AzureSqlSource" # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -11733,127 +12947,137 @@ def __init__( self.partition_settings = partition_settings -class AzureSqlTableDataset(Dataset): +class AzureSqlTableDataset(Dataset): # pylint: disable=too-many-instance-attributes """The Azure SQL Server database dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :vartype table_name: any + :vartype table_name: JSON :ivar schema_type_properties_schema: The schema name of the Azure SQL database. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON :ivar table: The table name of the Azure SQL database. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, - schema_type_properties_schema: Optional[Any] = None, - table: Optional[Any] = None, + table_name: Optional[JSON] = None, + schema_type_properties_schema: Optional[JSON] = None, + table: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: This property will be retired. Please consider using schema + table properties instead. - :paramtype table_name: any + :paramtype table_name: JSON :keyword schema_type_properties_schema: The schema name of the Azure SQL database. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any + :paramtype schema_type_properties_schema: JSON :keyword table: The table name of the Azure SQL database. Type: string (or Expression with resultType string). - :paramtype table: any - """ - super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzureSqlTable' # type: str + :paramtype table: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "AzureSqlTable" # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table -class AzureStorageLinkedService(LinkedService): +class AzureStorageLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """The storage account linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -11862,15 +13086,15 @@ class AzureStorageLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype connection_string: JSON :ivar account_key: The Azure key vault secret reference of accountKey in connection string. :vartype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype sas_uri: any + :vartype sas_uri: JSON :ivar sas_token: The Azure key vault secret reference of sasToken in sas uri. :vartype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are @@ -11880,34 +13104,34 @@ class AzureStorageLinkedService(LinkedService): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "account_key": {"key": "typeProperties.accountKey", "type": "AzureKeyVaultSecretReference"}, + "sas_uri": {"key": "typeProperties.sasUri", "type": "object"}, + "sas_token": {"key": "typeProperties.sasToken", "type": "AzureKeyVaultSecretReference"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_string: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_string: Optional[JSON] = None, account_key: Optional["_models.AzureKeyVaultSecretReference"] = None, - sas_uri: Optional[Any] = None, + sas_uri: Optional[JSON] = None, sas_token: Optional["_models.AzureKeyVaultSecretReference"] = None, encrypted_credential: Optional[str] = None, **kwargs @@ -11915,7 +13139,7 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -11923,15 +13147,15 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype connection_string: JSON :keyword account_key: The Azure key vault secret reference of accountKey in connection string. :paramtype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype sas_uri: any + :paramtype sas_uri: JSON :keyword sas_token: The Azure key vault secret reference of sasToken in sas uri. :paramtype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials @@ -11939,8 +13163,15 @@ def __init__( with resultType string). :paramtype encrypted_credential: str """ - super(AzureStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureStorage' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureStorage" # type: str self.connection_string = connection_string self.account_key = account_key self.sas_uri = sas_uri @@ -11948,6 +13179,92 @@ def __init__( self.encrypted_credential = encrypted_credential +class AzureSynapseArtifactsLinkedService(LinkedService): + """Azure Synapse Analytics (Artifacts) linked service. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[JSON] + :ivar endpoint: https://:code:``.dev.azuresynapse.net, Azure Synapse Analytics + workspace URL. Type: string (or Expression with resultType string). Required. + :vartype endpoint: JSON + :ivar authentication: Required to specify MSI, if using system assigned managed identity as + authentication method. Type: string (or Expression with resultType string). + :vartype authentication: JSON + """ + + _validation = { + "type": {"required": True}, + "endpoint": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "endpoint": {"key": "typeProperties.endpoint", "type": "object"}, + "authentication": {"key": "typeProperties.authentication", "type": "object"}, + } + + def __init__( + self, + *, + endpoint: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, + connect_via: Optional["_models.IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, + annotations: Optional[List[JSON]] = None, + authentication: Optional[JSON] = None, + **kwargs + ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[JSON] + :keyword endpoint: https://:code:``.dev.azuresynapse.net, Azure Synapse + Analytics workspace URL. Type: string (or Expression with resultType string). Required. + :paramtype endpoint: JSON + :keyword authentication: Required to specify MSI, if using system assigned managed identity as + authentication method. Type: string (or Expression with resultType string). + :paramtype authentication: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureSynapseArtifacts" # type: str + self.endpoint = endpoint + self.authentication = authentication + + class AzureTableDataset(Dataset): """The Azure Table storage dataset. @@ -11955,208 +13272,227 @@ class AzureTableDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :ivar table_name: Required. The table name of the Azure Table storage. Type: string (or - Expression with resultType string). - :vartype table_name: any + :ivar table_name: The table name of the Azure Table storage. Type: string (or Expression with + resultType string). Required. + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'table_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, + "table_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - table_name: Any, - additional_properties: Optional[Dict[str, Any]] = None, + table_name: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :keyword table_name: Required. The table name of the Azure Table storage. Type: string (or - Expression with resultType string). - :paramtype table_name: any - """ - super(AzureTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzureTable' # type: str + :keyword table_name: The table name of the Azure Table storage. Type: string (or Expression + with resultType string). Required. + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "AzureTable" # type: str self.table_name = table_name -class AzureTableSink(CopySink): +class AzureTableSink(CopySink): # pylint: disable=too-many-instance-attributes """A copy activity Azure Table sink. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar azure_table_default_partition_key_value: Azure Table default partition key value. Type: string (or Expression with resultType string). - :vartype azure_table_default_partition_key_value: any + :vartype azure_table_default_partition_key_value: JSON :ivar azure_table_partition_key_name: Azure Table partition key name. Type: string (or Expression with resultType string). - :vartype azure_table_partition_key_name: any + :vartype azure_table_partition_key_name: JSON :ivar azure_table_row_key_name: Azure Table row key name. Type: string (or Expression with resultType string). - :vartype azure_table_row_key_name: any + :vartype azure_table_row_key_name: JSON :ivar azure_table_insert_type: Azure Table insert type. Type: string (or Expression with resultType string). - :vartype azure_table_insert_type: any + :vartype azure_table_insert_type: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, - 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, - 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, - 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "azure_table_default_partition_key_value": {"key": "azureTableDefaultPartitionKeyValue", "type": "object"}, + "azure_table_partition_key_name": {"key": "azureTablePartitionKeyName", "type": "object"}, + "azure_table_row_key_name": {"key": "azureTableRowKeyName", "type": "object"}, + "azure_table_insert_type": {"key": "azureTableInsertType", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - azure_table_default_partition_key_value: Optional[Any] = None, - azure_table_partition_key_name: Optional[Any] = None, - azure_table_row_key_name: Optional[Any] = None, - azure_table_insert_type: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + azure_table_default_partition_key_value: Optional[JSON] = None, + azure_table_partition_key_name: Optional[JSON] = None, + azure_table_row_key_name: Optional[JSON] = None, + azure_table_insert_type: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword azure_table_default_partition_key_value: Azure Table default partition key value. Type: string (or Expression with resultType string). - :paramtype azure_table_default_partition_key_value: any + :paramtype azure_table_default_partition_key_value: JSON :keyword azure_table_partition_key_name: Azure Table partition key name. Type: string (or Expression with resultType string). - :paramtype azure_table_partition_key_name: any + :paramtype azure_table_partition_key_name: JSON :keyword azure_table_row_key_name: Azure Table row key name. Type: string (or Expression with resultType string). - :paramtype azure_table_row_key_name: any + :paramtype azure_table_row_key_name: JSON :keyword azure_table_insert_type: Azure Table insert type. Type: string (or Expression with resultType string). - :paramtype azure_table_insert_type: any - """ - super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'AzureTableSink' # type: str + :paramtype azure_table_insert_type: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "AzureTableSink" # type: str self.azure_table_default_partition_key_value = azure_table_default_partition_key_value self.azure_table_partition_key_name = azure_table_partition_key_name self.azure_table_row_key_name = azure_table_row_key_name @@ -12170,110 +13506,119 @@ class AzureTableSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar azure_table_source_query: Azure Table source query. Type: string (or Expression with resultType string). - :vartype azure_table_source_query: any + :vartype azure_table_source_query: JSON :ivar azure_table_source_ignore_table_not_found: Azure Table source ignore table not found. Type: boolean (or Expression with resultType boolean). - :vartype azure_table_source_ignore_table_not_found: any + :vartype azure_table_source_ignore_table_not_found: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, - 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "azure_table_source_query": {"key": "azureTableSourceQuery", "type": "object"}, + "azure_table_source_ignore_table_not_found": {"key": "azureTableSourceIgnoreTableNotFound", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - azure_table_source_query: Optional[Any] = None, - azure_table_source_ignore_table_not_found: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + azure_table_source_query: Optional[JSON] = None, + azure_table_source_ignore_table_not_found: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword azure_table_source_query: Azure Table source query. Type: string (or Expression with resultType string). - :paramtype azure_table_source_query: any + :paramtype azure_table_source_query: JSON :keyword azure_table_source_ignore_table_not_found: Azure Table source ignore table not found. Type: boolean (or Expression with resultType boolean). - :paramtype azure_table_source_ignore_table_not_found: any - """ - super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'AzureTableSource' # type: str + :paramtype azure_table_source_ignore_table_not_found: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "AzureTableSource" # type: str self.azure_table_source_query = azure_table_source_query self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found -class AzureTableStorageLinkedService(LinkedService): +class AzureTableStorageLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """The azure table storage linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -12282,15 +13627,15 @@ class AzureTableStorageLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype connection_string: JSON :ivar account_key: The Azure key vault secret reference of accountKey in connection string. :vartype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype sas_uri: any + :vartype sas_uri: JSON :ivar sas_token: The Azure key vault secret reference of sasToken in sas uri. :vartype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are @@ -12300,34 +13645,34 @@ class AzureTableStorageLinkedService(LinkedService): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "account_key": {"key": "typeProperties.accountKey", "type": "AzureKeyVaultSecretReference"}, + "sas_uri": {"key": "typeProperties.sasUri", "type": "object"}, + "sas_token": {"key": "typeProperties.sasToken", "type": "AzureKeyVaultSecretReference"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_string: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_string: Optional[JSON] = None, account_key: Optional["_models.AzureKeyVaultSecretReference"] = None, - sas_uri: Optional[Any] = None, + sas_uri: Optional[JSON] = None, sas_token: Optional["_models.AzureKeyVaultSecretReference"] = None, encrypted_credential: Optional[str] = None, **kwargs @@ -12335,7 +13680,7 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -12343,15 +13688,15 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype connection_string: JSON :keyword account_key: The Azure key vault secret reference of accountKey in connection string. :paramtype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype sas_uri: any + :paramtype sas_uri: JSON :keyword sas_token: The Azure key vault secret reference of sasToken in sas uri. :paramtype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials @@ -12359,8 +13704,15 @@ def __init__( with resultType string). :paramtype encrypted_credential: str """ - super(AzureTableStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureTableStorage' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "AzureTableStorage" # type: str self.connection_string = connection_string self.account_key = account_key self.sas_uri = sas_uri @@ -12368,30 +13720,65 @@ def __init__( self.encrypted_credential = encrypted_credential -class BinaryDataset(Dataset): +class BigDataPoolParametrizationReference(_serialization.Model): + """Big data pool reference type. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Big data pool reference type. Required. "BigDataPoolReference" + :vartype type: str or ~azure.mgmt.datafactory.models.BigDataPoolReferenceType + :ivar reference_name: Reference big data pool name. Type: string (or Expression with resultType + string). Required. + :vartype reference_name: JSON + """ + + _validation = { + "type": {"required": True}, + "reference_name": {"required": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "reference_name": {"key": "referenceName", "type": "object"}, + } + + def __init__(self, *, type: Union[str, "_models.BigDataPoolReferenceType"], reference_name: JSON, **kwargs): + """ + :keyword type: Big data pool reference type. Required. "BigDataPoolReference" + :paramtype type: str or ~azure.mgmt.datafactory.models.BigDataPoolReferenceType + :keyword reference_name: Reference big data pool name. Type: string (or Expression with + resultType string). Required. + :paramtype reference_name: JSON + """ + super().__init__(**kwargs) + self.type = type + self.reference_name = reference_name + + +class BinaryDataset(Dataset): # pylint: disable=too-many-instance-attributes """Binary dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -12402,34 +13789,34 @@ class BinaryDataset(Dataset): """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "location": {"key": "typeProperties.location", "type": "DatasetLocation"}, + "compression": {"key": "typeProperties.compression", "type": "DatasetCompression"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, location: Optional["_models.DatasetLocation"] = None, compression: Optional["_models.DatasetCompression"] = None, @@ -12438,21 +13825,21 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -12461,54 +13848,64 @@ def __init__( :keyword compression: The data compression method used for the binary dataset. :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ - super(BinaryDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'Binary' # type: str + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "Binary" # type: str self.location = location self.compression = compression -class FormatReadSettings(msrest.serialization.Model): +class FormatReadSettings(_serialization.Model): """Format read settings. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BinaryReadSettings, DelimitedTextReadSettings, JsonReadSettings, XmlReadSettings. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + BinaryReadSettings, DelimitedTextReadSettings, JsonReadSettings, XmlReadSettings All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The read setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. :vartype type: str """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, } _subtype_map = { - 'type': {'BinaryReadSettings': 'BinaryReadSettings', 'DelimitedTextReadSettings': 'DelimitedTextReadSettings', 'JsonReadSettings': 'JsonReadSettings', 'XmlReadSettings': 'XmlReadSettings'} + "type": { + "BinaryReadSettings": "BinaryReadSettings", + "DelimitedTextReadSettings": "DelimitedTextReadSettings", + "JsonReadSettings": "JsonReadSettings", + "XmlReadSettings": "XmlReadSettings", + } } - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - **kwargs - ): + def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, **kwargs): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] """ - super(FormatReadSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties - self.type = 'FormatReadSettings' # type: str + self.type = None # type: Optional[str] class BinaryReadSettings(FormatReadSettings): @@ -12518,39 +13915,39 @@ class BinaryReadSettings(FormatReadSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The read setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. :vartype type: str :ivar compression_properties: Compression settings. :vartype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "compression_properties": {"key": "compressionProperties", "type": "CompressionReadSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, compression_properties: Optional["_models.CompressionReadSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword compression_properties: Compression settings. :paramtype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ - super(BinaryReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'BinaryReadSettings' # type: str + super().__init__(additional_properties=additional_properties, **kwargs) + self.type = "BinaryReadSettings" # type: str self.compression_properties = compression_properties @@ -12561,87 +13958,96 @@ class BinarySink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar store_settings: Binary store settings. :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "store_settings": {"key": "storeSettings", "type": "StoreWriteSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, store_settings: Optional["_models.StoreWriteSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword store_settings: Binary store settings. :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings """ - super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'BinarySink' # type: str + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "BinarySink" # type: str self.store_settings = store_settings @@ -12652,21 +14058,21 @@ class BinarySource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar store_settings: Binary store settings. :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :ivar format_settings: Binary format settings. @@ -12674,28 +14080,28 @@ class BinarySource(CopySource): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'BinaryReadSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "store_settings": {"key": "storeSettings", "type": "StoreReadSettings"}, + "format_settings": {"key": "formatSettings", "type": "BinaryReadSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, store_settings: Optional["_models.StoreReadSettings"] = None, format_settings: Optional["_models.BinaryReadSettings"] = None, **kwargs @@ -12703,35 +14109,42 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword store_settings: Binary store settings. :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :keyword format_settings: Binary format settings. :paramtype format_settings: ~azure.mgmt.datafactory.models.BinaryReadSettings """ - super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'BinarySource' # type: str + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "BinarySource" # type: str self.store_settings = store_settings self.format_settings = format_settings -class Trigger(msrest.serialization.Model): +class Trigger(_serialization.Model): """Azure data factory nested object which contains information about creating pipeline run. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ChainingTrigger, MultiplePipelineTrigger, RerunTumblingWindowTrigger, TumblingWindowTrigger. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ChainingTrigger, MultiplePipelineTrigger, RerunTumblingWindowTrigger, TumblingWindowTrigger Variables are only populated by the server, and will be ignored when sending a request. @@ -12739,55 +14152,60 @@ class Trigger(msrest.serialization.Model): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Trigger type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Trigger type. Required. :vartype type: str :ivar description: Trigger description. :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Known values are: "Started", "Stopped", "Disabled". + called on the Trigger. Known values are: "Started", "Stopped", and "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :ivar annotations: List of tags that can be used for describing the trigger. - :vartype annotations: list[any] + :vartype annotations: list[JSON] """ _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, + "type": {"required": True}, + "runtime_state": {"readonly": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "runtime_state": {"key": "runtimeState", "type": "str"}, + "annotations": {"key": "annotations", "type": "[object]"}, } _subtype_map = { - 'type': {'ChainingTrigger': 'ChainingTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger', 'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger'} + "type": { + "ChainingTrigger": "ChainingTrigger", + "MultiplePipelineTrigger": "MultiplePipelineTrigger", + "RerunTumblingWindowTrigger": "RerunTumblingWindowTrigger", + "TumblingWindowTrigger": "TumblingWindowTrigger", + } } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Trigger description. :paramtype description: str :keyword annotations: List of tags that can be used for describing the trigger. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] """ - super(Trigger, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties - self.type = 'Trigger' # type: str + self.type = None # type: Optional[str] self.description = description self.runtime_state = None self.annotations = annotations @@ -12796,8 +14214,8 @@ def __init__( class MultiplePipelineTrigger(Trigger): """Base class for all triggers that support one to many model for trigger to pipeline. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BlobEventsTrigger, BlobTrigger, CustomEventsTrigger, ScheduleTrigger. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + BlobEventsTrigger, BlobTrigger, CustomEventsTrigger, ScheduleTrigger Variables are only populated by the server, and will be ignored when sending a request. @@ -12805,64 +14223,71 @@ class MultiplePipelineTrigger(Trigger): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Trigger type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Trigger type. Required. :vartype type: str :ivar description: Trigger description. :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Known values are: "Started", "Stopped", "Disabled". + called on the Trigger. Known values are: "Started", "Stopped", and "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :ivar annotations: List of tags that can be used for describing the trigger. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar pipelines: Pipelines that need to be started. :vartype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] """ _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, + "type": {"required": True}, + "runtime_state": {"readonly": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "runtime_state": {"key": "runtimeState", "type": "str"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "pipelines": {"key": "pipelines", "type": "[TriggerPipelineReference]"}, } _subtype_map = { - 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'CustomEventsTrigger': 'CustomEventsTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} + "type": { + "BlobEventsTrigger": "BlobEventsTrigger", + "BlobTrigger": "BlobTrigger", + "CustomEventsTrigger": "CustomEventsTrigger", + "ScheduleTrigger": "ScheduleTrigger", + } } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, pipelines: Optional[List["_models.TriggerPipelineReference"]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Trigger description. :paramtype description: str :keyword annotations: List of tags that can be used for describing the trigger. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword pipelines: Pipelines that need to be started. :paramtype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] """ - super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.type = 'MultiplePipelineTrigger' # type: str + super().__init__( + additional_properties=additional_properties, description=description, annotations=annotations, **kwargs + ) + self.type = "MultiplePipelineTrigger" # type: str self.pipelines = pipelines -class BlobEventsTrigger(MultiplePipelineTrigger): +class BlobEventsTrigger(MultiplePipelineTrigger): # pylint: disable=too-many-instance-attributes """Trigger that runs every time a Blob event occurs. Variables are only populated by the server, and will be ignored when sending a request. @@ -12871,16 +14296,16 @@ class BlobEventsTrigger(MultiplePipelineTrigger): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Trigger type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Trigger type. Required. :vartype type: str :ivar description: Trigger description. :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Known values are: "Started", "Stopped", "Disabled". + called on the Trigger. Known values are: "Started", "Stopped", and "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :ivar annotations: List of tags that can be used for describing the trigger. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar pipelines: Pipelines that need to be started. :vartype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :ivar blob_path_begins_with: The blob path must begin with the pattern provided for trigger to @@ -12894,31 +14319,31 @@ class BlobEventsTrigger(MultiplePipelineTrigger): :vartype blob_path_ends_with: str :ivar ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. :vartype ignore_empty_blobs: bool - :ivar events: Required. The type of events that cause this trigger to fire. + :ivar events: The type of events that cause this trigger to fire. Required. :vartype events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] - :ivar scope: Required. The ARM resource ID of the Storage Account. + :ivar scope: The ARM resource ID of the Storage Account. Required. :vartype scope: str """ _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, - 'events': {'required': True}, - 'scope': {'required': True}, + "type": {"required": True}, + "runtime_state": {"readonly": True}, + "events": {"required": True}, + "scope": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, - 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, - 'ignore_empty_blobs': {'key': 'typeProperties.ignoreEmptyBlobs', 'type': 'bool'}, - 'events': {'key': 'typeProperties.events', 'type': '[str]'}, - 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "runtime_state": {"key": "runtimeState", "type": "str"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "pipelines": {"key": "pipelines", "type": "[TriggerPipelineReference]"}, + "blob_path_begins_with": {"key": "typeProperties.blobPathBeginsWith", "type": "str"}, + "blob_path_ends_with": {"key": "typeProperties.blobPathEndsWith", "type": "str"}, + "ignore_empty_blobs": {"key": "typeProperties.ignoreEmptyBlobs", "type": "bool"}, + "events": {"key": "typeProperties.events", "type": "[str]"}, + "scope": {"key": "typeProperties.scope", "type": "str"}, } def __init__( @@ -12926,9 +14351,9 @@ def __init__( *, events: List[Union[str, "_models.BlobEventTypes"]], scope: str, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, pipelines: Optional[List["_models.TriggerPipelineReference"]] = None, blob_path_begins_with: Optional[str] = None, blob_path_ends_with: Optional[str] = None, @@ -12938,11 +14363,11 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Trigger description. :paramtype description: str :keyword annotations: List of tags that can be used for describing the trigger. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword pipelines: Pipelines that need to be started. :paramtype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :keyword blob_path_begins_with: The blob path must begin with the pattern provided for trigger @@ -12956,13 +14381,19 @@ def __init__( :paramtype blob_path_ends_with: str :keyword ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. :paramtype ignore_empty_blobs: bool - :keyword events: Required. The type of events that cause this trigger to fire. + :keyword events: The type of events that cause this trigger to fire. Required. :paramtype events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] - :keyword scope: Required. The ARM resource ID of the Storage Account. + :keyword scope: The ARM resource ID of the Storage Account. Required. :paramtype scope: str """ - super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) - self.type = 'BlobEventsTrigger' # type: str + super().__init__( + additional_properties=additional_properties, + description=description, + annotations=annotations, + pipelines=pipelines, + **kwargs + ) + self.type = "BlobEventsTrigger" # type: str self.blob_path_begins_with = blob_path_begins_with self.blob_path_ends_with = blob_path_ends_with self.ignore_empty_blobs = ignore_empty_blobs @@ -12970,126 +14401,135 @@ def __init__( self.scope = scope -class BlobSink(CopySink): +class BlobSink(CopySink): # pylint: disable=too-many-instance-attributes """A copy activity Azure Blob sink. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). - :vartype blob_writer_overwrite_files: any + :vartype blob_writer_overwrite_files: JSON :ivar blob_writer_date_time_format: Blob writer date time format. Type: string (or Expression with resultType string). - :vartype blob_writer_date_time_format: any + :vartype blob_writer_date_time_format: JSON :ivar blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with resultType boolean). - :vartype blob_writer_add_header: any + :vartype blob_writer_add_header: JSON :ivar copy_behavior: The type of copy behavior for copy sink. - :vartype copy_behavior: any + :vartype copy_behavior: JSON :ivar metadata: Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). :vartype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, - 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, - 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "blob_writer_overwrite_files": {"key": "blobWriterOverwriteFiles", "type": "object"}, + "blob_writer_date_time_format": {"key": "blobWriterDateTimeFormat", "type": "object"}, + "blob_writer_add_header": {"key": "blobWriterAddHeader", "type": "object"}, + "copy_behavior": {"key": "copyBehavior", "type": "object"}, + "metadata": {"key": "metadata", "type": "[MetadataItem]"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - blob_writer_overwrite_files: Optional[Any] = None, - blob_writer_date_time_format: Optional[Any] = None, - blob_writer_add_header: Optional[Any] = None, - copy_behavior: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + blob_writer_overwrite_files: Optional[JSON] = None, + blob_writer_date_time_format: Optional[JSON] = None, + blob_writer_add_header: Optional[JSON] = None, + copy_behavior: Optional[JSON] = None, metadata: Optional[List["_models.MetadataItem"]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). - :paramtype blob_writer_overwrite_files: any + :paramtype blob_writer_overwrite_files: JSON :keyword blob_writer_date_time_format: Blob writer date time format. Type: string (or Expression with resultType string). - :paramtype blob_writer_date_time_format: any + :paramtype blob_writer_date_time_format: JSON :keyword blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with resultType boolean). - :paramtype blob_writer_add_header: any + :paramtype blob_writer_add_header: JSON :keyword copy_behavior: The type of copy behavior for copy sink. - :paramtype copy_behavior: any + :paramtype copy_behavior: JSON :keyword metadata: Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). :paramtype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ - super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'BlobSink' # type: str + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "BlobSink" # type: str self.blob_writer_overwrite_files = blob_writer_overwrite_files self.blob_writer_date_time_format = blob_writer_date_time_format self.blob_writer_add_header = blob_writer_add_header @@ -13104,89 +14544,96 @@ class BlobSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). - :vartype treat_empty_as_null: any + :vartype treat_empty_as_null: JSON :ivar skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). - :vartype skip_header_line_count: any + :vartype skip_header_line_count: JSON :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :vartype recursive: any + :vartype recursive: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, - 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "treat_empty_as_null": {"key": "treatEmptyAsNull", "type": "object"}, + "skip_header_line_count": {"key": "skipHeaderLineCount", "type": "object"}, + "recursive": {"key": "recursive", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - treat_empty_as_null: Optional[Any] = None, - skip_header_line_count: Optional[Any] = None, - recursive: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + treat_empty_as_null: Optional[JSON] = None, + skip_header_line_count: Optional[JSON] = None, + recursive: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). - :paramtype treat_empty_as_null: any + :paramtype treat_empty_as_null: JSON :keyword skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). - :paramtype skip_header_line_count: any + :paramtype skip_header_line_count: JSON :keyword recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :paramtype recursive: any - """ - super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'BlobSource' # type: str + :paramtype recursive: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "BlobSource" # type: str self.treat_empty_as_null = treat_empty_as_null self.skip_header_line_count = skip_header_line_count self.recursive = recursive @@ -13201,45 +14648,45 @@ class BlobTrigger(MultiplePipelineTrigger): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Trigger type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Trigger type. Required. :vartype type: str :ivar description: Trigger description. :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Known values are: "Started", "Stopped", "Disabled". + called on the Trigger. Known values are: "Started", "Stopped", and "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :ivar annotations: List of tags that can be used for describing the trigger. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar pipelines: Pipelines that need to be started. :vartype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :ivar folder_path: Required. The path of the container/folder that will trigger the pipeline. + :ivar folder_path: The path of the container/folder that will trigger the pipeline. Required. :vartype folder_path: str - :ivar max_concurrency: Required. The max number of parallel files to handle when it is - triggered. + :ivar max_concurrency: The max number of parallel files to handle when it is triggered. + Required. :vartype max_concurrency: int - :ivar linked_service: Required. The Azure Storage linked service reference. + :ivar linked_service: The Azure Storage linked service reference. Required. :vartype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, - 'folder_path': {'required': True}, - 'max_concurrency': {'required': True}, - 'linked_service': {'required': True}, + "type": {"required": True}, + "runtime_state": {"readonly": True}, + "folder_path": {"required": True}, + "max_concurrency": {"required": True}, + "linked_service": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, - 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "runtime_state": {"key": "runtimeState", "type": "str"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "pipelines": {"key": "pipelines", "type": "[TriggerPipelineReference]"}, + "folder_path": {"key": "typeProperties.folderPath", "type": "str"}, + "max_concurrency": {"key": "typeProperties.maxConcurrency", "type": "int"}, + "linked_service": {"key": "typeProperties.linkedService", "type": "LinkedServiceReference"}, } def __init__( @@ -13248,47 +14695,53 @@ def __init__( folder_path: str, max_concurrency: int, linked_service: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, pipelines: Optional[List["_models.TriggerPipelineReference"]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Trigger description. :paramtype description: str :keyword annotations: List of tags that can be used for describing the trigger. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword pipelines: Pipelines that need to be started. :paramtype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :keyword folder_path: Required. The path of the container/folder that will trigger the - pipeline. + :keyword folder_path: The path of the container/folder that will trigger the pipeline. + Required. :paramtype folder_path: str - :keyword max_concurrency: Required. The max number of parallel files to handle when it is - triggered. + :keyword max_concurrency: The max number of parallel files to handle when it is triggered. + Required. :paramtype max_concurrency: int - :keyword linked_service: Required. The Azure Storage linked service reference. + :keyword linked_service: The Azure Storage linked service reference. Required. :paramtype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ - super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) - self.type = 'BlobTrigger' # type: str + super().__init__( + additional_properties=additional_properties, + description=description, + annotations=annotations, + pipelines=pipelines, + **kwargs + ) + self.type = "BlobTrigger" # type: str self.folder_path = folder_path self.max_concurrency = max_concurrency self.linked_service = linked_service -class CassandraLinkedService(LinkedService): +class CassandraLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Linked service for Cassandra data source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -13297,66 +14750,66 @@ class CassandraLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar host: Required. Host name for connection. Type: string (or Expression with resultType - string). - :vartype host: any + :vartype annotations: list[JSON] + :ivar host: Host name for connection. Type: string (or Expression with resultType string). + Required. + :vartype host: JSON :ivar authentication_type: AuthenticationType to be used for connection. Type: string (or Expression with resultType string). - :vartype authentication_type: any + :vartype authentication_type: JSON :ivar port: The port for the connection. Type: integer (or Expression with resultType integer). - :vartype port: any + :vartype port: JSON :ivar username: Username for authentication. Type: string (or Expression with resultType string). - :vartype username: any + :vartype username: JSON :ivar password: Password for authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'host': {'required': True}, + "type": {"required": True}, + "host": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "host": {"key": "typeProperties.host", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "object"}, + "port": {"key": "typeProperties.port", "type": "object"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - host: Any, - additional_properties: Optional[Dict[str, Any]] = None, + host: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - authentication_type: Optional[Any] = None, - port: Optional[Any] = None, - username: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + authentication_type: Optional[JSON] = None, + port: Optional[JSON] = None, + username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -13364,28 +14817,35 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword host: Required. Host name for connection. Type: string (or Expression with resultType - string). - :paramtype host: any + :paramtype annotations: list[JSON] + :keyword host: Host name for connection. Type: string (or Expression with resultType string). + Required. + :paramtype host: JSON :keyword authentication_type: AuthenticationType to be used for connection. Type: string (or Expression with resultType string). - :paramtype authentication_type: any + :paramtype authentication_type: JSON :keyword port: The port for the connection. Type: integer (or Expression with resultType integer). - :paramtype port: any + :paramtype port: JSON :keyword username: Username for authentication. Type: string (or Expression with resultType string). - :paramtype username: any + :paramtype username: JSON :keyword password: Password for authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(CassandraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Cassandra' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Cassandra" # type: str self.host = host self.authentication_type = authentication_type self.port = port @@ -13401,210 +14861,229 @@ class CassandraSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar consistency_level: The consistency level specifies how many Cassandra servers must respond to a read request before returning data to the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. Known values are: "ALL", "EACH_QUORUM", "QUORUM", "LOCAL_QUORUM", "ONE", "TWO", "THREE", "LOCAL_ONE", - "SERIAL", "LOCAL_SERIAL". + "SERIAL", and "LOCAL_SERIAL". :vartype consistency_level: str or ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "consistency_level": {"key": "consistencyLevel", "type": "str"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, consistency_level: Optional[Union[str, "_models.CassandraSourceReadConsistencyLevels"]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword consistency_level: The consistency level specifies how many Cassandra servers must respond to a read request before returning data to the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. Known values are: "ALL", "EACH_QUORUM", "QUORUM", "LOCAL_QUORUM", "ONE", "TWO", "THREE", "LOCAL_ONE", - "SERIAL", "LOCAL_SERIAL". + "SERIAL", and "LOCAL_SERIAL". :paramtype consistency_level: str or ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels """ - super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'CassandraSource' # type: str + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "CassandraSource" # type: str self.query = query self.consistency_level = consistency_level -class CassandraTableDataset(Dataset): +class CassandraTableDataset(Dataset): # pylint: disable=too-many-instance-attributes """The Cassandra database dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name of the Cassandra database. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON :ivar keyspace: The keyspace of the Cassandra database. Type: string (or Expression with resultType string). - :vartype keyspace: any + :vartype keyspace: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "keyspace": {"key": "typeProperties.keyspace", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, - keyspace: Optional[Any] = None, + table_name: Optional[JSON] = None, + keyspace: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name of the Cassandra database. Type: string (or Expression with resultType string). - :paramtype table_name: any + :paramtype table_name: JSON :keyword keyspace: The keyspace of the Cassandra database. Type: string (or Expression with resultType string). - :paramtype keyspace: any - """ - super(CassandraTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'CassandraTable' # type: str + :paramtype keyspace: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "CassandraTable" # type: str self.table_name = table_name self.keyspace = keyspace @@ -13618,43 +15097,43 @@ class ChainingTrigger(Trigger): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Trigger type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Trigger type. Required. :vartype type: str :ivar description: Trigger description. :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Known values are: "Started", "Stopped", "Disabled". + called on the Trigger. Known values are: "Started", "Stopped", and "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :ivar annotations: List of tags that can be used for describing the trigger. - :vartype annotations: list[any] - :ivar pipeline: Required. Pipeline for which runs are created when all upstream pipelines - complete successfully. + :vartype annotations: list[JSON] + :ivar pipeline: Pipeline for which runs are created when all upstream pipelines complete + successfully. Required. :vartype pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference - :ivar depends_on: Required. Upstream Pipelines. + :ivar depends_on: Upstream Pipelines. Required. :vartype depends_on: list[~azure.mgmt.datafactory.models.PipelineReference] - :ivar run_dimension: Required. Run Dimension property that needs to be emitted by upstream - pipelines. + :ivar run_dimension: Run Dimension property that needs to be emitted by upstream pipelines. + Required. :vartype run_dimension: str """ _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, - 'pipeline': {'required': True}, - 'depends_on': {'required': True}, - 'run_dimension': {'required': True}, + "type": {"required": True}, + "runtime_state": {"readonly": True}, + "pipeline": {"required": True}, + "depends_on": {"required": True}, + "run_dimension": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, - 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[PipelineReference]'}, - 'run_dimension': {'key': 'typeProperties.runDimension', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "runtime_state": {"key": "runtimeState", "type": "str"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "pipeline": {"key": "pipeline", "type": "TriggerPipelineReference"}, + "depends_on": {"key": "typeProperties.dependsOn", "type": "[PipelineReference]"}, + "run_dimension": {"key": "typeProperties.runDimension", "type": "str"}, } def __init__( @@ -13663,43 +15142,45 @@ def __init__( pipeline: "_models.TriggerPipelineReference", depends_on: List["_models.PipelineReference"], run_dimension: str, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Trigger description. :paramtype description: str :keyword annotations: List of tags that can be used for describing the trigger. - :paramtype annotations: list[any] - :keyword pipeline: Required. Pipeline for which runs are created when all upstream pipelines - complete successfully. + :paramtype annotations: list[JSON] + :keyword pipeline: Pipeline for which runs are created when all upstream pipelines complete + successfully. Required. :paramtype pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference - :keyword depends_on: Required. Upstream Pipelines. + :keyword depends_on: Upstream Pipelines. Required. :paramtype depends_on: list[~azure.mgmt.datafactory.models.PipelineReference] - :keyword run_dimension: Required. Run Dimension property that needs to be emitted by upstream - pipelines. + :keyword run_dimension: Run Dimension property that needs to be emitted by upstream pipelines. + Required. :paramtype run_dimension: str """ - super(ChainingTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.type = 'ChainingTrigger' # type: str + super().__init__( + additional_properties=additional_properties, description=description, annotations=annotations, **kwargs + ) + self.type = "ChainingTrigger" # type: str self.pipeline = pipeline self.depends_on = depends_on self.run_dimension = run_dimension -class CloudError(msrest.serialization.Model): +class CloudError(_serialization.Model): """The object that defines the structure of an Azure Data Factory error response. All required parameters must be populated in order to send to Azure. - :ivar code: Required. Error code. + :ivar code: Error code. Required. :vartype code: str - :ivar message: Required. Error message. + :ivar message: Error message. Required. :vartype message: str :ivar target: Property name/path in request associated with error. :vartype target: str @@ -13708,15 +15189,15 @@ class CloudError(msrest.serialization.Model): """ _validation = { - 'code': {'required': True}, - 'message': {'required': True}, + "code": {"required": True}, + "message": {"required": True}, } _attribute_map = { - 'code': {'key': 'error.code', 'type': 'str'}, - 'message': {'key': 'error.message', 'type': 'str'}, - 'target': {'key': 'error.target', 'type': 'str'}, - 'details': {'key': 'error.details', 'type': '[CloudError]'}, + "code": {"key": "error.code", "type": "str"}, + "message": {"key": "error.message", "type": "str"}, + "target": {"key": "error.target", "type": "str"}, + "details": {"key": "error.details", "type": "[CloudError]"}, } def __init__( @@ -13729,16 +15210,16 @@ def __init__( **kwargs ): """ - :keyword code: Required. Error code. + :keyword code: Error code. Required. :paramtype code: str - :keyword message: Required. Error message. + :keyword message: Error message. Required. :paramtype message: str :keyword target: Property name/path in request associated with error. :paramtype target: str :keyword details: Array with additional error details. :paramtype details: list[~azure.mgmt.datafactory.models.CloudError] """ - super(CloudError, self).__init__(**kwargs) + super().__init__(**kwargs) self.code = code self.message = message self.target = target @@ -13750,54 +15231,47 @@ class CmdkeySetup(CustomSetupBase): All required parameters must be populated in order to send to Azure. - :ivar type: Required. The type of custom setup.Constant filled by server. + :ivar type: The type of custom setup. Required. :vartype type: str - :ivar target_name: Required. The server name of data source access. - :vartype target_name: any - :ivar user_name: Required. The user name of data source access. - :vartype user_name: any - :ivar password: Required. The password of data source access. + :ivar target_name: The server name of data source access. Required. + :vartype target_name: JSON + :ivar user_name: The user name of data source access. Required. + :vartype user_name: JSON + :ivar password: The password of data source access. Required. :vartype password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { - 'type': {'required': True}, - 'target_name': {'required': True}, - 'user_name': {'required': True}, - 'password': {'required': True}, + "type": {"required": True}, + "target_name": {"required": True}, + "user_name": {"required": True}, + "password": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'target_name': {'key': 'typeProperties.targetName', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + "type": {"key": "type", "type": "str"}, + "target_name": {"key": "typeProperties.targetName", "type": "object"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, } - def __init__( - self, - *, - target_name: Any, - user_name: Any, - password: "_models.SecretBase", - **kwargs - ): + def __init__(self, *, target_name: JSON, user_name: JSON, password: "_models.SecretBase", **kwargs): """ - :keyword target_name: Required. The server name of data source access. - :paramtype target_name: any - :keyword user_name: Required. The user name of data source access. - :paramtype user_name: any - :keyword password: Required. The password of data source access. + :keyword target_name: The server name of data source access. Required. + :paramtype target_name: JSON + :keyword user_name: The user name of data source access. Required. + :paramtype user_name: JSON + :keyword password: The password of data source access. Required. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase """ - super(CmdkeySetup, self).__init__(**kwargs) - self.type = 'CmdkeySetup' # type: str + super().__init__(**kwargs) + self.type = "CmdkeySetup" # type: str self.target_name = target_name self.user_name = user_name self.password = password -class CMKIdentityDefinition(msrest.serialization.Model): +class CMKIdentityDefinition(_serialization.Model): """Managed Identity used for CMK. :ivar user_assigned_identity: The resource id of the user assigned identity to authenticate to @@ -13806,21 +15280,16 @@ class CMKIdentityDefinition(msrest.serialization.Model): """ _attribute_map = { - 'user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'}, + "user_assigned_identity": {"key": "userAssignedIdentity", "type": "str"}, } - def __init__( - self, - *, - user_assigned_identity: Optional[str] = None, - **kwargs - ): + def __init__(self, *, user_assigned_identity: Optional[str] = None, **kwargs): """ :keyword user_assigned_identity: The resource id of the user assigned identity to authenticate to customer's key vault. :paramtype user_assigned_identity: str """ - super(CMKIdentityDefinition, self).__init__(**kwargs) + super().__init__(**kwargs) self.user_assigned_identity = user_assigned_identity @@ -13831,102 +15300,112 @@ class CommonDataServiceForAppsEntityDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar entity_name: The logical name of the entity. Type: string (or Expression with resultType string). - :vartype entity_name: any + :vartype entity_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "entity_name": {"key": "typeProperties.entityName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - entity_name: Optional[Any] = None, + entity_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword entity_name: The logical name of the entity. Type: string (or Expression with resultType string). - :paramtype entity_name: any - """ - super(CommonDataServiceForAppsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'CommonDataServiceForAppsEntity' # type: str + :paramtype entity_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "CommonDataServiceForAppsEntity" # type: str self.entity_name = entity_name -class CommonDataServiceForAppsLinkedService(LinkedService): +class CommonDataServiceForAppsLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Common Data Service for Apps linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -13935,46 +15414,46 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar deployment_type: Required. The deployment type of the Common Data Service for Apps - instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common - Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType - string). - :vartype deployment_type: any + :vartype annotations: list[JSON] + :ivar deployment_type: The deployment type of the Common Data Service for Apps instance. + 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common Data + Service for Apps on-premises with Ifd. Type: string (or Expression with resultType string). + Required. + :vartype deployment_type: JSON :ivar host_name: The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :vartype host_name: any + :vartype host_name: JSON :ivar port: The port of on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype port: any + :vartype port: JSON :ivar service_uri: The URL to the Microsoft Common Data Service for Apps server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :vartype service_uri: any + :vartype service_uri: JSON :ivar organization_name: The organization name of the Common Data Service for Apps instance. The property is required for on-prem and required for online when there are more than one Common Data Service for Apps instances associated with the user. Type: string (or Expression with resultType string). - :vartype organization_name: any - :ivar authentication_type: Required. The authentication type to connect to Common Data Service - for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. + :vartype organization_name: JSON + :ivar authentication_type: The authentication type to connect to Common Data Service for Apps + server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). - :vartype authentication_type: any + Expression with resultType string). Required. + :vartype authentication_type: JSON :ivar username: User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType string). - :vartype username: any + :vartype username: JSON :ivar password: Password to access the Common Data Service for Apps instance. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). - :vartype service_principal_id: any + :vartype service_principal_id: JSON :ivar service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). - :vartype service_principal_credential_type: any + :vartype service_principal_credential_type: JSON :ivar service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -13984,62 +15463,62 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, + "type": {"required": True}, + "deployment_type": {"required": True}, + "authentication_type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, - 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "deployment_type": {"key": "typeProperties.deploymentType", "type": "object"}, + "host_name": {"key": "typeProperties.hostName", "type": "object"}, + "port": {"key": "typeProperties.port", "type": "object"}, + "service_uri": {"key": "typeProperties.serviceUri", "type": "object"}, + "organization_name": {"key": "typeProperties.organizationName", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "object"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "service_principal_credential_type": {"key": "typeProperties.servicePrincipalCredentialType", "type": "object"}, + "service_principal_credential": {"key": "typeProperties.servicePrincipalCredential", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - deployment_type: Any, - authentication_type: Any, - additional_properties: Optional[Dict[str, Any]] = None, + deployment_type: JSON, + authentication_type: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - host_name: Optional[Any] = None, - port: Optional[Any] = None, - service_uri: Optional[Any] = None, - organization_name: Optional[Any] = None, - username: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + host_name: Optional[JSON] = None, + port: Optional[JSON] = None, + service_uri: Optional[JSON] = None, + organization_name: Optional[JSON] = None, + username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - service_principal_id: Optional[Any] = None, - service_principal_credential_type: Optional[Any] = None, + service_principal_id: Optional[JSON] = None, + service_principal_credential_type: Optional[JSON] = None, service_principal_credential: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -14047,46 +15526,46 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword deployment_type: Required. The deployment type of the Common Data Service for Apps - instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common - Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType - string). - :paramtype deployment_type: any + :paramtype annotations: list[JSON] + :keyword deployment_type: The deployment type of the Common Data Service for Apps instance. + 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common Data + Service for Apps on-premises with Ifd. Type: string (or Expression with resultType string). + Required. + :paramtype deployment_type: JSON :keyword host_name: The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :paramtype host_name: any + :paramtype host_name: JSON :keyword port: The port of on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype port: any + :paramtype port: JSON :keyword service_uri: The URL to the Microsoft Common Data Service for Apps server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :paramtype service_uri: any + :paramtype service_uri: JSON :keyword organization_name: The organization name of the Common Data Service for Apps instance. The property is required for on-prem and required for online when there are more than one Common Data Service for Apps instances associated with the user. Type: string (or Expression with resultType string). - :paramtype organization_name: any - :keyword authentication_type: Required. The authentication type to connect to Common Data - Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd - scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: - string (or Expression with resultType string). - :paramtype authentication_type: any + :paramtype organization_name: JSON + :keyword authentication_type: The authentication type to connect to Common Data Service for + Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. + 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or + Expression with resultType string). Required. + :paramtype authentication_type: JSON :keyword username: User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType string). - :paramtype username: any + :paramtype username: JSON :keyword password: Password to access the Common Data Service for Apps instance. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). - :paramtype service_principal_id: any + :paramtype service_principal_id: JSON :keyword service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). - :paramtype service_principal_credential_type: any + :paramtype service_principal_credential_type: JSON :keyword service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -14096,10 +15575,17 @@ def __init__( :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'CommonDataServiceForApps' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "CommonDataServiceForApps" # type: str self.deployment_type = deployment_type self.host_name = host_name self.port = port @@ -14114,115 +15600,122 @@ def __init__( self.encrypted_credential = encrypted_credential -class CommonDataServiceForAppsSink(CopySink): +class CommonDataServiceForAppsSink(CopySink): # pylint: disable=too-many-instance-attributes """A copy activity Common Data Service for Apps sink. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any - :ivar write_behavior: Required. The write behavior for the operation. Known values are: - "Upsert". + :vartype disable_metrics_collection: JSON + :ivar write_behavior: The write behavior for the operation. Required. "Upsert" :vartype write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior :ivar ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype ignore_null_values: any + :vartype ignore_null_values: JSON :ivar alternate_key_name: The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). - :vartype alternate_key_name: any + :vartype alternate_key_name: JSON """ _validation = { - 'type': {'required': True}, - 'write_behavior': {'required': True}, + "type": {"required": True}, + "write_behavior": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "write_behavior": {"key": "writeBehavior", "type": "str"}, + "ignore_null_values": {"key": "ignoreNullValues", "type": "object"}, + "alternate_key_name": {"key": "alternateKeyName", "type": "object"}, } def __init__( self, *, write_behavior: Union[str, "_models.DynamicsSinkWriteBehavior"], - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - ignore_null_values: Optional[Any] = None, - alternate_key_name: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + ignore_null_values: Optional[JSON] = None, + alternate_key_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any - :keyword write_behavior: Required. The write behavior for the operation. Known values are: - "Upsert". + :paramtype disable_metrics_collection: JSON + :keyword write_behavior: The write behavior for the operation. Required. "Upsert" :paramtype write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior :keyword ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype ignore_null_values: any + :paramtype ignore_null_values: JSON :keyword alternate_key_name: The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). - :paramtype alternate_key_name: any - """ - super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'CommonDataServiceForAppsSink' # type: str + :paramtype alternate_key_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "CommonDataServiceForAppsSink" # type: str self.write_behavior = write_behavior self.ignore_null_values = ignore_null_values self.alternate_key_name = alternate_key_name @@ -14235,81 +15728,88 @@ class CommonDataServiceForAppsSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query: FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query: Optional[Any] = None, - additional_columns: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query: FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'CommonDataServiceForAppsSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "CommonDataServiceForAppsSource" # type: str self.query = query self.additional_columns = additional_columns @@ -14319,97 +15819,90 @@ class ComponentSetup(CustomSetupBase): All required parameters must be populated in order to send to Azure. - :ivar type: Required. The type of custom setup.Constant filled by server. + :ivar type: The type of custom setup. Required. :vartype type: str - :ivar component_name: Required. The name of the 3rd party component. + :ivar component_name: The name of the 3rd party component. Required. :vartype component_name: str :ivar license_key: The license key to activate the component. :vartype license_key: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { - 'type': {'required': True}, - 'component_name': {'required': True}, + "type": {"required": True}, + "component_name": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'component_name': {'key': 'typeProperties.componentName', 'type': 'str'}, - 'license_key': {'key': 'typeProperties.licenseKey', 'type': 'SecretBase'}, + "type": {"key": "type", "type": "str"}, + "component_name": {"key": "typeProperties.componentName", "type": "str"}, + "license_key": {"key": "typeProperties.licenseKey", "type": "SecretBase"}, } - def __init__( - self, - *, - component_name: str, - license_key: Optional["_models.SecretBase"] = None, - **kwargs - ): + def __init__(self, *, component_name: str, license_key: Optional["_models.SecretBase"] = None, **kwargs): """ - :keyword component_name: Required. The name of the 3rd party component. + :keyword component_name: The name of the 3rd party component. Required. :paramtype component_name: str :keyword license_key: The license key to activate the component. :paramtype license_key: ~azure.mgmt.datafactory.models.SecretBase """ - super(ComponentSetup, self).__init__(**kwargs) - self.type = 'ComponentSetup' # type: str + super().__init__(**kwargs) + self.type = "ComponentSetup" # type: str self.component_name = component_name self.license_key = license_key -class CompressionReadSettings(msrest.serialization.Model): +class CompressionReadSettings(_serialization.Model): """Compression read settings. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: TarGZipReadSettings, TarReadSettings, ZipDeflateReadSettings. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + TarGZipReadSettings, TarReadSettings, ZipDeflateReadSettings All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The Compression setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The Compression setting type. Required. :vartype type: str """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, } _subtype_map = { - 'type': {'TarGZipReadSettings': 'TarGZipReadSettings', 'TarReadSettings': 'TarReadSettings', 'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} + "type": { + "TarGZipReadSettings": "TarGZipReadSettings", + "TarReadSettings": "TarReadSettings", + "ZipDeflateReadSettings": "ZipDeflateReadSettings", + } } - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - **kwargs - ): + def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, **kwargs): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] """ - super(CompressionReadSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties - self.type = 'CompressionReadSettings' # type: str + self.type = None # type: Optional[str] -class ConcurLinkedService(LinkedService): +class ConcurLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Concur Service linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -14418,78 +15911,78 @@ class ConcurLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_properties: Properties used to connect to Concur. It is mutually exclusive with any other properties in the linked service. Type: object. - :vartype connection_properties: any - :ivar client_id: Required. Application client_id supplied by Concur App Management. - :vartype client_id: any - :ivar username: Required. The user name that you use to access Concur Service. - :vartype username: any + :vartype connection_properties: JSON + :ivar client_id: Application client_id supplied by Concur App Management. Required. + :vartype client_id: JSON + :ivar username: The user name that you use to access Concur Service. Required. + :vartype username: JSON :ivar password: The password corresponding to the user name that you provided in the username field. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :vartype use_encrypted_endpoints: any + :vartype use_encrypted_endpoints: JSON :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :vartype use_host_verification: any + :vartype use_host_verification: JSON :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :vartype use_peer_verification: any + :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'client_id': {'required': True}, - 'username': {'required': True}, + "type": {"required": True}, + "client_id": {"required": True}, + "username": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_properties": {"key": "typeProperties.connectionProperties", "type": "object"}, + "client_id": {"key": "typeProperties.clientId", "type": "object"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, + "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, + "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - client_id: Any, - username: Any, - additional_properties: Optional[Dict[str, Any]] = None, + client_id: JSON, + username: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_properties: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_properties: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - use_encrypted_endpoints: Optional[Any] = None, - use_host_verification: Optional[Any] = None, - use_peer_verification: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + use_encrypted_endpoints: Optional[JSON] = None, + use_host_verification: Optional[JSON] = None, + use_peer_verification: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -14497,34 +15990,41 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_properties: Properties used to connect to Concur. It is mutually exclusive with any other properties in the linked service. Type: object. - :paramtype connection_properties: any - :keyword client_id: Required. Application client_id supplied by Concur App Management. - :paramtype client_id: any - :keyword username: Required. The user name that you use to access Concur Service. - :paramtype username: any + :paramtype connection_properties: JSON + :keyword client_id: Application client_id supplied by Concur App Management. Required. + :paramtype client_id: JSON + :keyword username: The user name that you use to access Concur Service. Required. + :paramtype username: JSON :keyword password: The password corresponding to the user name that you provided in the username field. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :paramtype use_encrypted_endpoints: any + :paramtype use_encrypted_endpoints: JSON :keyword use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :paramtype use_host_verification: any + :paramtype use_host_verification: JSON :keyword use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :paramtype use_peer_verification: any + :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Concur' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Concur" # type: str self.connection_properties = connection_properties self.client_id = client_id self.username = username @@ -14542,88 +16042,98 @@ class ConcurObjectDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(ConcurObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'ConcurObject' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "ConcurObject" # type: str self.table_name = table_name @@ -14634,93 +16144,102 @@ class ConcurSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'ConcurSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "ConcurSource" # type: str self.query = query -class ConnectionStateProperties(msrest.serialization.Model): +class ConnectionStateProperties(_serialization.Model): """The connection state of a managed private endpoint. Variables are only populated by the server, and will be ignored when sending a request. @@ -14734,40 +16253,36 @@ class ConnectionStateProperties(msrest.serialization.Model): """ _validation = { - 'actions_required': {'readonly': True}, - 'description': {'readonly': True}, - 'status': {'readonly': True}, + "actions_required": {"readonly": True}, + "description": {"readonly": True}, + "status": {"readonly": True}, } _attribute_map = { - 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, + "actions_required": {"key": "actionsRequired", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "status": {"key": "status", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(ConnectionStateProperties, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.actions_required = None self.description = None self.status = None -class CopyActivity(ExecutionActivity): +class CopyActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """Copy activity. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -14783,27 +16298,27 @@ class CopyActivity(ExecutionActivity): :vartype inputs: list[~azure.mgmt.datafactory.models.DatasetReference] :ivar outputs: List of outputs for the activity. :vartype outputs: list[~azure.mgmt.datafactory.models.DatasetReference] - :ivar source: Required. Copy activity source. + :ivar source: Copy activity source. Required. :vartype source: ~azure.mgmt.datafactory.models.CopySource - :ivar sink: Required. Copy activity sink. + :ivar sink: Copy activity sink. Required. :vartype sink: ~azure.mgmt.datafactory.models.CopySink :ivar translator: Copy activity translator. If not specified, tabular translator is used. - :vartype translator: any + :vartype translator: JSON :ivar enable_staging: Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). - :vartype enable_staging: any + :vartype enable_staging: JSON :ivar staging_settings: Specifies interim staging settings when EnableStaging is true. :vartype staging_settings: ~azure.mgmt.datafactory.models.StagingSettings :ivar parallel_copies: Maximum number of concurrent sessions opened on the source or sink to avoid overloading the data store. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype parallel_copies: any + :vartype parallel_copies: JSON :ivar data_integration_units: Maximum number of data integration units that can be used to perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype data_integration_units: any + :vartype data_integration_units: JSON :ivar enable_skip_incompatible_row: Whether to skip incompatible row. Default value is false. Type: boolean (or Expression with resultType boolean). - :vartype enable_skip_incompatible_row: any + :vartype enable_skip_incompatible_row: JSON :ivar redirect_incompatible_row_settings: Redirect incompatible row settings when EnableSkipIncompatibleRow is true. :vartype redirect_incompatible_row_settings: @@ -14814,58 +16329,61 @@ class CopyActivity(ExecutionActivity): :ivar log_settings: Log settings customer needs provide when enabling log. :vartype log_settings: ~azure.mgmt.datafactory.models.LogSettings :ivar preserve_rules: Preserve Rules. - :vartype preserve_rules: list[any] + :vartype preserve_rules: list[JSON] :ivar preserve: Preserve rules. - :vartype preserve: list[any] + :vartype preserve: list[JSON] :ivar validate_data_consistency: Whether to enable Data Consistency validation. Type: boolean (or Expression with resultType boolean). - :vartype validate_data_consistency: any + :vartype validate_data_consistency: JSON :ivar skip_error_file: Specify the fault tolerance for data consistency. :vartype skip_error_file: ~azure.mgmt.datafactory.models.SkipErrorFile """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'source': {'required': True}, - 'sink': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "source": {"required": True}, + "sink": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, - 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, - 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, - 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, - 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, - 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, - 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, - 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, - 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, - 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, - 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, - 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, - 'log_settings': {'key': 'typeProperties.logSettings', 'type': 'LogSettings'}, - 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, - 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, - 'validate_data_consistency': {'key': 'typeProperties.validateDataConsistency', 'type': 'object'}, - 'skip_error_file': {'key': 'typeProperties.skipErrorFile', 'type': 'SkipErrorFile'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "inputs": {"key": "inputs", "type": "[DatasetReference]"}, + "outputs": {"key": "outputs", "type": "[DatasetReference]"}, + "source": {"key": "typeProperties.source", "type": "CopySource"}, + "sink": {"key": "typeProperties.sink", "type": "CopySink"}, + "translator": {"key": "typeProperties.translator", "type": "object"}, + "enable_staging": {"key": "typeProperties.enableStaging", "type": "object"}, + "staging_settings": {"key": "typeProperties.stagingSettings", "type": "StagingSettings"}, + "parallel_copies": {"key": "typeProperties.parallelCopies", "type": "object"}, + "data_integration_units": {"key": "typeProperties.dataIntegrationUnits", "type": "object"}, + "enable_skip_incompatible_row": {"key": "typeProperties.enableSkipIncompatibleRow", "type": "object"}, + "redirect_incompatible_row_settings": { + "key": "typeProperties.redirectIncompatibleRowSettings", + "type": "RedirectIncompatibleRowSettings", + }, + "log_storage_settings": {"key": "typeProperties.logStorageSettings", "type": "LogStorageSettings"}, + "log_settings": {"key": "typeProperties.logSettings", "type": "LogSettings"}, + "preserve_rules": {"key": "typeProperties.preserveRules", "type": "[object]"}, + "preserve": {"key": "typeProperties.preserve", "type": "[object]"}, + "validate_data_consistency": {"key": "typeProperties.validateDataConsistency", "type": "object"}, + "skip_error_file": {"key": "typeProperties.skipErrorFile", "type": "SkipErrorFile"}, } - def __init__( + def __init__( # pylint: disable=too-many-locals self, *, name: str, source: "_models.CopySource", sink: "_models.CopySink", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, @@ -14873,26 +16391,26 @@ def __init__( policy: Optional["_models.ActivityPolicy"] = None, inputs: Optional[List["_models.DatasetReference"]] = None, outputs: Optional[List["_models.DatasetReference"]] = None, - translator: Optional[Any] = None, - enable_staging: Optional[Any] = None, + translator: Optional[JSON] = None, + enable_staging: Optional[JSON] = None, staging_settings: Optional["_models.StagingSettings"] = None, - parallel_copies: Optional[Any] = None, - data_integration_units: Optional[Any] = None, - enable_skip_incompatible_row: Optional[Any] = None, + parallel_copies: Optional[JSON] = None, + data_integration_units: Optional[JSON] = None, + enable_skip_incompatible_row: Optional[JSON] = None, redirect_incompatible_row_settings: Optional["_models.RedirectIncompatibleRowSettings"] = None, log_storage_settings: Optional["_models.LogStorageSettings"] = None, log_settings: Optional["_models.LogSettings"] = None, - preserve_rules: Optional[List[Any]] = None, - preserve: Optional[List[Any]] = None, - validate_data_consistency: Optional[Any] = None, + preserve_rules: Optional[List[JSON]] = None, + preserve: Optional[List[JSON]] = None, + validate_data_consistency: Optional[JSON] = None, skip_error_file: Optional["_models.SkipErrorFile"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -14908,27 +16426,27 @@ def __init__( :paramtype inputs: list[~azure.mgmt.datafactory.models.DatasetReference] :keyword outputs: List of outputs for the activity. :paramtype outputs: list[~azure.mgmt.datafactory.models.DatasetReference] - :keyword source: Required. Copy activity source. + :keyword source: Copy activity source. Required. :paramtype source: ~azure.mgmt.datafactory.models.CopySource - :keyword sink: Required. Copy activity sink. + :keyword sink: Copy activity sink. Required. :paramtype sink: ~azure.mgmt.datafactory.models.CopySink :keyword translator: Copy activity translator. If not specified, tabular translator is used. - :paramtype translator: any + :paramtype translator: JSON :keyword enable_staging: Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). - :paramtype enable_staging: any + :paramtype enable_staging: JSON :keyword staging_settings: Specifies interim staging settings when EnableStaging is true. :paramtype staging_settings: ~azure.mgmt.datafactory.models.StagingSettings :keyword parallel_copies: Maximum number of concurrent sessions opened on the source or sink to avoid overloading the data store. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype parallel_copies: any + :paramtype parallel_copies: JSON :keyword data_integration_units: Maximum number of data integration units that can be used to perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype data_integration_units: any + :paramtype data_integration_units: JSON :keyword enable_skip_incompatible_row: Whether to skip incompatible row. Default value is false. Type: boolean (or Expression with resultType boolean). - :paramtype enable_skip_incompatible_row: any + :paramtype enable_skip_incompatible_row: JSON :keyword redirect_incompatible_row_settings: Redirect incompatible row settings when EnableSkipIncompatibleRow is true. :paramtype redirect_incompatible_row_settings: @@ -14939,17 +16457,26 @@ def __init__( :keyword log_settings: Log settings customer needs provide when enabling log. :paramtype log_settings: ~azure.mgmt.datafactory.models.LogSettings :keyword preserve_rules: Preserve Rules. - :paramtype preserve_rules: list[any] + :paramtype preserve_rules: list[JSON] :keyword preserve: Preserve rules. - :paramtype preserve: list[any] + :paramtype preserve: list[JSON] :keyword validate_data_consistency: Whether to enable Data Consistency validation. Type: boolean (or Expression with resultType boolean). - :paramtype validate_data_consistency: any + :paramtype validate_data_consistency: JSON :keyword skip_error_file: Specify the fault tolerance for data consistency. :paramtype skip_error_file: ~azure.mgmt.datafactory.models.SkipErrorFile """ - super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'Copy' # type: str + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "Copy" # type: str self.inputs = inputs self.outputs = outputs self.source = source @@ -14969,95 +16496,82 @@ def __init__( self.skip_error_file = skip_error_file -class CopyActivityLogSettings(msrest.serialization.Model): +class CopyActivityLogSettings(_serialization.Model): """Settings for copy activity log. :ivar log_level: Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). - :vartype log_level: any + :vartype log_level: JSON :ivar enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). - :vartype enable_reliable_logging: any + :vartype enable_reliable_logging: JSON """ _attribute_map = { - 'log_level': {'key': 'logLevel', 'type': 'object'}, - 'enable_reliable_logging': {'key': 'enableReliableLogging', 'type': 'object'}, + "log_level": {"key": "logLevel", "type": "object"}, + "enable_reliable_logging": {"key": "enableReliableLogging", "type": "object"}, } - def __init__( - self, - *, - log_level: Optional[Any] = None, - enable_reliable_logging: Optional[Any] = None, - **kwargs - ): + def __init__(self, *, log_level: Optional[JSON] = None, enable_reliable_logging: Optional[JSON] = None, **kwargs): """ :keyword log_level: Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). - :paramtype log_level: any + :paramtype log_level: JSON :keyword enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). - :paramtype enable_reliable_logging: any + :paramtype enable_reliable_logging: JSON """ - super(CopyActivityLogSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.log_level = log_level self.enable_reliable_logging = enable_reliable_logging -class CopyTranslator(msrest.serialization.Model): +class CopyTranslator(_serialization.Model): """A copy activity translator. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: TabularTranslator. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + TabularTranslator All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy translator type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy translator type. Required. :vartype type: str """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, } - _subtype_map = { - 'type': {'TabularTranslator': 'TabularTranslator'} - } + _subtype_map = {"type": {"TabularTranslator": "TabularTranslator"}} - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - **kwargs - ): + def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, **kwargs): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] """ - super(CopyTranslator, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties - self.type = 'CopyTranslator' # type: str + self.type = None # type: Optional[str] -class CosmosDbLinkedService(LinkedService): +class CosmosDbLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Microsoft Azure Cosmos Database (CosmosDB) linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -15066,25 +16580,25 @@ class CosmosDbLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_string: The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype connection_string: JSON :ivar account_endpoint: The endpoint of the Azure CosmosDB account. Type: string (or Expression with resultType string). - :vartype account_endpoint: any + :vartype account_endpoint: JSON :ivar database: The name of the database. Type: string (or Expression with resultType string). - :vartype database: any + :vartype database: JSON :ivar account_key: The account key of the Azure CosmosDB account. Type: SecureString or AzureKeyVaultSecretReference. :vartype account_key: ~azure.mgmt.datafactory.models.SecretBase :ivar service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). - :vartype service_principal_id: any + :vartype service_principal_id: JSON :ivar service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). Known values are: - "ServicePrincipalKey", "ServicePrincipalCert". + "ServicePrincipalKey" and "ServicePrincipalCert". :vartype service_principal_credential_type: str or ~azure.mgmt.datafactory.models.CosmosDbServicePrincipalCredentialType :ivar service_principal_credential: The credential of the service principal object in Azure @@ -15095,73 +16609,75 @@ class CosmosDbLinkedService(LinkedService): :vartype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :vartype tenant: any + :vartype tenant: JSON :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :vartype azure_cloud_type: any + :vartype azure_cloud_type: JSON :ivar connection_mode: The connection mode used to access CosmosDB account. Type: string (or - Expression with resultType string). Known values are: "Gateway", "Direct". + Expression with resultType string). Known values are: "Gateway" and "Direct". :vartype connection_mode: str or ~azure.mgmt.datafactory.models.CosmosDbConnectionMode :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_endpoint': {'key': 'typeProperties.accountEndpoint', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, - 'connection_mode': {'key': 'typeProperties.connectionMode', 'type': 'str'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "account_endpoint": {"key": "typeProperties.accountEndpoint", "type": "object"}, + "database": {"key": "typeProperties.database", "type": "object"}, + "account_key": {"key": "typeProperties.accountKey", "type": "SecretBase"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "service_principal_credential_type": {"key": "typeProperties.servicePrincipalCredentialType", "type": "str"}, + "service_principal_credential": {"key": "typeProperties.servicePrincipalCredential", "type": "SecretBase"}, + "tenant": {"key": "typeProperties.tenant", "type": "object"}, + "azure_cloud_type": {"key": "typeProperties.azureCloudType", "type": "object"}, + "connection_mode": {"key": "typeProperties.connectionMode", "type": "str"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_string: Optional[Any] = None, - account_endpoint: Optional[Any] = None, - database: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_string: Optional[JSON] = None, + account_endpoint: Optional[JSON] = None, + database: Optional[JSON] = None, account_key: Optional["_models.SecretBase"] = None, - service_principal_id: Optional[Any] = None, - service_principal_credential_type: Optional[Union[str, "_models.CosmosDbServicePrincipalCredentialType"]] = None, + service_principal_id: Optional[JSON] = None, + service_principal_credential_type: Optional[ + Union[str, "_models.CosmosDbServicePrincipalCredentialType"] + ] = None, service_principal_credential: Optional["_models.SecretBase"] = None, - tenant: Optional[Any] = None, - azure_cloud_type: Optional[Any] = None, + tenant: Optional[JSON] = None, + azure_cloud_type: Optional[JSON] = None, connection_mode: Optional[Union[str, "_models.CosmosDbConnectionMode"]] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, credential: Optional["_models.CredentialReference"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -15169,26 +16685,26 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_string: The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype connection_string: JSON :keyword account_endpoint: The endpoint of the Azure CosmosDB account. Type: string (or Expression with resultType string). - :paramtype account_endpoint: any + :paramtype account_endpoint: JSON :keyword database: The name of the database. Type: string (or Expression with resultType string). - :paramtype database: any + :paramtype database: JSON :keyword account_key: The account key of the Azure CosmosDB account. Type: SecureString or AzureKeyVaultSecretReference. :paramtype account_key: ~azure.mgmt.datafactory.models.SecretBase :keyword service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). - :paramtype service_principal_id: any + :paramtype service_principal_id: JSON :keyword service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). Known values are: - "ServicePrincipalKey", "ServicePrincipalCert". + "ServicePrincipalKey" and "ServicePrincipalCert". :paramtype service_principal_credential_type: str or ~azure.mgmt.datafactory.models.CosmosDbServicePrincipalCredentialType :keyword service_principal_credential: The credential of the service principal object in Azure @@ -15199,23 +16715,30 @@ def __init__( :paramtype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :paramtype tenant: any + :paramtype tenant: JSON :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :paramtype azure_cloud_type: any + :paramtype azure_cloud_type: JSON :keyword connection_mode: The connection mode used to access CosmosDB account. Type: string (or - Expression with resultType string). Known values are: "Gateway", "Direct". + Expression with resultType string). Known values are: "Gateway" and "Direct". :paramtype connection_mode: str or ~azure.mgmt.datafactory.models.CosmosDbConnectionMode :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any + :paramtype encrypted_credential: JSON :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ - super(CosmosDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'CosmosDb' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "CosmosDb" # type: str self.connection_string = connection_string self.account_endpoint = account_endpoint self.database = database @@ -15237,91 +16760,101 @@ class CosmosDbMongoDbApiCollectionDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :ivar collection: Required. The collection name of the CosmosDB (MongoDB API) database. Type: - string (or Expression with resultType string). - :vartype collection: any + :ivar collection: The collection name of the CosmosDB (MongoDB API) database. Type: string (or + Expression with resultType string). Required. + :vartype collection: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, + "collection": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "collection": {"key": "typeProperties.collection", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - collection: Any, - additional_properties: Optional[Dict[str, Any]] = None, + collection: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :keyword collection: Required. The collection name of the CosmosDB (MongoDB API) database. - Type: string (or Expression with resultType string). - :paramtype collection: any - """ - super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'CosmosDbMongoDbApiCollection' # type: str + :keyword collection: The collection name of the CosmosDB (MongoDB API) database. Type: string + (or Expression with resultType string). Required. + :paramtype collection: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "CosmosDbMongoDbApiCollection" # type: str self.collection = collection @@ -15332,8 +16865,8 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -15342,54 +16875,54 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar is_server_version_above32: Whether the CosmosDB (MongoDB API) server version is higher than 3.2. The default value is false. Type: boolean (or Expression with resultType boolean). - :vartype is_server_version_above32: any - :ivar connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string, + :vartype is_server_version_above32: JSON + :ivar connection_string: The CosmosDB (MongoDB API) connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or - AzureKeyVaultSecretReference. - :vartype connection_string: any - :ivar database: Required. The name of the CosmosDB (MongoDB API) database that you want to - access. Type: string (or Expression with resultType string). - :vartype database: any + AzureKeyVaultSecretReference. Required. + :vartype connection_string: JSON + :ivar database: The name of the CosmosDB (MongoDB API) database that you want to access. Type: + string (or Expression with resultType string). Required. + :vartype database: JSON """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, + "type": {"required": True}, + "connection_string": {"required": True}, + "database": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'is_server_version_above32': {'key': 'typeProperties.isServerVersionAbove32', 'type': 'object'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "is_server_version_above32": {"key": "typeProperties.isServerVersionAbove32", "type": "object"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "database": {"key": "typeProperties.database", "type": "object"}, } def __init__( self, *, - connection_string: Any, - database: Any, - additional_properties: Optional[Dict[str, Any]] = None, + connection_string: JSON, + database: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - is_server_version_above32: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + is_server_version_above32: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -15397,20 +16930,27 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword is_server_version_above32: Whether the CosmosDB (MongoDB API) server version is higher than 3.2. The default value is false. Type: boolean (or Expression with resultType boolean). - :paramtype is_server_version_above32: any - :keyword connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: - string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or - AzureKeyVaultSecretReference. - :paramtype connection_string: any - :keyword database: Required. The name of the CosmosDB (MongoDB API) database that you want to - access. Type: string (or Expression with resultType string). - :paramtype database: any - """ - super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'CosmosDbMongoDbApi' # type: str + :paramtype is_server_version_above32: JSON + :keyword connection_string: The CosmosDB (MongoDB API) connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. + :paramtype connection_string: JSON + :keyword database: The name of the CosmosDB (MongoDB API) database that you want to access. + Type: string (or Expression with resultType string). Required. + :paramtype database: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "CosmosDbMongoDbApi" # type: str self.is_server_version_above32 = is_server_version_above32 self.connection_string = connection_string self.database = database @@ -15423,204 +16963,220 @@ class CosmosDbMongoDbApiSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). - :vartype write_behavior: any + :vartype write_behavior: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "write_behavior": {"key": "writeBehavior", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - write_behavior: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + write_behavior: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). - :paramtype write_behavior: any - """ - super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'CosmosDbMongoDbApiSink' # type: str + :paramtype write_behavior: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "CosmosDbMongoDbApiSink" # type: str self.write_behavior = write_behavior -class CosmosDbMongoDbApiSource(CopySource): +class CosmosDbMongoDbApiSource(CopySource): # pylint: disable=too-many-instance-attributes """A copy activity source for a CosmosDB (MongoDB API) database. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). - :vartype filter: any + :vartype filter: JSON :ivar cursor_methods: Cursor methods for Mongodb query. :vartype cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :ivar batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). - :vartype batch_size: any + :vartype batch_size: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "filter": {"key": "filter", "type": "object"}, + "cursor_methods": {"key": "cursorMethods", "type": "MongoDbCursorMethodsProperties"}, + "batch_size": {"key": "batchSize", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - filter: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + filter: Optional[JSON] = None, # pylint: disable=redefined-builtin cursor_methods: Optional["_models.MongoDbCursorMethodsProperties"] = None, - batch_size: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, + batch_size: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). - :paramtype filter: any + :paramtype filter: JSON :keyword cursor_methods: Cursor methods for Mongodb query. :paramtype cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :keyword batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). - :paramtype batch_size: any + :paramtype batch_size: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'CosmosDbMongoDbApiSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "CosmosDbMongoDbApiSource" # type: str self.filter = filter self.cursor_methods = cursor_methods self.batch_size = batch_size @@ -15635,91 +17191,101 @@ class CosmosDbSqlApiCollectionDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :ivar collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or - Expression with resultType string). - :vartype collection_name: any + :ivar collection_name: CosmosDB (SQL API) collection name. Type: string (or Expression with + resultType string). Required. + :vartype collection_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, + "collection_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "collection_name": {"key": "typeProperties.collectionName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - collection_name: Any, - additional_properties: Optional[Dict[str, Any]] = None, + collection_name: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :keyword collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or - Expression with resultType string). - :paramtype collection_name: any - """ - super(CosmosDbSqlApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'CosmosDbSqlApiCollection' # type: str + :keyword collection_name: CosmosDB (SQL API) collection name. Type: string (or Expression with + resultType string). Required. + :paramtype collection_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "CosmosDbSqlApiCollection" # type: str self.collection_name = collection_name @@ -15730,196 +17296,212 @@ class CosmosDbSqlApiSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. - :vartype write_behavior: any + :vartype write_behavior: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "write_behavior": {"key": "writeBehavior", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - write_behavior: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + write_behavior: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. - :paramtype write_behavior: any - """ - super(CosmosDbSqlApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'CosmosDbSqlApiSink' # type: str + :paramtype write_behavior: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "CosmosDbSqlApiSink" # type: str self.write_behavior = write_behavior -class CosmosDbSqlApiSource(CopySource): +class CosmosDbSqlApiSource(CopySource): # pylint: disable=too-many-instance-attributes """A copy activity Azure CosmosDB (SQL API) Collection source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query: SQL API query. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar page_size: Page size of the result. Type: integer (or Expression with resultType integer). - :vartype page_size: any + :vartype page_size: JSON :ivar preferred_regions: Preferred regions. Type: array of strings (or Expression with resultType array of strings). - :vartype preferred_regions: any + :vartype preferred_regions: JSON :ivar detect_datetime: Whether detect primitive values as datetime values. Type: boolean (or Expression with resultType boolean). - :vartype detect_datetime: any + :vartype detect_datetime: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'page_size': {'key': 'pageSize', 'type': 'object'}, - 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, - 'detect_datetime': {'key': 'detectDatetime', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "page_size": {"key": "pageSize", "type": "object"}, + "preferred_regions": {"key": "preferredRegions", "type": "object"}, + "detect_datetime": {"key": "detectDatetime", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query: Optional[Any] = None, - page_size: Optional[Any] = None, - preferred_regions: Optional[Any] = None, - detect_datetime: Optional[Any] = None, - additional_columns: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query: Optional[JSON] = None, + page_size: Optional[JSON] = None, + preferred_regions: Optional[JSON] = None, + detect_datetime: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query: SQL API query. Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword page_size: Page size of the result. Type: integer (or Expression with resultType integer). - :paramtype page_size: any + :paramtype page_size: JSON :keyword preferred_regions: Preferred regions. Type: array of strings (or Expression with resultType array of strings). - :paramtype preferred_regions: any + :paramtype preferred_regions: JSON :keyword detect_datetime: Whether detect primitive values as datetime values. Type: boolean (or Expression with resultType boolean). - :paramtype detect_datetime: any + :paramtype detect_datetime: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'CosmosDbSqlApiSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "CosmosDbSqlApiSource" # type: str self.query = query self.page_size = page_size self.preferred_regions = preferred_regions @@ -15934,8 +17516,8 @@ class CouchbaseLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -15944,51 +17526,51 @@ class CouchbaseLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype connection_string: JSON :ivar cred_string: The Azure key vault secret reference of credString in connection string. :vartype cred_string: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "cred_string": {"key": "typeProperties.credString", "type": "AzureKeyVaultSecretReference"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_string: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_string: Optional[JSON] = None, cred_string: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -15996,19 +17578,26 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype connection_string: JSON :keyword cred_string: The Azure key vault secret reference of credString in connection string. :paramtype cred_string: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(CouchbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Couchbase' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Couchbase" # type: str self.connection_string = connection_string self.cred_string = cred_string self.encrypted_credential = encrypted_credential @@ -16021,89 +17610,98 @@ class CouchbaseSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'CouchbaseSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "CouchbaseSource" # type: str self.query = query @@ -16114,92 +17712,102 @@ class CouchbaseTableDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(CouchbaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'CouchbaseTable' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "CouchbaseTable" # type: str self.table_name = table_name -class CreateDataFlowDebugSessionRequest(msrest.serialization.Model): +class CreateDataFlowDebugSessionRequest(_serialization.Model): """Request body structure for creating data flow debug session. :ivar compute_type: Compute type of the cluster. The value will be overwritten by the same @@ -16215,10 +17823,10 @@ class CreateDataFlowDebugSessionRequest(msrest.serialization.Model): """ _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'core_count': {'key': 'coreCount', 'type': 'int'}, - 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, - 'integration_runtime': {'key': 'integrationRuntime', 'type': 'IntegrationRuntimeDebugResource'}, + "compute_type": {"key": "computeType", "type": "str"}, + "core_count": {"key": "coreCount", "type": "int"}, + "time_to_live": {"key": "timeToLive", "type": "int"}, + "integration_runtime": {"key": "integrationRuntime", "type": "IntegrationRuntimeDebugResource"}, } def __init__( @@ -16243,14 +17851,14 @@ def __init__( session. :paramtype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeDebugResource """ - super(CreateDataFlowDebugSessionRequest, self).__init__(**kwargs) + super().__init__(**kwargs) self.compute_type = compute_type self.core_count = core_count self.time_to_live = time_to_live self.integration_runtime = integration_runtime -class CreateDataFlowDebugSessionResponse(msrest.serialization.Model): +class CreateDataFlowDebugSessionResponse(_serialization.Model): """Response body structure for creating data flow debug session. :ivar status: The state of the debug session. @@ -16260,29 +17868,23 @@ class CreateDataFlowDebugSessionResponse(msrest.serialization.Model): """ _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'session_id': {'key': 'sessionId', 'type': 'str'}, + "status": {"key": "status", "type": "str"}, + "session_id": {"key": "sessionId", "type": "str"}, } - def __init__( - self, - *, - status: Optional[str] = None, - session_id: Optional[str] = None, - **kwargs - ): + def __init__(self, *, status: Optional[str] = None, session_id: Optional[str] = None, **kwargs): """ :keyword status: The state of the debug session. :paramtype status: str :keyword session_id: The ID of data flow debug session. :paramtype session_id: str """ - super(CreateDataFlowDebugSessionResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.status = status self.session_id = session_id -class CreateLinkedIntegrationRuntimeRequest(msrest.serialization.Model): +class CreateLinkedIntegrationRuntimeRequest(_serialization.Model): """The linked integration runtime information. :ivar name: The name of the linked integration runtime. @@ -16299,10 +17901,10 @@ class CreateLinkedIntegrationRuntimeRequest(msrest.serialization.Model): """ _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, + "data_factory_name": {"key": "dataFactoryName", "type": "str"}, + "data_factory_location": {"key": "dataFactoryLocation", "type": "str"}, } def __init__( @@ -16327,125 +17929,120 @@ def __init__( runtime belongs to. :paramtype data_factory_location: str """ - super(CreateLinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name self.subscription_id = subscription_id self.data_factory_name = data_factory_name self.data_factory_location = data_factory_location -class CreateRunResponse(msrest.serialization.Model): +class CreateRunResponse(_serialization.Model): """Response body with a run identifier. All required parameters must be populated in order to send to Azure. - :ivar run_id: Required. Identifier of a run. + :ivar run_id: Identifier of a run. Required. :vartype run_id: str """ _validation = { - 'run_id': {'required': True}, + "run_id": {"required": True}, } _attribute_map = { - 'run_id': {'key': 'runId', 'type': 'str'}, + "run_id": {"key": "runId", "type": "str"}, } - def __init__( - self, - *, - run_id: str, - **kwargs - ): + def __init__(self, *, run_id: str, **kwargs): """ - :keyword run_id: Required. Identifier of a run. + :keyword run_id: Identifier of a run. Required. :paramtype run_id: str """ - super(CreateRunResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.run_id = run_id -class Credential(msrest.serialization.Model): +class Credential(_serialization.Model): """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ManagedIdentityCredential, ServicePrincipalCredential. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ManagedIdentityCredential, ServicePrincipalCredential All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of credential.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of credential. Required. :vartype type: str :ivar description: Credential description. :vartype description: str :ivar annotations: List of tags that can be used for describing the Credential. - :vartype annotations: list[any] + :vartype annotations: list[JSON] """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "annotations": {"key": "annotations", "type": "[object]"}, } _subtype_map = { - 'type': {'ManagedIdentity': 'ManagedIdentityCredential', 'ServicePrincipal': 'ServicePrincipalCredential'} + "type": {"ManagedIdentity": "ManagedIdentityCredential", "ServicePrincipal": "ServicePrincipalCredential"} } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Credential description. :paramtype description: str :keyword annotations: List of tags that can be used for describing the Credential. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] """ - super(Credential, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties - self.type = 'Credential' # type: str + self.type = None # type: Optional[str] self.description = description self.annotations = annotations -class CredentialReference(msrest.serialization.Model): +class CredentialReference(_serialization.Model): """Credential reference type. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Credential reference type. Known values are: "CredentialReference". + :vartype additional_properties: dict[str, JSON] + :ivar type: Credential reference type. Required. "CredentialReference" :vartype type: str or ~azure.mgmt.datafactory.models.CredentialReferenceType - :ivar reference_name: Required. Reference credential name. + :ivar reference_name: Reference credential name. Required. :vartype reference_name: str """ _validation = { - 'type': {'required': True}, - 'reference_name': {'required': True}, + "type": {"required": True}, + "reference_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "reference_name": {"key": "referenceName", "type": "str"}, } def __init__( @@ -16453,25 +18050,25 @@ def __init__( *, type: Union[str, "_models.CredentialReferenceType"], reference_name: str, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword type: Required. Credential reference type. Known values are: "CredentialReference". + :paramtype additional_properties: dict[str, JSON] + :keyword type: Credential reference type. Required. "CredentialReference" :paramtype type: str or ~azure.mgmt.datafactory.models.CredentialReferenceType - :keyword reference_name: Required. Reference credential name. + :keyword reference_name: Reference credential name. Required. :paramtype reference_name: str """ - super(CredentialReference, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.type = type self.reference_name = reference_name -class SubResource(msrest.serialization.Model): +class SubResource(_serialization.Model): """Azure Data Factory nested resource, which belongs to a factory. Variables are only populated by the server, and will be ignored when sending a request. @@ -16487,26 +18084,22 @@ class SubResource(msrest.serialization.Model): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "etag": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "etag": {"key": "etag", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(SubResource, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.id = None self.name = None self.type = None @@ -16528,51 +18121,46 @@ class CredentialResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :ivar properties: Required. Properties of credentials. + :ivar properties: Properties of credentials. Required. :vartype properties: ~azure.mgmt.datafactory.models.Credential """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "etag": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Credential'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "etag": {"key": "etag", "type": "str"}, + "properties": {"key": "properties", "type": "Credential"}, } - def __init__( - self, - *, - properties: "_models.Credential", - **kwargs - ): + def __init__(self, *, properties: "_models.Credential", **kwargs): """ - :keyword properties: Required. Properties of credentials. + :keyword properties: Properties of credentials. Required. :paramtype properties: ~azure.mgmt.datafactory.models.Credential """ - super(CredentialResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class CustomActivity(ExecutionActivity): +class CustomActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """Custom activity type. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -16584,76 +18172,76 @@ class CustomActivity(ExecutionActivity): :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar policy: Activity policy. :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :ivar command: Required. Command for custom activity Type: string (or Expression with - resultType string). - :vartype command: any + :ivar command: Command for custom activity Type: string (or Expression with resultType string). + Required. + :vartype command: JSON :ivar resource_linked_service: Resource linked service reference. :vartype resource_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar folder_path: Folder path for resource files Type: string (or Expression with resultType string). - :vartype folder_path: any + :vartype folder_path: JSON :ivar reference_objects: Reference objects. :vartype reference_objects: ~azure.mgmt.datafactory.models.CustomActivityReferenceObject :ivar extended_properties: User defined property bag. There is no restriction on the keys or values that can be used. The user specified custom activity has the full responsibility to consume and interpret the content defined. - :vartype extended_properties: dict[str, any] + :vartype extended_properties: dict[str, JSON] :ivar retention_time_in_days: The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). - :vartype retention_time_in_days: any + :vartype retention_time_in_days: JSON :ivar auto_user_specification: Elevation level and scope for the user, default is nonadmin task. Type: string (or Expression with resultType double). - :vartype auto_user_specification: any + :vartype auto_user_specification: JSON """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'command': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "command": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, - 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, - 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, - 'auto_user_specification': {'key': 'typeProperties.autoUserSpecification', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "command": {"key": "typeProperties.command", "type": "object"}, + "resource_linked_service": {"key": "typeProperties.resourceLinkedService", "type": "LinkedServiceReference"}, + "folder_path": {"key": "typeProperties.folderPath", "type": "object"}, + "reference_objects": {"key": "typeProperties.referenceObjects", "type": "CustomActivityReferenceObject"}, + "extended_properties": {"key": "typeProperties.extendedProperties", "type": "{object}"}, + "retention_time_in_days": {"key": "typeProperties.retentionTimeInDays", "type": "object"}, + "auto_user_specification": {"key": "typeProperties.autoUserSpecification", "type": "object"}, } def __init__( self, *, name: str, - command: Any, - additional_properties: Optional[Dict[str, Any]] = None, + command: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, resource_linked_service: Optional["_models.LinkedServiceReference"] = None, - folder_path: Optional[Any] = None, + folder_path: Optional[JSON] = None, reference_objects: Optional["_models.CustomActivityReferenceObject"] = None, - extended_properties: Optional[Dict[str, Any]] = None, - retention_time_in_days: Optional[Any] = None, - auto_user_specification: Optional[Any] = None, + extended_properties: Optional[Dict[str, JSON]] = None, + retention_time_in_days: Optional[JSON] = None, + auto_user_specification: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -16665,29 +18253,38 @@ def __init__( :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword policy: Activity policy. :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :keyword command: Required. Command for custom activity Type: string (or Expression with - resultType string). - :paramtype command: any + :keyword command: Command for custom activity Type: string (or Expression with resultType + string). Required. + :paramtype command: JSON :keyword resource_linked_service: Resource linked service reference. :paramtype resource_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword folder_path: Folder path for resource files Type: string (or Expression with resultType string). - :paramtype folder_path: any + :paramtype folder_path: JSON :keyword reference_objects: Reference objects. :paramtype reference_objects: ~azure.mgmt.datafactory.models.CustomActivityReferenceObject :keyword extended_properties: User defined property bag. There is no restriction on the keys or values that can be used. The user specified custom activity has the full responsibility to consume and interpret the content defined. - :paramtype extended_properties: dict[str, any] + :paramtype extended_properties: dict[str, JSON] :keyword retention_time_in_days: The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). - :paramtype retention_time_in_days: any + :paramtype retention_time_in_days: JSON :keyword auto_user_specification: Elevation level and scope for the user, default is nonadmin task. Type: string (or Expression with resultType double). - :paramtype auto_user_specification: any - """ - super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'Custom' # type: str + :paramtype auto_user_specification: JSON + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "Custom" # type: str self.command = command self.resource_linked_service = resource_linked_service self.folder_path = folder_path @@ -16697,7 +18294,7 @@ def __init__( self.auto_user_specification = auto_user_specification -class CustomActivityReferenceObject(msrest.serialization.Model): +class CustomActivityReferenceObject(_serialization.Model): """Reference objects for custom activity. :ivar linked_services: Linked service references. @@ -16707,8 +18304,8 @@ class CustomActivityReferenceObject(msrest.serialization.Model): """ _attribute_map = { - 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, - 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, + "linked_services": {"key": "linkedServices", "type": "[LinkedServiceReference]"}, + "datasets": {"key": "datasets", "type": "[DatasetReference]"}, } def __init__( @@ -16724,7 +18321,7 @@ def __init__( :keyword datasets: Dataset references. :paramtype datasets: list[~azure.mgmt.datafactory.models.DatasetReference] """ - super(CustomActivityReferenceObject, self).__init__(**kwargs) + super().__init__(**kwargs) self.linked_services = linked_services self.datasets = datasets @@ -16736,88 +18333,98 @@ class CustomDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar type_properties: Custom dataset properties. - :vartype type_properties: any + :vartype type_properties: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type_properties': {'key': 'typeProperties', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "type_properties": {"key": "typeProperties", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - type_properties: Optional[Any] = None, + type_properties: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword type_properties: Custom dataset properties. - :paramtype type_properties: any - """ - super(CustomDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'CustomDataset' # type: str + :paramtype type_properties: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "CustomDataset" # type: str self.type_properties = type_properties @@ -16828,8 +18435,8 @@ class CustomDataSourceLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -16838,41 +18445,41 @@ class CustomDataSourceLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar type_properties: Required. Custom linked service properties. - :vartype type_properties: any + :vartype annotations: list[JSON] + :ivar type_properties: Custom linked service properties. Required. + :vartype type_properties: JSON """ _validation = { - 'type': {'required': True}, - 'type_properties': {'required': True}, + "type": {"required": True}, + "type_properties": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type_properties': {'key': 'typeProperties', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "type_properties": {"key": "typeProperties", "type": "object"}, } def __init__( self, *, - type_properties: Any, - additional_properties: Optional[Dict[str, Any]] = None, + type_properties: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -16880,12 +18487,19 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword type_properties: Required. Custom linked service properties. - :paramtype type_properties: any - """ - super(CustomDataSourceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'CustomDataSource' # type: str + :paramtype annotations: list[JSON] + :keyword type_properties: Custom linked service properties. Required. + :paramtype type_properties: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "CustomDataSource" # type: str self.type_properties = type_properties @@ -16898,16 +18512,16 @@ class CustomEventsTrigger(MultiplePipelineTrigger): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Trigger type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Trigger type. Required. :vartype type: str :ivar description: Trigger description. :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Known values are: "Started", "Stopped", "Disabled". + called on the Trigger. Known values are: "Started", "Stopped", and "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :ivar annotations: List of tags that can be used for describing the trigger. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar pipelines: Pipelines that need to be started. :vartype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :ivar subject_begins_with: The event subject must begin with the pattern provided for trigger @@ -16916,40 +18530,40 @@ class CustomEventsTrigger(MultiplePipelineTrigger): :ivar subject_ends_with: The event subject must end with the pattern provided for trigger to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. :vartype subject_ends_with: str - :ivar events: Required. The list of event types that cause this trigger to fire. - :vartype events: list[any] - :ivar scope: Required. The ARM resource ID of the Azure Event Grid Topic. + :ivar events: The list of event types that cause this trigger to fire. Required. + :vartype events: list[JSON] + :ivar scope: The ARM resource ID of the Azure Event Grid Topic. Required. :vartype scope: str """ _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, - 'events': {'required': True}, - 'scope': {'required': True}, + "type": {"required": True}, + "runtime_state": {"readonly": True}, + "events": {"required": True}, + "scope": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'subject_begins_with': {'key': 'typeProperties.subjectBeginsWith', 'type': 'str'}, - 'subject_ends_with': {'key': 'typeProperties.subjectEndsWith', 'type': 'str'}, - 'events': {'key': 'typeProperties.events', 'type': '[object]'}, - 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "runtime_state": {"key": "runtimeState", "type": "str"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "pipelines": {"key": "pipelines", "type": "[TriggerPipelineReference]"}, + "subject_begins_with": {"key": "typeProperties.subjectBeginsWith", "type": "str"}, + "subject_ends_with": {"key": "typeProperties.subjectEndsWith", "type": "str"}, + "events": {"key": "typeProperties.events", "type": "[object]"}, + "scope": {"key": "typeProperties.scope", "type": "str"}, } def __init__( self, *, - events: List[Any], + events: List[JSON], scope: str, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, pipelines: Optional[List["_models.TriggerPipelineReference"]] = None, subject_begins_with: Optional[str] = None, subject_ends_with: Optional[str] = None, @@ -16958,11 +18572,11 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Trigger description. :paramtype description: str :keyword annotations: List of tags that can be used for describing the trigger. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword pipelines: Pipelines that need to be started. :paramtype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :keyword subject_begins_with: The event subject must begin with the pattern provided for @@ -16971,30 +18585,36 @@ def __init__( :keyword subject_ends_with: The event subject must end with the pattern provided for trigger to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. :paramtype subject_ends_with: str - :keyword events: Required. The list of event types that cause this trigger to fire. - :paramtype events: list[any] - :keyword scope: Required. The ARM resource ID of the Azure Event Grid Topic. + :keyword events: The list of event types that cause this trigger to fire. Required. + :paramtype events: list[JSON] + :keyword scope: The ARM resource ID of the Azure Event Grid Topic. Required. :paramtype scope: str """ - super(CustomEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) - self.type = 'CustomEventsTrigger' # type: str + super().__init__( + additional_properties=additional_properties, + description=description, + annotations=annotations, + pipelines=pipelines, + **kwargs + ) + self.type = "CustomEventsTrigger" # type: str self.subject_begins_with = subject_begins_with self.subject_ends_with = subject_ends_with self.events = events self.scope = scope -class DatabricksNotebookActivity(ExecutionActivity): +class DatabricksNotebookActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """DatabricksNotebook activity. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -17006,57 +18626,57 @@ class DatabricksNotebookActivity(ExecutionActivity): :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar policy: Activity policy. :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :ivar notebook_path: Required. The absolute path of the notebook to be run in the Databricks - Workspace. This path must begin with a slash. Type: string (or Expression with resultType - string). - :vartype notebook_path: any + :ivar notebook_path: The absolute path of the notebook to be run in the Databricks Workspace. + This path must begin with a slash. Type: string (or Expression with resultType string). + Required. + :vartype notebook_path: JSON :ivar base_parameters: Base parameters to be used for each run of this job.If the notebook takes a parameter that is not specified, the default value from the notebook will be used. - :vartype base_parameters: dict[str, any] + :vartype base_parameters: dict[str, JSON] :ivar libraries: A list of libraries to be installed on the cluster that will execute the job. - :vartype libraries: list[dict[str, any]] + :vartype libraries: list[dict[str, JSON]] """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'notebook_path': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "notebook_path": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, - 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "notebook_path": {"key": "typeProperties.notebookPath", "type": "object"}, + "base_parameters": {"key": "typeProperties.baseParameters", "type": "{object}"}, + "libraries": {"key": "typeProperties.libraries", "type": "[{object}]"}, } def __init__( self, *, name: str, - notebook_path: Any, - additional_properties: Optional[Dict[str, Any]] = None, + notebook_path: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, - base_parameters: Optional[Dict[str, Any]] = None, - libraries: Optional[List[Dict[str, Any]]] = None, + base_parameters: Optional[Dict[str, JSON]] = None, + libraries: Optional[List[Dict[str, JSON]]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -17068,35 +18688,44 @@ def __init__( :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword policy: Activity policy. :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :keyword notebook_path: Required. The absolute path of the notebook to be run in the Databricks + :keyword notebook_path: The absolute path of the notebook to be run in the Databricks Workspace. This path must begin with a slash. Type: string (or Expression with resultType - string). - :paramtype notebook_path: any + string). Required. + :paramtype notebook_path: JSON :keyword base_parameters: Base parameters to be used for each run of this job.If the notebook takes a parameter that is not specified, the default value from the notebook will be used. - :paramtype base_parameters: dict[str, any] + :paramtype base_parameters: dict[str, JSON] :keyword libraries: A list of libraries to be installed on the cluster that will execute the job. - :paramtype libraries: list[dict[str, any]] - """ - super(DatabricksNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'DatabricksNotebook' # type: str + :paramtype libraries: list[dict[str, JSON]] + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "DatabricksNotebook" # type: str self.notebook_path = notebook_path self.base_parameters = base_parameters self.libraries = libraries -class DatabricksSparkJarActivity(ExecutionActivity): +class DatabricksSparkJarActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """DatabricksSparkJar activity. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -17108,56 +18737,56 @@ class DatabricksSparkJarActivity(ExecutionActivity): :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar policy: Activity policy. :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :ivar main_class_name: Required. The full name of the class containing the main method to be - executed. This class must be contained in a JAR provided as a library. Type: string (or - Expression with resultType string). - :vartype main_class_name: any + :ivar main_class_name: The full name of the class containing the main method to be executed. + This class must be contained in a JAR provided as a library. Type: string (or Expression with + resultType string). Required. + :vartype main_class_name: JSON :ivar parameters: Parameters that will be passed to the main method. - :vartype parameters: list[any] + :vartype parameters: list[JSON] :ivar libraries: A list of libraries to be installed on the cluster that will execute the job. - :vartype libraries: list[dict[str, any]] + :vartype libraries: list[dict[str, JSON]] """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'main_class_name': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "main_class_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "main_class_name": {"key": "typeProperties.mainClassName", "type": "object"}, + "parameters": {"key": "typeProperties.parameters", "type": "[object]"}, + "libraries": {"key": "typeProperties.libraries", "type": "[{object}]"}, } def __init__( self, *, name: str, - main_class_name: Any, - additional_properties: Optional[Dict[str, Any]] = None, + main_class_name: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, - parameters: Optional[List[Any]] = None, - libraries: Optional[List[Dict[str, Any]]] = None, + parameters: Optional[List[JSON]] = None, + libraries: Optional[List[Dict[str, JSON]]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -17169,34 +18798,43 @@ def __init__( :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword policy: Activity policy. :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :keyword main_class_name: Required. The full name of the class containing the main method to be - executed. This class must be contained in a JAR provided as a library. Type: string (or - Expression with resultType string). - :paramtype main_class_name: any + :keyword main_class_name: The full name of the class containing the main method to be executed. + This class must be contained in a JAR provided as a library. Type: string (or Expression with + resultType string). Required. + :paramtype main_class_name: JSON :keyword parameters: Parameters that will be passed to the main method. - :paramtype parameters: list[any] + :paramtype parameters: list[JSON] :keyword libraries: A list of libraries to be installed on the cluster that will execute the job. - :paramtype libraries: list[dict[str, any]] - """ - super(DatabricksSparkJarActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'DatabricksSparkJar' # type: str + :paramtype libraries: list[dict[str, JSON]] + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "DatabricksSparkJar" # type: str self.main_class_name = main_class_name self.parameters = parameters self.libraries = libraries -class DatabricksSparkPythonActivity(ExecutionActivity): +class DatabricksSparkPythonActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """DatabricksSparkPython activity. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -17208,55 +18846,55 @@ class DatabricksSparkPythonActivity(ExecutionActivity): :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar policy: Activity policy. :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :ivar python_file: Required. The URI of the Python file to be executed. DBFS paths are - supported. Type: string (or Expression with resultType string). - :vartype python_file: any + :ivar python_file: The URI of the Python file to be executed. DBFS paths are supported. Type: + string (or Expression with resultType string). Required. + :vartype python_file: JSON :ivar parameters: Command line parameters that will be passed to the Python file. - :vartype parameters: list[any] + :vartype parameters: list[JSON] :ivar libraries: A list of libraries to be installed on the cluster that will execute the job. - :vartype libraries: list[dict[str, any]] + :vartype libraries: list[dict[str, JSON]] """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'python_file': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "python_file": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "python_file": {"key": "typeProperties.pythonFile", "type": "object"}, + "parameters": {"key": "typeProperties.parameters", "type": "[object]"}, + "libraries": {"key": "typeProperties.libraries", "type": "[{object}]"}, } def __init__( self, *, name: str, - python_file: Any, - additional_properties: Optional[Dict[str, Any]] = None, + python_file: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, - parameters: Optional[List[Any]] = None, - libraries: Optional[List[Dict[str, Any]]] = None, + parameters: Optional[List[JSON]] = None, + libraries: Optional[List[Dict[str, JSON]]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -17268,61 +18906,70 @@ def __init__( :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword policy: Activity policy. :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :keyword python_file: Required. The URI of the Python file to be executed. DBFS paths are - supported. Type: string (or Expression with resultType string). - :paramtype python_file: any + :keyword python_file: The URI of the Python file to be executed. DBFS paths are supported. + Type: string (or Expression with resultType string). Required. + :paramtype python_file: JSON :keyword parameters: Command line parameters that will be passed to the Python file. - :paramtype parameters: list[any] + :paramtype parameters: list[JSON] :keyword libraries: A list of libraries to be installed on the cluster that will execute the job. - :paramtype libraries: list[dict[str, any]] - """ - super(DatabricksSparkPythonActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'DatabricksSparkPython' # type: str + :paramtype libraries: list[dict[str, JSON]] + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "DatabricksSparkPython" # type: str self.python_file = python_file self.parameters = parameters self.libraries = libraries -class DataFlow(msrest.serialization.Model): +class DataFlow(_serialization.Model): """Azure Data Factory nested object which contains a flow with data movements and transformations. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: Flowlet, MappingDataFlow, WranglingDataFlow. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + Flowlet, MappingDataFlow, WranglingDataFlow All required parameters must be populated in order to send to Azure. - :ivar type: Required. Type of data flow.Constant filled by server. + :ivar type: Type of data flow. Required. :vartype type: str :ivar description: The description of the data flow. :vartype description: str :ivar annotations: List of tags that can be used for describing the data flow. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DataFlowFolder """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DataFlowFolder"}, } _subtype_map = { - 'type': {'Flowlet': 'Flowlet', 'MappingDataFlow': 'MappingDataFlow', 'WranglingDataFlow': 'WranglingDataFlow'} + "type": {"Flowlet": "Flowlet", "MappingDataFlow": "MappingDataFlow", "WranglingDataFlow": "WranglingDataFlow"} } def __init__( self, *, description: Optional[str] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DataFlowFolder"] = None, **kwargs ): @@ -17330,24 +18977,24 @@ def __init__( :keyword description: The description of the data flow. :paramtype description: str :keyword annotations: List of tags that can be used for describing the data flow. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DataFlowFolder """ - super(DataFlow, self).__init__(**kwargs) + super().__init__(**kwargs) self.type = None # type: Optional[str] self.description = description self.annotations = annotations self.folder = folder -class DataFlowDebugCommandPayload(msrest.serialization.Model): +class DataFlowDebugCommandPayload(_serialization.Model): """Structure of command payload. All required parameters must be populated in order to send to Azure. - :ivar stream_name: Required. The stream name which is used for preview. + :ivar stream_name: The stream name which is used for preview. Required. :vartype stream_name: str :ivar row_limits: Row limits for preview response. :vartype row_limits: int @@ -17358,14 +19005,14 @@ class DataFlowDebugCommandPayload(msrest.serialization.Model): """ _validation = { - 'stream_name': {'required': True}, + "stream_name": {"required": True}, } _attribute_map = { - 'stream_name': {'key': 'streamName', 'type': 'str'}, - 'row_limits': {'key': 'rowLimits', 'type': 'int'}, - 'columns': {'key': 'columns', 'type': '[str]'}, - 'expression': {'key': 'expression', 'type': 'str'}, + "stream_name": {"key": "streamName", "type": "str"}, + "row_limits": {"key": "rowLimits", "type": "int"}, + "columns": {"key": "columns", "type": "[str]"}, + "expression": {"key": "expression", "type": "str"}, } def __init__( @@ -17378,7 +19025,7 @@ def __init__( **kwargs ): """ - :keyword stream_name: Required. The stream name which is used for preview. + :keyword stream_name: The stream name which is used for preview. Required. :paramtype stream_name: str :keyword row_limits: Row limits for preview response. :paramtype row_limits: int @@ -17387,29 +19034,29 @@ def __init__( :keyword expression: The expression which is used for preview. :paramtype expression: str """ - super(DataFlowDebugCommandPayload, self).__init__(**kwargs) + super().__init__(**kwargs) self.stream_name = stream_name self.row_limits = row_limits self.columns = columns self.expression = expression -class DataFlowDebugCommandRequest(msrest.serialization.Model): +class DataFlowDebugCommandRequest(_serialization.Model): """Request body structure for data flow debug command. :ivar session_id: The ID of data flow debug session. :vartype session_id: str :ivar command: The command type. Known values are: "executePreviewQuery", - "executeStatisticsQuery", "executeExpressionQuery". + "executeStatisticsQuery", and "executeExpressionQuery". :vartype command: str or ~azure.mgmt.datafactory.models.DataFlowDebugCommandType :ivar command_payload: The command payload object. :vartype command_payload: ~azure.mgmt.datafactory.models.DataFlowDebugCommandPayload """ _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'command': {'key': 'command', 'type': 'str'}, - 'command_payload': {'key': 'commandPayload', 'type': 'DataFlowDebugCommandPayload'}, + "session_id": {"key": "sessionId", "type": "str"}, + "command": {"key": "command", "type": "str"}, + "command_payload": {"key": "commandPayload", "type": "DataFlowDebugCommandPayload"}, } def __init__( @@ -17424,18 +19071,18 @@ def __init__( :keyword session_id: The ID of data flow debug session. :paramtype session_id: str :keyword command: The command type. Known values are: "executePreviewQuery", - "executeStatisticsQuery", "executeExpressionQuery". + "executeStatisticsQuery", and "executeExpressionQuery". :paramtype command: str or ~azure.mgmt.datafactory.models.DataFlowDebugCommandType :keyword command_payload: The command payload object. :paramtype command_payload: ~azure.mgmt.datafactory.models.DataFlowDebugCommandPayload """ - super(DataFlowDebugCommandRequest, self).__init__(**kwargs) + super().__init__(**kwargs) self.session_id = session_id self.command = command self.command_payload = command_payload -class DataFlowDebugCommandResponse(msrest.serialization.Model): +class DataFlowDebugCommandResponse(_serialization.Model): """Response body structure of data flow result for data preview, statistics or expression preview. :ivar status: The run status of data preview, statistics or expression preview. @@ -17445,34 +19092,28 @@ class DataFlowDebugCommandResponse(msrest.serialization.Model): """ _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'data': {'key': 'data', 'type': 'str'}, + "status": {"key": "status", "type": "str"}, + "data": {"key": "data", "type": "str"}, } - def __init__( - self, - *, - status: Optional[str] = None, - data: Optional[str] = None, - **kwargs - ): + def __init__(self, *, status: Optional[str] = None, data: Optional[str] = None, **kwargs): """ :keyword status: The run status of data preview, statistics or expression preview. :paramtype status: str :keyword data: The result data of data preview, statistics or expression preview. :paramtype data: str """ - super(DataFlowDebugCommandResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.status = status self.data = data -class DataFlowDebugPackage(msrest.serialization.Model): +class DataFlowDebugPackage(_serialization.Model): """Request body structure for starting data flow debug session. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar session_id: The ID of data flow debug session. :vartype session_id: str :ivar data_flow: Data flow instance. @@ -17490,20 +19131,20 @@ class DataFlowDebugPackage(msrest.serialization.Model): """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowDebugResource'}, - 'data_flows': {'key': 'dataFlows', 'type': '[DataFlowDebugResource]'}, - 'datasets': {'key': 'datasets', 'type': '[DatasetDebugResource]'}, - 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceDebugResource]'}, - 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, - 'debug_settings': {'key': 'debugSettings', 'type': 'DataFlowDebugPackageDebugSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "session_id": {"key": "sessionId", "type": "str"}, + "data_flow": {"key": "dataFlow", "type": "DataFlowDebugResource"}, + "data_flows": {"key": "dataFlows", "type": "[DataFlowDebugResource]"}, + "datasets": {"key": "datasets", "type": "[DatasetDebugResource]"}, + "linked_services": {"key": "linkedServices", "type": "[LinkedServiceDebugResource]"}, + "staging": {"key": "staging", "type": "DataFlowStagingInfo"}, + "debug_settings": {"key": "debugSettings", "type": "DataFlowDebugPackageDebugSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, session_id: Optional[str] = None, data_flow: Optional["_models.DataFlowDebugResource"] = None, data_flows: Optional[List["_models.DataFlowDebugResource"]] = None, @@ -17516,7 +19157,7 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword session_id: The ID of data flow debug session. :paramtype session_id: str :keyword data_flow: Data flow instance. @@ -17532,7 +19173,7 @@ def __init__( :keyword debug_settings: Data flow debug settings. :paramtype debug_settings: ~azure.mgmt.datafactory.models.DataFlowDebugPackageDebugSettings """ - super(DataFlowDebugPackage, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.session_id = session_id self.data_flow = data_flow @@ -17543,46 +19184,46 @@ def __init__( self.debug_settings = debug_settings -class DataFlowDebugPackageDebugSettings(msrest.serialization.Model): +class DataFlowDebugPackageDebugSettings(_serialization.Model): """Data flow debug settings. :ivar source_settings: Source setting for data flow debug. :vartype source_settings: list[~azure.mgmt.datafactory.models.DataFlowSourceSetting] :ivar parameters: Data flow parameters. - :vartype parameters: dict[str, any] + :vartype parameters: dict[str, JSON] :ivar dataset_parameters: Parameters for dataset. - :vartype dataset_parameters: any + :vartype dataset_parameters: JSON """ _attribute_map = { - 'source_settings': {'key': 'sourceSettings', 'type': '[DataFlowSourceSetting]'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, + "source_settings": {"key": "sourceSettings", "type": "[DataFlowSourceSetting]"}, + "parameters": {"key": "parameters", "type": "{object}"}, + "dataset_parameters": {"key": "datasetParameters", "type": "object"}, } def __init__( self, *, source_settings: Optional[List["_models.DataFlowSourceSetting"]] = None, - parameters: Optional[Dict[str, Any]] = None, - dataset_parameters: Optional[Any] = None, + parameters: Optional[Dict[str, JSON]] = None, + dataset_parameters: Optional[JSON] = None, **kwargs ): """ :keyword source_settings: Source setting for data flow debug. :paramtype source_settings: list[~azure.mgmt.datafactory.models.DataFlowSourceSetting] :keyword parameters: Data flow parameters. - :paramtype parameters: dict[str, any] + :paramtype parameters: dict[str, JSON] :keyword dataset_parameters: Parameters for dataset. - :paramtype dataset_parameters: any + :paramtype dataset_parameters: JSON """ - super(DataFlowDebugPackageDebugSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.source_settings = source_settings self.parameters = parameters self.dataset_parameters = dataset_parameters -class SubResourceDebugResource(msrest.serialization.Model): +class SubResourceDebugResource(_serialization.Model): """Azure Data Factory nested debug resource. :ivar name: The resource name. @@ -17590,20 +19231,15 @@ class SubResourceDebugResource(msrest.serialization.Model): """ _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, } - def __init__( - self, - *, - name: Optional[str] = None, - **kwargs - ): + def __init__(self, *, name: Optional[str] = None, **kwargs): """ :keyword name: The resource name. :paramtype name: str """ - super(SubResourceDebugResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name @@ -17614,42 +19250,36 @@ class DataFlowDebugResource(SubResourceDebugResource): :ivar name: The resource name. :vartype name: str - :ivar properties: Required. Data flow properties. + :ivar properties: Data flow properties. Required. :vartype properties: ~azure.mgmt.datafactory.models.DataFlow """ _validation = { - 'properties': {'required': True}, + "properties": {"required": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'DataFlow'}, + "name": {"key": "name", "type": "str"}, + "properties": {"key": "properties", "type": "DataFlow"}, } - def __init__( - self, - *, - properties: "_models.DataFlow", - name: Optional[str] = None, - **kwargs - ): + def __init__(self, *, properties: "_models.DataFlow", name: Optional[str] = None, **kwargs): """ :keyword name: The resource name. :paramtype name: str - :keyword properties: Required. Data flow properties. + :keyword properties: Data flow properties. Required. :paramtype properties: ~azure.mgmt.datafactory.models.DataFlow """ - super(DataFlowDebugResource, self).__init__(name=name, **kwargs) + super().__init__(name=name, **kwargs) self.properties = properties -class DataFlowDebugSessionInfo(msrest.serialization.Model): +class DataFlowDebugSessionInfo(_serialization.Model): """Data flow debug session info. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar data_flow_name: The name of the data flow. :vartype data_flow_name: str :ivar compute_type: Compute type of the cluster. @@ -17671,22 +19301,22 @@ class DataFlowDebugSessionInfo(msrest.serialization.Model): """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'core_count': {'key': 'coreCount', 'type': 'int'}, - 'node_count': {'key': 'nodeCount', 'type': 'int'}, - 'integration_runtime_name': {'key': 'integrationRuntimeName', 'type': 'str'}, - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'time_to_live_in_minutes': {'key': 'timeToLiveInMinutes', 'type': 'int'}, - 'last_activity_time': {'key': 'lastActivityTime', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "data_flow_name": {"key": "dataFlowName", "type": "str"}, + "compute_type": {"key": "computeType", "type": "str"}, + "core_count": {"key": "coreCount", "type": "int"}, + "node_count": {"key": "nodeCount", "type": "int"}, + "integration_runtime_name": {"key": "integrationRuntimeName", "type": "str"}, + "session_id": {"key": "sessionId", "type": "str"}, + "start_time": {"key": "startTime", "type": "str"}, + "time_to_live_in_minutes": {"key": "timeToLiveInMinutes", "type": "int"}, + "last_activity_time": {"key": "lastActivityTime", "type": "str"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, data_flow_name: Optional[str] = None, compute_type: Optional[str] = None, core_count: Optional[int] = None, @@ -17701,7 +19331,7 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword data_flow_name: The name of the data flow. :paramtype data_flow_name: str :keyword compute_type: Compute type of the cluster. @@ -17722,7 +19352,7 @@ def __init__( :keyword last_activity_time: Last activity time of data flow debug session. :paramtype last_activity_time: str """ - super(DataFlowDebugSessionInfo, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.data_flow_name = data_flow_name self.compute_type = compute_type @@ -17735,7 +19365,7 @@ def __init__( self.last_activity_time = last_activity_time -class DataFlowFolder(msrest.serialization.Model): +class DataFlowFolder(_serialization.Model): """The folder that this data flow is in. If not specified, Data flow will appear at the root level. :ivar name: The name of the folder that this data flow is in. @@ -17743,90 +19373,79 @@ class DataFlowFolder(msrest.serialization.Model): """ _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, } - def __init__( - self, - *, - name: Optional[str] = None, - **kwargs - ): + def __init__(self, *, name: Optional[str] = None, **kwargs): """ :keyword name: The name of the folder that this data flow is in. :paramtype name: str """ - super(DataFlowFolder, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name -class DataFlowListResponse(msrest.serialization.Model): +class DataFlowListResponse(_serialization.Model): """A list of data flow resources. All required parameters must be populated in order to send to Azure. - :ivar value: Required. List of data flows. + :ivar value: List of data flows. Required. :vartype value: list[~azure.mgmt.datafactory.models.DataFlowResource] :ivar next_link: The link to the next page of results, if any remaining results exist. :vartype next_link: str """ _validation = { - 'value': {'required': True}, + "value": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[DataFlowResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[DataFlowResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - *, - value: List["_models.DataFlowResource"], - next_link: Optional[str] = None, - **kwargs - ): + def __init__(self, *, value: List["_models.DataFlowResource"], next_link: Optional[str] = None, **kwargs): """ - :keyword value: Required. List of data flows. + :keyword value: List of data flows. Required. :paramtype value: list[~azure.mgmt.datafactory.models.DataFlowResource] :keyword next_link: The link to the next page of results, if any remaining results exist. :paramtype next_link: str """ - super(DataFlowListResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.next_link = next_link -class DataFlowReference(msrest.serialization.Model): +class DataFlowReference(_serialization.Model): """Data flow reference type. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Data flow reference type. Known values are: "DataFlowReference". + :vartype additional_properties: dict[str, JSON] + :ivar type: Data flow reference type. Required. "DataFlowReference" :vartype type: str or ~azure.mgmt.datafactory.models.DataFlowReferenceType - :ivar reference_name: Required. Reference data flow name. + :ivar reference_name: Reference data flow name. Required. :vartype reference_name: str :ivar dataset_parameters: Reference data flow parameters from dataset. - :vartype dataset_parameters: any + :vartype dataset_parameters: JSON :ivar parameters: Data flow parameters. - :vartype parameters: dict[str, any] + :vartype parameters: dict[str, JSON] """ _validation = { - 'type': {'required': True}, - 'reference_name': {'required': True}, + "type": {"required": True}, + "reference_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "reference_name": {"key": "referenceName", "type": "str"}, + "dataset_parameters": {"key": "datasetParameters", "type": "object"}, + "parameters": {"key": "parameters", "type": "{object}"}, } def __init__( @@ -17834,25 +19453,25 @@ def __init__( *, type: Union[str, "_models.DataFlowReferenceType"], reference_name: str, - additional_properties: Optional[Dict[str, Any]] = None, - dataset_parameters: Optional[Any] = None, - parameters: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + dataset_parameters: Optional[JSON] = None, + parameters: Optional[Dict[str, JSON]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword type: Required. Data flow reference type. Known values are: "DataFlowReference". + :paramtype additional_properties: dict[str, JSON] + :keyword type: Data flow reference type. Required. "DataFlowReference" :paramtype type: str or ~azure.mgmt.datafactory.models.DataFlowReferenceType - :keyword reference_name: Required. Reference data flow name. + :keyword reference_name: Reference data flow name. Required. :paramtype reference_name: str :keyword dataset_parameters: Reference data flow parameters from dataset. - :paramtype dataset_parameters: any + :paramtype dataset_parameters: JSON :keyword parameters: Data flow parameters. - :paramtype parameters: dict[str, any] + :paramtype parameters: dict[str, JSON] """ - super(DataFlowReference, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.type = type self.reference_name = reference_name @@ -17875,46 +19494,41 @@ class DataFlowResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :ivar properties: Required. Data flow properties. + :ivar properties: Data flow properties. Required. :vartype properties: ~azure.mgmt.datafactory.models.DataFlow """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "etag": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'DataFlow'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "etag": {"key": "etag", "type": "str"}, + "properties": {"key": "properties", "type": "DataFlow"}, } - def __init__( - self, - *, - properties: "_models.DataFlow", - **kwargs - ): + def __init__(self, *, properties: "_models.DataFlow", **kwargs): """ - :keyword properties: Required. Data flow properties. + :keyword properties: Data flow properties. Required. :paramtype properties: ~azure.mgmt.datafactory.models.DataFlow """ - super(DataFlowResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class Transformation(msrest.serialization.Model): +class Transformation(_serialization.Model): """A data flow transformation. All required parameters must be populated in order to send to Azure. - :ivar name: Required. Transformation name. + :ivar name: Transformation name. Required. :vartype name: str :ivar description: Transformation description. :vartype description: str @@ -17927,15 +19541,15 @@ class Transformation(msrest.serialization.Model): """ _validation = { - 'name': {'required': True}, + "name": {"required": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, - 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, - 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, + "name": {"key": "name", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "dataset": {"key": "dataset", "type": "DatasetReference"}, + "linked_service": {"key": "linkedService", "type": "LinkedServiceReference"}, + "flowlet": {"key": "flowlet", "type": "DataFlowReference"}, } def __init__( @@ -17949,7 +19563,7 @@ def __init__( **kwargs ): """ - :keyword name: Required. Transformation name. + :keyword name: Transformation name. Required. :paramtype name: str :keyword description: Transformation description. :paramtype description: str @@ -17960,7 +19574,7 @@ def __init__( :keyword flowlet: Flowlet Reference. :paramtype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference """ - super(Transformation, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name self.description = description self.dataset = dataset @@ -17973,7 +19587,7 @@ class DataFlowSink(Transformation): All required parameters must be populated in order to send to Azure. - :ivar name: Required. Transformation name. + :ivar name: Transformation name. Required. :vartype name: str :ivar description: Transformation description. :vartype description: str @@ -17990,17 +19604,17 @@ class DataFlowSink(Transformation): """ _validation = { - 'name': {'required': True}, + "name": {"required": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, - 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, - 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, - 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, - 'rejected_data_linked_service': {'key': 'rejectedDataLinkedService', 'type': 'LinkedServiceReference'}, + "name": {"key": "name", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "dataset": {"key": "dataset", "type": "DatasetReference"}, + "linked_service": {"key": "linkedService", "type": "LinkedServiceReference"}, + "flowlet": {"key": "flowlet", "type": "DataFlowReference"}, + "schema_linked_service": {"key": "schemaLinkedService", "type": "LinkedServiceReference"}, + "rejected_data_linked_service": {"key": "rejectedDataLinkedService", "type": "LinkedServiceReference"}, } def __init__( @@ -18016,7 +19630,7 @@ def __init__( **kwargs ): """ - :keyword name: Required. Transformation name. + :keyword name: Transformation name. Required. :paramtype name: str :keyword description: Transformation description. :paramtype description: str @@ -18031,7 +19645,14 @@ def __init__( :keyword rejected_data_linked_service: Rejected data linked service reference. :paramtype rejected_data_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ - super(DataFlowSink, self).__init__(name=name, description=description, dataset=dataset, linked_service=linked_service, flowlet=flowlet, **kwargs) + super().__init__( + name=name, + description=description, + dataset=dataset, + linked_service=linked_service, + flowlet=flowlet, + **kwargs + ) self.schema_linked_service = schema_linked_service self.rejected_data_linked_service = rejected_data_linked_service @@ -18041,7 +19662,7 @@ class DataFlowSource(Transformation): All required parameters must be populated in order to send to Azure. - :ivar name: Required. Transformation name. + :ivar name: Transformation name. Required. :vartype name: str :ivar description: Transformation description. :vartype description: str @@ -18056,16 +19677,16 @@ class DataFlowSource(Transformation): """ _validation = { - 'name': {'required': True}, + "name": {"required": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, - 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, - 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, - 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, + "name": {"key": "name", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "dataset": {"key": "dataset", "type": "DatasetReference"}, + "linked_service": {"key": "linkedService", "type": "LinkedServiceReference"}, + "flowlet": {"key": "flowlet", "type": "DataFlowReference"}, + "schema_linked_service": {"key": "schemaLinkedService", "type": "LinkedServiceReference"}, } def __init__( @@ -18080,7 +19701,7 @@ def __init__( **kwargs ): """ - :keyword name: Required. Transformation name. + :keyword name: Transformation name. Required. :paramtype name: str :keyword description: Transformation description. :paramtype description: str @@ -18093,16 +19714,23 @@ def __init__( :keyword schema_linked_service: Schema linked service reference. :paramtype schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ - super(DataFlowSource, self).__init__(name=name, description=description, dataset=dataset, linked_service=linked_service, flowlet=flowlet, **kwargs) + super().__init__( + name=name, + description=description, + dataset=dataset, + linked_service=linked_service, + flowlet=flowlet, + **kwargs + ) self.schema_linked_service = schema_linked_service -class DataFlowSourceSetting(msrest.serialization.Model): +class DataFlowSourceSetting(_serialization.Model): """Definition of data flow source setting for debug. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar source_name: The data flow source name. :vartype source_name: str :ivar row_limit: Defines the row limit of data flow source in debug. @@ -18110,15 +19738,15 @@ class DataFlowSourceSetting(msrest.serialization.Model): """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_name': {'key': 'sourceName', 'type': 'str'}, - 'row_limit': {'key': 'rowLimit', 'type': 'int'}, + "additional_properties": {"key": "", "type": "{object}"}, + "source_name": {"key": "sourceName", "type": "str"}, + "row_limit": {"key": "rowLimit", "type": "int"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, source_name: Optional[str] = None, row_limit: Optional[int] = None, **kwargs @@ -18126,38 +19754,38 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_name: The data flow source name. :paramtype source_name: str :keyword row_limit: Defines the row limit of data flow source in debug. :paramtype row_limit: int """ - super(DataFlowSourceSetting, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.source_name = source_name self.row_limit = row_limit -class DataFlowStagingInfo(msrest.serialization.Model): +class DataFlowStagingInfo(_serialization.Model): """Staging info for execute data flow activity. :ivar linked_service: Staging linked service reference. :vartype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar folder_path: Folder path for staging blob. Type: string (or Expression with resultType string). - :vartype folder_path: any + :vartype folder_path: JSON """ _attribute_map = { - 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, + "linked_service": {"key": "linkedService", "type": "LinkedServiceReference"}, + "folder_path": {"key": "folderPath", "type": "object"}, } def __init__( self, *, linked_service: Optional["_models.LinkedServiceReference"] = None, - folder_path: Optional[Any] = None, + folder_path: Optional[JSON] = None, **kwargs ): """ @@ -18165,24 +19793,24 @@ def __init__( :paramtype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword folder_path: Folder path for staging blob. Type: string (or Expression with resultType string). - :paramtype folder_path: any + :paramtype folder_path: JSON """ - super(DataFlowStagingInfo, self).__init__(**kwargs) + super().__init__(**kwargs) self.linked_service = linked_service self.folder_path = folder_path -class DataLakeAnalyticsUSQLActivity(ExecutionActivity): +class DataLakeAnalyticsUSQLActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """Data Lake Analytics U-SQL activity. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -18194,77 +19822,77 @@ class DataLakeAnalyticsUSQLActivity(ExecutionActivity): :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar policy: Activity policy. :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :ivar script_path: Required. Case-sensitive path to folder that contains the U-SQL script. - Type: string (or Expression with resultType string). - :vartype script_path: any - :ivar script_linked_service: Required. Script linked service reference. + :ivar script_path: Case-sensitive path to folder that contains the U-SQL script. Type: string + (or Expression with resultType string). Required. + :vartype script_path: JSON + :ivar script_linked_service: Script linked service reference. Required. :vartype script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. - :vartype degree_of_parallelism: any + :vartype degree_of_parallelism: JSON :ivar priority: Determines which jobs out of all that are queued should be selected to run first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or Expression with resultType integer), minimum: 1. - :vartype priority: any + :vartype priority: JSON :ivar parameters: Parameters for U-SQL job request. - :vartype parameters: dict[str, any] + :vartype parameters: dict[str, JSON] :ivar runtime_version: Runtime version of the U-SQL engine to use. Type: string (or Expression with resultType string). - :vartype runtime_version: any + :vartype runtime_version: JSON :ivar compilation_mode: Compilation mode of U-SQL. Must be one of these values : Semantic, Full and SingleBox. Type: string (or Expression with resultType string). - :vartype compilation_mode: any + :vartype compilation_mode: JSON """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'script_path': {'required': True}, - 'script_linked_service': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "script_path": {"required": True}, + "script_linked_service": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, - 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, - 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "script_path": {"key": "typeProperties.scriptPath", "type": "object"}, + "script_linked_service": {"key": "typeProperties.scriptLinkedService", "type": "LinkedServiceReference"}, + "degree_of_parallelism": {"key": "typeProperties.degreeOfParallelism", "type": "object"}, + "priority": {"key": "typeProperties.priority", "type": "object"}, + "parameters": {"key": "typeProperties.parameters", "type": "{object}"}, + "runtime_version": {"key": "typeProperties.runtimeVersion", "type": "object"}, + "compilation_mode": {"key": "typeProperties.compilationMode", "type": "object"}, } def __init__( self, *, name: str, - script_path: Any, + script_path: JSON, script_linked_service: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, - degree_of_parallelism: Optional[Any] = None, - priority: Optional[Any] = None, - parameters: Optional[Dict[str, Any]] = None, - runtime_version: Optional[Any] = None, - compilation_mode: Optional[Any] = None, + degree_of_parallelism: Optional[JSON] = None, + priority: Optional[JSON] = None, + parameters: Optional[Dict[str, JSON]] = None, + runtime_version: Optional[JSON] = None, + compilation_mode: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -18276,29 +19904,38 @@ def __init__( :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword policy: Activity policy. :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :keyword script_path: Required. Case-sensitive path to folder that contains the U-SQL script. - Type: string (or Expression with resultType string). - :paramtype script_path: any - :keyword script_linked_service: Required. Script linked service reference. + :keyword script_path: Case-sensitive path to folder that contains the U-SQL script. Type: + string (or Expression with resultType string). Required. + :paramtype script_path: JSON + :keyword script_linked_service: Script linked service reference. Required. :paramtype script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. - :paramtype degree_of_parallelism: any + :paramtype degree_of_parallelism: JSON :keyword priority: Determines which jobs out of all that are queued should be selected to run first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or Expression with resultType integer), minimum: 1. - :paramtype priority: any + :paramtype priority: JSON :keyword parameters: Parameters for U-SQL job request. - :paramtype parameters: dict[str, any] + :paramtype parameters: dict[str, JSON] :keyword runtime_version: Runtime version of the U-SQL engine to use. Type: string (or Expression with resultType string). - :paramtype runtime_version: any + :paramtype runtime_version: JSON :keyword compilation_mode: Compilation mode of U-SQL. Must be one of these values : Semantic, Full and SingleBox. Type: string (or Expression with resultType string). - :paramtype compilation_mode: any - """ - super(DataLakeAnalyticsUSQLActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'DataLakeAnalyticsU-SQL' # type: str + :paramtype compilation_mode: JSON + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "DataLakeAnalyticsU-SQL" # type: str self.script_path = script_path self.script_linked_service = script_linked_service self.degree_of_parallelism = degree_of_parallelism @@ -18308,85 +19945,79 @@ def __init__( self.compilation_mode = compilation_mode -class DatasetCompression(msrest.serialization.Model): +class DatasetCompression(_serialization.Model): """The compression method used on a dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset compression. Type: string (or Expression with resultType - string). - :vartype type: any + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset compression. Type: string (or Expression with resultType string). + Required. + :vartype type: JSON :ivar level: The dataset compression level. Type: string (or Expression with resultType string). - :vartype level: any + :vartype level: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'object'}, - 'level': {'key': 'level', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "object"}, + "level": {"key": "level", "type": "object"}, } def __init__( self, *, - type: Any, - additional_properties: Optional[Dict[str, Any]] = None, - level: Optional[Any] = None, + type: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, + level: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword type: Required. Type of dataset compression. Type: string (or Expression with - resultType string). - :paramtype type: any + :paramtype additional_properties: dict[str, JSON] + :keyword type: Type of dataset compression. Type: string (or Expression with resultType + string). Required. + :paramtype type: JSON :keyword level: The dataset compression level. Type: string (or Expression with resultType string). - :paramtype level: any + :paramtype level: JSON """ - super(DatasetCompression, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.type = type self.level = level -class DatasetDataElement(msrest.serialization.Model): +class DatasetDataElement(_serialization.Model): """Columns that define the structure of the dataset. :ivar name: Name of the column. Type: string (or Expression with resultType string). - :vartype name: any + :vartype name: JSON :ivar type: Type of the column. Type: string (or Expression with resultType string). - :vartype type: any + :vartype type: JSON """ _attribute_map = { - 'name': {'key': 'name', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'object'}, + "name": {"key": "name", "type": "object"}, + "type": {"key": "type", "type": "object"}, } - def __init__( - self, - *, - name: Optional[Any] = None, - type: Optional[Any] = None, - **kwargs - ): + def __init__(self, *, name: Optional[JSON] = None, type: Optional[JSON] = None, **kwargs): """ :keyword name: Name of the column. Type: string (or Expression with resultType string). - :paramtype name: any + :paramtype name: JSON :keyword type: Type of the column. Type: string (or Expression with resultType string). - :paramtype type: any + :paramtype type: JSON """ - super(DatasetDataElement, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name self.type = type @@ -18398,37 +20029,31 @@ class DatasetDebugResource(SubResourceDebugResource): :ivar name: The resource name. :vartype name: str - :ivar properties: Required. Dataset properties. + :ivar properties: Dataset properties. Required. :vartype properties: ~azure.mgmt.datafactory.models.Dataset """ _validation = { - 'properties': {'required': True}, + "properties": {"required": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Dataset'}, + "name": {"key": "name", "type": "str"}, + "properties": {"key": "properties", "type": "Dataset"}, } - def __init__( - self, - *, - properties: "_models.Dataset", - name: Optional[str] = None, - **kwargs - ): + def __init__(self, *, properties: "_models.Dataset", name: Optional[str] = None, **kwargs): """ :keyword name: The resource name. :paramtype name: str - :keyword properties: Required. Dataset properties. + :keyword properties: Dataset properties. Required. :paramtype properties: ~azure.mgmt.datafactory.models.Dataset """ - super(DatasetDebugResource, self).__init__(name=name, **kwargs) + super().__init__(name=name, **kwargs) self.properties = properties -class DatasetFolder(msrest.serialization.Model): +class DatasetFolder(_serialization.Model): """The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :ivar name: The name of the folder that this Dataset is in. @@ -18436,103 +20061,92 @@ class DatasetFolder(msrest.serialization.Model): """ _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, } - def __init__( - self, - *, - name: Optional[str] = None, - **kwargs - ): + def __init__(self, *, name: Optional[str] = None, **kwargs): """ :keyword name: The name of the folder that this Dataset is in. :paramtype name: str """ - super(DatasetFolder, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name -class DatasetListResponse(msrest.serialization.Model): +class DatasetListResponse(_serialization.Model): """A list of dataset resources. All required parameters must be populated in order to send to Azure. - :ivar value: Required. List of datasets. + :ivar value: List of datasets. Required. :vartype value: list[~azure.mgmt.datafactory.models.DatasetResource] :ivar next_link: The link to the next page of results, if any remaining results exist. :vartype next_link: str """ _validation = { - 'value': {'required': True}, + "value": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[DatasetResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[DatasetResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - *, - value: List["_models.DatasetResource"], - next_link: Optional[str] = None, - **kwargs - ): + def __init__(self, *, value: List["_models.DatasetResource"], next_link: Optional[str] = None, **kwargs): """ - :keyword value: Required. List of datasets. + :keyword value: List of datasets. Required. :paramtype value: list[~azure.mgmt.datafactory.models.DatasetResource] :keyword next_link: The link to the next page of results, if any remaining results exist. :paramtype next_link: str """ - super(DatasetListResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.next_link = next_link -class DatasetReference(msrest.serialization.Model): +class DatasetReference(_serialization.Model): """Dataset reference type. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar type: Dataset reference type. Has constant value: "DatasetReference". - :vartype type: str - :ivar reference_name: Required. Reference dataset name. + :ivar type: Dataset reference type. Required. "DatasetReference" + :vartype type: str or ~azure.mgmt.datafactory.models.DatasetReferenceType + :ivar reference_name: Reference dataset name. Required. :vartype reference_name: str :ivar parameters: Arguments for dataset. - :vartype parameters: dict[str, any] + :vartype parameters: dict[str, JSON] """ _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, + "type": {"required": True}, + "reference_name": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, + "type": {"key": "type", "type": "str"}, + "reference_name": {"key": "referenceName", "type": "str"}, + "parameters": {"key": "parameters", "type": "{object}"}, } - type = "DatasetReference" - def __init__( self, *, + type: Union[str, "_models.DatasetReferenceType"], reference_name: str, - parameters: Optional[Dict[str, Any]] = None, + parameters: Optional[Dict[str, JSON]] = None, **kwargs ): """ - :keyword reference_name: Required. Reference dataset name. + :keyword type: Dataset reference type. Required. "DatasetReference" + :paramtype type: str or ~azure.mgmt.datafactory.models.DatasetReferenceType + :keyword reference_name: Reference dataset name. Required. :paramtype reference_name: str :keyword parameters: Arguments for dataset. - :paramtype parameters: dict[str, any] + :paramtype parameters: dict[str, JSON] """ - super(DatasetReference, self).__init__(**kwargs) + super().__init__(**kwargs) + self.type = type self.reference_name = reference_name self.parameters = parameters @@ -18552,76 +20166,71 @@ class DatasetResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :ivar properties: Required. Dataset properties. + :ivar properties: Dataset properties. Required. :vartype properties: ~azure.mgmt.datafactory.models.Dataset """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "etag": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Dataset'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "etag": {"key": "etag", "type": "str"}, + "properties": {"key": "properties", "type": "Dataset"}, } - def __init__( - self, - *, - properties: "_models.Dataset", - **kwargs - ): + def __init__(self, *, properties: "_models.Dataset", **kwargs): """ - :keyword properties: Required. Dataset properties. + :keyword properties: Dataset properties. Required. :paramtype properties: ~azure.mgmt.datafactory.models.Dataset """ - super(DatasetResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class DatasetSchemaDataElement(msrest.serialization.Model): +class DatasetSchemaDataElement(_serialization.Model): """Columns that define the physical type schema of the dataset. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar name: Name of the schema column. Type: string (or Expression with resultType string). - :vartype name: any + :vartype name: JSON :ivar type: Type of the schema column. Type: string (or Expression with resultType string). - :vartype type: any + :vartype type: JSON """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "object"}, + "type": {"key": "type", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - name: Optional[Any] = None, - type: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + name: Optional[JSON] = None, + type: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword name: Name of the schema column. Type: string (or Expression with resultType string). - :paramtype name: any + :paramtype name: JSON :keyword type: Type of the schema column. Type: string (or Expression with resultType string). - :paramtype type: any + :paramtype type: JSON """ - super(DatasetSchemaDataElement, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.name = name self.type = type @@ -18634,8 +20243,8 @@ class DataworldLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -18644,47 +20253,47 @@ class DataworldLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar api_token: Required. The api token for the Dataworld source. + :vartype annotations: list[JSON] + :ivar api_token: The api token for the Dataworld source. Required. :vartype api_token: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'api_token': {'required': True}, + "type": {"required": True}, + "api_token": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'api_token': {'key': 'typeProperties.apiToken', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "api_token": {"key": "typeProperties.apiToken", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, api_token: "_models.SecretBase", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - encrypted_credential: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -18692,29 +20301,36 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword api_token: Required. The api token for the Dataworld source. + :paramtype annotations: list[JSON] + :keyword api_token: The api token for the Dataworld source. Required. :paramtype api_token: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(DataworldLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Dataworld' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Dataworld" # type: str self.api_token = api_token self.encrypted_credential = encrypted_credential -class Db2LinkedService(LinkedService): +class Db2LinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Linked service for DB2 data source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -18723,83 +20339,83 @@ class Db2LinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_string: The connection string. It is mutually exclusive with server, database, authenticationType, userName, packageCollection and certificateCommonName property. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype connection_string: JSON :ivar server: Server name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :vartype server: any + :vartype server: JSON :ivar database: Database name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :vartype database: any + :vartype database: JSON :ivar authentication_type: AuthenticationType to be used for connection. It is mutually - exclusive with connectionString property. Known values are: "Basic". + exclusive with connectionString property. "Basic" :vartype authentication_type: str or ~azure.mgmt.datafactory.models.Db2AuthenticationType :ivar username: Username for authentication. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :vartype username: any + :vartype username: JSON :ivar password: Password for authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar package_collection: Under where packages are created when querying database. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :vartype package_collection: any + :vartype package_collection: JSON :ivar certificate_common_name: Certificate Common Name when TLS is enabled. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :vartype certificate_common_name: any + :vartype certificate_common_name: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'package_collection': {'key': 'typeProperties.packageCollection', 'type': 'object'}, - 'certificate_common_name': {'key': 'typeProperties.certificateCommonName', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "server": {"key": "typeProperties.server", "type": "object"}, + "database": {"key": "typeProperties.database", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "package_collection": {"key": "typeProperties.packageCollection", "type": "object"}, + "certificate_common_name": {"key": "typeProperties.certificateCommonName", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_string: Optional[Any] = None, - server: Optional[Any] = None, - database: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_string: Optional[JSON] = None, + server: Optional[JSON] = None, + database: Optional[JSON] = None, authentication_type: Optional[Union[str, "_models.Db2AuthenticationType"]] = None, - username: Optional[Any] = None, + username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - package_collection: Optional[Any] = None, - certificate_common_name: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + package_collection: Optional[JSON] = None, + certificate_common_name: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -18807,39 +20423,46 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_string: The connection string. It is mutually exclusive with server, database, authenticationType, userName, packageCollection and certificateCommonName property. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype connection_string: JSON :keyword server: Server name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :paramtype server: any + :paramtype server: JSON :keyword database: Database name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :paramtype database: any + :paramtype database: JSON :keyword authentication_type: AuthenticationType to be used for connection. It is mutually - exclusive with connectionString property. Known values are: "Basic". + exclusive with connectionString property. "Basic" :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.Db2AuthenticationType :keyword username: Username for authentication. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :paramtype username: any + :paramtype username: JSON :keyword password: Password for authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword package_collection: Under where packages are created when querying database. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :paramtype package_collection: any + :paramtype package_collection: JSON :keyword certificate_common_name: Certificate Common Name when TLS is enabled. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :paramtype certificate_common_name: any + :paramtype certificate_common_name: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Db2' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Db2" # type: str self.connection_string = connection_string self.server = server self.database = database @@ -18858,211 +20481,230 @@ class Db2Source(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: Database query. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: Database query. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'Db2Source' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "Db2Source" # type: str self.query = query -class Db2TableDataset(Dataset): +class Db2TableDataset(Dataset): # pylint: disable=too-many-instance-attributes """The Db2 table dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :vartype table_name: any + :vartype table_name: JSON :ivar schema_type_properties_schema: The Db2 schema name. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON :ivar table: The Db2 table name. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, - schema_type_properties_schema: Optional[Any] = None, - table: Optional[Any] = None, + table_name: Optional[JSON] = None, + schema_type_properties_schema: Optional[JSON] = None, + table: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: This property will be retired. Please consider using schema + table properties instead. - :paramtype table_name: any + :paramtype table_name: JSON :keyword schema_type_properties_schema: The Db2 schema name. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any + :paramtype schema_type_properties_schema: JSON :keyword table: The Db2 table name. Type: string (or Expression with resultType string). - :paramtype table: any - """ - super(Db2TableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'Db2Table' # type: str + :paramtype table: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "Db2Table" # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table -class DeleteActivity(ExecutionActivity): +class DeleteActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """Delete activity. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -19076,44 +20718,44 @@ class DeleteActivity(ExecutionActivity): :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy :ivar recursive: If true, files or sub-folders under current folder path will be deleted recursively. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype recursive: any + :vartype recursive: JSON :ivar max_concurrent_connections: The max concurrent connections to connect data source at the same time. :vartype max_concurrent_connections: int :ivar enable_logging: Whether to record detailed logs of delete-activity execution. Default value is false. Type: boolean (or Expression with resultType boolean). - :vartype enable_logging: any + :vartype enable_logging: JSON :ivar log_storage_settings: Log storage settings customer need to provide when enableLogging is true. :vartype log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings - :ivar dataset: Required. Delete activity dataset reference. + :ivar dataset: Delete activity dataset reference. Required. :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference :ivar store_settings: Delete activity store settings. :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'max_concurrent_connections': {'minimum': 1}, - 'dataset': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "max_concurrent_connections": {"minimum": 1}, + "dataset": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, - 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, - 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'store_settings': {'key': 'typeProperties.storeSettings', 'type': 'StoreReadSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "recursive": {"key": "typeProperties.recursive", "type": "object"}, + "max_concurrent_connections": {"key": "typeProperties.maxConcurrentConnections", "type": "int"}, + "enable_logging": {"key": "typeProperties.enableLogging", "type": "object"}, + "log_storage_settings": {"key": "typeProperties.logStorageSettings", "type": "LogStorageSettings"}, + "dataset": {"key": "typeProperties.dataset", "type": "DatasetReference"}, + "store_settings": {"key": "typeProperties.storeSettings", "type": "StoreReadSettings"}, } def __init__( @@ -19121,15 +20763,15 @@ def __init__( *, name: str, dataset: "_models.DatasetReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, - recursive: Optional[Any] = None, + recursive: Optional[JSON] = None, max_concurrent_connections: Optional[int] = None, - enable_logging: Optional[Any] = None, + enable_logging: Optional[JSON] = None, log_storage_settings: Optional["_models.LogStorageSettings"] = None, store_settings: Optional["_models.StoreReadSettings"] = None, **kwargs @@ -19137,8 +20779,8 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -19152,23 +20794,32 @@ def __init__( :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy :keyword recursive: If true, files or sub-folders under current folder path will be deleted recursively. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype recursive: any + :paramtype recursive: JSON :keyword max_concurrent_connections: The max concurrent connections to connect data source at the same time. :paramtype max_concurrent_connections: int :keyword enable_logging: Whether to record detailed logs of delete-activity execution. Default value is false. Type: boolean (or Expression with resultType boolean). - :paramtype enable_logging: any + :paramtype enable_logging: JSON :keyword log_storage_settings: Log storage settings customer need to provide when enableLogging is true. :paramtype log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings - :keyword dataset: Required. Delete activity dataset reference. + :keyword dataset: Delete activity dataset reference. Required. :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference :keyword store_settings: Delete activity store settings. :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings """ - super(DeleteActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'Delete' # type: str + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "Delete" # type: str self.recursive = recursive self.max_concurrent_connections = max_concurrent_connections self.enable_logging = enable_logging @@ -19177,7 +20828,7 @@ def __init__( self.store_settings = store_settings -class DeleteDataFlowDebugSessionRequest(msrest.serialization.Model): +class DeleteDataFlowDebugSessionRequest(_serialization.Model): """Request body structure for deleting data flow debug session. :ivar session_id: The ID of data flow debug session. @@ -19185,47 +20836,42 @@ class DeleteDataFlowDebugSessionRequest(msrest.serialization.Model): """ _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, + "session_id": {"key": "sessionId", "type": "str"}, } - def __init__( - self, - *, - session_id: Optional[str] = None, - **kwargs - ): + def __init__(self, *, session_id: Optional[str] = None, **kwargs): """ :keyword session_id: The ID of data flow debug session. :paramtype session_id: str """ - super(DeleteDataFlowDebugSessionRequest, self).__init__(**kwargs) + super().__init__(**kwargs) self.session_id = session_id -class DelimitedTextDataset(Dataset): +class DelimitedTextDataset(Dataset): # pylint: disable=too-many-instance-attributes """Delimited text dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -19233,100 +20879,100 @@ class DelimitedTextDataset(Dataset): :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation :ivar column_delimiter: The column delimiter. Type: string (or Expression with resultType string). - :vartype column_delimiter: any + :vartype column_delimiter: JSON :ivar row_delimiter: The row delimiter. Type: string (or Expression with resultType string). - :vartype row_delimiter: any + :vartype row_delimiter: JSON :ivar encoding_name: The code page name of the preferred encoding. If miss, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :vartype encoding_name: any + :vartype encoding_name: JSON :ivar compression_codec: The data compressionCodec. Type: string (or Expression with resultType string). - :vartype compression_codec: any + :vartype compression_codec: JSON :ivar compression_level: The data compression method used for DelimitedText. - :vartype compression_level: any + :vartype compression_level: JSON :ivar quote_char: The quote character. Type: string (or Expression with resultType string). - :vartype quote_char: any + :vartype quote_char: JSON :ivar escape_char: The escape character. Type: string (or Expression with resultType string). - :vartype escape_char: any + :vartype escape_char: JSON :ivar first_row_as_header: When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). - :vartype first_row_as_header: any + :vartype first_row_as_header: JSON :ivar null_value: The null value string. Type: string (or Expression with resultType string). - :vartype null_value: any + :vartype null_value: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, - 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, - 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, - 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, - 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, - 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, - 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "location": {"key": "typeProperties.location", "type": "DatasetLocation"}, + "column_delimiter": {"key": "typeProperties.columnDelimiter", "type": "object"}, + "row_delimiter": {"key": "typeProperties.rowDelimiter", "type": "object"}, + "encoding_name": {"key": "typeProperties.encodingName", "type": "object"}, + "compression_codec": {"key": "typeProperties.compressionCodec", "type": "object"}, + "compression_level": {"key": "typeProperties.compressionLevel", "type": "object"}, + "quote_char": {"key": "typeProperties.quoteChar", "type": "object"}, + "escape_char": {"key": "typeProperties.escapeChar", "type": "object"}, + "first_row_as_header": {"key": "typeProperties.firstRowAsHeader", "type": "object"}, + "null_value": {"key": "typeProperties.nullValue", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, location: Optional["_models.DatasetLocation"] = None, - column_delimiter: Optional[Any] = None, - row_delimiter: Optional[Any] = None, - encoding_name: Optional[Any] = None, - compression_codec: Optional[Any] = None, - compression_level: Optional[Any] = None, - quote_char: Optional[Any] = None, - escape_char: Optional[Any] = None, - first_row_as_header: Optional[Any] = None, - null_value: Optional[Any] = None, + column_delimiter: Optional[JSON] = None, + row_delimiter: Optional[JSON] = None, + encoding_name: Optional[JSON] = None, + compression_codec: Optional[JSON] = None, + compression_level: Optional[JSON] = None, + quote_char: Optional[JSON] = None, + escape_char: Optional[JSON] = None, + first_row_as_header: Optional[JSON] = None, + null_value: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -19334,35 +20980,45 @@ def __init__( :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation :keyword column_delimiter: The column delimiter. Type: string (or Expression with resultType string). - :paramtype column_delimiter: any + :paramtype column_delimiter: JSON :keyword row_delimiter: The row delimiter. Type: string (or Expression with resultType string). - :paramtype row_delimiter: any + :paramtype row_delimiter: JSON :keyword encoding_name: The code page name of the preferred encoding. If miss, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :paramtype encoding_name: any + :paramtype encoding_name: JSON :keyword compression_codec: The data compressionCodec. Type: string (or Expression with resultType string). - :paramtype compression_codec: any + :paramtype compression_codec: JSON :keyword compression_level: The data compression method used for DelimitedText. - :paramtype compression_level: any + :paramtype compression_level: JSON :keyword quote_char: The quote character. Type: string (or Expression with resultType string). - :paramtype quote_char: any + :paramtype quote_char: JSON :keyword escape_char: The escape character. Type: string (or Expression with resultType string). - :paramtype escape_char: any + :paramtype escape_char: JSON :keyword first_row_as_header: When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). - :paramtype first_row_as_header: any + :paramtype first_row_as_header: JSON :keyword null_value: The null value string. Type: string (or Expression with resultType string). - :paramtype null_value: any - """ - super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'DelimitedText' # type: str + :paramtype null_value: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "DelimitedText" # type: str self.location = location self.column_delimiter = column_delimiter self.row_delimiter = row_delimiter @@ -19382,47 +21038,47 @@ class DelimitedTextReadSettings(FormatReadSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The read setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. :vartype type: str :ivar skip_line_count: Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression with resultType integer). - :vartype skip_line_count: any + :vartype skip_line_count: JSON :ivar compression_properties: Compression settings. :vartype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, - 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "skip_line_count": {"key": "skipLineCount", "type": "object"}, + "compression_properties": {"key": "compressionProperties", "type": "CompressionReadSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - skip_line_count: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + skip_line_count: Optional[JSON] = None, compression_properties: Optional["_models.CompressionReadSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword skip_line_count: Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression with resultType integer). - :paramtype skip_line_count: any + :paramtype skip_line_count: JSON :keyword compression_properties: Compression settings. :paramtype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ - super(DelimitedTextReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'DelimitedTextReadSettings' # type: str + super().__init__(additional_properties=additional_properties, **kwargs) + self.type = "DelimitedTextReadSettings" # type: str self.skip_line_count = skip_line_count self.compression_properties = compression_properties @@ -19434,27 +21090,27 @@ class DelimitedTextSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar store_settings: DelimitedText store settings. :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :ivar format_settings: DelimitedText format settings. @@ -19462,32 +21118,32 @@ class DelimitedTextSink(CopySink): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "store_settings": {"key": "storeSettings", "type": "StoreWriteSettings"}, + "format_settings": {"key": "formatSettings", "type": "DelimitedTextWriteSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, store_settings: Optional["_models.StoreWriteSettings"] = None, format_settings: Optional["_models.DelimitedTextWriteSettings"] = None, **kwargs @@ -19495,32 +21151,41 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword store_settings: DelimitedText store settings. :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :keyword format_settings: DelimitedText format settings. :paramtype format_settings: ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings """ - super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'DelimitedTextSink' # type: str + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "DelimitedTextSink" # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -19532,85 +21197,92 @@ class DelimitedTextSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar store_settings: DelimitedText store settings. :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :ivar format_settings: DelimitedText format settings. :vartype format_settings: ~azure.mgmt.datafactory.models.DelimitedTextReadSettings :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "store_settings": {"key": "storeSettings", "type": "StoreReadSettings"}, + "format_settings": {"key": "formatSettings", "type": "DelimitedTextReadSettings"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, store_settings: Optional["_models.StoreReadSettings"] = None, format_settings: Optional["_models.DelimitedTextReadSettings"] = None, - additional_columns: Optional[Any] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword store_settings: DelimitedText store settings. :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :keyword format_settings: DelimitedText format settings. :paramtype format_settings: ~azure.mgmt.datafactory.models.DelimitedTextReadSettings :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'DelimitedTextSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "DelimitedTextSource" # type: str self.store_settings = store_settings self.format_settings = format_settings self.additional_columns = additional_columns @@ -19623,157 +21295,156 @@ class DelimitedTextWriteSettings(FormatWriteSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The write setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The write setting type. Required. :vartype type: str :ivar quote_all_text: Indicates whether string values should always be enclosed with quotes. Type: boolean (or Expression with resultType boolean). - :vartype quote_all_text: any - :ivar file_extension: Required. The file extension used to create the files. Type: string (or - Expression with resultType string). - :vartype file_extension: any + :vartype quote_all_text: JSON + :ivar file_extension: The file extension used to create the files. Type: string (or Expression + with resultType string). Required. + :vartype file_extension: JSON :ivar max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :vartype max_rows_per_file: any + :vartype max_rows_per_file: JSON :ivar file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :vartype file_name_prefix: any + :vartype file_name_prefix: JSON """ _validation = { - 'type': {'required': True}, - 'file_extension': {'required': True}, + "type": {"required": True}, + "file_extension": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, - 'file_extension': {'key': 'fileExtension', 'type': 'object'}, - 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, - 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "quote_all_text": {"key": "quoteAllText", "type": "object"}, + "file_extension": {"key": "fileExtension", "type": "object"}, + "max_rows_per_file": {"key": "maxRowsPerFile", "type": "object"}, + "file_name_prefix": {"key": "fileNamePrefix", "type": "object"}, } def __init__( self, *, - file_extension: Any, - additional_properties: Optional[Dict[str, Any]] = None, - quote_all_text: Optional[Any] = None, - max_rows_per_file: Optional[Any] = None, - file_name_prefix: Optional[Any] = None, + file_extension: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, + quote_all_text: Optional[JSON] = None, + max_rows_per_file: Optional[JSON] = None, + file_name_prefix: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword quote_all_text: Indicates whether string values should always be enclosed with quotes. Type: boolean (or Expression with resultType boolean). - :paramtype quote_all_text: any - :keyword file_extension: Required. The file extension used to create the files. Type: string - (or Expression with resultType string). - :paramtype file_extension: any + :paramtype quote_all_text: JSON + :keyword file_extension: The file extension used to create the files. Type: string (or + Expression with resultType string). Required. + :paramtype file_extension: JSON :keyword max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :paramtype max_rows_per_file: any + :paramtype max_rows_per_file: JSON :keyword file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :paramtype file_name_prefix: any + :paramtype file_name_prefix: JSON """ - super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'DelimitedTextWriteSettings' # type: str + super().__init__(additional_properties=additional_properties, **kwargs) + self.type = "DelimitedTextWriteSettings" # type: str self.quote_all_text = quote_all_text self.file_extension = file_extension self.max_rows_per_file = max_rows_per_file self.file_name_prefix = file_name_prefix -class DependencyReference(msrest.serialization.Model): +class DependencyReference(_serialization.Model): """Referenced dependency. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SelfDependencyTumblingWindowTriggerReference, TriggerDependencyReference. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + SelfDependencyTumblingWindowTriggerReference, TriggerDependencyReference All required parameters must be populated in order to send to Azure. - :ivar type: Required. The type of dependency reference.Constant filled by server. + :ivar type: The type of dependency reference. Required. :vartype type: str """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, + "type": {"key": "type", "type": "str"}, } _subtype_map = { - 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} + "type": { + "SelfDependencyTumblingWindowTriggerReference": "SelfDependencyTumblingWindowTriggerReference", + "TriggerDependencyReference": "TriggerDependencyReference", + } } - def __init__( - self, - **kwargs - ): - """ - """ - super(DependencyReference, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.type = None # type: Optional[str] -class DistcpSettings(msrest.serialization.Model): +class DistcpSettings(_serialization.Model): """Distcp settings. All required parameters must be populated in order to send to Azure. - :ivar resource_manager_endpoint: Required. Specifies the Yarn ResourceManager endpoint. Type: - string (or Expression with resultType string). - :vartype resource_manager_endpoint: any - :ivar temp_script_path: Required. Specifies an existing folder path which will be used to store - temp Distcp command script. The script file is generated by ADF and will be removed after Copy - job finished. Type: string (or Expression with resultType string). - :vartype temp_script_path: any + :ivar resource_manager_endpoint: Specifies the Yarn ResourceManager endpoint. Type: string (or + Expression with resultType string). Required. + :vartype resource_manager_endpoint: JSON + :ivar temp_script_path: Specifies an existing folder path which will be used to store temp + Distcp command script. The script file is generated by ADF and will be removed after Copy job + finished. Type: string (or Expression with resultType string). Required. + :vartype temp_script_path: JSON :ivar distcp_options: Specifies the Distcp options. Type: string (or Expression with resultType string). - :vartype distcp_options: any + :vartype distcp_options: JSON """ _validation = { - 'resource_manager_endpoint': {'required': True}, - 'temp_script_path': {'required': True}, + "resource_manager_endpoint": {"required": True}, + "temp_script_path": {"required": True}, } _attribute_map = { - 'resource_manager_endpoint': {'key': 'resourceManagerEndpoint', 'type': 'object'}, - 'temp_script_path': {'key': 'tempScriptPath', 'type': 'object'}, - 'distcp_options': {'key': 'distcpOptions', 'type': 'object'}, + "resource_manager_endpoint": {"key": "resourceManagerEndpoint", "type": "object"}, + "temp_script_path": {"key": "tempScriptPath", "type": "object"}, + "distcp_options": {"key": "distcpOptions", "type": "object"}, } def __init__( self, *, - resource_manager_endpoint: Any, - temp_script_path: Any, - distcp_options: Optional[Any] = None, + resource_manager_endpoint: JSON, + temp_script_path: JSON, + distcp_options: Optional[JSON] = None, **kwargs ): """ - :keyword resource_manager_endpoint: Required. Specifies the Yarn ResourceManager endpoint. - Type: string (or Expression with resultType string). - :paramtype resource_manager_endpoint: any - :keyword temp_script_path: Required. Specifies an existing folder path which will be used to - store temp Distcp command script. The script file is generated by ADF and will be removed after - Copy job finished. Type: string (or Expression with resultType string). - :paramtype temp_script_path: any + :keyword resource_manager_endpoint: Specifies the Yarn ResourceManager endpoint. Type: string + (or Expression with resultType string). Required. + :paramtype resource_manager_endpoint: JSON + :keyword temp_script_path: Specifies an existing folder path which will be used to store temp + Distcp command script. The script file is generated by ADF and will be removed after Copy job + finished. Type: string (or Expression with resultType string). Required. + :paramtype temp_script_path: JSON :keyword distcp_options: Specifies the Distcp options. Type: string (or Expression with resultType string). - :paramtype distcp_options: any + :paramtype distcp_options: JSON """ - super(DistcpSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.resource_manager_endpoint = resource_manager_endpoint self.temp_script_path = temp_script_path self.distcp_options = distcp_options @@ -19786,91 +21457,101 @@ class DocumentDbCollectionDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :ivar collection_name: Required. Document Database collection name. Type: string (or Expression - with resultType string). - :vartype collection_name: any + :ivar collection_name: Document Database collection name. Type: string (or Expression with + resultType string). Required. + :vartype collection_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, + "collection_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "collection_name": {"key": "typeProperties.collectionName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - collection_name: Any, - additional_properties: Optional[Dict[str, Any]] = None, + collection_name: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :keyword collection_name: Required. Document Database collection name. Type: string (or - Expression with resultType string). - :paramtype collection_name: any - """ - super(DocumentDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'DocumentDbCollection' # type: str + :keyword collection_name: Document Database collection name. Type: string (or Expression with + resultType string). Required. + :paramtype collection_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "DocumentDbCollection" # type: str self.collection_name = collection_name @@ -19881,97 +21562,106 @@ class DocumentDbCollectionSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). - :vartype nesting_separator: any + :vartype nesting_separator: JSON :ivar write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. - :vartype write_behavior: any + :vartype write_behavior: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "nesting_separator": {"key": "nestingSeparator", "type": "object"}, + "write_behavior": {"key": "writeBehavior", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - nesting_separator: Optional[Any] = None, - write_behavior: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + nesting_separator: Optional[JSON] = None, + write_behavior: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). - :paramtype nesting_separator: any + :paramtype nesting_separator: JSON :keyword write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. - :paramtype write_behavior: any - """ - super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'DocumentDbCollectionSink' # type: str + :paramtype write_behavior: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "DocumentDbCollectionSink" # type: str self.nesting_separator = nesting_separator self.write_behavior = write_behavior @@ -19983,95 +21673,102 @@ class DocumentDbCollectionSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query: Documents query. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar nesting_separator: Nested properties separator. Type: string (or Expression with resultType string). - :vartype nesting_separator: any + :vartype nesting_separator: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "nesting_separator": {"key": "nestingSeparator", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query: Optional[Any] = None, - nesting_separator: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query: Optional[JSON] = None, + nesting_separator: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query: Documents query. Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword nesting_separator: Nested properties separator. Type: string (or Expression with resultType string). - :paramtype nesting_separator: any + :paramtype nesting_separator: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'DocumentDbCollectionSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "DocumentDbCollectionSource" # type: str self.query = query self.nesting_separator = nesting_separator self.query_timeout = query_timeout @@ -20085,8 +21782,8 @@ class DrillLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -20095,51 +21792,51 @@ class DrillLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype connection_string: JSON :ivar pwd: The Azure key vault secret reference of password in connection string. :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "pwd": {"key": "typeProperties.pwd", "type": "AzureKeyVaultSecretReference"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_string: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_string: Optional[JSON] = None, pwd: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -20147,19 +21844,26 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype connection_string: JSON :keyword pwd: The Azure key vault secret reference of password in connection string. :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(DrillLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Drill' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Drill" # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential @@ -20172,238 +21876,251 @@ class DrillSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'DrillSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "DrillSource" # type: str self.query = query -class DrillTableDataset(Dataset): +class DrillTableDataset(Dataset): # pylint: disable=too-many-instance-attributes """Drill server dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :vartype table_name: any + :vartype table_name: JSON :ivar table: The table name of the Drill. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON :ivar schema_type_properties_schema: The schema name of the Drill. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, - table: Optional[Any] = None, - schema_type_properties_schema: Optional[Any] = None, + table_name: Optional[JSON] = None, + table: Optional[JSON] = None, + schema_type_properties_schema: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: This property will be retired. Please consider using schema + table properties instead. - :paramtype table_name: any + :paramtype table_name: JSON :keyword table: The table name of the Drill. Type: string (or Expression with resultType string). - :paramtype table: any + :paramtype table: JSON :keyword schema_type_properties_schema: The schema name of the Drill. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any - """ - super(DrillTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'DrillTable' # type: str + :paramtype schema_type_properties_schema: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "DrillTable" # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema -class DWCopyCommandDefaultValue(msrest.serialization.Model): +class DWCopyCommandDefaultValue(_serialization.Model): """Default value. :ivar column_name: Column name. Type: object (or Expression with resultType string). - :vartype column_name: any + :vartype column_name: JSON :ivar default_value: The default value of the column. Type: object (or Expression with resultType string). - :vartype default_value: any + :vartype default_value: JSON """ _attribute_map = { - 'column_name': {'key': 'columnName', 'type': 'object'}, - 'default_value': {'key': 'defaultValue', 'type': 'object'}, + "column_name": {"key": "columnName", "type": "object"}, + "default_value": {"key": "defaultValue", "type": "object"}, } - def __init__( - self, - *, - column_name: Optional[Any] = None, - default_value: Optional[Any] = None, - **kwargs - ): + def __init__(self, *, column_name: Optional[JSON] = None, default_value: Optional[JSON] = None, **kwargs): """ :keyword column_name: Column name. Type: object (or Expression with resultType string). - :paramtype column_name: any + :paramtype column_name: JSON :keyword default_value: The default value of the column. Type: object (or Expression with resultType string). - :paramtype default_value: any + :paramtype default_value: JSON """ - super(DWCopyCommandDefaultValue, self).__init__(**kwargs) + super().__init__(**kwargs) self.column_name = column_name self.default_value = default_value -class DWCopyCommandSettings(msrest.serialization.Model): +class DWCopyCommandSettings(_serialization.Model): """DW Copy Command settings. :ivar default_values: Specifies the default values for each target column in SQL DW. The @@ -20418,8 +22135,8 @@ class DWCopyCommandSettings(msrest.serialization.Model): """ _attribute_map = { - 'default_values': {'key': 'defaultValues', 'type': '[DWCopyCommandDefaultValue]'}, - 'additional_options': {'key': 'additionalOptions', 'type': '{str}'}, + "default_values": {"key": "defaultValues", "type": "[DWCopyCommandDefaultValue]"}, + "additional_options": {"key": "additionalOptions", "type": "{str}"}, } def __init__( @@ -20440,20 +22157,20 @@ def __init__( Example: "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" }. :paramtype additional_options: dict[str, str] """ - super(DWCopyCommandSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.default_values = default_values self.additional_options = additional_options -class DynamicsAXLinkedService(LinkedService): +class DynamicsAXLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Dynamics AX linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -20462,74 +22179,74 @@ class DynamicsAXLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData - endpoint. - :vartype url: any - :ivar service_principal_id: Required. Specify the application's client ID. Type: string (or - Expression with resultType string). - :vartype service_principal_id: any - :ivar service_principal_key: Required. Specify the application's key. Mark this field as a - SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key - Vault. Type: string (or Expression with resultType string). + :vartype annotations: list[JSON] + :ivar url: The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData endpoint. + Required. + :vartype url: JSON + :ivar service_principal_id: Specify the application's client ID. Type: string (or Expression + with resultType string). Required. + :vartype service_principal_id: JSON + :ivar service_principal_key: Specify the application's key. Mark this field as a SecureString + to store it securely in Data Factory, or reference a secret stored in Azure Key Vault. Type: + string (or Expression with resultType string). Required. :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :ivar tenant: Required. Specify the tenant information (domain name or tenant ID) under which - your application resides. Retrieve it by hovering the mouse in the top-right corner of the - Azure portal. Type: string (or Expression with resultType string). - :vartype tenant: any - :ivar aad_resource_id: Required. Specify the resource you are requesting authorization. Type: - string (or Expression with resultType string). - :vartype aad_resource_id: any + :ivar tenant: Specify the tenant information (domain name or tenant ID) under which your + application resides. Retrieve it by hovering the mouse in the top-right corner of the Azure + portal. Type: string (or Expression with resultType string). Required. + :vartype tenant: JSON + :ivar aad_resource_id: Specify the resource you are requesting authorization. Type: string (or + Expression with resultType string). Required. + :vartype aad_resource_id: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'tenant': {'required': True}, - 'aad_resource_id': {'required': True}, + "type": {"required": True}, + "url": {"required": True}, + "service_principal_id": {"required": True}, + "service_principal_key": {"required": True}, + "tenant": {"required": True}, + "aad_resource_id": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "url": {"key": "typeProperties.url", "type": "object"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, + "tenant": {"key": "typeProperties.tenant", "type": "object"}, + "aad_resource_id": {"key": "typeProperties.aadResourceId", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - url: Any, - service_principal_id: Any, + url: JSON, + service_principal_id: JSON, service_principal_key: "_models.SecretBase", - tenant: Any, - aad_resource_id: Any, - additional_properties: Optional[Dict[str, Any]] = None, + tenant: JSON, + aad_resource_id: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - encrypted_credential: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -20537,31 +22254,38 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData - endpoint. - :paramtype url: any - :keyword service_principal_id: Required. Specify the application's client ID. Type: string (or - Expression with resultType string). - :paramtype service_principal_id: any - :keyword service_principal_key: Required. Specify the application's key. Mark this field as a + :paramtype annotations: list[JSON] + :keyword url: The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData endpoint. + Required. + :paramtype url: JSON + :keyword service_principal_id: Specify the application's client ID. Type: string (or Expression + with resultType string). Required. + :paramtype service_principal_id: JSON + :keyword service_principal_key: Specify the application's key. Mark this field as a SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key - Vault. Type: string (or Expression with resultType string). + Vault. Type: string (or Expression with resultType string). Required. :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :keyword tenant: Required. Specify the tenant information (domain name or tenant ID) under - which your application resides. Retrieve it by hovering the mouse in the top-right corner of - the Azure portal. Type: string (or Expression with resultType string). - :paramtype tenant: any - :keyword aad_resource_id: Required. Specify the resource you are requesting authorization. - Type: string (or Expression with resultType string). - :paramtype aad_resource_id: any + :keyword tenant: Specify the tenant information (domain name or tenant ID) under which your + application resides. Retrieve it by hovering the mouse in the top-right corner of the Azure + portal. Type: string (or Expression with resultType string). Required. + :paramtype tenant: JSON + :keyword aad_resource_id: Specify the resource you are requesting authorization. Type: string + (or Expression with resultType string). Required. + :paramtype aad_resource_id: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(DynamicsAXLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'DynamicsAX' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "DynamicsAX" # type: str self.url = url self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key @@ -20577,91 +22301,101 @@ class DynamicsAXResourceDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :ivar path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression - with resultType string). - :vartype path: any + :ivar path: The path of the Dynamics AX OData entity. Type: string (or Expression with + resultType string). Required. + :vartype path: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'path': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, + "path": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "path": {"key": "typeProperties.path", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - path: Any, - additional_properties: Optional[Dict[str, Any]] = None, + path: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :keyword path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression - with resultType string). - :paramtype path: any - """ - super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'DynamicsAXResource' # type: str + :keyword path: The path of the Dynamics AX OData entity. Type: string (or Expression with + resultType string). Required. + :paramtype path: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "DynamicsAXResource" # type: str self.path = path @@ -20672,101 +22406,110 @@ class DynamicsAXSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype http_request_timeout: any + :vartype http_request_timeout: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "http_request_timeout": {"key": "httpRequestTimeout", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, - http_request_timeout: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, + http_request_timeout: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype http_request_timeout: any - """ - super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'DynamicsAXSource' # type: str + :paramtype http_request_timeout: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "DynamicsAXSource" # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -20778,102 +22521,112 @@ class DynamicsCrmEntityDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar entity_name: The logical name of the entity. Type: string (or Expression with resultType string). - :vartype entity_name: any + :vartype entity_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "entity_name": {"key": "typeProperties.entityName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - entity_name: Optional[Any] = None, + entity_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword entity_name: The logical name of the entity. Type: string (or Expression with resultType string). - :paramtype entity_name: any - """ - super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'DynamicsCrmEntity' # type: str + :paramtype entity_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "DynamicsCrmEntity" # type: str self.entity_name = entity_name -class DynamicsCrmLinkedService(LinkedService): +class DynamicsCrmLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Dynamics CRM linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -20882,42 +22635,42 @@ class DynamicsCrmLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' for - Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: string - (or Expression with resultType string). - :vartype deployment_type: any + :vartype annotations: list[JSON] + :ivar deployment_type: The deployment type of the Dynamics CRM instance. 'Online' for Dynamics + CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: string (or + Expression with resultType string). Required. + :vartype deployment_type: JSON :ivar host_name: The host name of the on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :vartype host_name: any + :vartype host_name: JSON :ivar port: The port of on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype port: any + :vartype port: JSON :ivar service_uri: The URL to the Microsoft Dynamics CRM server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :vartype service_uri: any + :vartype service_uri: JSON :ivar organization_name: The organization name of the Dynamics CRM instance. The property is required for on-prem and required for online when there are more than one Dynamics CRM instances associated with the user. Type: string (or Expression with resultType string). - :vartype organization_name: any - :ivar authentication_type: Required. The authentication type to connect to Dynamics CRM server. + :vartype organization_name: JSON + :ivar authentication_type: The authentication type to connect to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with - resultType string). - :vartype authentication_type: any + resultType string). Required. + :vartype authentication_type: JSON :ivar username: User name to access the Dynamics CRM instance. Type: string (or Expression with resultType string). - :vartype username: any + :vartype username: JSON :ivar password: Password to access the Dynamics CRM instance. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). - :vartype service_principal_id: any + :vartype service_principal_id: JSON :ivar service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). - :vartype service_principal_credential_type: any + :vartype service_principal_credential_type: JSON :ivar service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -20927,62 +22680,62 @@ class DynamicsCrmLinkedService(LinkedService): :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, + "type": {"required": True}, + "deployment_type": {"required": True}, + "authentication_type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, - 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "deployment_type": {"key": "typeProperties.deploymentType", "type": "object"}, + "host_name": {"key": "typeProperties.hostName", "type": "object"}, + "port": {"key": "typeProperties.port", "type": "object"}, + "service_uri": {"key": "typeProperties.serviceUri", "type": "object"}, + "organization_name": {"key": "typeProperties.organizationName", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "object"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "service_principal_credential_type": {"key": "typeProperties.servicePrincipalCredentialType", "type": "object"}, + "service_principal_credential": {"key": "typeProperties.servicePrincipalCredential", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - deployment_type: Any, - authentication_type: Any, - additional_properties: Optional[Dict[str, Any]] = None, + deployment_type: JSON, + authentication_type: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - host_name: Optional[Any] = None, - port: Optional[Any] = None, - service_uri: Optional[Any] = None, - organization_name: Optional[Any] = None, - username: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + host_name: Optional[JSON] = None, + port: Optional[JSON] = None, + service_uri: Optional[JSON] = None, + organization_name: Optional[JSON] = None, + username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - service_principal_id: Optional[Any] = None, - service_principal_credential_type: Optional[Any] = None, + service_principal_id: Optional[JSON] = None, + service_principal_credential_type: Optional[JSON] = None, service_principal_credential: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -20990,43 +22743,43 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' - for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: - string (or Expression with resultType string). - :paramtype deployment_type: any + :paramtype annotations: list[JSON] + :keyword deployment_type: The deployment type of the Dynamics CRM instance. 'Online' for + Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: string + (or Expression with resultType string). Required. + :paramtype deployment_type: JSON :keyword host_name: The host name of the on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :paramtype host_name: any + :paramtype host_name: JSON :keyword port: The port of on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype port: any + :paramtype port: JSON :keyword service_uri: The URL to the Microsoft Dynamics CRM server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :paramtype service_uri: any + :paramtype service_uri: JSON :keyword organization_name: The organization name of the Dynamics CRM instance. The property is required for on-prem and required for online when there are more than one Dynamics CRM instances associated with the user. Type: string (or Expression with resultType string). - :paramtype organization_name: any - :keyword authentication_type: Required. The authentication type to connect to Dynamics CRM - server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, - 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). - :paramtype authentication_type: any + :paramtype organization_name: JSON + :keyword authentication_type: The authentication type to connect to Dynamics CRM server. + 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' + for Server-To-Server authentication in online scenario. Type: string (or Expression with + resultType string). Required. + :paramtype authentication_type: JSON :keyword username: User name to access the Dynamics CRM instance. Type: string (or Expression with resultType string). - :paramtype username: any + :paramtype username: JSON :keyword password: Password to access the Dynamics CRM instance. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). - :paramtype service_principal_id: any + :paramtype service_principal_id: JSON :keyword service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). - :paramtype service_principal_credential_type: any + :paramtype service_principal_credential_type: JSON :keyword service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -21036,10 +22789,17 @@ def __init__( :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(DynamicsCrmLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'DynamicsCrm' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "DynamicsCrm" # type: str self.deployment_type = deployment_type self.host_name = host_name self.port = port @@ -21054,115 +22814,122 @@ def __init__( self.encrypted_credential = encrypted_credential -class DynamicsCrmSink(CopySink): +class DynamicsCrmSink(CopySink): # pylint: disable=too-many-instance-attributes """A copy activity Dynamics CRM sink. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any - :ivar write_behavior: Required. The write behavior for the operation. Known values are: - "Upsert". + :vartype disable_metrics_collection: JSON + :ivar write_behavior: The write behavior for the operation. Required. "Upsert" :vartype write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior :ivar ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype ignore_null_values: any + :vartype ignore_null_values: JSON :ivar alternate_key_name: The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). - :vartype alternate_key_name: any + :vartype alternate_key_name: JSON """ _validation = { - 'type': {'required': True}, - 'write_behavior': {'required': True}, + "type": {"required": True}, + "write_behavior": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "write_behavior": {"key": "writeBehavior", "type": "str"}, + "ignore_null_values": {"key": "ignoreNullValues", "type": "object"}, + "alternate_key_name": {"key": "alternateKeyName", "type": "object"}, } def __init__( self, *, write_behavior: Union[str, "_models.DynamicsSinkWriteBehavior"], - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - ignore_null_values: Optional[Any] = None, - alternate_key_name: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + ignore_null_values: Optional[JSON] = None, + alternate_key_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any - :keyword write_behavior: Required. The write behavior for the operation. Known values are: - "Upsert". + :paramtype disable_metrics_collection: JSON + :keyword write_behavior: The write behavior for the operation. Required. "Upsert" :paramtype write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior :keyword ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype ignore_null_values: any + :paramtype ignore_null_values: JSON :keyword alternate_key_name: The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). - :paramtype alternate_key_name: any - """ - super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'DynamicsCrmSink' # type: str + :paramtype alternate_key_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "DynamicsCrmSink" # type: str self.write_behavior = write_behavior self.ignore_null_values = ignore_null_values self.alternate_key_name = alternate_key_name @@ -21175,81 +22942,88 @@ class DynamicsCrmSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query: Optional[Any] = None, - additional_columns: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'DynamicsCrmSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "DynamicsCrmSource" # type: str self.query = query self.additional_columns = additional_columns @@ -21261,102 +23035,112 @@ class DynamicsEntityDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar entity_name: The logical name of the entity. Type: string (or Expression with resultType string). - :vartype entity_name: any + :vartype entity_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "entity_name": {"key": "typeProperties.entityName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - entity_name: Optional[Any] = None, + entity_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword entity_name: The logical name of the entity. Type: string (or Expression with resultType string). - :paramtype entity_name: any - """ - super(DynamicsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'DynamicsEntity' # type: str + :paramtype entity_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "DynamicsEntity" # type: str self.entity_name = entity_name -class DynamicsLinkedService(LinkedService): +class DynamicsLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Dynamics linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -21365,42 +23149,42 @@ class DynamicsLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for - Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or - Expression with resultType string). - :vartype deployment_type: any + :vartype annotations: list[JSON] + :ivar deployment_type: The deployment type of the Dynamics instance. 'Online' for Dynamics + Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or Expression + with resultType string). Required. + :vartype deployment_type: JSON :ivar host_name: The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :vartype host_name: any + :vartype host_name: JSON :ivar port: The port of on-premises Dynamics server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype port: any + :vartype port: JSON :ivar service_uri: The URL to the Microsoft Dynamics server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :vartype service_uri: any + :vartype service_uri: JSON :ivar organization_name: The organization name of the Dynamics instance. The property is required for on-prem and required for online when there are more than one Dynamics instances associated with the user. Type: string (or Expression with resultType string). - :vartype organization_name: any - :ivar authentication_type: Required. The authentication type to connect to Dynamics server. - 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' - for Server-To-Server authentication in online scenario. Type: string (or Expression with - resultType string). - :vartype authentication_type: any + :vartype organization_name: JSON + :ivar authentication_type: The authentication type to connect to Dynamics server. 'Office365' + for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for + Server-To-Server authentication in online scenario. Type: string (or Expression with resultType + string). Required. + :vartype authentication_type: JSON :ivar username: User name to access the Dynamics instance. Type: string (or Expression with resultType string). - :vartype username: any + :vartype username: JSON :ivar password: Password to access the Dynamics instance. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). - :vartype service_principal_id: any + :vartype service_principal_id: JSON :ivar service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). - :vartype service_principal_credential_type: any + :vartype service_principal_credential_type: JSON :ivar service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -21410,66 +23194,66 @@ class DynamicsLinkedService(LinkedService): :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, + "type": {"required": True}, + "deployment_type": {"required": True}, + "authentication_type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, - 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "deployment_type": {"key": "typeProperties.deploymentType", "type": "object"}, + "host_name": {"key": "typeProperties.hostName", "type": "object"}, + "port": {"key": "typeProperties.port", "type": "object"}, + "service_uri": {"key": "typeProperties.serviceUri", "type": "object"}, + "organization_name": {"key": "typeProperties.organizationName", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "object"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "service_principal_credential_type": {"key": "typeProperties.servicePrincipalCredentialType", "type": "object"}, + "service_principal_credential": {"key": "typeProperties.servicePrincipalCredential", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, } def __init__( self, *, - deployment_type: Any, - authentication_type: Any, - additional_properties: Optional[Dict[str, Any]] = None, + deployment_type: JSON, + authentication_type: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - host_name: Optional[Any] = None, - port: Optional[Any] = None, - service_uri: Optional[Any] = None, - organization_name: Optional[Any] = None, - username: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + host_name: Optional[JSON] = None, + port: Optional[JSON] = None, + service_uri: Optional[JSON] = None, + organization_name: Optional[JSON] = None, + username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - service_principal_id: Optional[Any] = None, - service_principal_credential_type: Optional[Any] = None, + service_principal_id: Optional[JSON] = None, + service_principal_credential_type: Optional[JSON] = None, service_principal_credential: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, credential: Optional["_models.CredentialReference"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -21477,42 +23261,42 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for - Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or - Expression with resultType string). - :paramtype deployment_type: any + :paramtype annotations: list[JSON] + :keyword deployment_type: The deployment type of the Dynamics instance. 'Online' for Dynamics + Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or Expression + with resultType string). Required. + :paramtype deployment_type: JSON :keyword host_name: The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :paramtype host_name: any + :paramtype host_name: JSON :keyword port: The port of on-premises Dynamics server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype port: any + :paramtype port: JSON :keyword service_uri: The URL to the Microsoft Dynamics server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :paramtype service_uri: any + :paramtype service_uri: JSON :keyword organization_name: The organization name of the Dynamics instance. The property is required for on-prem and required for online when there are more than one Dynamics instances associated with the user. Type: string (or Expression with resultType string). - :paramtype organization_name: any - :keyword authentication_type: Required. The authentication type to connect to Dynamics server. + :paramtype organization_name: JSON + :keyword authentication_type: The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with - resultType string). - :paramtype authentication_type: any + resultType string). Required. + :paramtype authentication_type: JSON :keyword username: User name to access the Dynamics instance. Type: string (or Expression with resultType string). - :paramtype username: any + :paramtype username: JSON :keyword password: Password to access the Dynamics instance. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). - :paramtype service_principal_id: any + :paramtype service_principal_id: JSON :keyword service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). - :paramtype service_principal_credential_type: any + :paramtype service_principal_credential_type: JSON :keyword service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -21522,12 +23306,19 @@ def __init__( :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any + :paramtype encrypted_credential: JSON :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ - super(DynamicsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Dynamics' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Dynamics" # type: str self.deployment_type = deployment_type self.host_name = host_name self.port = port @@ -21543,115 +23334,122 @@ def __init__( self.credential = credential -class DynamicsSink(CopySink): +class DynamicsSink(CopySink): # pylint: disable=too-many-instance-attributes """A copy activity Dynamics sink. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any - :ivar write_behavior: Required. The write behavior for the operation. Known values are: - "Upsert". + :vartype disable_metrics_collection: JSON + :ivar write_behavior: The write behavior for the operation. Required. "Upsert" :vartype write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior :ivar ignore_null_values: The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype ignore_null_values: any + :vartype ignore_null_values: JSON :ivar alternate_key_name: The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). - :vartype alternate_key_name: any + :vartype alternate_key_name: JSON """ _validation = { - 'type': {'required': True}, - 'write_behavior': {'required': True}, + "type": {"required": True}, + "write_behavior": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "write_behavior": {"key": "writeBehavior", "type": "str"}, + "ignore_null_values": {"key": "ignoreNullValues", "type": "object"}, + "alternate_key_name": {"key": "alternateKeyName", "type": "object"}, } def __init__( self, *, write_behavior: Union[str, "_models.DynamicsSinkWriteBehavior"], - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - ignore_null_values: Optional[Any] = None, - alternate_key_name: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + ignore_null_values: Optional[JSON] = None, + alternate_key_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any - :keyword write_behavior: Required. The write behavior for the operation. Known values are: - "Upsert". + :paramtype disable_metrics_collection: JSON + :keyword write_behavior: The write behavior for the operation. Required. "Upsert" :paramtype write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior :keyword ignore_null_values: The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype ignore_null_values: any + :paramtype ignore_null_values: JSON :keyword alternate_key_name: The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). - :paramtype alternate_key_name: any - """ - super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'DynamicsSink' # type: str + :paramtype alternate_key_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "DynamicsSink" # type: str self.write_behavior = write_behavior self.ignore_null_values = ignore_null_values self.alternate_key_name = alternate_key_name @@ -21664,94 +23462,101 @@ class DynamicsSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query: FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query: Optional[Any] = None, - additional_columns: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query: FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'DynamicsSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "DynamicsSource" # type: str self.query = query self.additional_columns = additional_columns -class EloquaLinkedService(LinkedService): +class EloquaLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Eloqua server linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -21760,73 +23565,73 @@ class EloquaLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com). - :vartype endpoint: any - :ivar username: Required. The site name and user name of your Eloqua account in the form: - sitename/username. (i.e. Eloqua/Alice). - :vartype username: any + :vartype annotations: list[JSON] + :ivar endpoint: The endpoint of the Eloqua server. (i.e. eloqua.example.com). Required. + :vartype endpoint: JSON + :ivar username: The site name and user name of your Eloqua account in the form: + sitename/username. (i.e. Eloqua/Alice). Required. + :vartype username: JSON :ivar password: The password corresponding to the user name. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :vartype use_encrypted_endpoints: any + :vartype use_encrypted_endpoints: JSON :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :vartype use_host_verification: any + :vartype use_host_verification: JSON :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :vartype use_peer_verification: any + :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'username': {'required': True}, + "type": {"required": True}, + "endpoint": {"required": True}, + "username": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "endpoint": {"key": "typeProperties.endpoint", "type": "object"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, + "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, + "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - endpoint: Any, - username: Any, - additional_properties: Optional[Dict[str, Any]] = None, + endpoint: JSON, + username: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, password: Optional["_models.SecretBase"] = None, - use_encrypted_endpoints: Optional[Any] = None, - use_host_verification: Optional[Any] = None, - use_peer_verification: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + use_encrypted_endpoints: Optional[JSON] = None, + use_host_verification: Optional[JSON] = None, + use_peer_verification: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -21834,31 +23639,38 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com). - :paramtype endpoint: any - :keyword username: Required. The site name and user name of your Eloqua account in the form: - sitename/username. (i.e. Eloqua/Alice). - :paramtype username: any + :paramtype annotations: list[JSON] + :keyword endpoint: The endpoint of the Eloqua server. (i.e. eloqua.example.com). Required. + :paramtype endpoint: JSON + :keyword username: The site name and user name of your Eloqua account in the form: + sitename/username. (i.e. Eloqua/Alice). Required. + :paramtype username: JSON :keyword password: The password corresponding to the user name. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :paramtype use_encrypted_endpoints: any + :paramtype use_encrypted_endpoints: JSON :keyword use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :paramtype use_host_verification: any + :paramtype use_host_verification: JSON :keyword use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :paramtype use_peer_verification: any + :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(EloquaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Eloqua' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Eloqua" # type: str self.endpoint = endpoint self.username = username self.password = password @@ -21875,88 +23687,98 @@ class EloquaObjectDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(EloquaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'EloquaObject' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "EloquaObject" # type: str self.table_name = table_name @@ -21967,101 +23789,110 @@ class EloquaSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'EloquaSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "EloquaSource" # type: str self.query = query -class EncryptionConfiguration(msrest.serialization.Model): +class EncryptionConfiguration(_serialization.Model): """Definition of CMK for the factory. All required parameters must be populated in order to send to Azure. - :ivar key_name: Required. The name of the key in Azure Key Vault to use as Customer Managed - Key. + :ivar key_name: The name of the key in Azure Key Vault to use as Customer Managed Key. + Required. :vartype key_name: str - :ivar vault_base_url: Required. The url of the Azure Key Vault used for CMK. + :ivar vault_base_url: The url of the Azure Key Vault used for CMK. Required. :vartype vault_base_url: str :ivar key_version: The version of the key used for CMK. If not provided, latest version will be used. @@ -22072,15 +23903,15 @@ class EncryptionConfiguration(msrest.serialization.Model): """ _validation = { - 'key_name': {'required': True}, - 'vault_base_url': {'required': True}, + "key_name": {"required": True}, + "vault_base_url": {"required": True}, } _attribute_map = { - 'key_name': {'key': 'keyName', 'type': 'str'}, - 'vault_base_url': {'key': 'vaultBaseUrl', 'type': 'str'}, - 'key_version': {'key': 'keyVersion', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'CMKIdentityDefinition'}, + "key_name": {"key": "keyName", "type": "str"}, + "vault_base_url": {"key": "vaultBaseUrl", "type": "str"}, + "key_version": {"key": "keyVersion", "type": "str"}, + "identity": {"key": "identity", "type": "CMKIdentityDefinition"}, } def __init__( @@ -22093,10 +23924,10 @@ def __init__( **kwargs ): """ - :keyword key_name: Required. The name of the key in Azure Key Vault to use as Customer Managed - Key. + :keyword key_name: The name of the key in Azure Key Vault to use as Customer Managed Key. + Required. :paramtype key_name: str - :keyword vault_base_url: Required. The url of the Azure Key Vault used for CMK. + :keyword vault_base_url: The url of the Azure Key Vault used for CMK. Required. :paramtype vault_base_url: str :keyword key_version: The version of the key used for CMK. If not provided, latest version will be used. @@ -22105,26 +23936,26 @@ def __init__( not provided Managed Service Identity will be used. :paramtype identity: ~azure.mgmt.datafactory.models.CMKIdentityDefinition """ - super(EncryptionConfiguration, self).__init__(**kwargs) + super().__init__(**kwargs) self.key_name = key_name self.vault_base_url = vault_base_url self.key_version = key_version self.identity = identity -class EntityReference(msrest.serialization.Model): +class EntityReference(_serialization.Model): """The entity reference. - :ivar type: The type of this referenced entity. Known values are: - "IntegrationRuntimeReference", "LinkedServiceReference". + :ivar type: The type of this referenced entity. Known values are: "IntegrationRuntimeReference" + and "LinkedServiceReference". :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType :ivar reference_name: The name of this referenced entity. :vartype reference_name: str """ _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, + "type": {"key": "type", "type": "str"}, + "reference_name": {"key": "referenceName", "type": "str"}, } def __init__( @@ -22136,12 +23967,12 @@ def __init__( ): """ :keyword type: The type of this referenced entity. Known values are: - "IntegrationRuntimeReference", "LinkedServiceReference". + "IntegrationRuntimeReference" and "LinkedServiceReference". :paramtype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType :keyword reference_name: The name of this referenced entity. :paramtype reference_name: str """ - super(EntityReference, self).__init__(**kwargs) + super().__init__(**kwargs) self.type = type self.reference_name = reference_name @@ -22151,69 +23982,63 @@ class EnvironmentVariableSetup(CustomSetupBase): All required parameters must be populated in order to send to Azure. - :ivar type: Required. The type of custom setup.Constant filled by server. + :ivar type: The type of custom setup. Required. :vartype type: str - :ivar variable_name: Required. The name of the environment variable. + :ivar variable_name: The name of the environment variable. Required. :vartype variable_name: str - :ivar variable_value: Required. The value of the environment variable. + :ivar variable_value: The value of the environment variable. Required. :vartype variable_value: str """ _validation = { - 'type': {'required': True}, - 'variable_name': {'required': True}, - 'variable_value': {'required': True}, + "type": {"required": True}, + "variable_name": {"required": True}, + "variable_value": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'variable_value': {'key': 'typeProperties.variableValue', 'type': 'str'}, + "type": {"key": "type", "type": "str"}, + "variable_name": {"key": "typeProperties.variableName", "type": "str"}, + "variable_value": {"key": "typeProperties.variableValue", "type": "str"}, } - def __init__( - self, - *, - variable_name: str, - variable_value: str, - **kwargs - ): + def __init__(self, *, variable_name: str, variable_value: str, **kwargs): """ - :keyword variable_name: Required. The name of the environment variable. + :keyword variable_name: The name of the environment variable. Required. :paramtype variable_name: str - :keyword variable_value: Required. The value of the environment variable. + :keyword variable_value: The value of the environment variable. Required. :paramtype variable_value: str """ - super(EnvironmentVariableSetup, self).__init__(**kwargs) - self.type = 'EnvironmentVariableSetup' # type: str + super().__init__(**kwargs) + self.type = "EnvironmentVariableSetup" # type: str self.variable_name = variable_name self.variable_value = variable_value -class ExcelDataset(Dataset): +class ExcelDataset(Dataset): # pylint: disable=too-many-instance-attributes """Excel dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -22221,85 +24046,85 @@ class ExcelDataset(Dataset): :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation :ivar sheet_name: The sheet name of excel file. Type: string (or Expression with resultType string). - :vartype sheet_name: any + :vartype sheet_name: JSON :ivar sheet_index: The sheet index of excel file and default value is 0. Type: integer (or Expression with resultType integer). - :vartype sheet_index: any + :vartype sheet_index: JSON :ivar range: The partial data of one sheet. Type: string (or Expression with resultType string). - :vartype range: any + :vartype range: JSON :ivar first_row_as_header: When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). - :vartype first_row_as_header: any + :vartype first_row_as_header: JSON :ivar compression: The data compression method used for the json dataset. :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression :ivar null_value: The null value string. Type: string (or Expression with resultType string). - :vartype null_value: any + :vartype null_value: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'sheet_name': {'key': 'typeProperties.sheetName', 'type': 'object'}, - 'sheet_index': {'key': 'typeProperties.sheetIndex', 'type': 'object'}, - 'range': {'key': 'typeProperties.range', 'type': 'object'}, - 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "location": {"key": "typeProperties.location", "type": "DatasetLocation"}, + "sheet_name": {"key": "typeProperties.sheetName", "type": "object"}, + "sheet_index": {"key": "typeProperties.sheetIndex", "type": "object"}, + "range": {"key": "typeProperties.range", "type": "object"}, + "first_row_as_header": {"key": "typeProperties.firstRowAsHeader", "type": "object"}, + "compression": {"key": "typeProperties.compression", "type": "DatasetCompression"}, + "null_value": {"key": "typeProperties.nullValue", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, location: Optional["_models.DatasetLocation"] = None, - sheet_name: Optional[Any] = None, - sheet_index: Optional[Any] = None, - range: Optional[Any] = None, - first_row_as_header: Optional[Any] = None, + sheet_name: Optional[JSON] = None, + sheet_index: Optional[JSON] = None, + range: Optional[JSON] = None, + first_row_as_header: Optional[JSON] = None, compression: Optional["_models.DatasetCompression"] = None, - null_value: Optional[Any] = None, + null_value: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -22307,25 +24132,35 @@ def __init__( :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation :keyword sheet_name: The sheet name of excel file. Type: string (or Expression with resultType string). - :paramtype sheet_name: any + :paramtype sheet_name: JSON :keyword sheet_index: The sheet index of excel file and default value is 0. Type: integer (or Expression with resultType integer). - :paramtype sheet_index: any + :paramtype sheet_index: JSON :keyword range: The partial data of one sheet. Type: string (or Expression with resultType string). - :paramtype range: any + :paramtype range: JSON :keyword first_row_as_header: When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). - :paramtype first_row_as_header: any + :paramtype first_row_as_header: JSON :keyword compression: The data compression method used for the json dataset. :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression :keyword null_value: The null value string. Type: string (or Expression with resultType string). - :paramtype null_value: any - """ - super(ExcelDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'Excel' # type: str + :paramtype null_value: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "Excel" # type: str self.location = location self.sheet_name = sheet_name self.sheet_index = sheet_index @@ -22342,94 +24177,101 @@ class ExcelSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar store_settings: Excel store settings. :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "store_settings": {"key": "storeSettings", "type": "StoreReadSettings"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, store_settings: Optional["_models.StoreReadSettings"] = None, - additional_columns: Optional[Any] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword store_settings: Excel store settings. :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(ExcelSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'ExcelSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "ExcelSource" # type: str self.store_settings = store_settings self.additional_columns = additional_columns -class ExecuteDataFlowActivity(ExecutionActivity): +class ExecuteDataFlowActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """Execute data flow activity. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -22441,7 +24283,7 @@ class ExecuteDataFlowActivity(ExecutionActivity): :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar policy: Activity policy. :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :ivar data_flow: Required. Data flow reference. + :ivar data_flow: Data flow reference. Required. :vartype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference :ivar staging: Staging info for execute data flow activity. :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo @@ -22451,42 +24293,42 @@ class ExecuteDataFlowActivity(ExecutionActivity): :vartype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :ivar trace_level: Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). - :vartype trace_level: any + :vartype trace_level: JSON :ivar continue_on_error: Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). - :vartype continue_on_error: any + :vartype continue_on_error: JSON :ivar run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). - :vartype run_concurrently: any + :vartype run_concurrently: JSON :ivar source_staging_concurrency: Specify number of parallel staging for sources applicable to the sink. Type: integer (or Expression with resultType integer). - :vartype source_staging_concurrency: any + :vartype source_staging_concurrency: JSON """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'data_flow': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "data_flow": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'data_flow': {'key': 'typeProperties.dataFlow', 'type': 'DataFlowReference'}, - 'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'}, - 'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'}, - 'compute': {'key': 'typeProperties.compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'}, - 'trace_level': {'key': 'typeProperties.traceLevel', 'type': 'object'}, - 'continue_on_error': {'key': 'typeProperties.continueOnError', 'type': 'object'}, - 'run_concurrently': {'key': 'typeProperties.runConcurrently', 'type': 'object'}, - 'source_staging_concurrency': {'key': 'typeProperties.sourceStagingConcurrency', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "data_flow": {"key": "typeProperties.dataFlow", "type": "DataFlowReference"}, + "staging": {"key": "typeProperties.staging", "type": "DataFlowStagingInfo"}, + "integration_runtime": {"key": "typeProperties.integrationRuntime", "type": "IntegrationRuntimeReference"}, + "compute": {"key": "typeProperties.compute", "type": "ExecuteDataFlowActivityTypePropertiesCompute"}, + "trace_level": {"key": "typeProperties.traceLevel", "type": "object"}, + "continue_on_error": {"key": "typeProperties.continueOnError", "type": "object"}, + "run_concurrently": {"key": "typeProperties.runConcurrently", "type": "object"}, + "source_staging_concurrency": {"key": "typeProperties.sourceStagingConcurrency", "type": "object"}, } def __init__( @@ -22494,7 +24336,7 @@ def __init__( *, name: str, data_flow: "_models.DataFlowReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, @@ -22503,17 +24345,17 @@ def __init__( staging: Optional["_models.DataFlowStagingInfo"] = None, integration_runtime: Optional["_models.IntegrationRuntimeReference"] = None, compute: Optional["_models.ExecuteDataFlowActivityTypePropertiesCompute"] = None, - trace_level: Optional[Any] = None, - continue_on_error: Optional[Any] = None, - run_concurrently: Optional[Any] = None, - source_staging_concurrency: Optional[Any] = None, + trace_level: Optional[JSON] = None, + continue_on_error: Optional[JSON] = None, + run_concurrently: Optional[JSON] = None, + source_staging_concurrency: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -22525,7 +24367,7 @@ def __init__( :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword policy: Activity policy. :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :keyword data_flow: Required. Data flow reference. + :keyword data_flow: Data flow reference. Required. :paramtype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference :keyword staging: Staging info for execute data flow activity. :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo @@ -22535,20 +24377,29 @@ def __init__( :paramtype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :keyword trace_level: Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). - :paramtype trace_level: any + :paramtype trace_level: JSON :keyword continue_on_error: Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). - :paramtype continue_on_error: any + :paramtype continue_on_error: JSON :keyword run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). - :paramtype run_concurrently: any + :paramtype run_concurrently: JSON :keyword source_staging_concurrency: Specify number of parallel staging for sources applicable to the sink. Type: integer (or Expression with resultType integer). - :paramtype source_staging_concurrency: any - """ - super(ExecuteDataFlowActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'ExecuteDataFlow' # type: str + :paramtype source_staging_concurrency: JSON + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "ExecuteDataFlow" # type: str self.data_flow = data_flow self.staging = staging self.integration_runtime = integration_runtime @@ -22559,12 +24410,12 @@ def __init__( self.source_staging_concurrency = source_staging_concurrency -class ExecuteDataFlowActivityTypeProperties(msrest.serialization.Model): +class ExecuteDataFlowActivityTypeProperties(_serialization.Model): """Execute data flow activity properties. All required parameters must be populated in order to send to Azure. - :ivar data_flow: Required. Data flow reference. + :ivar data_flow: Data flow reference. Required. :vartype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference :ivar staging: Staging info for execute data flow activity. :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo @@ -22574,32 +24425,32 @@ class ExecuteDataFlowActivityTypeProperties(msrest.serialization.Model): :vartype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :ivar trace_level: Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). - :vartype trace_level: any + :vartype trace_level: JSON :ivar continue_on_error: Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). - :vartype continue_on_error: any + :vartype continue_on_error: JSON :ivar run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). - :vartype run_concurrently: any + :vartype run_concurrently: JSON :ivar source_staging_concurrency: Specify number of parallel staging for sources applicable to the sink. Type: integer (or Expression with resultType integer). - :vartype source_staging_concurrency: any + :vartype source_staging_concurrency: JSON """ _validation = { - 'data_flow': {'required': True}, + "data_flow": {"required": True}, } _attribute_map = { - 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowReference'}, - 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, - 'integration_runtime': {'key': 'integrationRuntime', 'type': 'IntegrationRuntimeReference'}, - 'compute': {'key': 'compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'}, - 'trace_level': {'key': 'traceLevel', 'type': 'object'}, - 'continue_on_error': {'key': 'continueOnError', 'type': 'object'}, - 'run_concurrently': {'key': 'runConcurrently', 'type': 'object'}, - 'source_staging_concurrency': {'key': 'sourceStagingConcurrency', 'type': 'object'}, + "data_flow": {"key": "dataFlow", "type": "DataFlowReference"}, + "staging": {"key": "staging", "type": "DataFlowStagingInfo"}, + "integration_runtime": {"key": "integrationRuntime", "type": "IntegrationRuntimeReference"}, + "compute": {"key": "compute", "type": "ExecuteDataFlowActivityTypePropertiesCompute"}, + "trace_level": {"key": "traceLevel", "type": "object"}, + "continue_on_error": {"key": "continueOnError", "type": "object"}, + "run_concurrently": {"key": "runConcurrently", "type": "object"}, + "source_staging_concurrency": {"key": "sourceStagingConcurrency", "type": "object"}, } def __init__( @@ -22609,14 +24460,14 @@ def __init__( staging: Optional["_models.DataFlowStagingInfo"] = None, integration_runtime: Optional["_models.IntegrationRuntimeReference"] = None, compute: Optional["_models.ExecuteDataFlowActivityTypePropertiesCompute"] = None, - trace_level: Optional[Any] = None, - continue_on_error: Optional[Any] = None, - run_concurrently: Optional[Any] = None, - source_staging_concurrency: Optional[Any] = None, + trace_level: Optional[JSON] = None, + continue_on_error: Optional[JSON] = None, + run_concurrently: Optional[JSON] = None, + source_staging_concurrency: Optional[JSON] = None, **kwargs ): """ - :keyword data_flow: Required. Data flow reference. + :keyword data_flow: Data flow reference. Required. :paramtype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference :keyword staging: Staging info for execute data flow activity. :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo @@ -22626,19 +24477,19 @@ def __init__( :paramtype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :keyword trace_level: Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). - :paramtype trace_level: any + :paramtype trace_level: JSON :keyword continue_on_error: Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). - :paramtype continue_on_error: any + :paramtype continue_on_error: JSON :keyword run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). - :paramtype run_concurrently: any + :paramtype run_concurrently: JSON :keyword source_staging_concurrency: Specify number of parallel staging for sources applicable to the sink. Type: integer (or Expression with resultType integer). - :paramtype source_staging_concurrency: any + :paramtype source_staging_concurrency: JSON """ - super(ExecuteDataFlowActivityTypeProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.data_flow = data_flow self.staging = staging self.integration_runtime = integration_runtime @@ -22649,41 +24500,35 @@ def __init__( self.source_staging_concurrency = source_staging_concurrency -class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): +class ExecuteDataFlowActivityTypePropertiesCompute(_serialization.Model): """Compute properties for data flow activity. :ivar compute_type: Compute type of the cluster which will execute data flow job. Possible values include: 'General', 'MemoryOptimized', 'ComputeOptimized'. Type: string (or Expression with resultType string). - :vartype compute_type: any + :vartype compute_type: JSON :ivar core_count: Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. Type: integer (or Expression with resultType integer). - :vartype core_count: any + :vartype core_count: JSON """ _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'object'}, - 'core_count': {'key': 'coreCount', 'type': 'object'}, + "compute_type": {"key": "computeType", "type": "object"}, + "core_count": {"key": "coreCount", "type": "object"}, } - def __init__( - self, - *, - compute_type: Optional[Any] = None, - core_count: Optional[Any] = None, - **kwargs - ): + def __init__(self, *, compute_type: Optional[JSON] = None, core_count: Optional[JSON] = None, **kwargs): """ :keyword compute_type: Compute type of the cluster which will execute data flow job. Possible values include: 'General', 'MemoryOptimized', 'ComputeOptimized'. Type: string (or Expression with resultType string). - :paramtype compute_type: any + :paramtype compute_type: JSON :keyword core_count: Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. Type: integer (or Expression with resultType integer). - :paramtype core_count: any + :paramtype core_count: JSON """ - super(ExecuteDataFlowActivityTypePropertiesCompute, self).__init__(**kwargs) + super().__init__(**kwargs) self.compute_type = compute_type self.core_count = core_count @@ -22695,10 +24540,10 @@ class ExecutePipelineActivity(ControlActivity): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -22708,32 +24553,32 @@ class ExecutePipelineActivity(ControlActivity): :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :ivar policy: Execute pipeline activity policy. :vartype policy: ~azure.mgmt.datafactory.models.ExecutePipelineActivityPolicy - :ivar pipeline: Required. Pipeline reference. + :ivar pipeline: Pipeline reference. Required. :vartype pipeline: ~azure.mgmt.datafactory.models.PipelineReference :ivar parameters: Pipeline parameters. - :vartype parameters: dict[str, any] + :vartype parameters: dict[str, JSON] :ivar wait_on_completion: Defines whether activity execution will wait for the dependent pipeline execution to finish. Default is false. :vartype wait_on_completion: bool """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'pipeline': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "pipeline": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'policy': {'key': 'policy', 'type': 'ExecutePipelineActivityPolicy'}, - 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "policy": {"key": "policy", "type": "ExecutePipelineActivityPolicy"}, + "pipeline": {"key": "typeProperties.pipeline", "type": "PipelineReference"}, + "parameters": {"key": "typeProperties.parameters", "type": "{object}"}, + "wait_on_completion": {"key": "typeProperties.waitOnCompletion", "type": "bool"}, } def __init__( @@ -22741,20 +24586,20 @@ def __init__( *, name: str, pipeline: "_models.PipelineReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, policy: Optional["_models.ExecutePipelineActivityPolicy"] = None, - parameters: Optional[Dict[str, Any]] = None, + parameters: Optional[Dict[str, JSON]] = None, wait_on_completion: Optional[bool] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -22764,54 +24609,57 @@ def __init__( :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :keyword policy: Execute pipeline activity policy. :paramtype policy: ~azure.mgmt.datafactory.models.ExecutePipelineActivityPolicy - :keyword pipeline: Required. Pipeline reference. + :keyword pipeline: Pipeline reference. Required. :paramtype pipeline: ~azure.mgmt.datafactory.models.PipelineReference :keyword parameters: Pipeline parameters. - :paramtype parameters: dict[str, any] + :paramtype parameters: dict[str, JSON] :keyword wait_on_completion: Defines whether activity execution will wait for the dependent pipeline execution to finish. Default is false. :paramtype wait_on_completion: bool """ - super(ExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'ExecutePipeline' # type: str + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + **kwargs + ) + self.type = "ExecutePipeline" # type: str self.policy = policy self.pipeline = pipeline self.parameters = parameters self.wait_on_completion = wait_on_completion -class ExecutePipelineActivityPolicy(msrest.serialization.Model): +class ExecutePipelineActivityPolicy(_serialization.Model): """Execution policy for an execute pipeline activity. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar secure_input: When set to true, Input from activity is considered as secure and will not be logged to monitoring. :vartype secure_input: bool """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'secure_input': {'key': 'secureInput', 'type': 'bool'}, + "additional_properties": {"key": "", "type": "{object}"}, + "secure_input": {"key": "secureInput", "type": "bool"}, } def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - secure_input: Optional[bool] = None, - **kwargs + self, *, additional_properties: Optional[Dict[str, JSON]] = None, secure_input: Optional[bool] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword secure_input: When set to true, Input from activity is considered as secure and will not be logged to monitoring. :paramtype secure_input: bool """ - super(ExecutePipelineActivityPolicy, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.secure_input = secure_input @@ -22821,7 +24669,7 @@ class ExecutePowerQueryActivityTypeProperties(ExecuteDataFlowActivityTypePropert All required parameters must be populated in order to send to Azure. - :ivar data_flow: Required. Data flow reference. + :ivar data_flow: Data flow reference. Required. :vartype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference :ivar staging: Staging info for execute data flow activity. :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo @@ -22831,17 +24679,17 @@ class ExecutePowerQueryActivityTypeProperties(ExecuteDataFlowActivityTypePropert :vartype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :ivar trace_level: Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). - :vartype trace_level: any + :vartype trace_level: JSON :ivar continue_on_error: Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). - :vartype continue_on_error: any + :vartype continue_on_error: JSON :ivar run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). - :vartype run_concurrently: any + :vartype run_concurrently: JSON :ivar source_staging_concurrency: Specify number of parallel staging for sources applicable to the sink. Type: integer (or Expression with resultType integer). - :vartype source_staging_concurrency: any + :vartype source_staging_concurrency: JSON :ivar sinks: (Deprecated. Please use Queries). List of Power Query activity sinks mapped to a queryName. :vartype sinks: dict[str, ~azure.mgmt.datafactory.models.PowerQuerySink] @@ -22850,20 +24698,20 @@ class ExecutePowerQueryActivityTypeProperties(ExecuteDataFlowActivityTypePropert """ _validation = { - 'data_flow': {'required': True}, + "data_flow": {"required": True}, } _attribute_map = { - 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowReference'}, - 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, - 'integration_runtime': {'key': 'integrationRuntime', 'type': 'IntegrationRuntimeReference'}, - 'compute': {'key': 'compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'}, - 'trace_level': {'key': 'traceLevel', 'type': 'object'}, - 'continue_on_error': {'key': 'continueOnError', 'type': 'object'}, - 'run_concurrently': {'key': 'runConcurrently', 'type': 'object'}, - 'source_staging_concurrency': {'key': 'sourceStagingConcurrency', 'type': 'object'}, - 'sinks': {'key': 'sinks', 'type': '{PowerQuerySink}'}, - 'queries': {'key': 'queries', 'type': '[PowerQuerySinkMapping]'}, + "data_flow": {"key": "dataFlow", "type": "DataFlowReference"}, + "staging": {"key": "staging", "type": "DataFlowStagingInfo"}, + "integration_runtime": {"key": "integrationRuntime", "type": "IntegrationRuntimeReference"}, + "compute": {"key": "compute", "type": "ExecuteDataFlowActivityTypePropertiesCompute"}, + "trace_level": {"key": "traceLevel", "type": "object"}, + "continue_on_error": {"key": "continueOnError", "type": "object"}, + "run_concurrently": {"key": "runConcurrently", "type": "object"}, + "source_staging_concurrency": {"key": "sourceStagingConcurrency", "type": "object"}, + "sinks": {"key": "sinks", "type": "{PowerQuerySink}"}, + "queries": {"key": "queries", "type": "[PowerQuerySinkMapping]"}, } def __init__( @@ -22873,16 +24721,16 @@ def __init__( staging: Optional["_models.DataFlowStagingInfo"] = None, integration_runtime: Optional["_models.IntegrationRuntimeReference"] = None, compute: Optional["_models.ExecuteDataFlowActivityTypePropertiesCompute"] = None, - trace_level: Optional[Any] = None, - continue_on_error: Optional[Any] = None, - run_concurrently: Optional[Any] = None, - source_staging_concurrency: Optional[Any] = None, + trace_level: Optional[JSON] = None, + continue_on_error: Optional[JSON] = None, + run_concurrently: Optional[JSON] = None, + source_staging_concurrency: Optional[JSON] = None, sinks: Optional[Dict[str, "_models.PowerQuerySink"]] = None, queries: Optional[List["_models.PowerQuerySinkMapping"]] = None, **kwargs ): """ - :keyword data_flow: Required. Data flow reference. + :keyword data_flow: Data flow reference. Required. :paramtype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference :keyword staging: Staging info for execute data flow activity. :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo @@ -22892,39 +24740,49 @@ def __init__( :paramtype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :keyword trace_level: Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). - :paramtype trace_level: any + :paramtype trace_level: JSON :keyword continue_on_error: Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). - :paramtype continue_on_error: any + :paramtype continue_on_error: JSON :keyword run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). - :paramtype run_concurrently: any + :paramtype run_concurrently: JSON :keyword source_staging_concurrency: Specify number of parallel staging for sources applicable to the sink. Type: integer (or Expression with resultType integer). - :paramtype source_staging_concurrency: any + :paramtype source_staging_concurrency: JSON :keyword sinks: (Deprecated. Please use Queries). List of Power Query activity sinks mapped to a queryName. :paramtype sinks: dict[str, ~azure.mgmt.datafactory.models.PowerQuerySink] :keyword queries: List of mapping for Power Query mashup query to sink dataset(s). :paramtype queries: list[~azure.mgmt.datafactory.models.PowerQuerySinkMapping] """ - super(ExecutePowerQueryActivityTypeProperties, self).__init__(data_flow=data_flow, staging=staging, integration_runtime=integration_runtime, compute=compute, trace_level=trace_level, continue_on_error=continue_on_error, run_concurrently=run_concurrently, source_staging_concurrency=source_staging_concurrency, **kwargs) + super().__init__( + data_flow=data_flow, + staging=staging, + integration_runtime=integration_runtime, + compute=compute, + trace_level=trace_level, + continue_on_error=continue_on_error, + run_concurrently=run_concurrently, + source_staging_concurrency=source_staging_concurrency, + **kwargs + ) self.sinks = sinks self.queries = queries -class ExecuteSSISPackageActivity(ExecutionActivity): +class ExecuteSSISPackageActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """Execute SSIS package activity. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -22936,20 +24794,20 @@ class ExecuteSSISPackageActivity(ExecutionActivity): :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar policy: Activity policy. :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :ivar package_location: Required. SSIS package location. + :ivar package_location: SSIS package location. Required. :vartype package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation :ivar runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or "x64". Type: string (or Expression with resultType string). - :vartype runtime: any + :vartype runtime: JSON :ivar logging_level: The logging level of SSIS package execution. Type: string (or Expression with resultType string). - :vartype logging_level: any + :vartype logging_level: JSON :ivar environment_path: The environment path to execute the SSIS package. Type: string (or Expression with resultType string). - :vartype environment_path: any + :vartype environment_path: JSON :ivar execution_credential: The package execution credential. :vartype execution_credential: ~azure.mgmt.datafactory.models.SSISExecutionCredential - :ivar connect_via: Required. The integration runtime reference. + :ivar connect_via: The integration runtime reference. Required. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar project_parameters: The project level parameters to execute the SSIS package. :vartype project_parameters: dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter] @@ -22970,33 +24828,39 @@ class ExecuteSSISPackageActivity(ExecutionActivity): """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'package_location': {'required': True}, - 'connect_via': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "package_location": {"required": True}, + "connect_via": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, - 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, - 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, - 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, - 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, - 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, - 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, - 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, - 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, - 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, - 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, - 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "package_location": {"key": "typeProperties.packageLocation", "type": "SSISPackageLocation"}, + "runtime": {"key": "typeProperties.runtime", "type": "object"}, + "logging_level": {"key": "typeProperties.loggingLevel", "type": "object"}, + "environment_path": {"key": "typeProperties.environmentPath", "type": "object"}, + "execution_credential": {"key": "typeProperties.executionCredential", "type": "SSISExecutionCredential"}, + "connect_via": {"key": "typeProperties.connectVia", "type": "IntegrationRuntimeReference"}, + "project_parameters": {"key": "typeProperties.projectParameters", "type": "{SSISExecutionParameter}"}, + "package_parameters": {"key": "typeProperties.packageParameters", "type": "{SSISExecutionParameter}"}, + "project_connection_managers": { + "key": "typeProperties.projectConnectionManagers", + "type": "{{SSISExecutionParameter}}", + }, + "package_connection_managers": { + "key": "typeProperties.packageConnectionManagers", + "type": "{{SSISExecutionParameter}}", + }, + "property_overrides": {"key": "typeProperties.propertyOverrides", "type": "{SSISPropertyOverride}"}, + "log_location": {"key": "typeProperties.logLocation", "type": "SSISLogLocation"}, } def __init__( @@ -23005,15 +24869,15 @@ def __init__( name: str, package_location: "_models.SSISPackageLocation", connect_via: "_models.IntegrationRuntimeReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, - runtime: Optional[Any] = None, - logging_level: Optional[Any] = None, - environment_path: Optional[Any] = None, + runtime: Optional[JSON] = None, + logging_level: Optional[JSON] = None, + environment_path: Optional[JSON] = None, execution_credential: Optional["_models.SSISExecutionCredential"] = None, project_parameters: Optional[Dict[str, "_models.SSISExecutionParameter"]] = None, package_parameters: Optional[Dict[str, "_models.SSISExecutionParameter"]] = None, @@ -23026,8 +24890,8 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -23039,20 +24903,20 @@ def __init__( :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword policy: Activity policy. :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :keyword package_location: Required. SSIS package location. + :keyword package_location: SSIS package location. Required. :paramtype package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation :keyword runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or "x64". Type: string (or Expression with resultType string). - :paramtype runtime: any + :paramtype runtime: JSON :keyword logging_level: The logging level of SSIS package execution. Type: string (or Expression with resultType string). - :paramtype logging_level: any + :paramtype logging_level: JSON :keyword environment_path: The environment path to execute the SSIS package. Type: string (or Expression with resultType string). - :paramtype environment_path: any + :paramtype environment_path: JSON :keyword execution_credential: The package execution credential. :paramtype execution_credential: ~azure.mgmt.datafactory.models.SSISExecutionCredential - :keyword connect_via: Required. The integration runtime reference. + :keyword connect_via: The integration runtime reference. Required. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword project_parameters: The project level parameters to execute the SSIS package. :paramtype project_parameters: dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter] @@ -23071,8 +24935,17 @@ def __init__( :keyword log_location: SSIS package execution log location. :paramtype log_location: ~azure.mgmt.datafactory.models.SSISLogLocation """ - super(ExecuteSSISPackageActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'ExecuteSSISPackage' # type: str + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "ExecuteSSISPackage" # type: str self.package_location = package_location self.runtime = runtime self.logging_level = logging_level @@ -23087,17 +24960,17 @@ def __init__( self.log_location = log_location -class ExecuteWranglingDataflowActivity(Activity): +class ExecuteWranglingDataflowActivity(Activity): # pylint: disable=too-many-instance-attributes """Execute power query activity. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -23107,7 +24980,7 @@ class ExecuteWranglingDataflowActivity(Activity): :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :ivar policy: Activity policy. :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :ivar data_flow: Required. Data flow reference. + :ivar data_flow: Data flow reference. Required. :vartype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference :ivar staging: Staging info for execute data flow activity. :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo @@ -23117,17 +24990,17 @@ class ExecuteWranglingDataflowActivity(Activity): :vartype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :ivar trace_level: Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). - :vartype trace_level: any + :vartype trace_level: JSON :ivar continue_on_error: Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). - :vartype continue_on_error: any + :vartype continue_on_error: JSON :ivar run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). - :vartype run_concurrently: any + :vartype run_concurrently: JSON :ivar source_staging_concurrency: Specify number of parallel staging for sources applicable to the sink. Type: integer (or Expression with resultType integer). - :vartype source_staging_concurrency: any + :vartype source_staging_concurrency: JSON :ivar sinks: (Deprecated. Please use Queries). List of Power Query activity sinks mapped to a queryName. :vartype sinks: dict[str, ~azure.mgmt.datafactory.models.PowerQuerySink] @@ -23136,29 +25009,29 @@ class ExecuteWranglingDataflowActivity(Activity): """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'data_flow': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "data_flow": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'data_flow': {'key': 'typeProperties.dataFlow', 'type': 'DataFlowReference'}, - 'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'}, - 'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'}, - 'compute': {'key': 'typeProperties.compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'}, - 'trace_level': {'key': 'typeProperties.traceLevel', 'type': 'object'}, - 'continue_on_error': {'key': 'typeProperties.continueOnError', 'type': 'object'}, - 'run_concurrently': {'key': 'typeProperties.runConcurrently', 'type': 'object'}, - 'source_staging_concurrency': {'key': 'typeProperties.sourceStagingConcurrency', 'type': 'object'}, - 'sinks': {'key': 'typeProperties.sinks', 'type': '{PowerQuerySink}'}, - 'queries': {'key': 'typeProperties.queries', 'type': '[PowerQuerySinkMapping]'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "data_flow": {"key": "typeProperties.dataFlow", "type": "DataFlowReference"}, + "staging": {"key": "typeProperties.staging", "type": "DataFlowStagingInfo"}, + "integration_runtime": {"key": "typeProperties.integrationRuntime", "type": "IntegrationRuntimeReference"}, + "compute": {"key": "typeProperties.compute", "type": "ExecuteDataFlowActivityTypePropertiesCompute"}, + "trace_level": {"key": "typeProperties.traceLevel", "type": "object"}, + "continue_on_error": {"key": "typeProperties.continueOnError", "type": "object"}, + "run_concurrently": {"key": "typeProperties.runConcurrently", "type": "object"}, + "source_staging_concurrency": {"key": "typeProperties.sourceStagingConcurrency", "type": "object"}, + "sinks": {"key": "typeProperties.sinks", "type": "{PowerQuerySink}"}, + "queries": {"key": "typeProperties.queries", "type": "[PowerQuerySinkMapping]"}, } def __init__( @@ -23166,7 +25039,7 @@ def __init__( *, name: str, data_flow: "_models.DataFlowReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, @@ -23174,10 +25047,10 @@ def __init__( staging: Optional["_models.DataFlowStagingInfo"] = None, integration_runtime: Optional["_models.IntegrationRuntimeReference"] = None, compute: Optional["_models.ExecuteDataFlowActivityTypePropertiesCompute"] = None, - trace_level: Optional[Any] = None, - continue_on_error: Optional[Any] = None, - run_concurrently: Optional[Any] = None, - source_staging_concurrency: Optional[Any] = None, + trace_level: Optional[JSON] = None, + continue_on_error: Optional[JSON] = None, + run_concurrently: Optional[JSON] = None, + source_staging_concurrency: Optional[JSON] = None, sinks: Optional[Dict[str, "_models.PowerQuerySink"]] = None, queries: Optional[List["_models.PowerQuerySinkMapping"]] = None, **kwargs @@ -23185,8 +25058,8 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -23196,7 +25069,7 @@ def __init__( :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :keyword policy: Activity policy. :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :keyword data_flow: Required. Data flow reference. + :keyword data_flow: Data flow reference. Required. :paramtype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference :keyword staging: Staging info for execute data flow activity. :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo @@ -23206,25 +25079,32 @@ def __init__( :paramtype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :keyword trace_level: Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). - :paramtype trace_level: any + :paramtype trace_level: JSON :keyword continue_on_error: Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). - :paramtype continue_on_error: any + :paramtype continue_on_error: JSON :keyword run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). - :paramtype run_concurrently: any + :paramtype run_concurrently: JSON :keyword source_staging_concurrency: Specify number of parallel staging for sources applicable to the sink. Type: integer (or Expression with resultType integer). - :paramtype source_staging_concurrency: any + :paramtype source_staging_concurrency: JSON :keyword sinks: (Deprecated. Please use Queries). List of Power Query activity sinks mapped to a queryName. :paramtype sinks: dict[str, ~azure.mgmt.datafactory.models.PowerQuerySink] :keyword queries: List of mapping for Power Query mashup query to sink dataset(s). :paramtype queries: list[~azure.mgmt.datafactory.models.PowerQuerySinkMapping] """ - super(ExecuteWranglingDataflowActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'ExecuteWranglingDataflow' # type: str + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + **kwargs + ) + self.type = "ExecuteWranglingDataflow" # type: str self.policy = policy self.data_flow = data_flow self.staging = staging @@ -23238,72 +25118,62 @@ def __init__( self.queries = queries -class ExposureControlBatchRequest(msrest.serialization.Model): +class ExposureControlBatchRequest(_serialization.Model): """A list of exposure control features. All required parameters must be populated in order to send to Azure. - :ivar exposure_control_requests: Required. List of exposure control features. + :ivar exposure_control_requests: List of exposure control features. Required. :vartype exposure_control_requests: list[~azure.mgmt.datafactory.models.ExposureControlRequest] """ _validation = { - 'exposure_control_requests': {'required': True}, + "exposure_control_requests": {"required": True}, } _attribute_map = { - 'exposure_control_requests': {'key': 'exposureControlRequests', 'type': '[ExposureControlRequest]'}, + "exposure_control_requests": {"key": "exposureControlRequests", "type": "[ExposureControlRequest]"}, } - def __init__( - self, - *, - exposure_control_requests: List["_models.ExposureControlRequest"], - **kwargs - ): + def __init__(self, *, exposure_control_requests: List["_models.ExposureControlRequest"], **kwargs): """ - :keyword exposure_control_requests: Required. List of exposure control features. + :keyword exposure_control_requests: List of exposure control features. Required. :paramtype exposure_control_requests: list[~azure.mgmt.datafactory.models.ExposureControlRequest] """ - super(ExposureControlBatchRequest, self).__init__(**kwargs) + super().__init__(**kwargs) self.exposure_control_requests = exposure_control_requests -class ExposureControlBatchResponse(msrest.serialization.Model): +class ExposureControlBatchResponse(_serialization.Model): """A list of exposure control feature values. All required parameters must be populated in order to send to Azure. - :ivar exposure_control_responses: Required. List of exposure control feature values. + :ivar exposure_control_responses: List of exposure control feature values. Required. :vartype exposure_control_responses: list[~azure.mgmt.datafactory.models.ExposureControlResponse] """ _validation = { - 'exposure_control_responses': {'required': True}, + "exposure_control_responses": {"required": True}, } _attribute_map = { - 'exposure_control_responses': {'key': 'exposureControlResponses', 'type': '[ExposureControlResponse]'}, + "exposure_control_responses": {"key": "exposureControlResponses", "type": "[ExposureControlResponse]"}, } - def __init__( - self, - *, - exposure_control_responses: List["_models.ExposureControlResponse"], - **kwargs - ): + def __init__(self, *, exposure_control_responses: List["_models.ExposureControlResponse"], **kwargs): """ - :keyword exposure_control_responses: Required. List of exposure control feature values. + :keyword exposure_control_responses: List of exposure control feature values. Required. :paramtype exposure_control_responses: list[~azure.mgmt.datafactory.models.ExposureControlResponse] """ - super(ExposureControlBatchResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.exposure_control_responses = exposure_control_responses -class ExposureControlRequest(msrest.serialization.Model): +class ExposureControlRequest(_serialization.Model): """The exposure control request. :ivar feature_name: The feature name. @@ -23313,29 +25183,23 @@ class ExposureControlRequest(msrest.serialization.Model): """ _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'feature_type': {'key': 'featureType', 'type': 'str'}, + "feature_name": {"key": "featureName", "type": "str"}, + "feature_type": {"key": "featureType", "type": "str"}, } - def __init__( - self, - *, - feature_name: Optional[str] = None, - feature_type: Optional[str] = None, - **kwargs - ): + def __init__(self, *, feature_name: Optional[str] = None, feature_type: Optional[str] = None, **kwargs): """ :keyword feature_name: The feature name. :paramtype feature_name: str :keyword feature_type: The feature type. :paramtype feature_type: str """ - super(ExposureControlRequest, self).__init__(**kwargs) + super().__init__(**kwargs) self.feature_name = feature_name self.feature_type = feature_type -class ExposureControlResponse(msrest.serialization.Model): +class ExposureControlResponse(_serialization.Model): """The exposure control response. Variables are only populated by the server, and will be ignored when sending a request. @@ -23347,66 +25211,56 @@ class ExposureControlResponse(msrest.serialization.Model): """ _validation = { - 'feature_name': {'readonly': True}, - 'value': {'readonly': True}, + "feature_name": {"readonly": True}, + "value": {"readonly": True}, } _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + "feature_name": {"key": "featureName", "type": "str"}, + "value": {"key": "value", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(ExposureControlResponse, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.feature_name = None self.value = None -class Expression(msrest.serialization.Model): +class Expression(_serialization.Model): """Azure Data Factory expression definition. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar type: Expression type. Has constant value: "Expression". - :vartype type: str - :ivar value: Required. Expression value. + :ivar type: Expression type. Required. "Expression" + :vartype type: str or ~azure.mgmt.datafactory.models.ExpressionType + :ivar value: Expression value. Required. :vartype value: str """ _validation = { - 'type': {'required': True, 'constant': True}, - 'value': {'required': True}, + "type": {"required": True}, + "value": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + "type": {"key": "type", "type": "str"}, + "value": {"key": "value", "type": "str"}, } - type = "Expression" - - def __init__( - self, - *, - value: str, - **kwargs - ): + def __init__(self, *, type: Union[str, "_models.ExpressionType"], value: str, **kwargs): """ - :keyword value: Required. Expression value. + :keyword type: Expression type. Required. "Expression" + :paramtype type: str or ~azure.mgmt.datafactory.models.ExpressionType + :keyword value: Expression value. Required. :paramtype value: str """ - super(Expression, self).__init__(**kwargs) + super().__init__(**kwargs) + self.type = type self.value = value -class Resource(msrest.serialization.Model): +class Resource(_serialization.Model): """Azure Data Factory top-level resource. Variables are only populated by the server, and will be ignored when sending a request. @@ -23419,42 +25273,36 @@ class Resource(msrest.serialization.Model): :vartype type: str :ivar location: The resource location. :vartype location: str - :ivar tags: A set of tags. The resource tags. + :ivar tags: The resource tags. :vartype tags: dict[str, str] :ivar e_tag: Etag identifies change in the resource. :vartype e_tag: str """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "e_tag": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "location": {"key": "location", "type": "str"}, + "tags": {"key": "tags", "type": "{str}"}, + "e_tag": {"key": "eTag", "type": "str"}, } - def __init__( - self, - *, - location: Optional[str] = None, - tags: Optional[Dict[str, str]] = None, - **kwargs - ): + def __init__(self, *, location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, **kwargs): """ :keyword location: The resource location. :paramtype location: str - :keyword tags: A set of tags. The resource tags. + :keyword tags: The resource tags. :paramtype tags: dict[str, str] """ - super(Resource, self).__init__(**kwargs) + super().__init__(**kwargs) self.id = None self.name = None self.type = None @@ -23463,7 +25311,7 @@ def __init__( self.e_tag = None -class Factory(Resource): +class Factory(Resource): # pylint: disable=too-many-instance-attributes """Factory resource type. Variables are only populated by the server, and will be ignored when sending a request. @@ -23476,13 +25324,13 @@ class Factory(Resource): :vartype type: str :ivar location: The resource location. :vartype location: str - :ivar tags: A set of tags. The resource tags. + :ivar tags: The resource tags. :vartype tags: dict[str, str] :ivar e_tag: Etag identifies change in the resource. :vartype e_tag: str :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar identity: Managed service identity of the factory. :vartype identity: ~azure.mgmt.datafactory.models.FactoryIdentity :ivar provisioning_state: Factory provisioning state, example Succeeded. @@ -23501,37 +25349,37 @@ class Factory(Resource): :ivar encryption: Properties to enable Customer Managed Key for the factory. :vartype encryption: ~azure.mgmt.datafactory.models.EncryptionConfiguration :ivar public_network_access: Whether or not public network access is allowed for the data - factory. Known values are: "Enabled", "Disabled". + factory. Known values are: "Enabled" and "Disabled". :vartype public_network_access: str or ~azure.mgmt.datafactory.models.PublicNetworkAccess """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'create_time': {'readonly': True}, - 'version': {'readonly': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "e_tag": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "create_time": {"readonly": True}, + "version": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, - 'additional_properties': {'key': '', 'type': '{object}'}, - 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, - 'version': {'key': 'properties.version', 'type': 'str'}, - 'purview_configuration': {'key': 'properties.purviewConfiguration', 'type': 'PurviewConfiguration'}, - 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, - 'global_parameters': {'key': 'properties.globalParameters', 'type': '{GlobalParameterSpecification}'}, - 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionConfiguration'}, - 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "location": {"key": "location", "type": "str"}, + "tags": {"key": "tags", "type": "{str}"}, + "e_tag": {"key": "eTag", "type": "str"}, + "additional_properties": {"key": "", "type": "{object}"}, + "identity": {"key": "identity", "type": "FactoryIdentity"}, + "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, + "create_time": {"key": "properties.createTime", "type": "iso-8601"}, + "version": {"key": "properties.version", "type": "str"}, + "purview_configuration": {"key": "properties.purviewConfiguration", "type": "PurviewConfiguration"}, + "repo_configuration": {"key": "properties.repoConfiguration", "type": "FactoryRepoConfiguration"}, + "global_parameters": {"key": "properties.globalParameters", "type": "{GlobalParameterSpecification}"}, + "encryption": {"key": "properties.encryption", "type": "EncryptionConfiguration"}, + "public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"}, } def __init__( @@ -23539,7 +25387,7 @@ def __init__( *, location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, identity: Optional["_models.FactoryIdentity"] = None, purview_configuration: Optional["_models.PurviewConfiguration"] = None, repo_configuration: Optional["_models.FactoryRepoConfiguration"] = None, @@ -23551,11 +25399,11 @@ def __init__( """ :keyword location: The resource location. :paramtype location: str - :keyword tags: A set of tags. The resource tags. + :keyword tags: The resource tags. :paramtype tags: dict[str, str] :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword identity: Managed service identity of the factory. :paramtype identity: ~azure.mgmt.datafactory.models.FactoryIdentity :keyword purview_configuration: Purview information of the factory. @@ -23568,10 +25416,10 @@ def __init__( :keyword encryption: Properties to enable Customer Managed Key for the factory. :paramtype encryption: ~azure.mgmt.datafactory.models.EncryptionConfiguration :keyword public_network_access: Whether or not public network access is allowed for the data - factory. Known values are: "Enabled", "Disabled". + factory. Known values are: "Enabled" and "Disabled". :paramtype public_network_access: str or ~azure.mgmt.datafactory.models.PublicNetworkAccess """ - super(Factory, self).__init__(location=location, tags=tags, **kwargs) + super().__init__(location=location, tags=tags, **kwargs) self.additional_properties = additional_properties self.identity = identity self.provisioning_state = None @@ -23584,47 +25432,50 @@ def __init__( self.public_network_access = public_network_access -class FactoryRepoConfiguration(msrest.serialization.Model): +class FactoryRepoConfiguration(_serialization.Model): """Factory's git repo information. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: FactoryGitHubConfiguration, FactoryVSTSConfiguration. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + FactoryGitHubConfiguration, FactoryVSTSConfiguration All required parameters must be populated in order to send to Azure. - :ivar type: Required. Type of repo configuration.Constant filled by server. + :ivar type: Type of repo configuration. Required. :vartype type: str - :ivar account_name: Required. Account name. + :ivar account_name: Account name. Required. :vartype account_name: str - :ivar repository_name: Required. Repository name. + :ivar repository_name: Repository name. Required. :vartype repository_name: str - :ivar collaboration_branch: Required. Collaboration branch. + :ivar collaboration_branch: Collaboration branch. Required. :vartype collaboration_branch: str - :ivar root_folder: Required. Root folder. + :ivar root_folder: Root folder. Required. :vartype root_folder: str :ivar last_commit_id: Last commit id. :vartype last_commit_id: str """ _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, + "type": {"required": True}, + "account_name": {"required": True}, + "repository_name": {"required": True}, + "collaboration_branch": {"required": True}, + "root_folder": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + "type": {"key": "type", "type": "str"}, + "account_name": {"key": "accountName", "type": "str"}, + "repository_name": {"key": "repositoryName", "type": "str"}, + "collaboration_branch": {"key": "collaborationBranch", "type": "str"}, + "root_folder": {"key": "rootFolder", "type": "str"}, + "last_commit_id": {"key": "lastCommitId", "type": "str"}, } _subtype_map = { - 'type': {'FactoryGitHubConfiguration': 'FactoryGitHubConfiguration', 'FactoryVSTSConfiguration': 'FactoryVSTSConfiguration'} + "type": { + "FactoryGitHubConfiguration": "FactoryGitHubConfiguration", + "FactoryVSTSConfiguration": "FactoryVSTSConfiguration", + } } def __init__( @@ -23638,18 +25489,18 @@ def __init__( **kwargs ): """ - :keyword account_name: Required. Account name. + :keyword account_name: Account name. Required. :paramtype account_name: str - :keyword repository_name: Required. Repository name. + :keyword repository_name: Repository name. Required. :paramtype repository_name: str - :keyword collaboration_branch: Required. Collaboration branch. + :keyword collaboration_branch: Collaboration branch. Required. :paramtype collaboration_branch: str - :keyword root_folder: Required. Root folder. + :keyword root_folder: Root folder. Required. :paramtype root_folder: str :keyword last_commit_id: Last commit id. :paramtype last_commit_id: str """ - super(FactoryRepoConfiguration, self).__init__(**kwargs) + super().__init__(**kwargs) self.type = None # type: Optional[str] self.account_name = account_name self.repository_name = repository_name @@ -23663,15 +25514,15 @@ class FactoryGitHubConfiguration(FactoryRepoConfiguration): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Type of repo configuration.Constant filled by server. + :ivar type: Type of repo configuration. Required. :vartype type: str - :ivar account_name: Required. Account name. + :ivar account_name: Account name. Required. :vartype account_name: str - :ivar repository_name: Required. Repository name. + :ivar repository_name: Repository name. Required. :vartype repository_name: str - :ivar collaboration_branch: Required. Collaboration branch. + :ivar collaboration_branch: Collaboration branch. Required. :vartype collaboration_branch: str - :ivar root_folder: Required. Root folder. + :ivar root_folder: Root folder. Required. :vartype root_folder: str :ivar last_commit_id: Last commit id. :vartype last_commit_id: str @@ -23684,23 +25535,23 @@ class FactoryGitHubConfiguration(FactoryRepoConfiguration): """ _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, + "type": {"required": True}, + "account_name": {"required": True}, + "repository_name": {"required": True}, + "collaboration_branch": {"required": True}, + "root_folder": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'host_name': {'key': 'hostName', 'type': 'str'}, - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'client_secret': {'key': 'clientSecret', 'type': 'GitHubClientSecret'}, + "type": {"key": "type", "type": "str"}, + "account_name": {"key": "accountName", "type": "str"}, + "repository_name": {"key": "repositoryName", "type": "str"}, + "collaboration_branch": {"key": "collaborationBranch", "type": "str"}, + "root_folder": {"key": "rootFolder", "type": "str"}, + "last_commit_id": {"key": "lastCommitId", "type": "str"}, + "host_name": {"key": "hostName", "type": "str"}, + "client_id": {"key": "clientId", "type": "str"}, + "client_secret": {"key": "clientSecret", "type": "GitHubClientSecret"}, } def __init__( @@ -23717,13 +25568,13 @@ def __init__( **kwargs ): """ - :keyword account_name: Required. Account name. + :keyword account_name: Account name. Required. :paramtype account_name: str - :keyword repository_name: Required. Repository name. + :keyword repository_name: Repository name. Required. :paramtype repository_name: str - :keyword collaboration_branch: Required. Collaboration branch. + :keyword collaboration_branch: Collaboration branch. Required. :paramtype collaboration_branch: str - :keyword root_folder: Required. Root folder. + :keyword root_folder: Root folder. Required. :paramtype root_folder: str :keyword last_commit_id: Last commit id. :paramtype last_commit_id: str @@ -23734,104 +25585,105 @@ def __init__( :keyword client_secret: GitHub bring your own app client secret information. :paramtype client_secret: ~azure.mgmt.datafactory.models.GitHubClientSecret """ - super(FactoryGitHubConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) - self.type = 'FactoryGitHubConfiguration' # type: str + super().__init__( + account_name=account_name, + repository_name=repository_name, + collaboration_branch=collaboration_branch, + root_folder=root_folder, + last_commit_id=last_commit_id, + **kwargs + ) + self.type = "FactoryGitHubConfiguration" # type: str self.host_name = host_name self.client_id = client_id self.client_secret = client_secret -class FactoryIdentity(msrest.serialization.Model): +class FactoryIdentity(_serialization.Model): """Identity properties of the factory resource. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar type: Required. The identity type. Known values are: "SystemAssigned", "UserAssigned", - "SystemAssigned,UserAssigned". + :ivar type: The identity type. Required. Known values are: "SystemAssigned", "UserAssigned", + and "SystemAssigned,UserAssigned". :vartype type: str or ~azure.mgmt.datafactory.models.FactoryIdentityType :ivar principal_id: The principal id of the identity. :vartype principal_id: str :ivar tenant_id: The client tenant id of the identity. :vartype tenant_id: str :ivar user_assigned_identities: List of user assigned identities for the factory. - :vartype user_assigned_identities: dict[str, any] + :vartype user_assigned_identities: dict[str, JSON] """ _validation = { - 'type': {'required': True}, - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, + "type": {"required": True}, + "principal_id": {"readonly": True}, + "tenant_id": {"readonly": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{object}'}, + "type": {"key": "type", "type": "str"}, + "principal_id": {"key": "principalId", "type": "str"}, + "tenant_id": {"key": "tenantId", "type": "str"}, + "user_assigned_identities": {"key": "userAssignedIdentities", "type": "{object}"}, } def __init__( self, *, type: Union[str, "_models.FactoryIdentityType"], - user_assigned_identities: Optional[Dict[str, Any]] = None, + user_assigned_identities: Optional[Dict[str, JSON]] = None, **kwargs ): """ - :keyword type: Required. The identity type. Known values are: "SystemAssigned", "UserAssigned", - "SystemAssigned,UserAssigned". + :keyword type: The identity type. Required. Known values are: "SystemAssigned", "UserAssigned", + and "SystemAssigned,UserAssigned". :paramtype type: str or ~azure.mgmt.datafactory.models.FactoryIdentityType :keyword user_assigned_identities: List of user assigned identities for the factory. - :paramtype user_assigned_identities: dict[str, any] + :paramtype user_assigned_identities: dict[str, JSON] """ - super(FactoryIdentity, self).__init__(**kwargs) + super().__init__(**kwargs) self.type = type self.principal_id = None self.tenant_id = None self.user_assigned_identities = user_assigned_identities -class FactoryListResponse(msrest.serialization.Model): +class FactoryListResponse(_serialization.Model): """A list of factory resources. All required parameters must be populated in order to send to Azure. - :ivar value: Required. List of factories. + :ivar value: List of factories. Required. :vartype value: list[~azure.mgmt.datafactory.models.Factory] :ivar next_link: The link to the next page of results, if any remaining results exist. :vartype next_link: str """ _validation = { - 'value': {'required': True}, + "value": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[Factory]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[Factory]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - *, - value: List["_models.Factory"], - next_link: Optional[str] = None, - **kwargs - ): + def __init__(self, *, value: List["_models.Factory"], next_link: Optional[str] = None, **kwargs): """ - :keyword value: Required. List of factories. + :keyword value: List of factories. Required. :paramtype value: list[~azure.mgmt.datafactory.models.Factory] :keyword next_link: The link to the next page of results, if any remaining results exist. :paramtype next_link: str """ - super(FactoryListResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.next_link = next_link -class FactoryRepoUpdate(msrest.serialization.Model): +class FactoryRepoUpdate(_serialization.Model): """Factory's git repo information. :ivar factory_resource_id: The factory resource id. @@ -23841,8 +25693,8 @@ class FactoryRepoUpdate(msrest.serialization.Model): """ _attribute_map = { - 'factory_resource_id': {'key': 'factoryResourceId', 'type': 'str'}, - 'repo_configuration': {'key': 'repoConfiguration', 'type': 'FactoryRepoConfiguration'}, + "factory_resource_id": {"key": "factoryResourceId", "type": "str"}, + "repo_configuration": {"key": "repoConfiguration", "type": "FactoryRepoConfiguration"}, } def __init__( @@ -23858,27 +25710,27 @@ def __init__( :keyword repo_configuration: Git repo information of the factory. :paramtype repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration """ - super(FactoryRepoUpdate, self).__init__(**kwargs) + super().__init__(**kwargs) self.factory_resource_id = factory_resource_id self.repo_configuration = repo_configuration -class FactoryUpdateParameters(msrest.serialization.Model): +class FactoryUpdateParameters(_serialization.Model): """Parameters for updating a factory resource. - :ivar tags: A set of tags. The resource tags. + :ivar tags: The resource tags. :vartype tags: dict[str, str] :ivar identity: Managed service identity of the factory. :vartype identity: ~azure.mgmt.datafactory.models.FactoryIdentity :ivar public_network_access: Whether or not public network access is allowed for the data - factory. Known values are: "Enabled", "Disabled". + factory. Known values are: "Enabled" and "Disabled". :vartype public_network_access: str or ~azure.mgmt.datafactory.models.PublicNetworkAccess """ _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, - 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, + "tags": {"key": "tags", "type": "{str}"}, + "identity": {"key": "identity", "type": "FactoryIdentity"}, + "public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"}, } def __init__( @@ -23890,15 +25742,15 @@ def __init__( **kwargs ): """ - :keyword tags: A set of tags. The resource tags. + :keyword tags: The resource tags. :paramtype tags: dict[str, str] :keyword identity: Managed service identity of the factory. :paramtype identity: ~azure.mgmt.datafactory.models.FactoryIdentity :keyword public_network_access: Whether or not public network access is allowed for the data - factory. Known values are: "Enabled", "Disabled". + factory. Known values are: "Enabled" and "Disabled". :paramtype public_network_access: str or ~azure.mgmt.datafactory.models.PublicNetworkAccess """ - super(FactoryUpdateParameters, self).__init__(**kwargs) + super().__init__(**kwargs) self.tags = tags self.identity = identity self.public_network_access = public_network_access @@ -23909,42 +25761,42 @@ class FactoryVSTSConfiguration(FactoryRepoConfiguration): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Type of repo configuration.Constant filled by server. + :ivar type: Type of repo configuration. Required. :vartype type: str - :ivar account_name: Required. Account name. + :ivar account_name: Account name. Required. :vartype account_name: str - :ivar repository_name: Required. Repository name. + :ivar repository_name: Repository name. Required. :vartype repository_name: str - :ivar collaboration_branch: Required. Collaboration branch. + :ivar collaboration_branch: Collaboration branch. Required. :vartype collaboration_branch: str - :ivar root_folder: Required. Root folder. + :ivar root_folder: Root folder. Required. :vartype root_folder: str :ivar last_commit_id: Last commit id. :vartype last_commit_id: str - :ivar project_name: Required. VSTS project name. + :ivar project_name: VSTS project name. Required. :vartype project_name: str :ivar tenant_id: VSTS tenant id. :vartype tenant_id: str """ _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - 'project_name': {'required': True}, + "type": {"required": True}, + "account_name": {"required": True}, + "repository_name": {"required": True}, + "collaboration_branch": {"required": True}, + "root_folder": {"required": True}, + "project_name": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'project_name': {'key': 'projectName', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + "type": {"key": "type", "type": "str"}, + "account_name": {"key": "accountName", "type": "str"}, + "repository_name": {"key": "repositoryName", "type": "str"}, + "collaboration_branch": {"key": "collaborationBranch", "type": "str"}, + "root_folder": {"key": "rootFolder", "type": "str"}, + "last_commit_id": {"key": "lastCommitId", "type": "str"}, + "project_name": {"key": "projectName", "type": "str"}, + "tenant_id": {"key": "tenantId", "type": "str"}, } def __init__( @@ -23960,23 +25812,30 @@ def __init__( **kwargs ): """ - :keyword account_name: Required. Account name. + :keyword account_name: Account name. Required. :paramtype account_name: str - :keyword repository_name: Required. Repository name. + :keyword repository_name: Repository name. Required. :paramtype repository_name: str - :keyword collaboration_branch: Required. Collaboration branch. + :keyword collaboration_branch: Collaboration branch. Required. :paramtype collaboration_branch: str - :keyword root_folder: Required. Root folder. + :keyword root_folder: Root folder. Required. :paramtype root_folder: str :keyword last_commit_id: Last commit id. :paramtype last_commit_id: str - :keyword project_name: Required. VSTS project name. + :keyword project_name: VSTS project name. Required. :paramtype project_name: str :keyword tenant_id: VSTS tenant id. :paramtype tenant_id: str """ - super(FactoryVSTSConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) - self.type = 'FactoryVSTSConfiguration' # type: str + super().__init__( + account_name=account_name, + repository_name=repository_name, + collaboration_branch=collaboration_branch, + root_folder=root_folder, + last_commit_id=last_commit_id, + **kwargs + ) + self.type = "FactoryVSTSConfiguration" # type: str self.project_name = project_name self.tenant_id = tenant_id @@ -23988,10 +25847,10 @@ class FailActivity(ControlActivity): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -23999,41 +25858,41 @@ class FailActivity(ControlActivity): :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :ivar message: Required. The error message that surfaced in the Fail activity. It can be - dynamic content that's evaluated to a non empty/blank string at runtime. Type: string (or - Expression with resultType string). - :vartype message: any - :ivar error_code: Required. The error code that categorizes the error type of the Fail - activity. It can be dynamic content that's evaluated to a non empty/blank string at runtime. - Type: string (or Expression with resultType string). - :vartype error_code: any + :ivar message: The error message that surfaced in the Fail activity. It can be dynamic content + that's evaluated to a non empty/blank string at runtime. Type: string (or Expression with + resultType string). Required. + :vartype message: JSON + :ivar error_code: The error code that categorizes the error type of the Fail activity. It can + be dynamic content that's evaluated to a non empty/blank string at runtime. Type: string (or + Expression with resultType string). Required. + :vartype error_code: JSON """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'message': {'required': True}, - 'error_code': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "message": {"required": True}, + "error_code": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'message': {'key': 'typeProperties.message', 'type': 'object'}, - 'error_code': {'key': 'typeProperties.errorCode', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "message": {"key": "typeProperties.message", "type": "object"}, + "error_code": {"key": "typeProperties.errorCode", "type": "object"}, } def __init__( self, *, name: str, - message: Any, - error_code: Any, - additional_properties: Optional[Dict[str, Any]] = None, + message: JSON, + error_code: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, @@ -24042,8 +25901,8 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -24051,17 +25910,24 @@ def __init__( :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :keyword message: Required. The error message that surfaced in the Fail activity. It can be - dynamic content that's evaluated to a non empty/blank string at runtime. Type: string (or - Expression with resultType string). - :paramtype message: any - :keyword error_code: Required. The error code that categorizes the error type of the Fail - activity. It can be dynamic content that's evaluated to a non empty/blank string at runtime. - Type: string (or Expression with resultType string). - :paramtype error_code: any - """ - super(FailActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'Fail' # type: str + :keyword message: The error message that surfaced in the Fail activity. It can be dynamic + content that's evaluated to a non empty/blank string at runtime. Type: string (or Expression + with resultType string). Required. + :paramtype message: JSON + :keyword error_code: The error code that categorizes the error type of the Fail activity. It + can be dynamic content that's evaluated to a non empty/blank string at runtime. Type: string + (or Expression with resultType string). Required. + :paramtype error_code: JSON + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + **kwargs + ) + self.type = "Fail" # type: str self.message = message self.error_code = error_code @@ -24073,8 +25939,8 @@ class FileServerLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -24083,57 +25949,57 @@ class FileServerLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar host: Required. Host name of the server. Type: string (or Expression with resultType - string). - :vartype host: any + :vartype annotations: list[JSON] + :ivar host: Host name of the server. Type: string (or Expression with resultType string). + Required. + :vartype host: JSON :ivar user_id: User ID to logon the server. Type: string (or Expression with resultType string). - :vartype user_id: any + :vartype user_id: JSON :ivar password: Password to logon the server. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'host': {'required': True}, + "type": {"required": True}, + "host": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "host": {"key": "typeProperties.host", "type": "object"}, + "user_id": {"key": "typeProperties.userId", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - host: Any, - additional_properties: Optional[Dict[str, Any]] = None, + host: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - user_id: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + user_id: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -24141,22 +26007,29 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword host: Required. Host name of the server. Type: string (or Expression with resultType - string). - :paramtype host: any + :paramtype annotations: list[JSON] + :keyword host: Host name of the server. Type: string (or Expression with resultType string). + Required. + :paramtype host: JSON :keyword user_id: User ID to logon the server. Type: string (or Expression with resultType string). - :paramtype user_id: any + :paramtype user_id: JSON :keyword password: Password to logon the server. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(FileServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'FileServer' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "FileServer" # type: str self.host = host self.user_id = user_id self.password = password @@ -24170,181 +26043,188 @@ class FileServerLocation(DatasetLocation): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage location. Required. :vartype type: str :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :vartype folder_path: any + :vartype folder_path: JSON :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :vartype file_name: any + :vartype file_name: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "folder_path": {"key": "folderPath", "type": "object"}, + "file_name": {"key": "fileName", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - folder_path: Optional[Any] = None, - file_name: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + folder_path: Optional[JSON] = None, + file_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :paramtype folder_path: any + :paramtype folder_path: JSON :keyword file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :paramtype file_name: any + :paramtype file_name: JSON """ - super(FileServerLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type = 'FileServerLocation' # type: str + super().__init__( + additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs + ) + self.type = "FileServerLocation" # type: str -class FileServerReadSettings(StoreReadSettings): +class FileServerReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes """File server read settings. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The read setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. :vartype type: str :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :vartype recursive: any + :vartype recursive: JSON :ivar wildcard_folder_path: FileServer wildcardFolderPath. Type: string (or Expression with resultType string). - :vartype wildcard_folder_path: any + :vartype wildcard_folder_path: JSON :ivar wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with resultType string). - :vartype wildcard_file_name: any + :vartype wildcard_file_name: JSON :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :vartype file_list_path: any + :vartype file_list_path: JSON :ivar enable_partition_discovery: Indicates whether to enable partition discovery. :vartype enable_partition_discovery: bool :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :vartype partition_root_path: any + :vartype partition_root_path: JSON :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype delete_files_after_completion: any + :vartype delete_files_after_completion: JSON :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_start: any + :vartype modified_datetime_start: JSON :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_end: any + :vartype modified_datetime_end: JSON :ivar file_filter: Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). - :vartype file_filter: any + :vartype file_filter: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - 'file_filter': {'key': 'fileFilter', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "recursive": {"key": "recursive", "type": "object"}, + "wildcard_folder_path": {"key": "wildcardFolderPath", "type": "object"}, + "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, + "file_list_path": {"key": "fileListPath", "type": "object"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "partition_root_path": {"key": "partitionRootPath", "type": "object"}, + "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, + "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, + "modified_datetime_end": {"key": "modifiedDatetimeEnd", "type": "object"}, + "file_filter": {"key": "fileFilter", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - recursive: Optional[Any] = None, - wildcard_folder_path: Optional[Any] = None, - wildcard_file_name: Optional[Any] = None, - file_list_path: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + recursive: Optional[JSON] = None, + wildcard_folder_path: Optional[JSON] = None, + wildcard_file_name: Optional[JSON] = None, + file_list_path: Optional[JSON] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[Any] = None, - delete_files_after_completion: Optional[Any] = None, - modified_datetime_start: Optional[Any] = None, - modified_datetime_end: Optional[Any] = None, - file_filter: Optional[Any] = None, + partition_root_path: Optional[JSON] = None, + delete_files_after_completion: Optional[JSON] = None, + modified_datetime_start: Optional[JSON] = None, + modified_datetime_end: Optional[JSON] = None, + file_filter: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :paramtype recursive: any + :paramtype recursive: JSON :keyword wildcard_folder_path: FileServer wildcardFolderPath. Type: string (or Expression with resultType string). - :paramtype wildcard_folder_path: any + :paramtype wildcard_folder_path: JSON :keyword wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with resultType string). - :paramtype wildcard_file_name: any + :paramtype wildcard_file_name: JSON :keyword file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :paramtype file_list_path: any + :paramtype file_list_path: JSON :keyword enable_partition_discovery: Indicates whether to enable partition discovery. :paramtype enable_partition_discovery: bool :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :paramtype partition_root_path: any + :paramtype partition_root_path: JSON :keyword delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype delete_files_after_completion: any + :paramtype delete_files_after_completion: JSON :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_start: any + :paramtype modified_datetime_start: JSON :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_end: any + :paramtype modified_datetime_end: JSON :keyword file_filter: Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). - :paramtype file_filter: any - """ - super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'FileServerReadSettings' # type: str + :paramtype file_filter: JSON + """ + super().__init__( + additional_properties=additional_properties, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "FileServerReadSettings" # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -24364,192 +26244,208 @@ class FileServerWriteSettings(StoreWriteSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The write setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The write setting type. Required. :vartype type: str :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar copy_behavior: The type of copy behavior for copy sink. - :vartype copy_behavior: any + :vartype copy_behavior: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "copy_behavior": {"key": "copyBehavior", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - copy_behavior: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + copy_behavior: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword copy_behavior: The type of copy behavior for copy sink. - :paramtype copy_behavior: any + :paramtype copy_behavior: JSON """ - super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) - self.type = 'FileServerWriteSettings' # type: str + super().__init__( + additional_properties=additional_properties, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + copy_behavior=copy_behavior, + **kwargs + ) + self.type = "FileServerWriteSettings" # type: str -class FileShareDataset(Dataset): +class FileShareDataset(Dataset): # pylint: disable=too-many-instance-attributes """An on-premises file system dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar folder_path: The path of the on-premises file system. Type: string (or Expression with resultType string). - :vartype folder_path: any + :vartype folder_path: JSON :ivar file_name: The name of the on-premises file system. Type: string (or Expression with resultType string). - :vartype file_name: any + :vartype file_name: JSON :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_start: any + :vartype modified_datetime_start: JSON :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_end: any + :vartype modified_datetime_end: JSON :ivar format: The format of the files. :vartype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :ivar file_filter: Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). - :vartype file_filter: any + :vartype file_filter: JSON :ivar compression: The data compression method used for the file system. :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "folder_path": {"key": "typeProperties.folderPath", "type": "object"}, + "file_name": {"key": "typeProperties.fileName", "type": "object"}, + "modified_datetime_start": {"key": "typeProperties.modifiedDatetimeStart", "type": "object"}, + "modified_datetime_end": {"key": "typeProperties.modifiedDatetimeEnd", "type": "object"}, + "format": {"key": "typeProperties.format", "type": "DatasetStorageFormat"}, + "file_filter": {"key": "typeProperties.fileFilter", "type": "object"}, + "compression": {"key": "typeProperties.compression", "type": "DatasetCompression"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - folder_path: Optional[Any] = None, - file_name: Optional[Any] = None, - modified_datetime_start: Optional[Any] = None, - modified_datetime_end: Optional[Any] = None, + folder_path: Optional[JSON] = None, + file_name: Optional[JSON] = None, + modified_datetime_start: Optional[JSON] = None, + modified_datetime_end: Optional[JSON] = None, format: Optional["_models.DatasetStorageFormat"] = None, - file_filter: Optional[Any] = None, + file_filter: Optional[JSON] = None, compression: Optional["_models.DatasetCompression"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword folder_path: The path of the on-premises file system. Type: string (or Expression with resultType string). - :paramtype folder_path: any + :paramtype folder_path: JSON :keyword file_name: The name of the on-premises file system. Type: string (or Expression with resultType string). - :paramtype file_name: any + :paramtype file_name: JSON :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_start: any + :paramtype modified_datetime_start: JSON :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_end: any + :paramtype modified_datetime_end: JSON :keyword format: The format of the files. :paramtype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :keyword file_filter: Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). - :paramtype file_filter: any + :paramtype file_filter: JSON :keyword compression: The data compression method used for the file system. :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ - super(FileShareDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'FileShare' # type: str + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "FileShare" # type: str self.folder_path = folder_path self.file_name = file_name self.modified_datetime_start = modified_datetime_start @@ -24566,87 +26462,96 @@ class FileSystemSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar copy_behavior: The type of copy behavior for copy sink. - :vartype copy_behavior: any + :vartype copy_behavior: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "copy_behavior": {"key": "copyBehavior", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - copy_behavior: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + copy_behavior: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword copy_behavior: The type of copy behavior for copy sink. - :paramtype copy_behavior: any - """ - super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'FileSystemSink' # type: str + :paramtype copy_behavior: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "FileSystemSink" # type: str self.copy_behavior = copy_behavior @@ -24657,81 +26562,88 @@ class FileSystemSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :vartype recursive: any + :vartype recursive: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "recursive": {"key": "recursive", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - recursive: Optional[Any] = None, - additional_columns: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + recursive: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :paramtype recursive: any + :paramtype recursive: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'FileSystemSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "FileSystemSource" # type: str self.recursive = recursive self.additional_columns = additional_columns @@ -24743,10 +26655,10 @@ class FilterActivity(ControlActivity): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -24754,28 +26666,28 @@ class FilterActivity(ControlActivity): :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :ivar items: Required. Input array on which filter should be applied. + :ivar items: Input array on which filter should be applied. Required. :vartype items: ~azure.mgmt.datafactory.models.Expression - :ivar condition: Required. Condition to be used for filtering the input. + :ivar condition: Condition to be used for filtering the input. Required. :vartype condition: ~azure.mgmt.datafactory.models.Expression """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'items': {'required': True}, - 'condition': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "items": {"required": True}, + "condition": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "items": {"key": "typeProperties.items", "type": "Expression"}, + "condition": {"key": "typeProperties.condition", "type": "Expression"}, } def __init__( @@ -24784,7 +26696,7 @@ def __init__( name: str, items: "_models.Expression", condition: "_models.Expression", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, @@ -24793,8 +26705,8 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -24802,13 +26714,20 @@ def __init__( :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :keyword items: Required. Input array on which filter should be applied. + :keyword items: Input array on which filter should be applied. Required. :paramtype items: ~azure.mgmt.datafactory.models.Expression - :keyword condition: Required. Condition to be used for filtering the input. + :keyword condition: Condition to be used for filtering the input. Required. :paramtype condition: ~azure.mgmt.datafactory.models.Expression """ - super(FilterActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'Filter' # type: str + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + **kwargs + ) + self.type = "Filter" # type: str self.items = items self.condition = condition @@ -24818,12 +26737,12 @@ class Flowlet(DataFlow): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Type of data flow.Constant filled by server. + :ivar type: Type of data flow. Required. :vartype type: str :ivar description: The description of the data flow. :vartype description: str :ivar annotations: List of tags that can be used for describing the data flow. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DataFlowFolder @@ -24840,26 +26759,26 @@ class Flowlet(DataFlow): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, - 'sources': {'key': 'typeProperties.sources', 'type': '[DataFlowSource]'}, - 'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'}, - 'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'}, - 'script': {'key': 'typeProperties.script', 'type': 'str'}, - 'script_lines': {'key': 'typeProperties.scriptLines', 'type': '[str]'}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DataFlowFolder"}, + "sources": {"key": "typeProperties.sources", "type": "[DataFlowSource]"}, + "sinks": {"key": "typeProperties.sinks", "type": "[DataFlowSink]"}, + "transformations": {"key": "typeProperties.transformations", "type": "[Transformation]"}, + "script": {"key": "typeProperties.script", "type": "str"}, + "script_lines": {"key": "typeProperties.scriptLines", "type": "[str]"}, } def __init__( self, *, description: Optional[str] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DataFlowFolder"] = None, sources: Optional[List["_models.DataFlowSource"]] = None, sinks: Optional[List["_models.DataFlowSink"]] = None, @@ -24872,7 +26791,7 @@ def __init__( :keyword description: The description of the data flow. :paramtype description: str :keyword annotations: List of tags that can be used for describing the data flow. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DataFlowFolder @@ -24887,8 +26806,8 @@ def __init__( :keyword script_lines: Flowlet script lines. :paramtype script_lines: list[str] """ - super(Flowlet, self).__init__(description=description, annotations=annotations, folder=folder, **kwargs) - self.type = 'Flowlet' # type: str + super().__init__(description=description, annotations=annotations, folder=folder, **kwargs) + self.type = "Flowlet" # type: str self.sources = sources self.sinks = sinks self.transformations = transformations @@ -24903,10 +26822,10 @@ class ForEachActivity(ControlActivity): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -24919,31 +26838,31 @@ class ForEachActivity(ControlActivity): :ivar batch_count: Batch count to be used for controlling the number of parallel execution (when isSequential is set to false). :vartype batch_count: int - :ivar items: Required. Collection to iterate. + :ivar items: Collection to iterate. Required. :vartype items: ~azure.mgmt.datafactory.models.Expression - :ivar activities: Required. List of activities to execute . + :ivar activities: List of activities to execute . Required. :vartype activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'batch_count': {'maximum': 50}, - 'items': {'required': True}, - 'activities': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "batch_count": {"maximum": 50}, + "items": {"required": True}, + "activities": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, - 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "is_sequential": {"key": "typeProperties.isSequential", "type": "bool"}, + "batch_count": {"key": "typeProperties.batchCount", "type": "int"}, + "items": {"key": "typeProperties.items", "type": "Expression"}, + "activities": {"key": "typeProperties.activities", "type": "[Activity]"}, } def __init__( @@ -24952,7 +26871,7 @@ def __init__( name: str, items: "_models.Expression", activities: List["_models.Activity"], - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, @@ -24963,8 +26882,8 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -24977,139 +26896,151 @@ def __init__( :keyword batch_count: Batch count to be used for controlling the number of parallel execution (when isSequential is set to false). :paramtype batch_count: int - :keyword items: Required. Collection to iterate. + :keyword items: Collection to iterate. Required. :paramtype items: ~azure.mgmt.datafactory.models.Expression - :keyword activities: Required. List of activities to execute . + :keyword activities: List of activities to execute . Required. :paramtype activities: list[~azure.mgmt.datafactory.models.Activity] """ - super(ForEachActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'ForEach' # type: str + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + **kwargs + ) + self.type = "ForEach" # type: str self.is_sequential = is_sequential self.batch_count = batch_count self.items = items self.activities = activities -class FtpReadSettings(StoreReadSettings): +class FtpReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes """Ftp read settings. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The read setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. :vartype type: str :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :vartype recursive: any + :vartype recursive: JSON :ivar wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or Expression with resultType string). - :vartype wildcard_folder_path: any + :vartype wildcard_folder_path: JSON :ivar wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType string). - :vartype wildcard_file_name: any + :vartype wildcard_file_name: JSON :ivar enable_partition_discovery: Indicates whether to enable partition discovery. :vartype enable_partition_discovery: bool :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :vartype partition_root_path: any + :vartype partition_root_path: JSON :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype delete_files_after_completion: any + :vartype delete_files_after_completion: JSON :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :vartype file_list_path: any + :vartype file_list_path: JSON :ivar use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. :vartype use_binary_transfer: bool :ivar disable_chunking: If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_chunking: any + :vartype disable_chunking: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, - 'disable_chunking': {'key': 'disableChunking', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "recursive": {"key": "recursive", "type": "object"}, + "wildcard_folder_path": {"key": "wildcardFolderPath", "type": "object"}, + "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "partition_root_path": {"key": "partitionRootPath", "type": "object"}, + "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, + "file_list_path": {"key": "fileListPath", "type": "object"}, + "use_binary_transfer": {"key": "useBinaryTransfer", "type": "bool"}, + "disable_chunking": {"key": "disableChunking", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - recursive: Optional[Any] = None, - wildcard_folder_path: Optional[Any] = None, - wildcard_file_name: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + recursive: Optional[JSON] = None, + wildcard_folder_path: Optional[JSON] = None, + wildcard_file_name: Optional[JSON] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[Any] = None, - delete_files_after_completion: Optional[Any] = None, - file_list_path: Optional[Any] = None, + partition_root_path: Optional[JSON] = None, + delete_files_after_completion: Optional[JSON] = None, + file_list_path: Optional[JSON] = None, use_binary_transfer: Optional[bool] = None, - disable_chunking: Optional[Any] = None, + disable_chunking: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :paramtype recursive: any + :paramtype recursive: JSON :keyword wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or Expression with resultType string). - :paramtype wildcard_folder_path: any + :paramtype wildcard_folder_path: JSON :keyword wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType string). - :paramtype wildcard_file_name: any + :paramtype wildcard_file_name: JSON :keyword enable_partition_discovery: Indicates whether to enable partition discovery. :paramtype enable_partition_discovery: bool :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :paramtype partition_root_path: any + :paramtype partition_root_path: JSON :keyword delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype delete_files_after_completion: any + :paramtype delete_files_after_completion: JSON :keyword file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :paramtype file_list_path: any + :paramtype file_list_path: JSON :keyword use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. :paramtype use_binary_transfer: bool :keyword disable_chunking: If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_chunking: any - """ - super(FtpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'FtpReadSettings' # type: str + :paramtype disable_chunking: JSON + """ + super().__init__( + additional_properties=additional_properties, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "FtpReadSettings" # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -25121,15 +27052,15 @@ def __init__( self.disable_chunking = disable_chunking -class FtpServerLinkedService(LinkedService): +class FtpServerLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """A FTP server Linked Service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -25138,78 +27069,81 @@ class FtpServerLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar host: Required. Host name of the FTP server. Type: string (or Expression with resultType - string). - :vartype host: any + :vartype annotations: list[JSON] + :ivar host: Host name of the FTP server. Type: string (or Expression with resultType string). + Required. + :vartype host: JSON :ivar port: The TCP port number that the FTP server uses to listen for client connections. Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype port: any + :vartype port: JSON :ivar authentication_type: The authentication type to be used to connect to the FTP server. - Known values are: "Basic", "Anonymous". + Known values are: "Basic" and "Anonymous". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.FtpAuthenticationType :ivar user_name: Username to logon the FTP server. Type: string (or Expression with resultType string). - :vartype user_name: any + :vartype user_name: JSON :ivar password: Password to logon the FTP server. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON :ivar enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). - :vartype enable_ssl: any + :vartype enable_ssl: JSON :ivar enable_server_certificate_validation: If true, validate the FTP server SSL certificate when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). - :vartype enable_server_certificate_validation: any + :vartype enable_server_certificate_validation: JSON """ _validation = { - 'type': {'required': True}, - 'host': {'required': True}, + "type": {"required": True}, + "host": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "host": {"key": "typeProperties.host", "type": "object"}, + "port": {"key": "typeProperties.port", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "enable_ssl": {"key": "typeProperties.enableSsl", "type": "object"}, + "enable_server_certificate_validation": { + "key": "typeProperties.enableServerCertificateValidation", + "type": "object", + }, } def __init__( self, *, - host: Any, - additional_properties: Optional[Dict[str, Any]] = None, + host: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - port: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + port: Optional[JSON] = None, authentication_type: Optional[Union[str, "_models.FtpAuthenticationType"]] = None, - user_name: Optional[Any] = None, + user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, - enable_ssl: Optional[Any] = None, - enable_server_certificate_validation: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, + enable_ssl: Optional[JSON] = None, + enable_server_certificate_validation: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -25217,35 +27151,42 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword host: Required. Host name of the FTP server. Type: string (or Expression with - resultType string). - :paramtype host: any + :paramtype annotations: list[JSON] + :keyword host: Host name of the FTP server. Type: string (or Expression with resultType + string). Required. + :paramtype host: JSON :keyword port: The TCP port number that the FTP server uses to listen for client connections. Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype port: any + :paramtype port: JSON :keyword authentication_type: The authentication type to be used to connect to the FTP server. - Known values are: "Basic", "Anonymous". + Known values are: "Basic" and "Anonymous". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.FtpAuthenticationType :keyword user_name: Username to logon the FTP server. Type: string (or Expression with resultType string). - :paramtype user_name: any + :paramtype user_name: JSON :keyword password: Password to logon the FTP server. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any + :paramtype encrypted_credential: JSON :keyword enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). - :paramtype enable_ssl: any + :paramtype enable_ssl: JSON :keyword enable_server_certificate_validation: If true, validate the FTP server SSL certificate when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). - :paramtype enable_server_certificate_validation: any - """ - super(FtpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'FtpServer' # type: str + :paramtype enable_server_certificate_validation: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "FtpServer" # type: str self.host = host self.port = port self.authentication_type = authentication_type @@ -25263,96 +27204,94 @@ class FtpServerLocation(DatasetLocation): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage location. Required. :vartype type: str :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :vartype folder_path: any + :vartype folder_path: JSON :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :vartype file_name: any + :vartype file_name: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "folder_path": {"key": "folderPath", "type": "object"}, + "file_name": {"key": "fileName", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - folder_path: Optional[Any] = None, - file_name: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + folder_path: Optional[JSON] = None, + file_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :paramtype folder_path: any + :paramtype folder_path: JSON :keyword file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :paramtype file_name: any + :paramtype file_name: JSON """ - super(FtpServerLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type = 'FtpServerLocation' # type: str + super().__init__( + additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs + ) + self.type = "FtpServerLocation" # type: str -class GetDataFactoryOperationStatusResponse(msrest.serialization.Model): +class GetDataFactoryOperationStatusResponse(_serialization.Model): """Response body structure for get data factory operation status. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar status: Status of the operation. :vartype status: str """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'status': {'key': 'status', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "status": {"key": "status", "type": "str"}, } def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - status: Optional[str] = None, - **kwargs + self, *, additional_properties: Optional[Dict[str, JSON]] = None, status: Optional[str] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword status: Status of the operation. :paramtype status: str """ - super(GetDataFactoryOperationStatusResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.status = status -class GetMetadataActivity(ExecutionActivity): +class GetMetadataActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """Activity to get metadata of dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -25364,10 +27303,10 @@ class GetMetadataActivity(ExecutionActivity): :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar policy: Activity policy. :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :ivar dataset: Required. GetMetadata activity dataset reference. + :ivar dataset: GetMetadata activity dataset reference. Required. :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference :ivar field_list: Fields of metadata to get from dataset. - :vartype field_list: list[any] + :vartype field_list: list[JSON] :ivar store_settings: GetMetadata activity store settings. :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :ivar format_settings: GetMetadata activity format settings. @@ -25375,24 +27314,24 @@ class GetMetadataActivity(ExecutionActivity): """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'dataset': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "dataset": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, - 'store_settings': {'key': 'typeProperties.storeSettings', 'type': 'StoreReadSettings'}, - 'format_settings': {'key': 'typeProperties.formatSettings', 'type': 'FormatReadSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "dataset": {"key": "typeProperties.dataset", "type": "DatasetReference"}, + "field_list": {"key": "typeProperties.fieldList", "type": "[object]"}, + "store_settings": {"key": "typeProperties.storeSettings", "type": "StoreReadSettings"}, + "format_settings": {"key": "typeProperties.formatSettings", "type": "FormatReadSettings"}, } def __init__( @@ -25400,13 +27339,13 @@ def __init__( *, name: str, dataset: "_models.DatasetReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, - field_list: Optional[List[Any]] = None, + field_list: Optional[List[JSON]] = None, store_settings: Optional["_models.StoreReadSettings"] = None, format_settings: Optional["_models.FormatReadSettings"] = None, **kwargs @@ -25414,8 +27353,8 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -25427,24 +27366,33 @@ def __init__( :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword policy: Activity policy. :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :keyword dataset: Required. GetMetadata activity dataset reference. + :keyword dataset: GetMetadata activity dataset reference. Required. :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference :keyword field_list: Fields of metadata to get from dataset. - :paramtype field_list: list[any] + :paramtype field_list: list[JSON] :keyword store_settings: GetMetadata activity store settings. :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :keyword format_settings: GetMetadata activity format settings. :paramtype format_settings: ~azure.mgmt.datafactory.models.FormatReadSettings """ - super(GetMetadataActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'GetMetadata' # type: str + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "GetMetadata" # type: str self.dataset = dataset self.field_list = field_list self.store_settings = store_settings self.format_settings = format_settings -class GetSsisObjectMetadataRequest(msrest.serialization.Model): +class GetSsisObjectMetadataRequest(_serialization.Model): """The request payload of get SSIS object metadata. :ivar metadata_path: Metadata path. @@ -25452,48 +27400,43 @@ class GetSsisObjectMetadataRequest(msrest.serialization.Model): """ _attribute_map = { - 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, + "metadata_path": {"key": "metadataPath", "type": "str"}, } - def __init__( - self, - *, - metadata_path: Optional[str] = None, - **kwargs - ): + def __init__(self, *, metadata_path: Optional[str] = None, **kwargs): """ :keyword metadata_path: Metadata path. :paramtype metadata_path: str """ - super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) + super().__init__(**kwargs) self.metadata_path = metadata_path -class GitHubAccessTokenRequest(msrest.serialization.Model): +class GitHubAccessTokenRequest(_serialization.Model): """Get GitHub access token request definition. All required parameters must be populated in order to send to Azure. - :ivar git_hub_access_code: Required. GitHub access code. + :ivar git_hub_access_code: GitHub access code. Required. :vartype git_hub_access_code: str :ivar git_hub_client_id: GitHub application client ID. :vartype git_hub_client_id: str :ivar git_hub_client_secret: GitHub bring your own app client secret information. :vartype git_hub_client_secret: ~azure.mgmt.datafactory.models.GitHubClientSecret - :ivar git_hub_access_token_base_url: Required. GitHub access token base URL. + :ivar git_hub_access_token_base_url: GitHub access token base URL. Required. :vartype git_hub_access_token_base_url: str """ _validation = { - 'git_hub_access_code': {'required': True}, - 'git_hub_access_token_base_url': {'required': True}, + "git_hub_access_code": {"required": True}, + "git_hub_access_token_base_url": {"required": True}, } _attribute_map = { - 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, - 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, - 'git_hub_client_secret': {'key': 'gitHubClientSecret', 'type': 'GitHubClientSecret'}, - 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, + "git_hub_access_code": {"key": "gitHubAccessCode", "type": "str"}, + "git_hub_client_id": {"key": "gitHubClientId", "type": "str"}, + "git_hub_client_secret": {"key": "gitHubClientSecret", "type": "GitHubClientSecret"}, + "git_hub_access_token_base_url": {"key": "gitHubAccessTokenBaseUrl", "type": "str"}, } def __init__( @@ -25506,23 +27449,23 @@ def __init__( **kwargs ): """ - :keyword git_hub_access_code: Required. GitHub access code. + :keyword git_hub_access_code: GitHub access code. Required. :paramtype git_hub_access_code: str :keyword git_hub_client_id: GitHub application client ID. :paramtype git_hub_client_id: str :keyword git_hub_client_secret: GitHub bring your own app client secret information. :paramtype git_hub_client_secret: ~azure.mgmt.datafactory.models.GitHubClientSecret - :keyword git_hub_access_token_base_url: Required. GitHub access token base URL. + :keyword git_hub_access_token_base_url: GitHub access token base URL. Required. :paramtype git_hub_access_token_base_url: str """ - super(GitHubAccessTokenRequest, self).__init__(**kwargs) + super().__init__(**kwargs) self.git_hub_access_code = git_hub_access_code self.git_hub_client_id = git_hub_client_id self.git_hub_client_secret = git_hub_client_secret self.git_hub_access_token_base_url = git_hub_access_token_base_url -class GitHubAccessTokenResponse(msrest.serialization.Model): +class GitHubAccessTokenResponse(_serialization.Model): """Get GitHub access token response definition. :ivar git_hub_access_token: GitHub access token. @@ -25530,24 +27473,19 @@ class GitHubAccessTokenResponse(msrest.serialization.Model): """ _attribute_map = { - 'git_hub_access_token': {'key': 'gitHubAccessToken', 'type': 'str'}, + "git_hub_access_token": {"key": "gitHubAccessToken", "type": "str"}, } - def __init__( - self, - *, - git_hub_access_token: Optional[str] = None, - **kwargs - ): + def __init__(self, *, git_hub_access_token: Optional[str] = None, **kwargs): """ :keyword git_hub_access_token: GitHub access token. :paramtype git_hub_access_token: str """ - super(GitHubAccessTokenResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.git_hub_access_token = git_hub_access_token -class GitHubClientSecret(msrest.serialization.Model): +class GitHubClientSecret(_serialization.Model): """Client secret information for factory's bring your own app repository configuration. :ivar byoa_secret_akv_url: Bring your own app client secret AKV URL. @@ -25557,62 +27495,50 @@ class GitHubClientSecret(msrest.serialization.Model): """ _attribute_map = { - 'byoa_secret_akv_url': {'key': 'byoaSecretAkvUrl', 'type': 'str'}, - 'byoa_secret_name': {'key': 'byoaSecretName', 'type': 'str'}, + "byoa_secret_akv_url": {"key": "byoaSecretAkvUrl", "type": "str"}, + "byoa_secret_name": {"key": "byoaSecretName", "type": "str"}, } - def __init__( - self, - *, - byoa_secret_akv_url: Optional[str] = None, - byoa_secret_name: Optional[str] = None, - **kwargs - ): + def __init__(self, *, byoa_secret_akv_url: Optional[str] = None, byoa_secret_name: Optional[str] = None, **kwargs): """ :keyword byoa_secret_akv_url: Bring your own app client secret AKV URL. :paramtype byoa_secret_akv_url: str :keyword byoa_secret_name: Bring your own app client secret name in AKV. :paramtype byoa_secret_name: str """ - super(GitHubClientSecret, self).__init__(**kwargs) + super().__init__(**kwargs) self.byoa_secret_akv_url = byoa_secret_akv_url self.byoa_secret_name = byoa_secret_name -class GlobalParameterListResponse(msrest.serialization.Model): +class GlobalParameterListResponse(_serialization.Model): """A list of Global parameters. All required parameters must be populated in order to send to Azure. - :ivar value: Required. List of global parameters. + :ivar value: List of global parameters. Required. :vartype value: list[~azure.mgmt.datafactory.models.GlobalParameterResource] :ivar next_link: The link to the next page of results, if any remaining results exist. :vartype next_link: str """ _validation = { - 'value': {'required': True}, + "value": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[GlobalParameterResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[GlobalParameterResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - *, - value: List["_models.GlobalParameterResource"], - next_link: Optional[str] = None, - **kwargs - ): + def __init__(self, *, value: List["_models.GlobalParameterResource"], next_link: Optional[str] = None, **kwargs): """ - :keyword value: Required. List of global parameters. + :keyword value: List of global parameters. Required. :paramtype value: list[~azure.mgmt.datafactory.models.GlobalParameterResource] :keyword next_link: The link to the next page of results, if any remaining results exist. :paramtype next_link: str """ - super(GlobalParameterListResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.next_link = next_link @@ -25632,90 +27558,79 @@ class GlobalParameterResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :ivar properties: Required. Properties of the global parameter. + :ivar properties: Properties of the global parameter. Required. :vartype properties: dict[str, ~azure.mgmt.datafactory.models.GlobalParameterSpecification] """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "etag": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{GlobalParameterSpecification}'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "etag": {"key": "etag", "type": "str"}, + "properties": {"key": "properties", "type": "{GlobalParameterSpecification}"}, } - def __init__( - self, - *, - properties: Dict[str, "_models.GlobalParameterSpecification"], - **kwargs - ): + def __init__(self, *, properties: Dict[str, "_models.GlobalParameterSpecification"], **kwargs): """ - :keyword properties: Required. Properties of the global parameter. + :keyword properties: Properties of the global parameter. Required. :paramtype properties: dict[str, ~azure.mgmt.datafactory.models.GlobalParameterSpecification] """ - super(GlobalParameterResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class GlobalParameterSpecification(msrest.serialization.Model): +class GlobalParameterSpecification(_serialization.Model): """Definition of a single parameter for an entity. All required parameters must be populated in order to send to Azure. - :ivar type: Required. Global Parameter type. Known values are: "Object", "String", "Int", - "Float", "Bool", "Array". + :ivar type: Global Parameter type. Required. Known values are: "Object", "String", "Int", + "Float", "Bool", and "Array". :vartype type: str or ~azure.mgmt.datafactory.models.GlobalParameterType - :ivar value: Required. Value of parameter. - :vartype value: any + :ivar value: Value of parameter. Required. + :vartype value: JSON """ _validation = { - 'type': {'required': True}, - 'value': {'required': True}, + "type": {"required": True}, + "value": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'object'}, + "type": {"key": "type", "type": "str"}, + "value": {"key": "value", "type": "object"}, } - def __init__( - self, - *, - type: Union[str, "_models.GlobalParameterType"], - value: Any, - **kwargs - ): + def __init__(self, *, type: Union[str, "_models.GlobalParameterType"], value: JSON, **kwargs): """ - :keyword type: Required. Global Parameter type. Known values are: "Object", "String", "Int", - "Float", "Bool", "Array". + :keyword type: Global Parameter type. Required. Known values are: "Object", "String", "Int", + "Float", "Bool", and "Array". :paramtype type: str or ~azure.mgmt.datafactory.models.GlobalParameterType - :keyword value: Required. Value of parameter. - :paramtype value: any + :keyword value: Value of parameter. Required. + :paramtype value: JSON """ - super(GlobalParameterSpecification, self).__init__(**kwargs) + super().__init__(**kwargs) self.type = type self.value = value -class GoogleAdWordsLinkedService(LinkedService): +class GoogleAdWordsLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Google AdWords service linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -25724,19 +27639,19 @@ class GoogleAdWordsLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_properties: Properties used to connect to GoogleAds. It is mutually exclusive with any other properties in the linked service. Type: object. - :vartype connection_properties: any + :vartype connection_properties: JSON :ivar client_customer_id: The Client customer ID of the AdWords account that you want to fetch report data for. - :vartype client_customer_id: any + :vartype client_customer_id: JSON :ivar developer_token: The developer token associated with the manager account that you use to grant access to the AdWords API. :vartype developer_token: ~azure.mgmt.datafactory.models.SecretBase :ivar authentication_type: The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. Known values are: - "ServiceAuthentication", "UserAuthentication". + "ServiceAuthentication" and "UserAuthentication". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType :ivar refresh_token: The refresh token obtained from Google for authorizing access to AdWords @@ -25744,80 +27659,80 @@ class GoogleAdWordsLinkedService(LinkedService): :vartype refresh_token: ~azure.mgmt.datafactory.models.SecretBase :ivar client_id: The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). - :vartype client_id: any + :vartype client_id: JSON :ivar client_secret: The client secret of the google application used to acquire the refresh token. :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase :ivar email: The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. - :vartype email: any + :vartype email: JSON :ivar key_file_path: The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. - :vartype key_file_path: any + :vartype key_file_path: JSON :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :vartype trusted_cert_path: any + :vartype trusted_cert_path: JSON :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :vartype use_system_trust_store: any + :vartype use_system_trust_store: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, - 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, - 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_properties": {"key": "typeProperties.connectionProperties", "type": "object"}, + "client_customer_id": {"key": "typeProperties.clientCustomerID", "type": "object"}, + "developer_token": {"key": "typeProperties.developerToken", "type": "SecretBase"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, + "refresh_token": {"key": "typeProperties.refreshToken", "type": "SecretBase"}, + "client_id": {"key": "typeProperties.clientId", "type": "object"}, + "client_secret": {"key": "typeProperties.clientSecret", "type": "SecretBase"}, + "email": {"key": "typeProperties.email", "type": "object"}, + "key_file_path": {"key": "typeProperties.keyFilePath", "type": "object"}, + "trusted_cert_path": {"key": "typeProperties.trustedCertPath", "type": "object"}, + "use_system_trust_store": {"key": "typeProperties.useSystemTrustStore", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_properties: Optional[Any] = None, - client_customer_id: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_properties: Optional[JSON] = None, + client_customer_id: Optional[JSON] = None, developer_token: Optional["_models.SecretBase"] = None, authentication_type: Optional[Union[str, "_models.GoogleAdWordsAuthenticationType"]] = None, refresh_token: Optional["_models.SecretBase"] = None, - client_id: Optional[Any] = None, + client_id: Optional[JSON] = None, client_secret: Optional["_models.SecretBase"] = None, - email: Optional[Any] = None, - key_file_path: Optional[Any] = None, - trusted_cert_path: Optional[Any] = None, - use_system_trust_store: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + email: Optional[JSON] = None, + key_file_path: Optional[JSON] = None, + trusted_cert_path: Optional[JSON] = None, + use_system_trust_store: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -25825,19 +27740,19 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_properties: Properties used to connect to GoogleAds. It is mutually exclusive with any other properties in the linked service. Type: object. - :paramtype connection_properties: any + :paramtype connection_properties: JSON :keyword client_customer_id: The Client customer ID of the AdWords account that you want to fetch report data for. - :paramtype client_customer_id: any + :paramtype client_customer_id: JSON :keyword developer_token: The developer token associated with the manager account that you use to grant access to the AdWords API. :paramtype developer_token: ~azure.mgmt.datafactory.models.SecretBase :keyword authentication_type: The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. Known values are: - "ServiceAuthentication", "UserAuthentication". + "ServiceAuthentication" and "UserAuthentication". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType :keyword refresh_token: The refresh token obtained from Google for authorizing access to @@ -25845,30 +27760,37 @@ def __init__( :paramtype refresh_token: ~azure.mgmt.datafactory.models.SecretBase :keyword client_id: The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). - :paramtype client_id: any + :paramtype client_id: JSON :keyword client_secret: The client secret of the google application used to acquire the refresh token. :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase :keyword email: The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. - :paramtype email: any + :paramtype email: JSON :keyword key_file_path: The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. - :paramtype key_file_path: any + :paramtype key_file_path: JSON :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :paramtype trusted_cert_path: any + :paramtype trusted_cert_path: JSON :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :paramtype use_system_trust_store: any + :paramtype use_system_trust_store: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'GoogleAdWords' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "GoogleAdWords" # type: str self.connection_properties = connection_properties self.client_customer_id = client_customer_id self.developer_token = developer_token @@ -25890,88 +27812,98 @@ class GoogleAdWordsObjectDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'GoogleAdWordsObject' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "GoogleAdWordsObject" # type: str self.table_name = table_name @@ -25982,101 +27914,110 @@ class GoogleAdWordsSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'GoogleAdWordsSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "GoogleAdWordsSource" # type: str self.query = query -class GoogleBigQueryLinkedService(LinkedService): +class GoogleBigQueryLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Google BigQuery service linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -26085,18 +28026,18 @@ class GoogleBigQueryLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar project: Required. The default BigQuery project to query against. - :vartype project: any + :vartype annotations: list[JSON] + :ivar project: The default BigQuery project to query against. Required. + :vartype project: JSON :ivar additional_projects: A comma-separated list of public BigQuery projects to access. - :vartype additional_projects: any + :vartype additional_projects: JSON :ivar request_google_drive_scope: Whether to request access to Google Drive. Allowing Google Drive access enables support for federated tables that combine BigQuery data with data from Google Drive. The default value is false. - :vartype request_google_drive_scope: any - :ivar authentication_type: Required. The OAuth 2.0 authentication mechanism used for - authentication. ServiceAuthentication can only be used on self-hosted IR. Known values are: - "ServiceAuthentication", "UserAuthentication". + :vartype request_google_drive_scope: JSON + :ivar authentication_type: The OAuth 2.0 authentication mechanism used for authentication. + ServiceAuthentication can only be used on self-hosted IR. Required. Known values are: + "ServiceAuthentication" and "UserAuthentication". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType :ivar refresh_token: The refresh token obtained from Google for authorizing access to BigQuery @@ -26104,82 +28045,82 @@ class GoogleBigQueryLinkedService(LinkedService): :vartype refresh_token: ~azure.mgmt.datafactory.models.SecretBase :ivar client_id: The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). - :vartype client_id: any + :vartype client_id: JSON :ivar client_secret: The client secret of the google application used to acquire the refresh token. :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase :ivar email: The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. - :vartype email: any + :vartype email: JSON :ivar key_file_path: The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. - :vartype key_file_path: any + :vartype key_file_path: JSON :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :vartype trusted_cert_path: any + :vartype trusted_cert_path: JSON :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :vartype use_system_trust_store: any + :vartype use_system_trust_store: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'project': {'required': True}, - 'authentication_type': {'required': True}, + "type": {"required": True}, + "project": {"required": True}, + "authentication_type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'project': {'key': 'typeProperties.project', 'type': 'object'}, - 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, - 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "project": {"key": "typeProperties.project", "type": "object"}, + "additional_projects": {"key": "typeProperties.additionalProjects", "type": "object"}, + "request_google_drive_scope": {"key": "typeProperties.requestGoogleDriveScope", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, + "refresh_token": {"key": "typeProperties.refreshToken", "type": "SecretBase"}, + "client_id": {"key": "typeProperties.clientId", "type": "object"}, + "client_secret": {"key": "typeProperties.clientSecret", "type": "SecretBase"}, + "email": {"key": "typeProperties.email", "type": "object"}, + "key_file_path": {"key": "typeProperties.keyFilePath", "type": "object"}, + "trusted_cert_path": {"key": "typeProperties.trustedCertPath", "type": "object"}, + "use_system_trust_store": {"key": "typeProperties.useSystemTrustStore", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - project: Any, + project: JSON, authentication_type: Union[str, "_models.GoogleBigQueryAuthenticationType"], - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - additional_projects: Optional[Any] = None, - request_google_drive_scope: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + additional_projects: Optional[JSON] = None, + request_google_drive_scope: Optional[JSON] = None, refresh_token: Optional["_models.SecretBase"] = None, - client_id: Optional[Any] = None, + client_id: Optional[JSON] = None, client_secret: Optional["_models.SecretBase"] = None, - email: Optional[Any] = None, - key_file_path: Optional[Any] = None, - trusted_cert_path: Optional[Any] = None, - use_system_trust_store: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + email: Optional[JSON] = None, + key_file_path: Optional[JSON] = None, + trusted_cert_path: Optional[JSON] = None, + use_system_trust_store: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -26187,18 +28128,18 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword project: Required. The default BigQuery project to query against. - :paramtype project: any + :paramtype annotations: list[JSON] + :keyword project: The default BigQuery project to query against. Required. + :paramtype project: JSON :keyword additional_projects: A comma-separated list of public BigQuery projects to access. - :paramtype additional_projects: any + :paramtype additional_projects: JSON :keyword request_google_drive_scope: Whether to request access to Google Drive. Allowing Google Drive access enables support for federated tables that combine BigQuery data with data from Google Drive. The default value is false. - :paramtype request_google_drive_scope: any - :keyword authentication_type: Required. The OAuth 2.0 authentication mechanism used for - authentication. ServiceAuthentication can only be used on self-hosted IR. Known values are: - "ServiceAuthentication", "UserAuthentication". + :paramtype request_google_drive_scope: JSON + :keyword authentication_type: The OAuth 2.0 authentication mechanism used for authentication. + ServiceAuthentication can only be used on self-hosted IR. Required. Known values are: + "ServiceAuthentication" and "UserAuthentication". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType :keyword refresh_token: The refresh token obtained from Google for authorizing access to @@ -26206,30 +28147,37 @@ def __init__( :paramtype refresh_token: ~azure.mgmt.datafactory.models.SecretBase :keyword client_id: The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). - :paramtype client_id: any + :paramtype client_id: JSON :keyword client_secret: The client secret of the google application used to acquire the refresh token. :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase :keyword email: The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. - :paramtype email: any + :paramtype email: JSON :keyword key_file_path: The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. - :paramtype key_file_path: any + :paramtype key_file_path: JSON :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :paramtype trusted_cert_path: any + :paramtype trusted_cert_path: JSON :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :paramtype use_system_trust_store: any + :paramtype use_system_trust_store: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(GoogleBigQueryLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'GoogleBigQuery' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "GoogleBigQuery" # type: str self.project = project self.additional_projects = additional_projects self.request_google_drive_scope = request_google_drive_scope @@ -26244,113 +28192,123 @@ def __init__( self.encrypted_credential = encrypted_credential -class GoogleBigQueryObjectDataset(Dataset): +class GoogleBigQueryObjectDataset(Dataset): # pylint: disable=too-many-instance-attributes """Google BigQuery service dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: This property will be retired. Please consider using database + table properties instead. - :vartype table_name: any + :vartype table_name: JSON :ivar table: The table name of the Google BigQuery. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON :ivar dataset: The database name of the Google BigQuery. Type: string (or Expression with resultType string). - :vartype dataset: any + :vartype dataset: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, + "dataset": {"key": "typeProperties.dataset", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, - table: Optional[Any] = None, - dataset: Optional[Any] = None, + table_name: Optional[JSON] = None, + table: Optional[JSON] = None, + dataset: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: This property will be retired. Please consider using database + table properties instead. - :paramtype table_name: any + :paramtype table_name: JSON :keyword table: The table name of the Google BigQuery. Type: string (or Expression with resultType string). - :paramtype table: any + :paramtype table: JSON :keyword dataset: The database name of the Google BigQuery. Type: string (or Expression with resultType string). - :paramtype dataset: any - """ - super(GoogleBigQueryObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'GoogleBigQueryObject' # type: str + :paramtype dataset: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "GoogleBigQueryObject" # type: str self.table_name = table_name self.table = table self.dataset = dataset @@ -26363,89 +28321,98 @@ class GoogleBigQuerySource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'GoogleBigQuerySource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "GoogleBigQuerySource" # type: str self.query = query @@ -26456,8 +28423,8 @@ class GoogleCloudStorageLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -26466,10 +28433,10 @@ class GoogleCloudStorageLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar access_key_id: The access key identifier of the Google Cloud Storage Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :vartype access_key_id: any + :vartype access_key_id: JSON :ivar secret_access_key: The secret access key of the Google Cloud Storage Identity and Access Management (IAM) user. :vartype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase @@ -26477,48 +28444,48 @@ class GoogleCloudStorageLinkedService(LinkedService): Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :vartype service_url: any + :vartype service_url: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, - 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "access_key_id": {"key": "typeProperties.accessKeyId", "type": "object"}, + "secret_access_key": {"key": "typeProperties.secretAccessKey", "type": "SecretBase"}, + "service_url": {"key": "typeProperties.serviceUrl", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - access_key_id: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + access_key_id: Optional[JSON] = None, secret_access_key: Optional["_models.SecretBase"] = None, - service_url: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + service_url: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -26526,10 +28493,10 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword access_key_id: The access key identifier of the Google Cloud Storage Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :paramtype access_key_id: any + :paramtype access_key_id: JSON :keyword secret_access_key: The secret access key of the Google Cloud Storage Identity and Access Management (IAM) user. :paramtype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase @@ -26537,14 +28504,21 @@ def __init__( Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :paramtype service_url: any + :paramtype service_url: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(GoogleCloudStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'GoogleCloudStorage' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "GoogleCloudStorage" # type: str self.access_key_id = access_key_id self.secret_access_key = secret_access_key self.service_url = service_url @@ -26558,199 +28532,206 @@ class GoogleCloudStorageLocation(DatasetLocation): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage location. Required. :vartype type: str :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :vartype folder_path: any + :vartype folder_path: JSON :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :vartype file_name: any + :vartype file_name: JSON :ivar bucket_name: Specify the bucketName of Google Cloud Storage. Type: string (or Expression with resultType string). - :vartype bucket_name: any + :vartype bucket_name: JSON :ivar version: Specify the version of Google Cloud Storage. Type: string (or Expression with resultType string). - :vartype version: any + :vartype version: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'bucket_name': {'key': 'bucketName', 'type': 'object'}, - 'version': {'key': 'version', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "folder_path": {"key": "folderPath", "type": "object"}, + "file_name": {"key": "fileName", "type": "object"}, + "bucket_name": {"key": "bucketName", "type": "object"}, + "version": {"key": "version", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - folder_path: Optional[Any] = None, - file_name: Optional[Any] = None, - bucket_name: Optional[Any] = None, - version: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + folder_path: Optional[JSON] = None, + file_name: Optional[JSON] = None, + bucket_name: Optional[JSON] = None, + version: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :paramtype folder_path: any + :paramtype folder_path: JSON :keyword file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :paramtype file_name: any + :paramtype file_name: JSON :keyword bucket_name: Specify the bucketName of Google Cloud Storage. Type: string (or Expression with resultType string). - :paramtype bucket_name: any + :paramtype bucket_name: JSON :keyword version: Specify the version of Google Cloud Storage. Type: string (or Expression with resultType string). - :paramtype version: any + :paramtype version: JSON """ - super(GoogleCloudStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type = 'GoogleCloudStorageLocation' # type: str + super().__init__( + additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs + ) + self.type = "GoogleCloudStorageLocation" # type: str self.bucket_name = bucket_name self.version = version -class GoogleCloudStorageReadSettings(StoreReadSettings): +class GoogleCloudStorageReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes """Google Cloud Storage read settings. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The read setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. :vartype type: str :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :vartype recursive: any + :vartype recursive: JSON :ivar wildcard_folder_path: Google Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). - :vartype wildcard_folder_path: any + :vartype wildcard_folder_path: JSON :ivar wildcard_file_name: Google Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). - :vartype wildcard_file_name: any + :vartype wildcard_file_name: JSON :ivar prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with resultType string). - :vartype prefix: any + :vartype prefix: JSON :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :vartype file_list_path: any + :vartype file_list_path: JSON :ivar enable_partition_discovery: Indicates whether to enable partition discovery. :vartype enable_partition_discovery: bool :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :vartype partition_root_path: any + :vartype partition_root_path: JSON :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype delete_files_after_completion: any + :vartype delete_files_after_completion: JSON :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_start: any + :vartype modified_datetime_start: JSON :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_end: any + :vartype modified_datetime_end: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'prefix': {'key': 'prefix', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "recursive": {"key": "recursive", "type": "object"}, + "wildcard_folder_path": {"key": "wildcardFolderPath", "type": "object"}, + "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, + "prefix": {"key": "prefix", "type": "object"}, + "file_list_path": {"key": "fileListPath", "type": "object"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "partition_root_path": {"key": "partitionRootPath", "type": "object"}, + "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, + "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, + "modified_datetime_end": {"key": "modifiedDatetimeEnd", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - recursive: Optional[Any] = None, - wildcard_folder_path: Optional[Any] = None, - wildcard_file_name: Optional[Any] = None, - prefix: Optional[Any] = None, - file_list_path: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + recursive: Optional[JSON] = None, + wildcard_folder_path: Optional[JSON] = None, + wildcard_file_name: Optional[JSON] = None, + prefix: Optional[JSON] = None, + file_list_path: Optional[JSON] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[Any] = None, - delete_files_after_completion: Optional[Any] = None, - modified_datetime_start: Optional[Any] = None, - modified_datetime_end: Optional[Any] = None, + partition_root_path: Optional[JSON] = None, + delete_files_after_completion: Optional[JSON] = None, + modified_datetime_start: Optional[JSON] = None, + modified_datetime_end: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :paramtype recursive: any + :paramtype recursive: JSON :keyword wildcard_folder_path: Google Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). - :paramtype wildcard_folder_path: any + :paramtype wildcard_folder_path: JSON :keyword wildcard_file_name: Google Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). - :paramtype wildcard_file_name: any + :paramtype wildcard_file_name: JSON :keyword prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with resultType string). - :paramtype prefix: any + :paramtype prefix: JSON :keyword file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :paramtype file_list_path: any + :paramtype file_list_path: JSON :keyword enable_partition_discovery: Indicates whether to enable partition discovery. :paramtype enable_partition_discovery: bool :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :paramtype partition_root_path: any + :paramtype partition_root_path: JSON :keyword delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype delete_files_after_completion: any + :paramtype delete_files_after_completion: JSON :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_start: any + :paramtype modified_datetime_start: JSON :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_end: any - """ - super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'GoogleCloudStorageReadSettings' # type: str + :paramtype modified_datetime_end: JSON + """ + super().__init__( + additional_properties=additional_properties, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "GoogleCloudStorageReadSettings" # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -26763,6 +28744,92 @@ def __init__( self.modified_datetime_end = modified_datetime_end +class GoogleSheetsLinkedService(LinkedService): + """Linked service for GoogleSheets. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[JSON] + :ivar api_token: The api token for the GoogleSheets source. Required. + :vartype api_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :vartype encrypted_credential: JSON + """ + + _validation = { + "type": {"required": True}, + "api_token": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "api_token": {"key": "typeProperties.apiToken", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + } + + def __init__( + self, + *, + api_token: "_models.SecretBase", + additional_properties: Optional[Dict[str, JSON]] = None, + connect_via: Optional["_models.IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, + annotations: Optional[List[JSON]] = None, + encrypted_credential: Optional[JSON] = None, + **kwargs + ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[JSON] + :keyword api_token: The api token for the GoogleSheets source. Required. + :paramtype api_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "GoogleSheets" # type: str + self.api_token = api_token + self.encrypted_credential = encrypted_credential + + class GreenplumLinkedService(LinkedService): """Greenplum Database linked service. @@ -26770,8 +28837,8 @@ class GreenplumLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -26780,51 +28847,51 @@ class GreenplumLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype connection_string: JSON :ivar pwd: The Azure key vault secret reference of password in connection string. :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "pwd": {"key": "typeProperties.pwd", "type": "AzureKeyVaultSecretReference"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_string: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_string: Optional[JSON] = None, pwd: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -26832,19 +28899,26 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype connection_string: JSON :keyword pwd: The Azure key vault secret reference of password in connection string. :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(GreenplumLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Greenplum' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Greenplum" # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential @@ -26857,212 +28931,231 @@ class GreenplumSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'GreenplumSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "GreenplumSource" # type: str self.query = query -class GreenplumTableDataset(Dataset): +class GreenplumTableDataset(Dataset): # pylint: disable=too-many-instance-attributes """Greenplum Database dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :vartype table_name: any + :vartype table_name: JSON :ivar table: The table name of Greenplum. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON :ivar schema_type_properties_schema: The schema name of Greenplum. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, - table: Optional[Any] = None, - schema_type_properties_schema: Optional[Any] = None, + table_name: Optional[JSON] = None, + table: Optional[JSON] = None, + schema_type_properties_schema: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: This property will be retired. Please consider using schema + table properties instead. - :paramtype table_name: any + :paramtype table_name: JSON :keyword table: The table name of Greenplum. Type: string (or Expression with resultType string). - :paramtype table: any + :paramtype table: JSON :keyword schema_type_properties_schema: The schema name of Greenplum. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any - """ - super(GreenplumTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'GreenplumTable' # type: str + :paramtype schema_type_properties_schema: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "GreenplumTable" # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema -class HBaseLinkedService(LinkedService): +class HBaseLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """HBase server linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -27071,92 +29164,92 @@ class HBaseLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar host: Required. The IP address or host name of the HBase server. (i.e. 192.168.222.160). - :vartype host: any + :vartype annotations: list[JSON] + :ivar host: The IP address or host name of the HBase server. (i.e. 192.168.222.160). Required. + :vartype host: JSON :ivar port: The TCP port that the HBase instance uses to listen for client connections. The default value is 9090. - :vartype port: any + :vartype port: JSON :ivar http_path: The partial URL corresponding to the HBase server. (i.e. /gateway/sandbox/hbase/version). - :vartype http_path: any - :ivar authentication_type: Required. The authentication mechanism to use to connect to the - HBase server. Known values are: "Anonymous", "Basic". + :vartype http_path: JSON + :ivar authentication_type: The authentication mechanism to use to connect to the HBase server. + Required. Known values are: "Anonymous" and "Basic". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.HBaseAuthenticationType :ivar username: The user name used to connect to the HBase instance. - :vartype username: any + :vartype username: JSON :ivar password: The password corresponding to the user name. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :vartype enable_ssl: any + :vartype enable_ssl: JSON :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :vartype trusted_cert_path: any + :vartype trusted_cert_path: JSON :ivar allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :vartype allow_host_name_cn_mismatch: any + :vartype allow_host_name_cn_mismatch: JSON :ivar allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :vartype allow_self_signed_server_cert: any + :vartype allow_self_signed_server_cert: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, + "type": {"required": True}, + "host": {"required": True}, + "authentication_type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "host": {"key": "typeProperties.host", "type": "object"}, + "port": {"key": "typeProperties.port", "type": "object"}, + "http_path": {"key": "typeProperties.httpPath", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "enable_ssl": {"key": "typeProperties.enableSsl", "type": "object"}, + "trusted_cert_path": {"key": "typeProperties.trustedCertPath", "type": "object"}, + "allow_host_name_cn_mismatch": {"key": "typeProperties.allowHostNameCNMismatch", "type": "object"}, + "allow_self_signed_server_cert": {"key": "typeProperties.allowSelfSignedServerCert", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - host: Any, + host: JSON, authentication_type: Union[str, "_models.HBaseAuthenticationType"], - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - port: Optional[Any] = None, - http_path: Optional[Any] = None, - username: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + port: Optional[JSON] = None, + http_path: Optional[JSON] = None, + username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - enable_ssl: Optional[Any] = None, - trusted_cert_path: Optional[Any] = None, - allow_host_name_cn_mismatch: Optional[Any] = None, - allow_self_signed_server_cert: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + enable_ssl: Optional[JSON] = None, + trusted_cert_path: Optional[JSON] = None, + allow_host_name_cn_mismatch: Optional[JSON] = None, + allow_self_signed_server_cert: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -27164,43 +29257,50 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword host: Required. The IP address or host name of the HBase server. (i.e. - 192.168.222.160). - :paramtype host: any + :paramtype annotations: list[JSON] + :keyword host: The IP address or host name of the HBase server. (i.e. 192.168.222.160). + Required. + :paramtype host: JSON :keyword port: The TCP port that the HBase instance uses to listen for client connections. The default value is 9090. - :paramtype port: any + :paramtype port: JSON :keyword http_path: The partial URL corresponding to the HBase server. (i.e. /gateway/sandbox/hbase/version). - :paramtype http_path: any - :keyword authentication_type: Required. The authentication mechanism to use to connect to the - HBase server. Known values are: "Anonymous", "Basic". + :paramtype http_path: JSON + :keyword authentication_type: The authentication mechanism to use to connect to the HBase + server. Required. Known values are: "Anonymous" and "Basic". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.HBaseAuthenticationType :keyword username: The user name used to connect to the HBase instance. - :paramtype username: any + :paramtype username: JSON :keyword password: The password corresponding to the user name. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :paramtype enable_ssl: any + :paramtype enable_ssl: JSON :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :paramtype trusted_cert_path: any + :paramtype trusted_cert_path: JSON :keyword allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :paramtype allow_host_name_cn_mismatch: any + :paramtype allow_host_name_cn_mismatch: JSON :keyword allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :paramtype allow_self_signed_server_cert: any + :paramtype allow_self_signed_server_cert: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(HBaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'HBase' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "HBase" # type: str self.host = host self.port = port self.http_path = http_path @@ -27221,88 +29321,98 @@ class HBaseObjectDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(HBaseObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'HBaseObject' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "HBaseObject" # type: str self.table_name = table_name @@ -27313,101 +29423,110 @@ class HBaseSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'HBaseSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "HBaseSource" # type: str self.query = query -class HdfsLinkedService(LinkedService): +class HdfsLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Hadoop Distributed File System (HDFS) linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -27416,62 +29535,62 @@ class HdfsLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar url: Required. The URL of the HDFS service endpoint, e.g. - http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). - :vartype url: any + :vartype annotations: list[JSON] + :ivar url: The URL of the HDFS service endpoint, e.g. http://myhostname:50070/webhdfs/v1 . + Type: string (or Expression with resultType string). Required. + :vartype url: JSON :ivar authentication_type: Type of authentication used to connect to the HDFS. Possible values are: Anonymous and Windows. Type: string (or Expression with resultType string). - :vartype authentication_type: any + :vartype authentication_type: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON :ivar user_name: User name for Windows authentication. Type: string (or Expression with resultType string). - :vartype user_name: any + :vartype user_name: JSON :ivar password: Password for Windows authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { - 'type': {'required': True}, - 'url': {'required': True}, + "type": {"required": True}, + "url": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "url": {"key": "typeProperties.url", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, } def __init__( self, *, - url: Any, - additional_properties: Optional[Dict[str, Any]] = None, + url: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - authentication_type: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, - user_name: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + authentication_type: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, + user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -27479,25 +29598,32 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword url: Required. The URL of the HDFS service endpoint, e.g. - http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). - :paramtype url: any + :paramtype annotations: list[JSON] + :keyword url: The URL of the HDFS service endpoint, e.g. http://myhostname:50070/webhdfs/v1 . + Type: string (or Expression with resultType string). Required. + :paramtype url: JSON :keyword authentication_type: Type of authentication used to connect to the HDFS. Possible values are: Anonymous and Windows. Type: string (or Expression with resultType string). - :paramtype authentication_type: any + :paramtype authentication_type: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any + :paramtype encrypted_credential: JSON :keyword user_name: User name for Windows authentication. Type: string (or Expression with resultType string). - :paramtype user_name: any + :paramtype user_name: JSON :keyword password: Password for Windows authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase """ - super(HdfsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Hdfs' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Hdfs" # type: str self.url = url self.authentication_type = authentication_type self.encrypted_credential = encrypted_credential @@ -27512,179 +29638,186 @@ class HdfsLocation(DatasetLocation): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage location. Required. :vartype type: str :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :vartype folder_path: any + :vartype folder_path: JSON :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :vartype file_name: any + :vartype file_name: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "folder_path": {"key": "folderPath", "type": "object"}, + "file_name": {"key": "fileName", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - folder_path: Optional[Any] = None, - file_name: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + folder_path: Optional[JSON] = None, + file_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :paramtype folder_path: any + :paramtype folder_path: JSON :keyword file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :paramtype file_name: any + :paramtype file_name: JSON """ - super(HdfsLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type = 'HdfsLocation' # type: str + super().__init__( + additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs + ) + self.type = "HdfsLocation" # type: str -class HdfsReadSettings(StoreReadSettings): +class HdfsReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes """HDFS read settings. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The read setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. :vartype type: str :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :vartype recursive: any + :vartype recursive: JSON :ivar wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or Expression with resultType string). - :vartype wildcard_folder_path: any + :vartype wildcard_folder_path: JSON :ivar wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType string). - :vartype wildcard_file_name: any + :vartype wildcard_file_name: JSON :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :vartype file_list_path: any + :vartype file_list_path: JSON :ivar enable_partition_discovery: Indicates whether to enable partition discovery. :vartype enable_partition_discovery: bool :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :vartype partition_root_path: any + :vartype partition_root_path: JSON :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_start: any + :vartype modified_datetime_start: JSON :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_end: any + :vartype modified_datetime_end: JSON :ivar distcp_settings: Specifies Distcp-related settings. :vartype distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype delete_files_after_completion: any + :vartype delete_files_after_completion: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "recursive": {"key": "recursive", "type": "object"}, + "wildcard_folder_path": {"key": "wildcardFolderPath", "type": "object"}, + "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, + "file_list_path": {"key": "fileListPath", "type": "object"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "partition_root_path": {"key": "partitionRootPath", "type": "object"}, + "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, + "modified_datetime_end": {"key": "modifiedDatetimeEnd", "type": "object"}, + "distcp_settings": {"key": "distcpSettings", "type": "DistcpSettings"}, + "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - recursive: Optional[Any] = None, - wildcard_folder_path: Optional[Any] = None, - wildcard_file_name: Optional[Any] = None, - file_list_path: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + recursive: Optional[JSON] = None, + wildcard_folder_path: Optional[JSON] = None, + wildcard_file_name: Optional[JSON] = None, + file_list_path: Optional[JSON] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[Any] = None, - modified_datetime_start: Optional[Any] = None, - modified_datetime_end: Optional[Any] = None, + partition_root_path: Optional[JSON] = None, + modified_datetime_start: Optional[JSON] = None, + modified_datetime_end: Optional[JSON] = None, distcp_settings: Optional["_models.DistcpSettings"] = None, - delete_files_after_completion: Optional[Any] = None, + delete_files_after_completion: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :paramtype recursive: any + :paramtype recursive: JSON :keyword wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or Expression with resultType string). - :paramtype wildcard_folder_path: any + :paramtype wildcard_folder_path: JSON :keyword wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType string). - :paramtype wildcard_file_name: any + :paramtype wildcard_file_name: JSON :keyword file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :paramtype file_list_path: any + :paramtype file_list_path: JSON :keyword enable_partition_discovery: Indicates whether to enable partition discovery. :paramtype enable_partition_discovery: bool :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :paramtype partition_root_path: any + :paramtype partition_root_path: JSON :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_start: any + :paramtype modified_datetime_start: JSON :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_end: any + :paramtype modified_datetime_end: JSON :keyword distcp_settings: Specifies Distcp-related settings. :paramtype distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings :keyword delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype delete_files_after_completion: any - """ - super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'HdfsReadSettings' # type: str + :paramtype delete_files_after_completion: JSON + """ + super().__init__( + additional_properties=additional_properties, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "HdfsReadSettings" # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -27704,94 +29837,101 @@ class HdfsSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :vartype recursive: any + :vartype recursive: JSON :ivar distcp_settings: Specifies Distcp-related settings. :vartype distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "recursive": {"key": "recursive", "type": "object"}, + "distcp_settings": {"key": "distcpSettings", "type": "DistcpSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - recursive: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + recursive: Optional[JSON] = None, distcp_settings: Optional["_models.DistcpSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :paramtype recursive: any + :paramtype recursive: JSON :keyword distcp_settings: Specifies Distcp-related settings. :paramtype distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings """ - super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'HdfsSource' # type: str + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "HdfsSource" # type: str self.recursive = recursive self.distcp_settings = distcp_settings -class HDInsightHiveActivity(ExecutionActivity): +class HDInsightHiveActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """HDInsight Hive activity type. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -27806,71 +29946,71 @@ class HDInsightHiveActivity(ExecutionActivity): :ivar storage_linked_services: Storage linked service references. :vartype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :ivar arguments: User specified arguments to HDInsightActivity. - :vartype arguments: list[any] - :ivar get_debug_info: Debug info option. Known values are: "None", "Always", "Failure". + :vartype arguments: list[JSON] + :ivar get_debug_info: Debug info option. Known values are: "None", "Always", and "Failure". :vartype get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption :ivar script_path: Script path. Type: string (or Expression with resultType string). - :vartype script_path: any + :vartype script_path: JSON :ivar script_linked_service: Script linked service reference. :vartype script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar defines: Allows user to specify defines for Hive job request. - :vartype defines: dict[str, any] + :vartype defines: dict[str, JSON] :ivar variables: User specified arguments under hivevar namespace. - :vartype variables: list[any] + :vartype variables: list[JSON] :ivar query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster is with ESP (Enterprise Security Package). :vartype query_timeout: int """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, - 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "storage_linked_services": {"key": "typeProperties.storageLinkedServices", "type": "[LinkedServiceReference]"}, + "arguments": {"key": "typeProperties.arguments", "type": "[object]"}, + "get_debug_info": {"key": "typeProperties.getDebugInfo", "type": "str"}, + "script_path": {"key": "typeProperties.scriptPath", "type": "object"}, + "script_linked_service": {"key": "typeProperties.scriptLinkedService", "type": "LinkedServiceReference"}, + "defines": {"key": "typeProperties.defines", "type": "{object}"}, + "variables": {"key": "typeProperties.variables", "type": "[object]"}, + "query_timeout": {"key": "typeProperties.queryTimeout", "type": "int"}, } def __init__( self, *, name: str, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, storage_linked_services: Optional[List["_models.LinkedServiceReference"]] = None, - arguments: Optional[List[Any]] = None, + arguments: Optional[List[JSON]] = None, get_debug_info: Optional[Union[str, "_models.HDInsightActivityDebugInfoOption"]] = None, - script_path: Optional[Any] = None, + script_path: Optional[JSON] = None, script_linked_service: Optional["_models.LinkedServiceReference"] = None, - defines: Optional[Dict[str, Any]] = None, - variables: Optional[List[Any]] = None, + defines: Optional[Dict[str, JSON]] = None, + variables: Optional[List[JSON]] = None, query_timeout: Optional[int] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -27885,24 +30025,33 @@ def __init__( :keyword storage_linked_services: Storage linked service references. :paramtype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :keyword arguments: User specified arguments to HDInsightActivity. - :paramtype arguments: list[any] - :keyword get_debug_info: Debug info option. Known values are: "None", "Always", "Failure". + :paramtype arguments: list[JSON] + :keyword get_debug_info: Debug info option. Known values are: "None", "Always", and "Failure". :paramtype get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption :keyword script_path: Script path. Type: string (or Expression with resultType string). - :paramtype script_path: any + :paramtype script_path: JSON :keyword script_linked_service: Script linked service reference. :paramtype script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword defines: Allows user to specify defines for Hive job request. - :paramtype defines: dict[str, any] + :paramtype defines: dict[str, JSON] :keyword variables: User specified arguments under hivevar namespace. - :paramtype variables: list[any] + :paramtype variables: list[JSON] :keyword query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster is with ESP (Enterprise Security Package). :paramtype query_timeout: int """ - super(HDInsightHiveActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'HDInsightHive' # type: str + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "HDInsightHive" # type: str self.storage_linked_services = storage_linked_services self.arguments = arguments self.get_debug_info = get_debug_info @@ -27913,15 +30062,15 @@ def __init__( self.query_timeout = query_timeout -class HDInsightLinkedService(LinkedService): +class HDInsightLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """HDInsight linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -27930,13 +30079,13 @@ class HDInsightLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with resultType - string). - :vartype cluster_uri: any + :vartype annotations: list[JSON] + :ivar cluster_uri: HDInsight cluster URI. Type: string (or Expression with resultType string). + Required. + :vartype cluster_uri: JSON :ivar user_name: HDInsight cluster user name. Type: string (or Expression with resultType string). - :vartype user_name: any + :vartype user_name: JSON :ivar password: HDInsight cluster password. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar linked_service_name: The Azure Storage linked service reference. @@ -27947,59 +30096,62 @@ class HDInsightLinkedService(LinkedService): :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON :ivar is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. - :vartype is_esp_enabled: any + :vartype is_esp_enabled: JSON :ivar file_system: Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. Type: string (or Expression with resultType string). - :vartype file_system: any + :vartype file_system: JSON """ _validation = { - 'type': {'required': True}, - 'cluster_uri': {'required': True}, + "type": {"required": True}, + "cluster_uri": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, - 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "cluster_uri": {"key": "typeProperties.clusterUri", "type": "object"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "linked_service_name": {"key": "typeProperties.linkedServiceName", "type": "LinkedServiceReference"}, + "hcatalog_linked_service_name": { + "key": "typeProperties.hcatalogLinkedServiceName", + "type": "LinkedServiceReference", + }, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "is_esp_enabled": {"key": "typeProperties.isEspEnabled", "type": "object"}, + "file_system": {"key": "typeProperties.fileSystem", "type": "object"}, } def __init__( self, *, - cluster_uri: Any, - additional_properties: Optional[Dict[str, Any]] = None, + cluster_uri: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - user_name: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, hcatalog_linked_service_name: Optional["_models.LinkedServiceReference"] = None, - encrypted_credential: Optional[Any] = None, - is_esp_enabled: Optional[Any] = None, - file_system: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, + is_esp_enabled: Optional[JSON] = None, + file_system: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -28007,13 +30159,13 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with - resultType string). - :paramtype cluster_uri: any + :paramtype annotations: list[JSON] + :keyword cluster_uri: HDInsight cluster URI. Type: string (or Expression with resultType + string). Required. + :paramtype cluster_uri: JSON :keyword user_name: HDInsight cluster user name. Type: string (or Expression with resultType string). - :paramtype user_name: any + :paramtype user_name: JSON :keyword password: HDInsight cluster password. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword linked_service_name: The Azure Storage linked service reference. @@ -28024,16 +30176,23 @@ def __init__( :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any + :paramtype encrypted_credential: JSON :keyword is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. - :paramtype is_esp_enabled: any + :paramtype is_esp_enabled: JSON :keyword file_system: Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. Type: string (or Expression with resultType string). - :paramtype file_system: any - """ - super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'HDInsight' # type: str + :paramtype file_system: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "HDInsight" # type: str self.cluster_uri = cluster_uri self.user_name = user_name self.password = password @@ -28044,17 +30203,17 @@ def __init__( self.file_system = file_system -class HDInsightMapReduceActivity(ExecutionActivity): +class HDInsightMapReduceActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """HDInsight MapReduce activity type. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -28069,72 +30228,72 @@ class HDInsightMapReduceActivity(ExecutionActivity): :ivar storage_linked_services: Storage linked service references. :vartype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :ivar arguments: User specified arguments to HDInsightActivity. - :vartype arguments: list[any] - :ivar get_debug_info: Debug info option. Known values are: "None", "Always", "Failure". + :vartype arguments: list[JSON] + :ivar get_debug_info: Debug info option. Known values are: "None", "Always", and "Failure". :vartype get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :ivar class_name: Required. Class name. Type: string (or Expression with resultType string). - :vartype class_name: any - :ivar jar_file_path: Required. Jar path. Type: string (or Expression with resultType string). - :vartype jar_file_path: any + :ivar class_name: Class name. Type: string (or Expression with resultType string). Required. + :vartype class_name: JSON + :ivar jar_file_path: Jar path. Type: string (or Expression with resultType string). Required. + :vartype jar_file_path: JSON :ivar jar_linked_service: Jar linked service reference. :vartype jar_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar jar_libs: Jar libs. - :vartype jar_libs: list[any] + :vartype jar_libs: list[JSON] :ivar defines: Allows user to specify defines for the MapReduce job request. - :vartype defines: dict[str, any] + :vartype defines: dict[str, JSON] """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'class_name': {'required': True}, - 'jar_file_path': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "class_name": {"required": True}, + "jar_file_path": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, - 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, - 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, - 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "storage_linked_services": {"key": "typeProperties.storageLinkedServices", "type": "[LinkedServiceReference]"}, + "arguments": {"key": "typeProperties.arguments", "type": "[object]"}, + "get_debug_info": {"key": "typeProperties.getDebugInfo", "type": "str"}, + "class_name": {"key": "typeProperties.className", "type": "object"}, + "jar_file_path": {"key": "typeProperties.jarFilePath", "type": "object"}, + "jar_linked_service": {"key": "typeProperties.jarLinkedService", "type": "LinkedServiceReference"}, + "jar_libs": {"key": "typeProperties.jarLibs", "type": "[object]"}, + "defines": {"key": "typeProperties.defines", "type": "{object}"}, } def __init__( self, *, name: str, - class_name: Any, - jar_file_path: Any, - additional_properties: Optional[Dict[str, Any]] = None, + class_name: JSON, + jar_file_path: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, storage_linked_services: Optional[List["_models.LinkedServiceReference"]] = None, - arguments: Optional[List[Any]] = None, + arguments: Optional[List[JSON]] = None, get_debug_info: Optional[Union[str, "_models.HDInsightActivityDebugInfoOption"]] = None, jar_linked_service: Optional["_models.LinkedServiceReference"] = None, - jar_libs: Optional[List[Any]] = None, - defines: Optional[Dict[str, Any]] = None, + jar_libs: Optional[List[JSON]] = None, + defines: Optional[Dict[str, JSON]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -28149,24 +30308,33 @@ def __init__( :keyword storage_linked_services: Storage linked service references. :paramtype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :keyword arguments: User specified arguments to HDInsightActivity. - :paramtype arguments: list[any] - :keyword get_debug_info: Debug info option. Known values are: "None", "Always", "Failure". + :paramtype arguments: list[JSON] + :keyword get_debug_info: Debug info option. Known values are: "None", "Always", and "Failure". :paramtype get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :keyword class_name: Required. Class name. Type: string (or Expression with resultType string). - :paramtype class_name: any - :keyword jar_file_path: Required. Jar path. Type: string (or Expression with resultType - string). - :paramtype jar_file_path: any + :keyword class_name: Class name. Type: string (or Expression with resultType string). Required. + :paramtype class_name: JSON + :keyword jar_file_path: Jar path. Type: string (or Expression with resultType string). + Required. + :paramtype jar_file_path: JSON :keyword jar_linked_service: Jar linked service reference. :paramtype jar_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword jar_libs: Jar libs. - :paramtype jar_libs: list[any] + :paramtype jar_libs: list[JSON] :keyword defines: Allows user to specify defines for the MapReduce job request. - :paramtype defines: dict[str, any] - """ - super(HDInsightMapReduceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'HDInsightMapReduce' # type: str + :paramtype defines: dict[str, JSON] + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "HDInsightMapReduce" # type: str self.storage_linked_services = storage_linked_services self.arguments = arguments self.get_debug_info = get_debug_info @@ -28177,15 +30345,15 @@ def __init__( self.defines = defines -class HDInsightOnDemandLinkedService(LinkedService): +class HDInsightOnDemandLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """HDInsight ondemand linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -28194,46 +30362,46 @@ class HDInsightOnDemandLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: 4. - Type: string (or Expression with resultType string). - :vartype cluster_size: any - :ivar time_to_live: Required. The allowed idle time for the on-demand HDInsight cluster. - Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity - run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string - (or Expression with resultType string). - :vartype time_to_live: any - :ivar version: Required. Version of the HDInsight cluster.  Type: string (or Expression with - resultType string). - :vartype version: any - :ivar linked_service_name: Required. Azure Storage linked service to be used by the on-demand - cluster for storing and processing data. + :vartype annotations: list[JSON] + :ivar cluster_size: Number of worker/data nodes in the cluster. Suggestion value: 4. Type: + string (or Expression with resultType string). Required. + :vartype cluster_size: JSON + :ivar time_to_live: The allowed idle time for the on-demand HDInsight cluster. Specifies how + long the on-demand HDInsight cluster stays alive after completion of an activity run if there + are no other active jobs in the cluster. The minimum value is 5 mins. Type: string (or + Expression with resultType string). Required. + :vartype time_to_live: JSON + :ivar version: Version of the HDInsight cluster.  Type: string (or Expression with resultType + string). Required. + :vartype version: JSON + :ivar linked_service_name: Azure Storage linked service to be used by the on-demand cluster for + storing and processing data. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :ivar host_subscription_id: Required. The customer’s subscription to host the cluster. Type: - string (or Expression with resultType string). - :vartype host_subscription_id: any + :ivar host_subscription_id: The customer’s subscription to host the cluster. Type: string (or + Expression with resultType string). Required. + :vartype host_subscription_id: JSON :ivar service_principal_id: The service principal id for the hostSubscriptionId. Type: string (or Expression with resultType string). - :vartype service_principal_id: any + :vartype service_principal_id: JSON :ivar service_principal_key: The key for the service principal id. :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :ivar tenant: Required. The Tenant id/name to which the service principal belongs. Type: string - (or Expression with resultType string). - :vartype tenant: any - :ivar cluster_resource_group: Required. The resource group where the cluster belongs. Type: - string (or Expression with resultType string). - :vartype cluster_resource_group: any + :ivar tenant: The Tenant id/name to which the service principal belongs. Type: string (or + Expression with resultType string). Required. + :vartype tenant: JSON + :ivar cluster_resource_group: The resource group where the cluster belongs. Type: string (or + Expression with resultType string). Required. + :vartype cluster_resource_group: JSON :ivar cluster_name_prefix: The prefix of cluster name, postfix will be distinct with timestamp. Type: string (or Expression with resultType string). - :vartype cluster_name_prefix: any + :vartype cluster_name_prefix: JSON :ivar cluster_user_name: The username to access the cluster. Type: string (or Expression with resultType string). - :vartype cluster_user_name: any + :vartype cluster_user_name: JSON :ivar cluster_password: The password to access the cluster. :vartype cluster_password: ~azure.mgmt.datafactory.models.SecretBase :ivar cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for Linux). Type: string (or Expression with resultType string). - :vartype cluster_ssh_user_name: any + :vartype cluster_ssh_user_name: JSON :ivar cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). :vartype cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase :ivar additional_linked_service_names: Specifies additional storage accounts for the HDInsight @@ -28245,160 +30413,166 @@ class HDInsightOnDemandLinkedService(LinkedService): as the metastore. :vartype hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar cluster_type: The cluster type. Type: string (or Expression with resultType string). - :vartype cluster_type: any + :vartype cluster_type: JSON :ivar spark_version: The version of spark if the cluster type is 'spark'. Type: string (or Expression with resultType string). - :vartype spark_version: any + :vartype spark_version: JSON :ivar core_configuration: Specifies the core configuration parameters (as in core-site.xml) for the HDInsight cluster to be created. - :vartype core_configuration: any + :vartype core_configuration: JSON :ivar h_base_configuration: Specifies the HBase configuration parameters (hbase-site.xml) for the HDInsight cluster. - :vartype h_base_configuration: any + :vartype h_base_configuration: JSON :ivar hdfs_configuration: Specifies the HDFS configuration parameters (hdfs-site.xml) for the HDInsight cluster. - :vartype hdfs_configuration: any + :vartype hdfs_configuration: JSON :ivar hive_configuration: Specifies the hive configuration parameters (hive-site.xml) for the HDInsight cluster. - :vartype hive_configuration: any + :vartype hive_configuration: JSON :ivar map_reduce_configuration: Specifies the MapReduce configuration parameters (mapred-site.xml) for the HDInsight cluster. - :vartype map_reduce_configuration: any + :vartype map_reduce_configuration: JSON :ivar oozie_configuration: Specifies the Oozie configuration parameters (oozie-site.xml) for the HDInsight cluster. - :vartype oozie_configuration: any + :vartype oozie_configuration: JSON :ivar storm_configuration: Specifies the Storm configuration parameters (storm-site.xml) for the HDInsight cluster. - :vartype storm_configuration: any + :vartype storm_configuration: JSON :ivar yarn_configuration: Specifies the Yarn configuration parameters (yarn-site.xml) for the HDInsight cluster. - :vartype yarn_configuration: any + :vartype yarn_configuration: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON :ivar head_node_size: Specifies the size of the head node for the HDInsight cluster. - :vartype head_node_size: any + :vartype head_node_size: JSON :ivar data_node_size: Specifies the size of the data node for the HDInsight cluster. - :vartype data_node_size: any + :vartype data_node_size: JSON :ivar zookeeper_node_size: Specifies the size of the Zoo Keeper node for the HDInsight cluster. - :vartype zookeeper_node_size: any + :vartype zookeeper_node_size: JSON :ivar script_actions: Custom script actions to run on HDI ondemand cluster once it's up. Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. :vartype script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] :ivar virtual_network_id: The ARM resource ID for the vNet to which the cluster should be joined after creation. Type: string (or Expression with resultType string). - :vartype virtual_network_id: any + :vartype virtual_network_id: JSON :ivar subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was specified, then this property is required. Type: string (or Expression with resultType string). - :vartype subnet_name: any + :vartype subnet_name: JSON :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { - 'type': {'required': True}, - 'cluster_size': {'required': True}, - 'time_to_live': {'required': True}, - 'version': {'required': True}, - 'linked_service_name': {'required': True}, - 'host_subscription_id': {'required': True}, - 'tenant': {'required': True}, - 'cluster_resource_group': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, - 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, - 'version': {'key': 'typeProperties.version', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, - 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, - 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, - 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, - 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, - 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, - 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, - 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, - 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, - 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, - 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, - 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, - 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, - 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, - 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, - 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, - 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, - 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, - 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, - 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, - 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, - } - - def __init__( - self, - *, - cluster_size: Any, - time_to_live: Any, - version: Any, + "type": {"required": True}, + "cluster_size": {"required": True}, + "time_to_live": {"required": True}, + "version": {"required": True}, + "linked_service_name": {"required": True}, + "host_subscription_id": {"required": True}, + "tenant": {"required": True}, + "cluster_resource_group": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "cluster_size": {"key": "typeProperties.clusterSize", "type": "object"}, + "time_to_live": {"key": "typeProperties.timeToLive", "type": "object"}, + "version": {"key": "typeProperties.version", "type": "object"}, + "linked_service_name": {"key": "typeProperties.linkedServiceName", "type": "LinkedServiceReference"}, + "host_subscription_id": {"key": "typeProperties.hostSubscriptionId", "type": "object"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, + "tenant": {"key": "typeProperties.tenant", "type": "object"}, + "cluster_resource_group": {"key": "typeProperties.clusterResourceGroup", "type": "object"}, + "cluster_name_prefix": {"key": "typeProperties.clusterNamePrefix", "type": "object"}, + "cluster_user_name": {"key": "typeProperties.clusterUserName", "type": "object"}, + "cluster_password": {"key": "typeProperties.clusterPassword", "type": "SecretBase"}, + "cluster_ssh_user_name": {"key": "typeProperties.clusterSshUserName", "type": "object"}, + "cluster_ssh_password": {"key": "typeProperties.clusterSshPassword", "type": "SecretBase"}, + "additional_linked_service_names": { + "key": "typeProperties.additionalLinkedServiceNames", + "type": "[LinkedServiceReference]", + }, + "hcatalog_linked_service_name": { + "key": "typeProperties.hcatalogLinkedServiceName", + "type": "LinkedServiceReference", + }, + "cluster_type": {"key": "typeProperties.clusterType", "type": "object"}, + "spark_version": {"key": "typeProperties.sparkVersion", "type": "object"}, + "core_configuration": {"key": "typeProperties.coreConfiguration", "type": "object"}, + "h_base_configuration": {"key": "typeProperties.hBaseConfiguration", "type": "object"}, + "hdfs_configuration": {"key": "typeProperties.hdfsConfiguration", "type": "object"}, + "hive_configuration": {"key": "typeProperties.hiveConfiguration", "type": "object"}, + "map_reduce_configuration": {"key": "typeProperties.mapReduceConfiguration", "type": "object"}, + "oozie_configuration": {"key": "typeProperties.oozieConfiguration", "type": "object"}, + "storm_configuration": {"key": "typeProperties.stormConfiguration", "type": "object"}, + "yarn_configuration": {"key": "typeProperties.yarnConfiguration", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "head_node_size": {"key": "typeProperties.headNodeSize", "type": "object"}, + "data_node_size": {"key": "typeProperties.dataNodeSize", "type": "object"}, + "zookeeper_node_size": {"key": "typeProperties.zookeeperNodeSize", "type": "object"}, + "script_actions": {"key": "typeProperties.scriptActions", "type": "[ScriptAction]"}, + "virtual_network_id": {"key": "typeProperties.virtualNetworkId", "type": "object"}, + "subnet_name": {"key": "typeProperties.subnetName", "type": "object"}, + "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, + } + + def __init__( # pylint: disable=too-many-locals + self, + *, + cluster_size: JSON, + time_to_live: JSON, + version: JSON, linked_service_name: "_models.LinkedServiceReference", - host_subscription_id: Any, - tenant: Any, - cluster_resource_group: Any, - additional_properties: Optional[Dict[str, Any]] = None, + host_subscription_id: JSON, + tenant: JSON, + cluster_resource_group: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - service_principal_id: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + service_principal_id: Optional[JSON] = None, service_principal_key: Optional["_models.SecretBase"] = None, - cluster_name_prefix: Optional[Any] = None, - cluster_user_name: Optional[Any] = None, + cluster_name_prefix: Optional[JSON] = None, + cluster_user_name: Optional[JSON] = None, cluster_password: Optional["_models.SecretBase"] = None, - cluster_ssh_user_name: Optional[Any] = None, + cluster_ssh_user_name: Optional[JSON] = None, cluster_ssh_password: Optional["_models.SecretBase"] = None, additional_linked_service_names: Optional[List["_models.LinkedServiceReference"]] = None, hcatalog_linked_service_name: Optional["_models.LinkedServiceReference"] = None, - cluster_type: Optional[Any] = None, - spark_version: Optional[Any] = None, - core_configuration: Optional[Any] = None, - h_base_configuration: Optional[Any] = None, - hdfs_configuration: Optional[Any] = None, - hive_configuration: Optional[Any] = None, - map_reduce_configuration: Optional[Any] = None, - oozie_configuration: Optional[Any] = None, - storm_configuration: Optional[Any] = None, - yarn_configuration: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, - head_node_size: Optional[Any] = None, - data_node_size: Optional[Any] = None, - zookeeper_node_size: Optional[Any] = None, + cluster_type: Optional[JSON] = None, + spark_version: Optional[JSON] = None, + core_configuration: Optional[JSON] = None, + h_base_configuration: Optional[JSON] = None, + hdfs_configuration: Optional[JSON] = None, + hive_configuration: Optional[JSON] = None, + map_reduce_configuration: Optional[JSON] = None, + oozie_configuration: Optional[JSON] = None, + storm_configuration: Optional[JSON] = None, + yarn_configuration: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, + head_node_size: Optional[JSON] = None, + data_node_size: Optional[JSON] = None, + zookeeper_node_size: Optional[JSON] = None, script_actions: Optional[List["_models.ScriptAction"]] = None, - virtual_network_id: Optional[Any] = None, - subnet_name: Optional[Any] = None, + virtual_network_id: Optional[JSON] = None, + subnet_name: Optional[JSON] = None, credential: Optional["_models.CredentialReference"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -28406,46 +30580,46 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: - 4. Type: string (or Expression with resultType string). - :paramtype cluster_size: any - :keyword time_to_live: Required. The allowed idle time for the on-demand HDInsight cluster. - Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity - run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string - (or Expression with resultType string). - :paramtype time_to_live: any - :keyword version: Required. Version of the HDInsight cluster.  Type: string (or Expression with - resultType string). - :paramtype version: any - :keyword linked_service_name: Required. Azure Storage linked service to be used by the - on-demand cluster for storing and processing data. + :paramtype annotations: list[JSON] + :keyword cluster_size: Number of worker/data nodes in the cluster. Suggestion value: 4. Type: + string (or Expression with resultType string). Required. + :paramtype cluster_size: JSON + :keyword time_to_live: The allowed idle time for the on-demand HDInsight cluster. Specifies how + long the on-demand HDInsight cluster stays alive after completion of an activity run if there + are no other active jobs in the cluster. The minimum value is 5 mins. Type: string (or + Expression with resultType string). Required. + :paramtype time_to_live: JSON + :keyword version: Version of the HDInsight cluster.  Type: string (or Expression with + resultType string). Required. + :paramtype version: JSON + :keyword linked_service_name: Azure Storage linked service to be used by the on-demand cluster + for storing and processing data. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :keyword host_subscription_id: Required. The customer’s subscription to host the cluster. Type: - string (or Expression with resultType string). - :paramtype host_subscription_id: any + :keyword host_subscription_id: The customer’s subscription to host the cluster. Type: string + (or Expression with resultType string). Required. + :paramtype host_subscription_id: JSON :keyword service_principal_id: The service principal id for the hostSubscriptionId. Type: string (or Expression with resultType string). - :paramtype service_principal_id: any + :paramtype service_principal_id: JSON :keyword service_principal_key: The key for the service principal id. :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :keyword tenant: Required. The Tenant id/name to which the service principal belongs. Type: - string (or Expression with resultType string). - :paramtype tenant: any - :keyword cluster_resource_group: Required. The resource group where the cluster belongs. Type: - string (or Expression with resultType string). - :paramtype cluster_resource_group: any + :keyword tenant: The Tenant id/name to which the service principal belongs. Type: string (or + Expression with resultType string). Required. + :paramtype tenant: JSON + :keyword cluster_resource_group: The resource group where the cluster belongs. Type: string (or + Expression with resultType string). Required. + :paramtype cluster_resource_group: JSON :keyword cluster_name_prefix: The prefix of cluster name, postfix will be distinct with timestamp. Type: string (or Expression with resultType string). - :paramtype cluster_name_prefix: any + :paramtype cluster_name_prefix: JSON :keyword cluster_user_name: The username to access the cluster. Type: string (or Expression with resultType string). - :paramtype cluster_user_name: any + :paramtype cluster_user_name: JSON :keyword cluster_password: The password to access the cluster. :paramtype cluster_password: ~azure.mgmt.datafactory.models.SecretBase :keyword cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for Linux). Type: string (or Expression with resultType string). - :paramtype cluster_ssh_user_name: any + :paramtype cluster_ssh_user_name: JSON :keyword cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). :paramtype cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase :keyword additional_linked_service_names: Specifies additional storage accounts for the @@ -28457,60 +30631,67 @@ def __init__( as the metastore. :paramtype hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword cluster_type: The cluster type. Type: string (or Expression with resultType string). - :paramtype cluster_type: any + :paramtype cluster_type: JSON :keyword spark_version: The version of spark if the cluster type is 'spark'. Type: string (or Expression with resultType string). - :paramtype spark_version: any + :paramtype spark_version: JSON :keyword core_configuration: Specifies the core configuration parameters (as in core-site.xml) for the HDInsight cluster to be created. - :paramtype core_configuration: any + :paramtype core_configuration: JSON :keyword h_base_configuration: Specifies the HBase configuration parameters (hbase-site.xml) for the HDInsight cluster. - :paramtype h_base_configuration: any + :paramtype h_base_configuration: JSON :keyword hdfs_configuration: Specifies the HDFS configuration parameters (hdfs-site.xml) for the HDInsight cluster. - :paramtype hdfs_configuration: any + :paramtype hdfs_configuration: JSON :keyword hive_configuration: Specifies the hive configuration parameters (hive-site.xml) for the HDInsight cluster. - :paramtype hive_configuration: any + :paramtype hive_configuration: JSON :keyword map_reduce_configuration: Specifies the MapReduce configuration parameters (mapred-site.xml) for the HDInsight cluster. - :paramtype map_reduce_configuration: any + :paramtype map_reduce_configuration: JSON :keyword oozie_configuration: Specifies the Oozie configuration parameters (oozie-site.xml) for the HDInsight cluster. - :paramtype oozie_configuration: any + :paramtype oozie_configuration: JSON :keyword storm_configuration: Specifies the Storm configuration parameters (storm-site.xml) for the HDInsight cluster. - :paramtype storm_configuration: any + :paramtype storm_configuration: JSON :keyword yarn_configuration: Specifies the Yarn configuration parameters (yarn-site.xml) for the HDInsight cluster. - :paramtype yarn_configuration: any + :paramtype yarn_configuration: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any + :paramtype encrypted_credential: JSON :keyword head_node_size: Specifies the size of the head node for the HDInsight cluster. - :paramtype head_node_size: any + :paramtype head_node_size: JSON :keyword data_node_size: Specifies the size of the data node for the HDInsight cluster. - :paramtype data_node_size: any + :paramtype data_node_size: JSON :keyword zookeeper_node_size: Specifies the size of the Zoo Keeper node for the HDInsight cluster. - :paramtype zookeeper_node_size: any + :paramtype zookeeper_node_size: JSON :keyword script_actions: Custom script actions to run on HDI ondemand cluster once it's up. Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. :paramtype script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] :keyword virtual_network_id: The ARM resource ID for the vNet to which the cluster should be joined after creation. Type: string (or Expression with resultType string). - :paramtype virtual_network_id: any + :paramtype virtual_network_id: JSON :keyword subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was specified, then this property is required. Type: string (or Expression with resultType string). - :paramtype subnet_name: any + :paramtype subnet_name: JSON :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ - super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'HDInsightOnDemand' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "HDInsightOnDemand" # type: str self.cluster_size = cluster_size self.time_to_live = time_to_live self.version = version @@ -28547,17 +30728,17 @@ def __init__( self.credential = credential -class HDInsightPigActivity(ExecutionActivity): +class HDInsightPigActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """HDInsight Pig activity type. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -28573,62 +30754,62 @@ class HDInsightPigActivity(ExecutionActivity): :vartype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :ivar arguments: User specified arguments to HDInsightActivity. Type: array (or Expression with resultType array). - :vartype arguments: any - :ivar get_debug_info: Debug info option. Known values are: "None", "Always", "Failure". + :vartype arguments: JSON + :ivar get_debug_info: Debug info option. Known values are: "None", "Always", and "Failure". :vartype get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption :ivar script_path: Script path. Type: string (or Expression with resultType string). - :vartype script_path: any + :vartype script_path: JSON :ivar script_linked_service: Script linked service reference. :vartype script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar defines: Allows user to specify defines for Pig job request. - :vartype defines: dict[str, any] + :vartype defines: dict[str, JSON] """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': 'object'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "storage_linked_services": {"key": "typeProperties.storageLinkedServices", "type": "[LinkedServiceReference]"}, + "arguments": {"key": "typeProperties.arguments", "type": "object"}, + "get_debug_info": {"key": "typeProperties.getDebugInfo", "type": "str"}, + "script_path": {"key": "typeProperties.scriptPath", "type": "object"}, + "script_linked_service": {"key": "typeProperties.scriptLinkedService", "type": "LinkedServiceReference"}, + "defines": {"key": "typeProperties.defines", "type": "{object}"}, } def __init__( self, *, name: str, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, storage_linked_services: Optional[List["_models.LinkedServiceReference"]] = None, - arguments: Optional[Any] = None, + arguments: Optional[JSON] = None, get_debug_info: Optional[Union[str, "_models.HDInsightActivityDebugInfoOption"]] = None, - script_path: Optional[Any] = None, + script_path: Optional[JSON] = None, script_linked_service: Optional["_models.LinkedServiceReference"] = None, - defines: Optional[Dict[str, Any]] = None, + defines: Optional[Dict[str, JSON]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -28644,19 +30825,28 @@ def __init__( :paramtype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :keyword arguments: User specified arguments to HDInsightActivity. Type: array (or Expression with resultType array). - :paramtype arguments: any - :keyword get_debug_info: Debug info option. Known values are: "None", "Always", "Failure". + :paramtype arguments: JSON + :keyword get_debug_info: Debug info option. Known values are: "None", "Always", and "Failure". :paramtype get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption :keyword script_path: Script path. Type: string (or Expression with resultType string). - :paramtype script_path: any + :paramtype script_path: JSON :keyword script_linked_service: Script linked service reference. :paramtype script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword defines: Allows user to specify defines for Pig job request. - :paramtype defines: dict[str, any] - """ - super(HDInsightPigActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'HDInsightPig' # type: str + :paramtype defines: dict[str, JSON] + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "HDInsightPig" # type: str self.storage_linked_services = storage_linked_services self.arguments = arguments self.get_debug_info = get_debug_info @@ -28665,17 +30855,17 @@ def __init__( self.defines = defines -class HDInsightSparkActivity(ExecutionActivity): +class HDInsightSparkActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """HDInsight Spark activity. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -28687,15 +30877,15 @@ class HDInsightSparkActivity(ExecutionActivity): :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar policy: Activity policy. :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :ivar root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. - Type: string (or Expression with resultType string). - :vartype root_path: any - :ivar entry_file_path: Required. The relative path to the root folder of the code/package to be - executed. Type: string (or Expression with resultType string). - :vartype entry_file_path: any + :ivar root_path: The root path in 'sparkJobLinkedService' for all the job’s files. Type: string + (or Expression with resultType string). Required. + :vartype root_path: JSON + :ivar entry_file_path: The relative path to the root folder of the code/package to be executed. + Type: string (or Expression with resultType string). Required. + :vartype entry_file_path: JSON :ivar arguments: The user-specified arguments to HDInsightSparkActivity. - :vartype arguments: list[any] - :ivar get_debug_info: Debug info option. Known values are: "None", "Always", "Failure". + :vartype arguments: list[JSON] + :ivar get_debug_info: Debug info option. Known values are: "None", "Always", and "Failure". :vartype get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption :ivar spark_job_linked_service: The storage linked service for uploading the entry file and dependencies, and for receiving logs. @@ -28704,62 +30894,62 @@ class HDInsightSparkActivity(ExecutionActivity): :vartype class_name: str :ivar proxy_user: The user to impersonate that will execute the job. Type: string (or Expression with resultType string). - :vartype proxy_user: any + :vartype proxy_user: JSON :ivar spark_config: Spark configuration property. - :vartype spark_config: dict[str, any] + :vartype spark_config: dict[str, JSON] """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'root_path': {'required': True}, - 'entry_file_path': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "root_path": {"required": True}, + "entry_file_path": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, - 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, - 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, - 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "root_path": {"key": "typeProperties.rootPath", "type": "object"}, + "entry_file_path": {"key": "typeProperties.entryFilePath", "type": "object"}, + "arguments": {"key": "typeProperties.arguments", "type": "[object]"}, + "get_debug_info": {"key": "typeProperties.getDebugInfo", "type": "str"}, + "spark_job_linked_service": {"key": "typeProperties.sparkJobLinkedService", "type": "LinkedServiceReference"}, + "class_name": {"key": "typeProperties.className", "type": "str"}, + "proxy_user": {"key": "typeProperties.proxyUser", "type": "object"}, + "spark_config": {"key": "typeProperties.sparkConfig", "type": "{object}"}, } def __init__( self, *, name: str, - root_path: Any, - entry_file_path: Any, - additional_properties: Optional[Dict[str, Any]] = None, + root_path: JSON, + entry_file_path: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, - arguments: Optional[List[Any]] = None, + arguments: Optional[List[JSON]] = None, get_debug_info: Optional[Union[str, "_models.HDInsightActivityDebugInfoOption"]] = None, spark_job_linked_service: Optional["_models.LinkedServiceReference"] = None, class_name: Optional[str] = None, - proxy_user: Optional[Any] = None, - spark_config: Optional[Dict[str, Any]] = None, + proxy_user: Optional[JSON] = None, + spark_config: Optional[Dict[str, JSON]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -28771,15 +30961,15 @@ def __init__( :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword policy: Activity policy. :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :keyword root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. - Type: string (or Expression with resultType string). - :paramtype root_path: any - :keyword entry_file_path: Required. The relative path to the root folder of the code/package to - be executed. Type: string (or Expression with resultType string). - :paramtype entry_file_path: any + :keyword root_path: The root path in 'sparkJobLinkedService' for all the job’s files. Type: + string (or Expression with resultType string). Required. + :paramtype root_path: JSON + :keyword entry_file_path: The relative path to the root folder of the code/package to be + executed. Type: string (or Expression with resultType string). Required. + :paramtype entry_file_path: JSON :keyword arguments: The user-specified arguments to HDInsightSparkActivity. - :paramtype arguments: list[any] - :keyword get_debug_info: Debug info option. Known values are: "None", "Always", "Failure". + :paramtype arguments: list[JSON] + :keyword get_debug_info: Debug info option. Known values are: "None", "Always", and "Failure". :paramtype get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption :keyword spark_job_linked_service: The storage linked service for uploading the entry file and @@ -28789,12 +30979,21 @@ def __init__( :paramtype class_name: str :keyword proxy_user: The user to impersonate that will execute the job. Type: string (or Expression with resultType string). - :paramtype proxy_user: any + :paramtype proxy_user: JSON :keyword spark_config: Spark configuration property. - :paramtype spark_config: dict[str, any] - """ - super(HDInsightSparkActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'HDInsightSpark' # type: str + :paramtype spark_config: dict[str, JSON] + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "HDInsightSpark" # type: str self.root_path = root_path self.entry_file_path = entry_file_path self.arguments = arguments @@ -28805,17 +31004,17 @@ def __init__( self.spark_config = spark_config -class HDInsightStreamingActivity(ExecutionActivity): +class HDInsightStreamingActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """HDInsight streaming activity type. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -28830,93 +31029,93 @@ class HDInsightStreamingActivity(ExecutionActivity): :ivar storage_linked_services: Storage linked service references. :vartype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :ivar arguments: User specified arguments to HDInsightActivity. - :vartype arguments: list[any] - :ivar get_debug_info: Debug info option. Known values are: "None", "Always", "Failure". + :vartype arguments: list[JSON] + :ivar get_debug_info: Debug info option. Known values are: "None", "Always", and "Failure". :vartype get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :ivar mapper: Required. Mapper executable name. Type: string (or Expression with resultType - string). - :vartype mapper: any - :ivar reducer: Required. Reducer executable name. Type: string (or Expression with resultType - string). - :vartype reducer: any - :ivar input: Required. Input blob path. Type: string (or Expression with resultType string). - :vartype input: any - :ivar output: Required. Output blob path. Type: string (or Expression with resultType string). - :vartype output: any - :ivar file_paths: Required. Paths to streaming job files. Can be directories. - :vartype file_paths: list[any] + :ivar mapper: Mapper executable name. Type: string (or Expression with resultType string). + Required. + :vartype mapper: JSON + :ivar reducer: Reducer executable name. Type: string (or Expression with resultType string). + Required. + :vartype reducer: JSON + :ivar input: Input blob path. Type: string (or Expression with resultType string). Required. + :vartype input: JSON + :ivar output: Output blob path. Type: string (or Expression with resultType string). Required. + :vartype output: JSON + :ivar file_paths: Paths to streaming job files. Can be directories. Required. + :vartype file_paths: list[JSON] :ivar file_linked_service: Linked service reference where the files are located. :vartype file_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar combiner: Combiner executable name. Type: string (or Expression with resultType string). - :vartype combiner: any + :vartype combiner: JSON :ivar command_environment: Command line environment values. - :vartype command_environment: list[any] + :vartype command_environment: list[JSON] :ivar defines: Allows user to specify defines for streaming job request. - :vartype defines: dict[str, any] + :vartype defines: dict[str, JSON] """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'mapper': {'required': True}, - 'reducer': {'required': True}, - 'input': {'required': True}, - 'output': {'required': True}, - 'file_paths': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "mapper": {"required": True}, + "reducer": {"required": True}, + "input": {"required": True}, + "output": {"required": True}, + "file_paths": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, - 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, - 'input': {'key': 'typeProperties.input', 'type': 'object'}, - 'output': {'key': 'typeProperties.output', 'type': 'object'}, - 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, - 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, - 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, - 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "storage_linked_services": {"key": "typeProperties.storageLinkedServices", "type": "[LinkedServiceReference]"}, + "arguments": {"key": "typeProperties.arguments", "type": "[object]"}, + "get_debug_info": {"key": "typeProperties.getDebugInfo", "type": "str"}, + "mapper": {"key": "typeProperties.mapper", "type": "object"}, + "reducer": {"key": "typeProperties.reducer", "type": "object"}, + "input": {"key": "typeProperties.input", "type": "object"}, + "output": {"key": "typeProperties.output", "type": "object"}, + "file_paths": {"key": "typeProperties.filePaths", "type": "[object]"}, + "file_linked_service": {"key": "typeProperties.fileLinkedService", "type": "LinkedServiceReference"}, + "combiner": {"key": "typeProperties.combiner", "type": "object"}, + "command_environment": {"key": "typeProperties.commandEnvironment", "type": "[object]"}, + "defines": {"key": "typeProperties.defines", "type": "{object}"}, } def __init__( self, *, name: str, - mapper: Any, - reducer: Any, - input: Any, - output: Any, - file_paths: List[Any], - additional_properties: Optional[Dict[str, Any]] = None, + mapper: JSON, + reducer: JSON, + input: JSON, + output: JSON, + file_paths: List[JSON], + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, storage_linked_services: Optional[List["_models.LinkedServiceReference"]] = None, - arguments: Optional[List[Any]] = None, + arguments: Optional[List[JSON]] = None, get_debug_info: Optional[Union[str, "_models.HDInsightActivityDebugInfoOption"]] = None, file_linked_service: Optional["_models.LinkedServiceReference"] = None, - combiner: Optional[Any] = None, - command_environment: Optional[List[Any]] = None, - defines: Optional[Dict[str, Any]] = None, + combiner: Optional[JSON] = None, + command_environment: Optional[List[JSON]] = None, + defines: Optional[Dict[str, JSON]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -28931,35 +31130,44 @@ def __init__( :keyword storage_linked_services: Storage linked service references. :paramtype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :keyword arguments: User specified arguments to HDInsightActivity. - :paramtype arguments: list[any] - :keyword get_debug_info: Debug info option. Known values are: "None", "Always", "Failure". + :paramtype arguments: list[JSON] + :keyword get_debug_info: Debug info option. Known values are: "None", "Always", and "Failure". :paramtype get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :keyword mapper: Required. Mapper executable name. Type: string (or Expression with resultType - string). - :paramtype mapper: any - :keyword reducer: Required. Reducer executable name. Type: string (or Expression with - resultType string). - :paramtype reducer: any - :keyword input: Required. Input blob path. Type: string (or Expression with resultType string). - :paramtype input: any - :keyword output: Required. Output blob path. Type: string (or Expression with resultType - string). - :paramtype output: any - :keyword file_paths: Required. Paths to streaming job files. Can be directories. - :paramtype file_paths: list[any] + :keyword mapper: Mapper executable name. Type: string (or Expression with resultType string). + Required. + :paramtype mapper: JSON + :keyword reducer: Reducer executable name. Type: string (or Expression with resultType string). + Required. + :paramtype reducer: JSON + :keyword input: Input blob path. Type: string (or Expression with resultType string). Required. + :paramtype input: JSON + :keyword output: Output blob path. Type: string (or Expression with resultType string). + Required. + :paramtype output: JSON + :keyword file_paths: Paths to streaming job files. Can be directories. Required. + :paramtype file_paths: list[JSON] :keyword file_linked_service: Linked service reference where the files are located. :paramtype file_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword combiner: Combiner executable name. Type: string (or Expression with resultType string). - :paramtype combiner: any + :paramtype combiner: JSON :keyword command_environment: Command line environment values. - :paramtype command_environment: list[any] + :paramtype command_environment: list[JSON] :keyword defines: Allows user to specify defines for streaming job request. - :paramtype defines: dict[str, any] - """ - super(HDInsightStreamingActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'HDInsightStreaming' # type: str + :paramtype defines: dict[str, JSON] + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "HDInsightStreaming" # type: str self.storage_linked_services = storage_linked_services self.arguments = arguments self.get_debug_info = get_debug_info @@ -28974,15 +31182,15 @@ def __init__( self.defines = defines -class HiveLinkedService(LinkedService): +class HiveLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Hive Server linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -28991,123 +31199,123 @@ class HiveLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar host: Required. IP address or host name of the Hive server, separated by ';' for multiple - hosts (only when serviceDiscoveryMode is enable). - :vartype host: any + :vartype annotations: list[JSON] + :ivar host: IP address or host name of the Hive server, separated by ';' for multiple hosts + (only when serviceDiscoveryMode is enable). Required. + :vartype host: JSON :ivar port: The TCP port that the Hive server uses to listen for client connections. - :vartype port: any - :ivar server_type: The type of Hive server. Known values are: "HiveServer1", "HiveServer2", + :vartype port: JSON + :ivar server_type: The type of Hive server. Known values are: "HiveServer1", "HiveServer2", and "HiveThriftServer". :vartype server_type: str or ~azure.mgmt.datafactory.models.HiveServerType :ivar thrift_transport_protocol: The transport protocol to use in the Thrift layer. Known - values are: "Binary", "SASL", "HTTP ". + values are: "Binary", "SASL", and "HTTP ". :vartype thrift_transport_protocol: str or ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol - :ivar authentication_type: Required. The authentication method used to access the Hive server. - Known values are: "Anonymous", "Username", "UsernameAndPassword", + :ivar authentication_type: The authentication method used to access the Hive server. Required. + Known values are: "Anonymous", "Username", "UsernameAndPassword", and "WindowsAzureHDInsightService". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.HiveAuthenticationType :ivar service_discovery_mode: true to indicate using the ZooKeeper service, false not. - :vartype service_discovery_mode: any + :vartype service_discovery_mode: JSON :ivar zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are added. - :vartype zoo_keeper_name_space: any + :vartype zoo_keeper_name_space: JSON :ivar use_native_query: Specifies whether the driver uses native HiveQL queries,or converts them into an equivalent form in HiveQL. - :vartype use_native_query: any + :vartype use_native_query: JSON :ivar username: The user name that you use to access Hive Server. - :vartype username: any + :vartype username: JSON :ivar password: The password corresponding to the user name that you provided in the Username field. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar http_path: The partial URL corresponding to the Hive server. - :vartype http_path: any + :vartype http_path: JSON :ivar enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :vartype enable_ssl: any + :vartype enable_ssl: JSON :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :vartype trusted_cert_path: any + :vartype trusted_cert_path: JSON :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :vartype use_system_trust_store: any + :vartype use_system_trust_store: JSON :ivar allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :vartype allow_host_name_cn_mismatch: any + :vartype allow_host_name_cn_mismatch: JSON :ivar allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :vartype allow_self_signed_server_cert: any + :vartype allow_self_signed_server_cert: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, + "type": {"required": True}, + "host": {"required": True}, + "authentication_type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, - 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, - 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, - 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "host": {"key": "typeProperties.host", "type": "object"}, + "port": {"key": "typeProperties.port", "type": "object"}, + "server_type": {"key": "typeProperties.serverType", "type": "str"}, + "thrift_transport_protocol": {"key": "typeProperties.thriftTransportProtocol", "type": "str"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, + "service_discovery_mode": {"key": "typeProperties.serviceDiscoveryMode", "type": "object"}, + "zoo_keeper_name_space": {"key": "typeProperties.zooKeeperNameSpace", "type": "object"}, + "use_native_query": {"key": "typeProperties.useNativeQuery", "type": "object"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "http_path": {"key": "typeProperties.httpPath", "type": "object"}, + "enable_ssl": {"key": "typeProperties.enableSsl", "type": "object"}, + "trusted_cert_path": {"key": "typeProperties.trustedCertPath", "type": "object"}, + "use_system_trust_store": {"key": "typeProperties.useSystemTrustStore", "type": "object"}, + "allow_host_name_cn_mismatch": {"key": "typeProperties.allowHostNameCNMismatch", "type": "object"}, + "allow_self_signed_server_cert": {"key": "typeProperties.allowSelfSignedServerCert", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - host: Any, + host: JSON, authentication_type: Union[str, "_models.HiveAuthenticationType"], - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - port: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + port: Optional[JSON] = None, server_type: Optional[Union[str, "_models.HiveServerType"]] = None, thrift_transport_protocol: Optional[Union[str, "_models.HiveThriftTransportProtocol"]] = None, - service_discovery_mode: Optional[Any] = None, - zoo_keeper_name_space: Optional[Any] = None, - use_native_query: Optional[Any] = None, - username: Optional[Any] = None, + service_discovery_mode: Optional[JSON] = None, + zoo_keeper_name_space: Optional[JSON] = None, + use_native_query: Optional[JSON] = None, + username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - http_path: Optional[Any] = None, - enable_ssl: Optional[Any] = None, - trusted_cert_path: Optional[Any] = None, - use_system_trust_store: Optional[Any] = None, - allow_host_name_cn_mismatch: Optional[Any] = None, - allow_self_signed_server_cert: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + http_path: Optional[JSON] = None, + enable_ssl: Optional[JSON] = None, + trusted_cert_path: Optional[JSON] = None, + use_system_trust_store: Optional[JSON] = None, + allow_host_name_cn_mismatch: Optional[JSON] = None, + allow_self_signed_server_cert: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -29115,61 +31323,68 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword host: Required. IP address or host name of the Hive server, separated by ';' for - multiple hosts (only when serviceDiscoveryMode is enable). - :paramtype host: any + :paramtype annotations: list[JSON] + :keyword host: IP address or host name of the Hive server, separated by ';' for multiple hosts + (only when serviceDiscoveryMode is enable). Required. + :paramtype host: JSON :keyword port: The TCP port that the Hive server uses to listen for client connections. - :paramtype port: any + :paramtype port: JSON :keyword server_type: The type of Hive server. Known values are: "HiveServer1", "HiveServer2", - "HiveThriftServer". + and "HiveThriftServer". :paramtype server_type: str or ~azure.mgmt.datafactory.models.HiveServerType :keyword thrift_transport_protocol: The transport protocol to use in the Thrift layer. Known - values are: "Binary", "SASL", "HTTP ". + values are: "Binary", "SASL", and "HTTP ". :paramtype thrift_transport_protocol: str or ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol - :keyword authentication_type: Required. The authentication method used to access the Hive - server. Known values are: "Anonymous", "Username", "UsernameAndPassword", + :keyword authentication_type: The authentication method used to access the Hive server. + Required. Known values are: "Anonymous", "Username", "UsernameAndPassword", and "WindowsAzureHDInsightService". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.HiveAuthenticationType :keyword service_discovery_mode: true to indicate using the ZooKeeper service, false not. - :paramtype service_discovery_mode: any + :paramtype service_discovery_mode: JSON :keyword zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are added. - :paramtype zoo_keeper_name_space: any + :paramtype zoo_keeper_name_space: JSON :keyword use_native_query: Specifies whether the driver uses native HiveQL queries,or converts them into an equivalent form in HiveQL. - :paramtype use_native_query: any + :paramtype use_native_query: JSON :keyword username: The user name that you use to access Hive Server. - :paramtype username: any + :paramtype username: JSON :keyword password: The password corresponding to the user name that you provided in the Username field. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword http_path: The partial URL corresponding to the Hive server. - :paramtype http_path: any + :paramtype http_path: JSON :keyword enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :paramtype enable_ssl: any + :paramtype enable_ssl: JSON :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :paramtype trusted_cert_path: any + :paramtype trusted_cert_path: JSON :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :paramtype use_system_trust_store: any + :paramtype use_system_trust_store: JSON :keyword allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :paramtype allow_host_name_cn_mismatch: any + :paramtype allow_host_name_cn_mismatch: JSON :keyword allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :paramtype allow_self_signed_server_cert: any + :paramtype allow_self_signed_server_cert: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(HiveLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Hive' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Hive" # type: str self.host = host self.port = port self.server_type = server_type @@ -29189,112 +31404,122 @@ def __init__( self.encrypted_credential = encrypted_credential -class HiveObjectDataset(Dataset): +class HiveObjectDataset(Dataset): # pylint: disable=too-many-instance-attributes """Hive Server dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :vartype table_name: any + :vartype table_name: JSON :ivar table: The table name of the Hive. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON :ivar schema_type_properties_schema: The schema name of the Hive. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, - table: Optional[Any] = None, - schema_type_properties_schema: Optional[Any] = None, + table_name: Optional[JSON] = None, + table: Optional[JSON] = None, + schema_type_properties_schema: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: This property will be retired. Please consider using schema + table properties instead. - :paramtype table_name: any + :paramtype table_name: JSON :keyword table: The table name of the Hive. Type: string (or Expression with resultType string). - :paramtype table: any + :paramtype table: JSON :keyword schema_type_properties_schema: The schema name of the Hive. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any - """ - super(HiveObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'HiveObject' # type: str + :paramtype schema_type_properties_schema: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "HiveObject" # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -29307,134 +31532,143 @@ class HiveSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'HiveSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "HiveSource" # type: str self.query = query -class HttpDataset(Dataset): +class HttpDataset(Dataset): # pylint: disable=too-many-instance-attributes """A file in an HTTP web server. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar relative_url: The relative URL based on the URL in the HttpLinkedService refers to an HTTP file Type: string (or Expression with resultType string). - :vartype relative_url: any + :vartype relative_url: JSON :ivar request_method: The HTTP method for the HTTP request. Type: string (or Expression with resultType string). - :vartype request_method: any + :vartype request_method: JSON :ivar request_body: The body for the HTTP request. Type: string (or Expression with resultType string). - :vartype request_body: any + :vartype request_body: JSON :ivar additional_headers: The headers for the HTTP Request. e.g. request-header-name-1:request-header-value-1 ... request-header-name-n:request-header-value-n Type: string (or Expression with resultType string). - :vartype additional_headers: any + :vartype additional_headers: JSON :ivar format: The format of files. :vartype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :ivar compression: The data compression method used on files. @@ -29442,43 +31676,43 @@ class HttpDataset(Dataset): """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, - 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, - 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "relative_url": {"key": "typeProperties.relativeUrl", "type": "object"}, + "request_method": {"key": "typeProperties.requestMethod", "type": "object"}, + "request_body": {"key": "typeProperties.requestBody", "type": "object"}, + "additional_headers": {"key": "typeProperties.additionalHeaders", "type": "object"}, + "format": {"key": "typeProperties.format", "type": "DatasetStorageFormat"}, + "compression": {"key": "typeProperties.compression", "type": "DatasetCompression"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - relative_url: Optional[Any] = None, - request_method: Optional[Any] = None, - request_body: Optional[Any] = None, - additional_headers: Optional[Any] = None, + relative_url: Optional[JSON] = None, + request_method: Optional[JSON] = None, + request_body: Optional[JSON] = None, + additional_headers: Optional[JSON] = None, format: Optional["_models.DatasetStorageFormat"] = None, compression: Optional["_models.DatasetCompression"] = None, **kwargs @@ -29486,46 +31720,56 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword relative_url: The relative URL based on the URL in the HttpLinkedService refers to an HTTP file Type: string (or Expression with resultType string). - :paramtype relative_url: any + :paramtype relative_url: JSON :keyword request_method: The HTTP method for the HTTP request. Type: string (or Expression with resultType string). - :paramtype request_method: any + :paramtype request_method: JSON :keyword request_body: The body for the HTTP request. Type: string (or Expression with resultType string). - :paramtype request_body: any + :paramtype request_body: JSON :keyword additional_headers: The headers for the HTTP Request. e.g. request-header-name-1:request-header-value-1 ... request-header-name-n:request-header-value-n Type: string (or Expression with resultType string). - :paramtype additional_headers: any + :paramtype additional_headers: JSON :keyword format: The format of files. :paramtype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :keyword compression: The data compression method used on files. :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ - super(HttpDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'HttpFile' # type: str + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "HttpFile" # type: str self.relative_url = relative_url self.request_method = request_method self.request_body = request_body @@ -29534,15 +31778,15 @@ def __init__( self.compression = compression -class HttpLinkedService(LinkedService): +class HttpLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Linked service for an HTTP source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -29551,87 +31795,90 @@ class HttpLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: - string (or Expression with resultType string). - :vartype url: any + :vartype annotations: list[JSON] + :ivar url: The base URL of the HTTP endpoint, e.g. https://www.microsoft.com. Type: string (or + Expression with resultType string). Required. + :vartype url: JSON :ivar authentication_type: The authentication type to be used to connect to the HTTP server. - Known values are: "Basic", "Anonymous", "Digest", "Windows", "ClientCertificate". + Known values are: "Basic", "Anonymous", "Digest", "Windows", and "ClientCertificate". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.HttpAuthenticationType :ivar user_name: User name for Basic, Digest, or Windows authentication. Type: string (or Expression with resultType string). - :vartype user_name: any + :vartype user_name: JSON :ivar password: Password for Basic, Digest, Windows, or ClientCertificate with EmbeddedCertData authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). - :vartype auth_headers: any + :vartype auth_headers: JSON :ivar embedded_cert_data: Base64 encoded certificate data for ClientCertificate authentication. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). - :vartype embedded_cert_data: any + :vartype embedded_cert_data: JSON :ivar cert_thumbprint: Thumbprint of certificate for ClientCertificate authentication. Only valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). - :vartype cert_thumbprint: any + :vartype cert_thumbprint: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON :ivar enable_server_certificate_validation: If true, validate the HTTPS server SSL certificate. Default value is true. Type: boolean (or Expression with resultType boolean). - :vartype enable_server_certificate_validation: any + :vartype enable_server_certificate_validation: JSON """ _validation = { - 'type': {'required': True}, - 'url': {'required': True}, + "type": {"required": True}, + "url": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'auth_headers': {'key': 'typeProperties.authHeaders', 'type': 'object'}, - 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, - 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "url": {"key": "typeProperties.url", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "auth_headers": {"key": "typeProperties.authHeaders", "type": "object"}, + "embedded_cert_data": {"key": "typeProperties.embeddedCertData", "type": "object"}, + "cert_thumbprint": {"key": "typeProperties.certThumbprint", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "enable_server_certificate_validation": { + "key": "typeProperties.enableServerCertificateValidation", + "type": "object", + }, } def __init__( self, *, - url: Any, - additional_properties: Optional[Dict[str, Any]] = None, + url: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, authentication_type: Optional[Union[str, "_models.HttpAuthenticationType"]] = None, - user_name: Optional[Any] = None, + user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - auth_headers: Optional[Any] = None, - embedded_cert_data: Optional[Any] = None, - cert_thumbprint: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, - enable_server_certificate_validation: Optional[Any] = None, + auth_headers: Optional[JSON] = None, + embedded_cert_data: Optional[JSON] = None, + cert_thumbprint: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, + enable_server_certificate_validation: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -29639,42 +31886,49 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: - string (or Expression with resultType string). - :paramtype url: any + :paramtype annotations: list[JSON] + :keyword url: The base URL of the HTTP endpoint, e.g. https://www.microsoft.com. Type: string + (or Expression with resultType string). Required. + :paramtype url: JSON :keyword authentication_type: The authentication type to be used to connect to the HTTP server. - Known values are: "Basic", "Anonymous", "Digest", "Windows", "ClientCertificate". + Known values are: "Basic", "Anonymous", "Digest", "Windows", and "ClientCertificate". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.HttpAuthenticationType :keyword user_name: User name for Basic, Digest, or Windows authentication. Type: string (or Expression with resultType string). - :paramtype user_name: any + :paramtype user_name: JSON :keyword password: Password for Basic, Digest, Windows, or ClientCertificate with EmbeddedCertData authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). - :paramtype auth_headers: any + :paramtype auth_headers: JSON :keyword embedded_cert_data: Base64 encoded certificate data for ClientCertificate authentication. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). - :paramtype embedded_cert_data: any + :paramtype embedded_cert_data: JSON :keyword cert_thumbprint: Thumbprint of certificate for ClientCertificate authentication. Only valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). - :paramtype cert_thumbprint: any + :paramtype cert_thumbprint: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any + :paramtype encrypted_credential: JSON :keyword enable_server_certificate_validation: If true, validate the HTTPS server SSL certificate. Default value is true. Type: boolean (or Expression with resultType boolean). - :paramtype enable_server_certificate_validation: any - """ - super(HttpLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'HttpServer' # type: str + :paramtype enable_server_certificate_validation: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "HttpServer" # type: str self.url = url self.authentication_type = authentication_type self.user_name = user_name @@ -29693,95 +31947,100 @@ class HttpReadSettings(StoreReadSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The read setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. :vartype type: str :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). - :vartype request_method: any + :vartype request_method: JSON :ivar request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). - :vartype request_body: any + :vartype request_body: JSON :ivar additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :vartype additional_headers: any + :vartype additional_headers: JSON :ivar request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. - :vartype request_timeout: any + :vartype request_timeout: JSON :ivar enable_partition_discovery: Indicates whether to enable partition discovery. :vartype enable_partition_discovery: bool :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :vartype partition_root_path: any + :vartype partition_root_path: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'request_method': {'key': 'requestMethod', 'type': 'object'}, - 'request_body': {'key': 'requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, - 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "request_method": {"key": "requestMethod", "type": "object"}, + "request_body": {"key": "requestBody", "type": "object"}, + "additional_headers": {"key": "additionalHeaders", "type": "object"}, + "request_timeout": {"key": "requestTimeout", "type": "object"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "partition_root_path": {"key": "partitionRootPath", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - request_method: Optional[Any] = None, - request_body: Optional[Any] = None, - additional_headers: Optional[Any] = None, - request_timeout: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + request_method: Optional[JSON] = None, + request_body: Optional[JSON] = None, + additional_headers: Optional[JSON] = None, + request_timeout: Optional[JSON] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[Any] = None, + partition_root_path: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). - :paramtype request_method: any + :paramtype request_method: JSON :keyword request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). - :paramtype request_body: any + :paramtype request_body: JSON :keyword additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :paramtype additional_headers: any + :paramtype additional_headers: JSON :keyword request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. - :paramtype request_timeout: any + :paramtype request_timeout: JSON :keyword enable_partition_discovery: Indicates whether to enable partition discovery. :paramtype enable_partition_discovery: bool :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :paramtype partition_root_path: any - """ - super(HttpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'HttpReadSettings' # type: str + :paramtype partition_root_path: JSON + """ + super().__init__( + additional_properties=additional_properties, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "HttpReadSettings" # type: str self.request_method = request_method self.request_body = request_body self.additional_headers = additional_headers @@ -29797,57 +32056,59 @@ class HttpServerLocation(DatasetLocation): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage location. Required. :vartype type: str :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :vartype folder_path: any + :vartype folder_path: JSON :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :vartype file_name: any + :vartype file_name: JSON :ivar relative_url: Specify the relativeUrl of http server. Type: string (or Expression with resultType string). - :vartype relative_url: any + :vartype relative_url: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "folder_path": {"key": "folderPath", "type": "object"}, + "file_name": {"key": "fileName", "type": "object"}, + "relative_url": {"key": "relativeUrl", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - folder_path: Optional[Any] = None, - file_name: Optional[Any] = None, - relative_url: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + folder_path: Optional[JSON] = None, + file_name: Optional[JSON] = None, + relative_url: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :paramtype folder_path: any + :paramtype folder_path: JSON :keyword file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :paramtype file_name: any + :paramtype file_name: JSON :keyword relative_url: Specify the relativeUrl of http server. Type: string (or Expression with resultType string). - :paramtype relative_url: any + :paramtype relative_url: JSON """ - super(HttpServerLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type = 'HttpServerLocation' # type: str + super().__init__( + additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs + ) + self.type = "HttpServerLocation" # type: str self.relative_url = relative_url @@ -29858,89 +32119,96 @@ class HttpSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype http_request_timeout: any + :vartype http_request_timeout: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "http_request_timeout": {"key": "httpRequestTimeout", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - http_request_timeout: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + http_request_timeout: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype http_request_timeout: any - """ - super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'HttpSource' # type: str + :paramtype http_request_timeout: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "HttpSource" # type: str self.http_request_timeout = http_request_timeout -class HubspotLinkedService(LinkedService): +class HubspotLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Hubspot Service linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -29949,9 +32217,9 @@ class HubspotLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar client_id: Required. The client ID associated with your Hubspot application. - :vartype client_id: any + :vartype annotations: list[JSON] + :ivar client_id: The client ID associated with your Hubspot application. Required. + :vartype client_id: JSON :ivar client_secret: The client secret associated with your Hubspot application. :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase :ivar access_token: The access token obtained when initially authenticating your OAuth @@ -29962,64 +32230,64 @@ class HubspotLinkedService(LinkedService): :vartype refresh_token: ~azure.mgmt.datafactory.models.SecretBase :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :vartype use_encrypted_endpoints: any + :vartype use_encrypted_endpoints: JSON :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :vartype use_host_verification: any + :vartype use_host_verification: JSON :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :vartype use_peer_verification: any + :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'client_id': {'required': True}, + "type": {"required": True}, + "client_id": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "client_id": {"key": "typeProperties.clientId", "type": "object"}, + "client_secret": {"key": "typeProperties.clientSecret", "type": "SecretBase"}, + "access_token": {"key": "typeProperties.accessToken", "type": "SecretBase"}, + "refresh_token": {"key": "typeProperties.refreshToken", "type": "SecretBase"}, + "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, + "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, + "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - client_id: Any, - additional_properties: Optional[Dict[str, Any]] = None, + client_id: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, client_secret: Optional["_models.SecretBase"] = None, access_token: Optional["_models.SecretBase"] = None, refresh_token: Optional["_models.SecretBase"] = None, - use_encrypted_endpoints: Optional[Any] = None, - use_host_verification: Optional[Any] = None, - use_peer_verification: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + use_encrypted_endpoints: Optional[JSON] = None, + use_host_verification: Optional[JSON] = None, + use_peer_verification: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -30027,9 +32295,9 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword client_id: Required. The client ID associated with your Hubspot application. - :paramtype client_id: any + :paramtype annotations: list[JSON] + :keyword client_id: The client ID associated with your Hubspot application. Required. + :paramtype client_id: JSON :keyword client_secret: The client secret associated with your Hubspot application. :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase :keyword access_token: The access token obtained when initially authenticating your OAuth @@ -30040,21 +32308,28 @@ def __init__( :paramtype refresh_token: ~azure.mgmt.datafactory.models.SecretBase :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :paramtype use_encrypted_endpoints: any + :paramtype use_encrypted_endpoints: JSON :keyword use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :paramtype use_host_verification: any + :paramtype use_host_verification: JSON :keyword use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :paramtype use_peer_verification: any + :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(HubspotLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Hubspot' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Hubspot" # type: str self.client_id = client_id self.client_secret = client_secret self.access_token = access_token @@ -30072,88 +32347,98 @@ class HubspotObjectDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(HubspotObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'HubspotObject' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "HubspotObject" # type: str self.table_name = table_name @@ -30164,89 +32449,98 @@ class HubspotSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'HubspotSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "HubspotSource" # type: str self.query = query @@ -30257,10 +32551,10 @@ class IfConditionActivity(ControlActivity): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -30268,9 +32562,8 @@ class IfConditionActivity(ControlActivity): :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :ivar expression: Required. An expression that would evaluate to Boolean. This is used to - determine the block of activities (ifTrueActivities or ifFalseActivities) that will be - executed. + :ivar expression: An expression that would evaluate to Boolean. This is used to determine the + block of activities (ifTrueActivities or ifFalseActivities) that will be executed. Required. :vartype expression: ~azure.mgmt.datafactory.models.Expression :ivar if_true_activities: List of activities to execute if expression is evaluated to true. This is an optional property and if not provided, the activity will exit without any action. @@ -30281,21 +32574,21 @@ class IfConditionActivity(ControlActivity): """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'expression': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "expression": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, - 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, - 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "expression": {"key": "typeProperties.expression", "type": "Expression"}, + "if_true_activities": {"key": "typeProperties.ifTrueActivities", "type": "[Activity]"}, + "if_false_activities": {"key": "typeProperties.ifFalseActivities", "type": "[Activity]"}, } def __init__( @@ -30303,7 +32596,7 @@ def __init__( *, name: str, expression: "_models.Expression", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, @@ -30314,8 +32607,8 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -30323,9 +32616,9 @@ def __init__( :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :keyword expression: Required. An expression that would evaluate to Boolean. This is used to - determine the block of activities (ifTrueActivities or ifFalseActivities) that will be - executed. + :keyword expression: An expression that would evaluate to Boolean. This is used to determine + the block of activities (ifTrueActivities or ifFalseActivities) that will be executed. + Required. :paramtype expression: ~azure.mgmt.datafactory.models.Expression :keyword if_true_activities: List of activities to execute if expression is evaluated to true. This is an optional property and if not provided, the activity will exit without any action. @@ -30335,22 +32628,29 @@ def __init__( action. :paramtype if_false_activities: list[~azure.mgmt.datafactory.models.Activity] """ - super(IfConditionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'IfCondition' # type: str + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + **kwargs + ) + self.type = "IfCondition" # type: str self.expression = expression self.if_true_activities = if_true_activities self.if_false_activities = if_false_activities -class ImpalaLinkedService(LinkedService): +class ImpalaLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Impala server linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -30359,93 +32659,93 @@ class ImpalaLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar host: Required. The IP address or host name of the Impala server. (i.e. 192.168.222.160). - :vartype host: any + :vartype annotations: list[JSON] + :ivar host: The IP address or host name of the Impala server. (i.e. 192.168.222.160). Required. + :vartype host: JSON :ivar port: The TCP port that the Impala server uses to listen for client connections. The default value is 21050. - :vartype port: any - :ivar authentication_type: Required. The authentication type to use. Known values are: - "Anonymous", "SASLUsername", "UsernameAndPassword". + :vartype port: JSON + :ivar authentication_type: The authentication type to use. Required. Known values are: + "Anonymous", "SASLUsername", and "UsernameAndPassword". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.ImpalaAuthenticationType :ivar username: The user name used to access the Impala server. The default value is anonymous when using SASLUsername. - :vartype username: any + :vartype username: JSON :ivar password: The password corresponding to the user name when using UsernameAndPassword. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :vartype enable_ssl: any + :vartype enable_ssl: JSON :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :vartype trusted_cert_path: any + :vartype trusted_cert_path: JSON :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :vartype use_system_trust_store: any + :vartype use_system_trust_store: JSON :ivar allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :vartype allow_host_name_cn_mismatch: any + :vartype allow_host_name_cn_mismatch: JSON :ivar allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :vartype allow_self_signed_server_cert: any + :vartype allow_self_signed_server_cert: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, + "type": {"required": True}, + "host": {"required": True}, + "authentication_type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "host": {"key": "typeProperties.host", "type": "object"}, + "port": {"key": "typeProperties.port", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "enable_ssl": {"key": "typeProperties.enableSsl", "type": "object"}, + "trusted_cert_path": {"key": "typeProperties.trustedCertPath", "type": "object"}, + "use_system_trust_store": {"key": "typeProperties.useSystemTrustStore", "type": "object"}, + "allow_host_name_cn_mismatch": {"key": "typeProperties.allowHostNameCNMismatch", "type": "object"}, + "allow_self_signed_server_cert": {"key": "typeProperties.allowSelfSignedServerCert", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - host: Any, + host: JSON, authentication_type: Union[str, "_models.ImpalaAuthenticationType"], - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - port: Optional[Any] = None, - username: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + port: Optional[JSON] = None, + username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - enable_ssl: Optional[Any] = None, - trusted_cert_path: Optional[Any] = None, - use_system_trust_store: Optional[Any] = None, - allow_host_name_cn_mismatch: Optional[Any] = None, - allow_self_signed_server_cert: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + enable_ssl: Optional[JSON] = None, + trusted_cert_path: Optional[JSON] = None, + use_system_trust_store: Optional[JSON] = None, + allow_host_name_cn_mismatch: Optional[JSON] = None, + allow_self_signed_server_cert: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -30453,44 +32753,51 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword host: Required. The IP address or host name of the Impala server. (i.e. - 192.168.222.160). - :paramtype host: any + :paramtype annotations: list[JSON] + :keyword host: The IP address or host name of the Impala server. (i.e. 192.168.222.160). + Required. + :paramtype host: JSON :keyword port: The TCP port that the Impala server uses to listen for client connections. The default value is 21050. - :paramtype port: any - :keyword authentication_type: Required. The authentication type to use. Known values are: - "Anonymous", "SASLUsername", "UsernameAndPassword". + :paramtype port: JSON + :keyword authentication_type: The authentication type to use. Required. Known values are: + "Anonymous", "SASLUsername", and "UsernameAndPassword". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.ImpalaAuthenticationType :keyword username: The user name used to access the Impala server. The default value is anonymous when using SASLUsername. - :paramtype username: any + :paramtype username: JSON :keyword password: The password corresponding to the user name when using UsernameAndPassword. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :paramtype enable_ssl: any + :paramtype enable_ssl: JSON :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :paramtype trusted_cert_path: any + :paramtype trusted_cert_path: JSON :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :paramtype use_system_trust_store: any + :paramtype use_system_trust_store: JSON :keyword allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :paramtype allow_host_name_cn_mismatch: any + :paramtype allow_host_name_cn_mismatch: JSON :keyword allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :paramtype allow_self_signed_server_cert: any + :paramtype allow_self_signed_server_cert: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(ImpalaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Impala' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Impala" # type: str self.host = host self.port = port self.authentication_type = authentication_type @@ -30504,112 +32811,122 @@ def __init__( self.encrypted_credential = encrypted_credential -class ImpalaObjectDataset(Dataset): +class ImpalaObjectDataset(Dataset): # pylint: disable=too-many-instance-attributes """Impala server dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :vartype table_name: any + :vartype table_name: JSON :ivar table: The table name of the Impala. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON :ivar schema_type_properties_schema: The schema name of the Impala. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, - table: Optional[Any] = None, - schema_type_properties_schema: Optional[Any] = None, + table_name: Optional[JSON] = None, + table: Optional[JSON] = None, + schema_type_properties_schema: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: This property will be retired. Please consider using schema + table properties instead. - :paramtype table_name: any + :paramtype table_name: JSON :keyword table: The table name of the Impala. Type: string (or Expression with resultType string). - :paramtype table: any + :paramtype table: JSON :keyword schema_type_properties_schema: The schema name of the Impala. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any - """ - super(ImpalaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'ImpalaObject' # type: str + :paramtype schema_type_properties_schema: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "ImpalaObject" # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -30622,101 +32939,110 @@ class ImpalaSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'ImpalaSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "ImpalaSource" # type: str self.query = query -class InformixLinkedService(LinkedService): +class InformixLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Informix linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -30725,69 +33051,69 @@ class InformixLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar connection_string: Required. The non-access credential portion of the connection string - as well as an optional encrypted credential. Type: string, SecureString or - AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype annotations: list[JSON] + :ivar connection_string: The non-access credential portion of the connection string as well as + an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. + Required. + :vartype connection_string: JSON :ivar authentication_type: Type of authentication used to connect to the Informix as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :vartype authentication_type: any + :vartype authentication_type: JSON :ivar credential: The access credential portion of the connection string specified in driver-specific property-value format. :vartype credential: ~azure.mgmt.datafactory.models.SecretBase :ivar user_name: User name for Basic authentication. Type: string (or Expression with resultType string). - :vartype user_name: any + :vartype user_name: JSON :ivar password: Password for Basic authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, + "type": {"required": True}, + "connection_string": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "object"}, + "credential": {"key": "typeProperties.credential", "type": "SecretBase"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - connection_string: Any, - additional_properties: Optional[Dict[str, Any]] = None, + connection_string: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - authentication_type: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + authentication_type: Optional[JSON] = None, credential: Optional["_models.SecretBase"] = None, - user_name: Optional[Any] = None, + user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -30795,30 +33121,37 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword connection_string: Required. The non-access credential portion of the connection - string as well as an optional encrypted credential. Type: string, SecureString or - AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype annotations: list[JSON] + :keyword connection_string: The non-access credential portion of the connection string as well + as an optional encrypted credential. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. + :paramtype connection_string: JSON :keyword authentication_type: Type of authentication used to connect to the Informix as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :paramtype authentication_type: any + :paramtype authentication_type: JSON :keyword credential: The access credential portion of the connection string specified in driver-specific property-value format. :paramtype credential: ~azure.mgmt.datafactory.models.SecretBase :keyword user_name: User name for Basic authentication. Type: string (or Expression with resultType string). - :paramtype user_name: any + :paramtype user_name: JSON :keyword password: Password for Basic authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(InformixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Informix' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Informix" # type: str self.connection_string = connection_string self.authentication_type = authentication_type self.credential = credential @@ -30834,89 +33167,98 @@ class InformixSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :vartype pre_copy_script: any + :vartype pre_copy_script: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "pre_copy_script": {"key": "preCopyScript", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - pre_copy_script: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + pre_copy_script: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :paramtype pre_copy_script: any - """ - super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'InformixSink' # type: str + :paramtype pre_copy_script: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "InformixSink" # type: str self.pre_copy_script = pre_copy_script @@ -30927,87 +33269,96 @@ class InformixSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: Database query. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: Database query. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'InformixSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "InformixSource" # type: str self.query = query @@ -31018,145 +33369,149 @@ class InformixTableDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The Informix table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The Informix table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(InformixTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'InformixTable' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "InformixTable" # type: str self.table_name = table_name -class IntegrationRuntime(msrest.serialization.Model): +class IntegrationRuntime(_serialization.Model): """Azure Data Factory nested object which serves as a compute resource for activities. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ManagedIntegrationRuntime, SelfHostedIntegrationRuntime. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ManagedIntegrationRuntime, SelfHostedIntegrationRuntime All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of integration runtime.Constant filled by server. Known values are: - "Managed", "SelfHosted". + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of integration runtime. Required. Known values are: "Managed" and + "SelfHosted". :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :ivar description: Integration runtime description. :vartype description: str """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, } - _subtype_map = { - 'type': {'Managed': 'ManagedIntegrationRuntime', 'SelfHosted': 'SelfHostedIntegrationRuntime'} - } + _subtype_map = {"type": {"Managed": "ManagedIntegrationRuntime", "SelfHosted": "SelfHostedIntegrationRuntime"}} def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - description: Optional[str] = None, - **kwargs + self, *, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Integration runtime description. :paramtype description: str """ - super(IntegrationRuntime, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties - self.type = 'IntegrationRuntime' # type: str + self.type = None # type: Optional[str] self.description = description -class IntegrationRuntimeAuthKeys(msrest.serialization.Model): +class IntegrationRuntimeAuthKeys(_serialization.Model): """The integration runtime authentication keys. :ivar auth_key1: The primary integration runtime authentication key. @@ -31166,34 +33521,28 @@ class IntegrationRuntimeAuthKeys(msrest.serialization.Model): """ _attribute_map = { - 'auth_key1': {'key': 'authKey1', 'type': 'str'}, - 'auth_key2': {'key': 'authKey2', 'type': 'str'}, + "auth_key1": {"key": "authKey1", "type": "str"}, + "auth_key2": {"key": "authKey2", "type": "str"}, } - def __init__( - self, - *, - auth_key1: Optional[str] = None, - auth_key2: Optional[str] = None, - **kwargs - ): + def __init__(self, *, auth_key1: Optional[str] = None, auth_key2: Optional[str] = None, **kwargs): """ :keyword auth_key1: The primary integration runtime authentication key. :paramtype auth_key1: str :keyword auth_key2: The secondary integration runtime authentication key. :paramtype auth_key2: str """ - super(IntegrationRuntimeAuthKeys, self).__init__(**kwargs) + super().__init__(**kwargs) self.auth_key1 = auth_key1 self.auth_key2 = auth_key2 -class IntegrationRuntimeComputeProperties(msrest.serialization.Model): +class IntegrationRuntimeComputeProperties(_serialization.Model): """The compute resource properties for managed integration runtime. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar location: The location for managed integration runtime. The supported regions could be found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities. @@ -31213,24 +33562,24 @@ class IntegrationRuntimeComputeProperties(msrest.serialization.Model): """ _validation = { - 'number_of_nodes': {'minimum': 1}, - 'max_parallel_executions_per_node': {'minimum': 1}, + "number_of_nodes": {"minimum": 1}, + "max_parallel_executions_per_node": {"minimum": 1}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'node_size': {'key': 'nodeSize', 'type': 'str'}, - 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, - 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, - 'data_flow_properties': {'key': 'dataFlowProperties', 'type': 'IntegrationRuntimeDataFlowProperties'}, - 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, + "additional_properties": {"key": "", "type": "{object}"}, + "location": {"key": "location", "type": "str"}, + "node_size": {"key": "nodeSize", "type": "str"}, + "number_of_nodes": {"key": "numberOfNodes", "type": "int"}, + "max_parallel_executions_per_node": {"key": "maxParallelExecutionsPerNode", "type": "int"}, + "data_flow_properties": {"key": "dataFlowProperties", "type": "IntegrationRuntimeDataFlowProperties"}, + "v_net_properties": {"key": "vNetProperties", "type": "IntegrationRuntimeVNetProperties"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, location: Optional[str] = None, node_size: Optional[str] = None, number_of_nodes: Optional[int] = None, @@ -31242,7 +33591,7 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword location: The location for managed integration runtime. The supported regions could be found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities. @@ -31260,7 +33609,7 @@ def __init__( :keyword v_net_properties: VNet properties for managed integration runtime. :paramtype v_net_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties """ - super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.location = location self.node_size = node_size @@ -31270,14 +33619,14 @@ def __init__( self.v_net_properties = v_net_properties -class IntegrationRuntimeConnectionInfo(msrest.serialization.Model): +class IntegrationRuntimeConnectionInfo(_serialization.Model): """Connection information for encrypting the on-premises data source credentials. Variables are only populated by the server, and will be ignored when sending a request. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar service_token: The token generated in service. Callers use this token to authenticate to integration runtime. :vartype service_token: str @@ -31296,36 +33645,31 @@ class IntegrationRuntimeConnectionInfo(msrest.serialization.Model): """ _validation = { - 'service_token': {'readonly': True}, - 'identity_cert_thumbprint': {'readonly': True}, - 'host_service_uri': {'readonly': True}, - 'version': {'readonly': True}, - 'public_key': {'readonly': True}, - 'is_identity_cert_exprired': {'readonly': True}, + "service_token": {"readonly": True}, + "identity_cert_thumbprint": {"readonly": True}, + "host_service_uri": {"readonly": True}, + "version": {"readonly": True}, + "public_key": {"readonly": True}, + "is_identity_cert_exprired": {"readonly": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'service_token': {'key': 'serviceToken', 'type': 'str'}, - 'identity_cert_thumbprint': {'key': 'identityCertThumbprint', 'type': 'str'}, - 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, - 'public_key': {'key': 'publicKey', 'type': 'str'}, - 'is_identity_cert_exprired': {'key': 'isIdentityCertExprired', 'type': 'bool'}, + "additional_properties": {"key": "", "type": "{object}"}, + "service_token": {"key": "serviceToken", "type": "str"}, + "identity_cert_thumbprint": {"key": "identityCertThumbprint", "type": "str"}, + "host_service_uri": {"key": "hostServiceUri", "type": "str"}, + "version": {"key": "version", "type": "str"}, + "public_key": {"key": "publicKey", "type": "str"}, + "is_identity_cert_exprired": {"key": "isIdentityCertExprired", "type": "bool"}, } - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - **kwargs - ): + def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, **kwargs): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] """ - super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.service_token = None self.identity_cert_thumbprint = None @@ -31335,7 +33679,7 @@ def __init__( self.is_identity_cert_exprired = None -class IntegrationRuntimeCustomerVirtualNetwork(msrest.serialization.Model): +class IntegrationRuntimeCustomerVirtualNetwork(_serialization.Model): """The definition and properties of virtual network to which Azure-SSIS integration runtime will join. :ivar subnet_id: The ID of subnet to which Azure-SSIS integration runtime will join. @@ -31343,24 +33687,19 @@ class IntegrationRuntimeCustomerVirtualNetwork(msrest.serialization.Model): """ _attribute_map = { - 'subnet_id': {'key': 'subnetId', 'type': 'str'}, + "subnet_id": {"key": "subnetId", "type": "str"}, } - def __init__( - self, - *, - subnet_id: Optional[str] = None, - **kwargs - ): + def __init__(self, *, subnet_id: Optional[str] = None, **kwargs): """ :keyword subnet_id: The ID of subnet to which Azure-SSIS integration runtime will join. :paramtype subnet_id: str """ - super(IntegrationRuntimeCustomerVirtualNetwork, self).__init__(**kwargs) + super().__init__(**kwargs) self.subnet_id = subnet_id -class IntegrationRuntimeCustomSetupScriptProperties(msrest.serialization.Model): +class IntegrationRuntimeCustomSetupScriptProperties(_serialization.Model): """Custom setup script properties for a managed dedicated integration runtime. :ivar blob_container_uri: The URI of the Azure blob container that contains the custom setup @@ -31371,16 +33710,12 @@ class IntegrationRuntimeCustomSetupScriptProperties(msrest.serialization.Model): """ _attribute_map = { - 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, - 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, + "blob_container_uri": {"key": "blobContainerUri", "type": "str"}, + "sas_token": {"key": "sasToken", "type": "SecureString"}, } def __init__( - self, - *, - blob_container_uri: Optional[str] = None, - sas_token: Optional["_models.SecureString"] = None, - **kwargs + self, *, blob_container_uri: Optional[str] = None, sas_token: Optional["_models.SecureString"] = None, **kwargs ): """ :keyword blob_container_uri: The URI of the Azure blob container that contains the custom setup @@ -31389,19 +33724,19 @@ def __init__( :keyword sas_token: The SAS token of the Azure blob container. :paramtype sas_token: ~azure.mgmt.datafactory.models.SecureString """ - super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.blob_container_uri = blob_container_uri self.sas_token = sas_token -class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): +class IntegrationRuntimeDataFlowProperties(_serialization.Model): """Data flow properties for managed integration runtime. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar compute_type: Compute type of the cluster which will execute data flow job. Known values - are: "General", "MemoryOptimized", "ComputeOptimized". + are: "General", "MemoryOptimized", and "ComputeOptimized". :vartype compute_type: str or ~azure.mgmt.datafactory.models.DataFlowComputeType :ivar core_count: Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. @@ -31415,21 +33750,21 @@ class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): """ _validation = { - 'time_to_live': {'minimum': 0}, + "time_to_live": {"minimum": 0}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'core_count': {'key': 'coreCount', 'type': 'int'}, - 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, - 'cleanup': {'key': 'cleanup', 'type': 'bool'}, + "additional_properties": {"key": "", "type": "{object}"}, + "compute_type": {"key": "computeType", "type": "str"}, + "core_count": {"key": "coreCount", "type": "int"}, + "time_to_live": {"key": "timeToLive", "type": "int"}, + "cleanup": {"key": "cleanup", "type": "bool"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, compute_type: Optional[Union[str, "_models.DataFlowComputeType"]] = None, core_count: Optional[int] = None, time_to_live: Optional[int] = None, @@ -31439,9 +33774,9 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword compute_type: Compute type of the cluster which will execute data flow job. Known - values are: "General", "MemoryOptimized", "ComputeOptimized". + values are: "General", "MemoryOptimized", and "ComputeOptimized". :paramtype compute_type: str or ~azure.mgmt.datafactory.models.DataFlowComputeType :keyword core_count: Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. @@ -31453,7 +33788,7 @@ def __init__( run until TTL (time to live) is reached if this is set as false. Default is true. :paramtype cleanup: bool """ - super(IntegrationRuntimeDataFlowProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.compute_type = compute_type self.core_count = core_count @@ -31461,7 +33796,7 @@ def __init__( self.cleanup = cleanup -class IntegrationRuntimeDataProxyProperties(msrest.serialization.Model): +class IntegrationRuntimeDataProxyProperties(_serialization.Model): """Data proxy properties for a managed dedicated integration runtime. :ivar connect_via: The self-hosted integration runtime reference. @@ -31473,9 +33808,9 @@ class IntegrationRuntimeDataProxyProperties(msrest.serialization.Model): """ _attribute_map = { - 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, - 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, - 'path': {'key': 'path', 'type': 'str'}, + "connect_via": {"key": "connectVia", "type": "EntityReference"}, + "staging_linked_service": {"key": "stagingLinkedService", "type": "EntityReference"}, + "path": {"key": "path", "type": "str"}, } def __init__( @@ -31494,7 +33829,7 @@ def __init__( :keyword path: The path to contain the staged data in the Blob storage. :paramtype path: str """ - super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.connect_via = connect_via self.staging_linked_service = staging_linked_service self.path = path @@ -31507,75 +33842,63 @@ class IntegrationRuntimeDebugResource(SubResourceDebugResource): :ivar name: The resource name. :vartype name: str - :ivar properties: Required. Integration runtime properties. + :ivar properties: Integration runtime properties. Required. :vartype properties: ~azure.mgmt.datafactory.models.IntegrationRuntime """ _validation = { - 'properties': {'required': True}, + "properties": {"required": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, + "name": {"key": "name", "type": "str"}, + "properties": {"key": "properties", "type": "IntegrationRuntime"}, } - def __init__( - self, - *, - properties: "_models.IntegrationRuntime", - name: Optional[str] = None, - **kwargs - ): + def __init__(self, *, properties: "_models.IntegrationRuntime", name: Optional[str] = None, **kwargs): """ :keyword name: The resource name. :paramtype name: str - :keyword properties: Required. Integration runtime properties. + :keyword properties: Integration runtime properties. Required. :paramtype properties: ~azure.mgmt.datafactory.models.IntegrationRuntime """ - super(IntegrationRuntimeDebugResource, self).__init__(name=name, **kwargs) + super().__init__(name=name, **kwargs) self.properties = properties -class IntegrationRuntimeListResponse(msrest.serialization.Model): +class IntegrationRuntimeListResponse(_serialization.Model): """A list of integration runtime resources. All required parameters must be populated in order to send to Azure. - :ivar value: Required. List of integration runtimes. + :ivar value: List of integration runtimes. Required. :vartype value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeResource] :ivar next_link: The link to the next page of results, if any remaining results exist. :vartype next_link: str """ _validation = { - 'value': {'required': True}, + "value": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[IntegrationRuntimeResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[IntegrationRuntimeResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - *, - value: List["_models.IntegrationRuntimeResource"], - next_link: Optional[str] = None, - **kwargs - ): + def __init__(self, *, value: List["_models.IntegrationRuntimeResource"], next_link: Optional[str] = None, **kwargs): """ - :keyword value: Required. List of integration runtimes. + :keyword value: List of integration runtimes. Required. :paramtype value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeResource] :keyword next_link: The link to the next page of results, if any remaining results exist. :paramtype next_link: str """ - super(IntegrationRuntimeListResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.next_link = next_link -class IntegrationRuntimeMonitoringData(msrest.serialization.Model): +class IntegrationRuntimeMonitoringData(_serialization.Model): """Get monitoring data response. :ivar name: Integration runtime name. @@ -31585,8 +33908,8 @@ class IntegrationRuntimeMonitoringData(msrest.serialization.Model): """ _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'nodes': {'key': 'nodes', 'type': '[IntegrationRuntimeNodeMonitoringData]'}, + "name": {"key": "name", "type": "str"}, + "nodes": {"key": "nodes", "type": "[IntegrationRuntimeNodeMonitoringData]"}, } def __init__( @@ -31602,12 +33925,12 @@ def __init__( :keyword nodes: Integration runtime node monitoring data. :paramtype nodes: list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] """ - super(IntegrationRuntimeMonitoringData, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name self.nodes = nodes -class IntegrationRuntimeNodeIpAddress(msrest.serialization.Model): +class IntegrationRuntimeNodeIpAddress(_serialization.Model): """The IP address of self-hosted integration runtime node. Variables are only populated by the server, and will be ignored when sending a request. @@ -31617,31 +33940,27 @@ class IntegrationRuntimeNodeIpAddress(msrest.serialization.Model): """ _validation = { - 'ip_address': {'readonly': True}, + "ip_address": {"readonly": True}, } _attribute_map = { - 'ip_address': {'key': 'ipAddress', 'type': 'str'}, + "ip_address": {"key": "ipAddress", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(IntegrationRuntimeNodeIpAddress, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.ip_address = None -class IntegrationRuntimeNodeMonitoringData(msrest.serialization.Model): +class IntegrationRuntimeNodeMonitoringData(_serialization.Model): """Monitoring data for integration runtime node. Variables are only populated by the server, and will be ignored when sending a request. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar node_name: Name of the integration runtime node. :vartype node_name: str :ivar available_memory_in_mb: Available memory (MB) on the integration runtime node. @@ -31662,40 +33981,35 @@ class IntegrationRuntimeNodeMonitoringData(msrest.serialization.Model): """ _validation = { - 'node_name': {'readonly': True}, - 'available_memory_in_mb': {'readonly': True}, - 'cpu_utilization': {'readonly': True}, - 'concurrent_jobs_limit': {'readonly': True}, - 'concurrent_jobs_running': {'readonly': True}, - 'max_concurrent_jobs': {'readonly': True}, - 'sent_bytes': {'readonly': True}, - 'received_bytes': {'readonly': True}, + "node_name": {"readonly": True}, + "available_memory_in_mb": {"readonly": True}, + "cpu_utilization": {"readonly": True}, + "concurrent_jobs_limit": {"readonly": True}, + "concurrent_jobs_running": {"readonly": True}, + "max_concurrent_jobs": {"readonly": True}, + "sent_bytes": {"readonly": True}, + "received_bytes": {"readonly": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'node_name': {'key': 'nodeName', 'type': 'str'}, - 'available_memory_in_mb': {'key': 'availableMemoryInMB', 'type': 'int'}, - 'cpu_utilization': {'key': 'cpuUtilization', 'type': 'int'}, - 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, - 'concurrent_jobs_running': {'key': 'concurrentJobsRunning', 'type': 'int'}, - 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, - 'sent_bytes': {'key': 'sentBytes', 'type': 'float'}, - 'received_bytes': {'key': 'receivedBytes', 'type': 'float'}, + "additional_properties": {"key": "", "type": "{object}"}, + "node_name": {"key": "nodeName", "type": "str"}, + "available_memory_in_mb": {"key": "availableMemoryInMB", "type": "int"}, + "cpu_utilization": {"key": "cpuUtilization", "type": "int"}, + "concurrent_jobs_limit": {"key": "concurrentJobsLimit", "type": "int"}, + "concurrent_jobs_running": {"key": "concurrentJobsRunning", "type": "int"}, + "max_concurrent_jobs": {"key": "maxConcurrentJobs", "type": "int"}, + "sent_bytes": {"key": "sentBytes", "type": "float"}, + "received_bytes": {"key": "receivedBytes", "type": "float"}, } - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - **kwargs - ): + def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, **kwargs): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] """ - super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.node_name = None self.available_memory_in_mb = None @@ -31707,7 +34021,7 @@ def __init__( self.received_bytes = None -class IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint(msrest.serialization.Model): +class IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint(_serialization.Model): """Azure-SSIS integration runtime outbound network dependency endpoints for one category. :ivar category: The category of outbound network dependency. @@ -31718,8 +34032,8 @@ class IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint(msrest.seria """ _attribute_map = { - 'category': {'key': 'category', 'type': 'str'}, - 'endpoints': {'key': 'endpoints', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesEndpoint]'}, + "category": {"key": "category", "type": "str"}, + "endpoints": {"key": "endpoints", "type": "[IntegrationRuntimeOutboundNetworkDependenciesEndpoint]"}, } def __init__( @@ -31736,12 +34050,12 @@ def __init__( :paramtype endpoints: list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpoint] """ - super(IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint, self).__init__(**kwargs) + super().__init__(**kwargs) self.category = category self.endpoints = endpoints -class IntegrationRuntimeOutboundNetworkDependenciesEndpoint(msrest.serialization.Model): +class IntegrationRuntimeOutboundNetworkDependenciesEndpoint(_serialization.Model): """The endpoint for Azure-SSIS integration runtime outbound network dependency. :ivar domain_name: The domain name of endpoint. @@ -31752,8 +34066,11 @@ class IntegrationRuntimeOutboundNetworkDependenciesEndpoint(msrest.serialization """ _attribute_map = { - 'domain_name': {'key': 'domainName', 'type': 'str'}, - 'endpoint_details': {'key': 'endpointDetails', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails]'}, + "domain_name": {"key": "domainName", "type": "str"}, + "endpoint_details": { + "key": "endpointDetails", + "type": "[IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails]", + }, } def __init__( @@ -31770,12 +34087,12 @@ def __init__( :paramtype endpoint_details: list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails] """ - super(IntegrationRuntimeOutboundNetworkDependenciesEndpoint, self).__init__(**kwargs) + super().__init__(**kwargs) self.domain_name = domain_name self.endpoint_details = endpoint_details -class IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(msrest.serialization.Model): +class IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(_serialization.Model): """The details of Azure-SSIS integration runtime outbound network dependency endpoint. :ivar port: The port of endpoint. @@ -31783,24 +34100,19 @@ class IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(msrest.serial """ _attribute_map = { - 'port': {'key': 'port', 'type': 'int'}, + "port": {"key": "port", "type": "int"}, } - def __init__( - self, - *, - port: Optional[int] = None, - **kwargs - ): + def __init__(self, *, port: Optional[int] = None, **kwargs): """ :keyword port: The port of endpoint. :paramtype port: int """ - super(IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails, self).__init__(**kwargs) + super().__init__(**kwargs) self.port = port -class IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse(msrest.serialization.Model): +class IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse(_serialization.Model): """Azure-SSIS integration runtime outbound network dependency endpoints. :ivar value: The list of outbound network dependency endpoints. @@ -31809,7 +34121,7 @@ class IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse(msrest.seri """ _attribute_map = { - 'value': {'key': 'value', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint]'}, + "value": {"key": "value", "type": "[IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint]"}, } def __init__( @@ -31823,80 +34135,75 @@ def __init__( :paramtype value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint] """ - super(IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value -class IntegrationRuntimeReference(msrest.serialization.Model): +class IntegrationRuntimeReference(_serialization.Model): """Integration runtime reference type. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar type: Type of integration runtime. Has constant value: "IntegrationRuntimeReference". - :vartype type: str - :ivar reference_name: Required. Reference integration runtime name. + :ivar type: Type of integration runtime. Required. "IntegrationRuntimeReference" + :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeReferenceType + :ivar reference_name: Reference integration runtime name. Required. :vartype reference_name: str :ivar parameters: Arguments for integration runtime. - :vartype parameters: dict[str, any] + :vartype parameters: dict[str, JSON] """ _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, + "type": {"required": True}, + "reference_name": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, + "type": {"key": "type", "type": "str"}, + "reference_name": {"key": "referenceName", "type": "str"}, + "parameters": {"key": "parameters", "type": "{object}"}, } - type = "IntegrationRuntimeReference" - def __init__( self, *, + type: Union[str, "_models.IntegrationRuntimeReferenceType"], reference_name: str, - parameters: Optional[Dict[str, Any]] = None, + parameters: Optional[Dict[str, JSON]] = None, **kwargs ): """ - :keyword reference_name: Required. Reference integration runtime name. + :keyword type: Type of integration runtime. Required. "IntegrationRuntimeReference" + :paramtype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeReferenceType + :keyword reference_name: Reference integration runtime name. Required. :paramtype reference_name: str :keyword parameters: Arguments for integration runtime. - :paramtype parameters: dict[str, any] + :paramtype parameters: dict[str, JSON] """ - super(IntegrationRuntimeReference, self).__init__(**kwargs) + super().__init__(**kwargs) + self.type = type self.reference_name = reference_name self.parameters = parameters -class IntegrationRuntimeRegenerateKeyParameters(msrest.serialization.Model): +class IntegrationRuntimeRegenerateKeyParameters(_serialization.Model): """Parameters to regenerate the authentication key. - :ivar key_name: The name of the authentication key to regenerate. Known values are: "authKey1", - "authKey2". + :ivar key_name: The name of the authentication key to regenerate. Known values are: "authKey1" + and "authKey2". :vartype key_name: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName """ _attribute_map = { - 'key_name': {'key': 'keyName', 'type': 'str'}, + "key_name": {"key": "keyName", "type": "str"}, } - def __init__( - self, - *, - key_name: Optional[Union[str, "_models.IntegrationRuntimeAuthKeyName"]] = None, - **kwargs - ): + def __init__(self, *, key_name: Optional[Union[str, "_models.IntegrationRuntimeAuthKeyName"]] = None, **kwargs): """ :keyword key_name: The name of the authentication key to regenerate. Known values are: - "authKey1", "authKey2". + "authKey1" and "authKey2". :paramtype key_name: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName """ - super(IntegrationRuntimeRegenerateKeyParameters, self).__init__(**kwargs) + super().__init__(**kwargs) self.key_name = key_name @@ -31915,46 +34222,41 @@ class IntegrationRuntimeResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :ivar properties: Required. Integration runtime properties. + :ivar properties: Integration runtime properties. Required. :vartype properties: ~azure.mgmt.datafactory.models.IntegrationRuntime """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "etag": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "etag": {"key": "etag", "type": "str"}, + "properties": {"key": "properties", "type": "IntegrationRuntime"}, } - def __init__( - self, - *, - properties: "_models.IntegrationRuntime", - **kwargs - ): + def __init__(self, *, properties: "_models.IntegrationRuntime", **kwargs): """ - :keyword properties: Required. Integration runtime properties. + :keyword properties: Integration runtime properties. Required. :paramtype properties: ~azure.mgmt.datafactory.models.IntegrationRuntime """ - super(IntegrationRuntimeResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): +class IntegrationRuntimeSsisCatalogInfo(_serialization.Model): """Catalog information for managed dedicated integration runtime. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar catalog_server_endpoint: The catalog database server URL. :vartype catalog_server_endpoint: str :ivar catalog_admin_user_name: The administrator user name of catalog database. @@ -31964,7 +34266,7 @@ class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): :vartype catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString :ivar catalog_pricing_tier: The pricing tier for the catalog database. The valid values could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/. Known values are: - "Basic", "Standard", "Premium", "PremiumRS". + "Basic", "Standard", "Premium", and "PremiumRS". :vartype catalog_pricing_tier: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier :ivar dual_standby_pair_name: The dual standby pair name of Azure-SSIS Integration Runtimes to @@ -31973,22 +34275,22 @@ class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): """ _validation = { - 'catalog_admin_user_name': {'max_length': 128, 'min_length': 1}, + "catalog_admin_user_name": {"max_length": 128, "min_length": 1}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, - 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, - 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, - 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, - 'dual_standby_pair_name': {'key': 'dualStandbyPairName', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "catalog_server_endpoint": {"key": "catalogServerEndpoint", "type": "str"}, + "catalog_admin_user_name": {"key": "catalogAdminUserName", "type": "str"}, + "catalog_admin_password": {"key": "catalogAdminPassword", "type": "SecureString"}, + "catalog_pricing_tier": {"key": "catalogPricingTier", "type": "str"}, + "dual_standby_pair_name": {"key": "dualStandbyPairName", "type": "str"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, catalog_server_endpoint: Optional[str] = None, catalog_admin_user_name: Optional[str] = None, catalog_admin_password: Optional["_models.SecureString"] = None, @@ -31999,7 +34301,7 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword catalog_server_endpoint: The catalog database server URL. :paramtype catalog_server_endpoint: str :keyword catalog_admin_user_name: The administrator user name of catalog database. @@ -32009,14 +34311,14 @@ def __init__( :paramtype catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString :keyword catalog_pricing_tier: The pricing tier for the catalog database. The valid values could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/. Known values - are: "Basic", "Standard", "Premium", "PremiumRS". + are: "Basic", "Standard", "Premium", and "PremiumRS". :paramtype catalog_pricing_tier: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier :keyword dual_standby_pair_name: The dual standby pair name of Azure-SSIS Integration Runtimes to support SSISDB failover. :paramtype dual_standby_pair_name: str """ - super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.catalog_server_endpoint = catalog_server_endpoint self.catalog_admin_user_name = catalog_admin_user_name @@ -32025,16 +34327,16 @@ def __init__( self.dual_standby_pair_name = dual_standby_pair_name -class IntegrationRuntimeSsisProperties(msrest.serialization.Model): +class IntegrationRuntimeSsisProperties(_serialization.Model): """SSIS properties for managed integration runtime. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar catalog_info: Catalog information for managed dedicated integration runtime. :vartype catalog_info: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo :ivar license_type: License type for bringing your own license scenario. Known values are: - "BasePrice", "LicenseIncluded". + "BasePrice" and "LicenseIncluded". :vartype license_type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType :ivar custom_setup_script_properties: Custom setup script properties for a managed dedicated integration runtime. @@ -32043,7 +34345,7 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model): :ivar data_proxy_properties: Data proxy properties for a managed dedicated integration runtime. :vartype data_proxy_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties - :ivar edition: The edition for the SSIS Integration Runtime. Known values are: "Standard", + :ivar edition: The edition for the SSIS Integration Runtime. Known values are: "Standard" and "Enterprise". :vartype edition: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition :ivar express_custom_setup_properties: Custom setup without script properties for a SSIS @@ -32056,21 +34358,24 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model): """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, - 'license_type': {'key': 'licenseType', 'type': 'str'}, - 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, - 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, - 'edition': {'key': 'edition', 'type': 'str'}, - 'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'}, - 'package_stores': {'key': 'packageStores', 'type': '[PackageStore]'}, - 'credential': {'key': 'credential', 'type': 'CredentialReference'}, + "additional_properties": {"key": "", "type": "{object}"}, + "catalog_info": {"key": "catalogInfo", "type": "IntegrationRuntimeSsisCatalogInfo"}, + "license_type": {"key": "licenseType", "type": "str"}, + "custom_setup_script_properties": { + "key": "customSetupScriptProperties", + "type": "IntegrationRuntimeCustomSetupScriptProperties", + }, + "data_proxy_properties": {"key": "dataProxyProperties", "type": "IntegrationRuntimeDataProxyProperties"}, + "edition": {"key": "edition", "type": "str"}, + "express_custom_setup_properties": {"key": "expressCustomSetupProperties", "type": "[CustomSetupBase]"}, + "package_stores": {"key": "packageStores", "type": "[PackageStore]"}, + "credential": {"key": "credential", "type": "CredentialReference"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, catalog_info: Optional["_models.IntegrationRuntimeSsisCatalogInfo"] = None, license_type: Optional[Union[str, "_models.IntegrationRuntimeLicenseType"]] = None, custom_setup_script_properties: Optional["_models.IntegrationRuntimeCustomSetupScriptProperties"] = None, @@ -32084,11 +34389,11 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword catalog_info: Catalog information for managed dedicated integration runtime. :paramtype catalog_info: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo :keyword license_type: License type for bringing your own license scenario. Known values are: - "BasePrice", "LicenseIncluded". + "BasePrice" and "LicenseIncluded". :paramtype license_type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType :keyword custom_setup_script_properties: Custom setup script properties for a managed dedicated integration runtime. @@ -32098,8 +34403,8 @@ def __init__( runtime. :paramtype data_proxy_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties - :keyword edition: The edition for the SSIS Integration Runtime. Known values are: "Standard", - "Enterprise". + :keyword edition: The edition for the SSIS Integration Runtime. Known values are: "Standard" + and "Enterprise". :paramtype edition: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition :keyword express_custom_setup_properties: Custom setup without script properties for a SSIS integration runtime. @@ -32110,7 +34415,7 @@ def __init__( :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ - super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.catalog_info = catalog_info self.license_type = license_type @@ -32122,11 +34427,11 @@ def __init__( self.credential = credential -class IntegrationRuntimeStatus(msrest.serialization.Model): +class IntegrationRuntimeStatus(_serialization.Model): """Integration runtime status. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ManagedIntegrationRuntimeStatus, SelfHostedIntegrationRuntimeStatus. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ManagedIntegrationRuntimeStatus, SelfHostedIntegrationRuntimeStatus Variables are only populated by the server, and will be ignored when sending a request. @@ -32134,92 +34439,83 @@ class IntegrationRuntimeStatus(msrest.serialization.Model): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of integration runtime.Constant filled by server. Known values are: - "Managed", "SelfHosted". + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of integration runtime. Required. Known values are: "Managed" and + "SelfHosted". :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :ivar data_factory_name: The data factory name which the integration runtime belong to. :vartype data_factory_name: str :ivar state: The state of integration runtime. Known values are: "Initial", "Stopped", - "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", + "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", and "AccessDenied". :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState """ _validation = { - 'type': {'required': True}, - 'data_factory_name': {'readonly': True}, - 'state': {'readonly': True}, + "type": {"required": True}, + "data_factory_name": {"readonly": True}, + "state": {"readonly": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "data_factory_name": {"key": "dataFactoryName", "type": "str"}, + "state": {"key": "state", "type": "str"}, } _subtype_map = { - 'type': {'Managed': 'ManagedIntegrationRuntimeStatus', 'SelfHosted': 'SelfHostedIntegrationRuntimeStatus'} + "type": {"Managed": "ManagedIntegrationRuntimeStatus", "SelfHosted": "SelfHostedIntegrationRuntimeStatus"} } - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - **kwargs - ): + def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, **kwargs): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] """ - super(IntegrationRuntimeStatus, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties - self.type = 'IntegrationRuntimeStatus' # type: str + self.type = None # type: Optional[str] self.data_factory_name = None self.state = None -class IntegrationRuntimeStatusListResponse(msrest.serialization.Model): +class IntegrationRuntimeStatusListResponse(_serialization.Model): """A list of integration runtime status. All required parameters must be populated in order to send to Azure. - :ivar value: Required. List of integration runtime status. + :ivar value: List of integration runtime status. Required. :vartype value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] :ivar next_link: The link to the next page of results, if any remaining results exist. :vartype next_link: str """ _validation = { - 'value': {'required': True}, + "value": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[IntegrationRuntimeStatusResponse]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[IntegrationRuntimeStatusResponse]"}, + "next_link": {"key": "nextLink", "type": "str"}, } def __init__( - self, - *, - value: List["_models.IntegrationRuntimeStatusResponse"], - next_link: Optional[str] = None, - **kwargs + self, *, value: List["_models.IntegrationRuntimeStatusResponse"], next_link: Optional[str] = None, **kwargs ): """ - :keyword value: Required. List of integration runtime status. + :keyword value: List of integration runtime status. Required. :paramtype value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] :keyword next_link: The link to the next page of results, if any remaining results exist. :paramtype next_link: str """ - super(IntegrationRuntimeStatusListResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.next_link = next_link -class IntegrationRuntimeStatusResponse(msrest.serialization.Model): +class IntegrationRuntimeStatusResponse(_serialization.Model): """Integration runtime status response. Variables are only populated by the server, and will be ignored when sending a request. @@ -32228,41 +34524,36 @@ class IntegrationRuntimeStatusResponse(msrest.serialization.Model): :ivar name: The integration runtime name. :vartype name: str - :ivar properties: Required. Integration runtime properties. + :ivar properties: Integration runtime properties. Required. :vartype properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus """ _validation = { - 'name': {'readonly': True}, - 'properties': {'required': True}, + "name": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'IntegrationRuntimeStatus'}, + "name": {"key": "name", "type": "str"}, + "properties": {"key": "properties", "type": "IntegrationRuntimeStatus"}, } - def __init__( - self, - *, - properties: "_models.IntegrationRuntimeStatus", - **kwargs - ): + def __init__(self, *, properties: "_models.IntegrationRuntimeStatus", **kwargs): """ - :keyword properties: Required. Integration runtime properties. + :keyword properties: Integration runtime properties. Required. :paramtype properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus """ - super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = None self.properties = properties -class IntegrationRuntimeVNetProperties(msrest.serialization.Model): +class IntegrationRuntimeVNetProperties(_serialization.Model): """VNet properties for managed integration runtime. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar v_net_id: The ID of the VNet that this integration runtime will join. :vartype v_net_id: str :ivar subnet: The name of the subnet this integration runtime will join. @@ -32275,17 +34566,17 @@ class IntegrationRuntimeVNetProperties(msrest.serialization.Model): """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'v_net_id': {'key': 'vNetId', 'type': 'str'}, - 'subnet': {'key': 'subnet', 'type': 'str'}, - 'public_i_ps': {'key': 'publicIPs', 'type': '[str]'}, - 'subnet_id': {'key': 'subnetId', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "v_net_id": {"key": "vNetId", "type": "str"}, + "subnet": {"key": "subnet", "type": "str"}, + "public_i_ps": {"key": "publicIPs", "type": "[str]"}, + "subnet_id": {"key": "subnetId", "type": "str"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, v_net_id: Optional[str] = None, subnet: Optional[str] = None, public_i_ps: Optional[List[str]] = None, @@ -32295,7 +34586,7 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword v_net_id: The ID of the VNet that this integration runtime will join. :paramtype v_net_id: str :keyword subnet: The name of the subnet this integration runtime will join. @@ -32307,7 +34598,7 @@ def __init__( joined. :paramtype subnet_id: str """ - super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.v_net_id = v_net_id self.subnet = subnet @@ -32315,15 +34606,15 @@ def __init__( self.subnet_id = subnet_id -class JiraLinkedService(LinkedService): +class JiraLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Jira Service linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -32332,78 +34623,78 @@ class JiraLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar host: Required. The IP address or host name of the Jira service. (e.g. jira.example.com). - :vartype host: any + :vartype annotations: list[JSON] + :ivar host: The IP address or host name of the Jira service. (e.g. jira.example.com). Required. + :vartype host: JSON :ivar port: The TCP port that the Jira server uses to listen for client connections. The default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. - :vartype port: any - :ivar username: Required. The user name that you use to access Jira Service. - :vartype username: any + :vartype port: JSON + :ivar username: The user name that you use to access Jira Service. Required. + :vartype username: JSON :ivar password: The password corresponding to the user name that you provided in the username field. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :vartype use_encrypted_endpoints: any + :vartype use_encrypted_endpoints: JSON :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :vartype use_host_verification: any + :vartype use_host_verification: JSON :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :vartype use_peer_verification: any + :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'username': {'required': True}, + "type": {"required": True}, + "host": {"required": True}, + "username": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "host": {"key": "typeProperties.host", "type": "object"}, + "port": {"key": "typeProperties.port", "type": "object"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, + "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, + "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - host: Any, - username: Any, - additional_properties: Optional[Dict[str, Any]] = None, + host: JSON, + username: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - port: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + port: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - use_encrypted_endpoints: Optional[Any] = None, - use_host_verification: Optional[Any] = None, - use_peer_verification: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + use_encrypted_endpoints: Optional[JSON] = None, + use_host_verification: Optional[JSON] = None, + use_peer_verification: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -32411,35 +34702,42 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword host: Required. The IP address or host name of the Jira service. (e.g. - jira.example.com). - :paramtype host: any + :paramtype annotations: list[JSON] + :keyword host: The IP address or host name of the Jira service. (e.g. jira.example.com). + Required. + :paramtype host: JSON :keyword port: The TCP port that the Jira server uses to listen for client connections. The default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. - :paramtype port: any - :keyword username: Required. The user name that you use to access Jira Service. - :paramtype username: any + :paramtype port: JSON + :keyword username: The user name that you use to access Jira Service. Required. + :paramtype username: JSON :keyword password: The password corresponding to the user name that you provided in the username field. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :paramtype use_encrypted_endpoints: any + :paramtype use_encrypted_endpoints: JSON :keyword use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :paramtype use_host_verification: any + :paramtype use_host_verification: JSON :keyword use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :paramtype use_peer_verification: any + :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(JiraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Jira' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Jira" # type: str self.host = host self.port = port self.username = username @@ -32457,88 +34755,98 @@ class JiraObjectDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(JiraObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'JiraObject' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "JiraObject" # type: str self.table_name = table_name @@ -32549,116 +34857,125 @@ class JiraSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'JiraSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "JiraSource" # type: str self.query = query -class JsonDataset(Dataset): +class JsonDataset(Dataset): # pylint: disable=too-many-instance-attributes """Json dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -32669,65 +34986,65 @@ class JsonDataset(Dataset): of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :vartype encoding_name: any + :vartype encoding_name: JSON :ivar compression: The data compression method used for the json dataset. :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "location": {"key": "typeProperties.location", "type": "DatasetLocation"}, + "encoding_name": {"key": "typeProperties.encodingName", "type": "object"}, + "compression": {"key": "typeProperties.compression", "type": "DatasetCompression"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, location: Optional["_models.DatasetLocation"] = None, - encoding_name: Optional[Any] = None, + encoding_name: Optional[JSON] = None, compression: Optional["_models.DatasetCompression"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -32738,12 +35055,22 @@ def __init__( of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :paramtype encoding_name: any + :paramtype encoding_name: JSON :keyword compression: The data compression method used for the json dataset. :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ - super(JsonDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'Json' # type: str + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "Json" # type: str self.location = location self.encoding_name = encoding_name self.compression = compression @@ -32756,97 +35083,99 @@ class JsonFormat(DatasetStorageFormat): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset storage format.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage format. Required. :vartype type: str :ivar serializer: Serializer. Type: string (or Expression with resultType string). - :vartype serializer: any + :vartype serializer: JSON :ivar deserializer: Deserializer. Type: string (or Expression with resultType string). - :vartype deserializer: any + :vartype deserializer: JSON :ivar file_pattern: File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. - :vartype file_pattern: any + :vartype file_pattern: JSON :ivar nesting_separator: The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). - :vartype nesting_separator: any + :vartype nesting_separator: JSON :ivar encoding_name: The code page name of the preferred encoding. If not provided, the default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. The full list of supported values can be found in the 'Name' column of the table of encodings in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or Expression with resultType string). - :vartype encoding_name: any + :vartype encoding_name: JSON :ivar json_node_reference: The JSONPath of the JSON array element to be flattened. Example: "$.ArrayPath". Type: string (or Expression with resultType string). - :vartype json_node_reference: any + :vartype json_node_reference: JSON :ivar json_path_definition: The JSONPath definition for each column mapping with a customized column name to extract data from JSON file. For fields under root object, start with "$"; for fields inside the array chosen by jsonNodeReference property, start from the array element. Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object (or Expression with resultType object). - :vartype json_path_definition: any + :vartype json_path_definition: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'file_pattern': {'key': 'filePattern', 'type': 'object'}, - 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, - 'encoding_name': {'key': 'encodingName', 'type': 'object'}, - 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, - 'json_path_definition': {'key': 'jsonPathDefinition', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "serializer": {"key": "serializer", "type": "object"}, + "deserializer": {"key": "deserializer", "type": "object"}, + "file_pattern": {"key": "filePattern", "type": "object"}, + "nesting_separator": {"key": "nestingSeparator", "type": "object"}, + "encoding_name": {"key": "encodingName", "type": "object"}, + "json_node_reference": {"key": "jsonNodeReference", "type": "object"}, + "json_path_definition": {"key": "jsonPathDefinition", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - serializer: Optional[Any] = None, - deserializer: Optional[Any] = None, - file_pattern: Optional[Any] = None, - nesting_separator: Optional[Any] = None, - encoding_name: Optional[Any] = None, - json_node_reference: Optional[Any] = None, - json_path_definition: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + serializer: Optional[JSON] = None, + deserializer: Optional[JSON] = None, + file_pattern: Optional[JSON] = None, + nesting_separator: Optional[JSON] = None, + encoding_name: Optional[JSON] = None, + json_node_reference: Optional[JSON] = None, + json_path_definition: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword serializer: Serializer. Type: string (or Expression with resultType string). - :paramtype serializer: any + :paramtype serializer: JSON :keyword deserializer: Deserializer. Type: string (or Expression with resultType string). - :paramtype deserializer: any + :paramtype deserializer: JSON :keyword file_pattern: File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. - :paramtype file_pattern: any + :paramtype file_pattern: JSON :keyword nesting_separator: The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). - :paramtype nesting_separator: any + :paramtype nesting_separator: JSON :keyword encoding_name: The code page name of the preferred encoding. If not provided, the default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. The full list of supported values can be found in the 'Name' column of the table of encodings in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or Expression with resultType string). - :paramtype encoding_name: any + :paramtype encoding_name: JSON :keyword json_node_reference: The JSONPath of the JSON array element to be flattened. Example: "$.ArrayPath". Type: string (or Expression with resultType string). - :paramtype json_node_reference: any + :paramtype json_node_reference: JSON :keyword json_path_definition: The JSONPath definition for each column mapping with a customized column name to extract data from JSON file. For fields under root object, start with "$"; for fields inside the array chosen by jsonNodeReference property, start from the array element. Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object (or Expression with resultType object). - :paramtype json_path_definition: any + :paramtype json_path_definition: JSON """ - super(JsonFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.type = 'JsonFormat' # type: str + super().__init__( + additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs + ) + self.type = "JsonFormat" # type: str self.file_pattern = file_pattern self.nesting_separator = nesting_separator self.encoding_name = encoding_name @@ -32861,39 +35190,39 @@ class JsonReadSettings(FormatReadSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The read setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. :vartype type: str :ivar compression_properties: Compression settings. :vartype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "compression_properties": {"key": "compressionProperties", "type": "CompressionReadSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, compression_properties: Optional["_models.CompressionReadSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword compression_properties: Compression settings. :paramtype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ - super(JsonReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'JsonReadSettings' # type: str + super().__init__(additional_properties=additional_properties, **kwargs) + self.type = "JsonReadSettings" # type: str self.compression_properties = compression_properties @@ -32904,27 +35233,27 @@ class JsonSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar store_settings: Json store settings. :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :ivar format_settings: Json format settings. @@ -32932,32 +35261,32 @@ class JsonSink(CopySink): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "store_settings": {"key": "storeSettings", "type": "StoreWriteSettings"}, + "format_settings": {"key": "formatSettings", "type": "JsonWriteSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, store_settings: Optional["_models.StoreWriteSettings"] = None, format_settings: Optional["_models.JsonWriteSettings"] = None, **kwargs @@ -32965,32 +35294,41 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword store_settings: Json store settings. :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :keyword format_settings: Json format settings. :paramtype format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings """ - super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'JsonSink' # type: str + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "JsonSink" # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -33002,85 +35340,92 @@ class JsonSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar store_settings: Json store settings. :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :ivar format_settings: Json format settings. :vartype format_settings: ~azure.mgmt.datafactory.models.JsonReadSettings :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'JsonReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "store_settings": {"key": "storeSettings", "type": "StoreReadSettings"}, + "format_settings": {"key": "formatSettings", "type": "JsonReadSettings"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, store_settings: Optional["_models.StoreReadSettings"] = None, format_settings: Optional["_models.JsonReadSettings"] = None, - additional_columns: Optional[Any] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword store_settings: Json store settings. :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :keyword format_settings: Json format settings. :paramtype format_settings: ~azure.mgmt.datafactory.models.JsonReadSettings :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'JsonSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "JsonSource" # type: str self.store_settings = store_settings self.format_settings = format_settings self.additional_columns = additional_columns @@ -33093,45 +35438,41 @@ class JsonWriteSettings(FormatWriteSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The write setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The write setting type. Required. :vartype type: str :ivar file_pattern: File pattern of JSON. This setting controls the way a collection of JSON objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. - :vartype file_pattern: any + :vartype file_pattern: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "file_pattern": {"key": "filePattern", "type": "object"}, } def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - file_pattern: Optional[Any] = None, - **kwargs + self, *, additional_properties: Optional[Dict[str, JSON]] = None, file_pattern: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword file_pattern: File pattern of JSON. This setting controls the way a collection of JSON objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. - :paramtype file_pattern: any + :paramtype file_pattern: JSON """ - super(JsonWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'JsonWriteSettings' # type: str + super().__init__(additional_properties=additional_properties, **kwargs) + self.type = "JsonWriteSettings" # type: str self.file_pattern = file_pattern -class LinkedIntegrationRuntime(msrest.serialization.Model): +class LinkedIntegrationRuntime(_serialization.Model): """The linked integration runtime information. Variables are only populated by the server, and will be ignored when sending a request. @@ -33151,28 +35492,24 @@ class LinkedIntegrationRuntime(msrest.serialization.Model): """ _validation = { - 'name': {'readonly': True}, - 'subscription_id': {'readonly': True}, - 'data_factory_name': {'readonly': True}, - 'data_factory_location': {'readonly': True}, - 'create_time': {'readonly': True}, + "name": {"readonly": True}, + "subscription_id": {"readonly": True}, + "data_factory_name": {"readonly": True}, + "data_factory_location": {"readonly": True}, + "create_time": {"readonly": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, - 'create_time': {'key': 'createTime', 'type': 'iso-8601'}, + "name": {"key": "name", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, + "data_factory_name": {"key": "dataFactoryName", "type": "str"}, + "data_factory_location": {"key": "dataFactoryLocation", "type": "str"}, + "create_time": {"key": "createTime", "type": "iso-8601"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(LinkedIntegrationRuntime, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.name = None self.subscription_id = None self.data_factory_name = None @@ -33180,38 +35517,36 @@ def __init__( self.create_time = None -class LinkedIntegrationRuntimeType(msrest.serialization.Model): +class LinkedIntegrationRuntimeType(_serialization.Model): """The base definition of a linked integration runtime. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: LinkedIntegrationRuntimeKeyAuthorization, LinkedIntegrationRuntimeRbacAuthorization. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + LinkedIntegrationRuntimeKeyAuthorization, LinkedIntegrationRuntimeRbacAuthorization All required parameters must be populated in order to send to Azure. - :ivar authorization_type: Required. The authorization type for integration runtime - sharing.Constant filled by server. + :ivar authorization_type: The authorization type for integration runtime sharing. Required. :vartype authorization_type: str """ _validation = { - 'authorization_type': {'required': True}, + "authorization_type": {"required": True}, } _attribute_map = { - 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + "authorization_type": {"key": "authorizationType", "type": "str"}, } _subtype_map = { - 'authorization_type': {'Key': 'LinkedIntegrationRuntimeKeyAuthorization', 'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization'} + "authorization_type": { + "Key": "LinkedIntegrationRuntimeKeyAuthorization", + "RBAC": "LinkedIntegrationRuntimeRbacAuthorization", + } } - def __init__( - self, - **kwargs - ): - """ - """ - super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.authorization_type = None # type: Optional[str] @@ -33220,35 +35555,29 @@ class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): All required parameters must be populated in order to send to Azure. - :ivar authorization_type: Required. The authorization type for integration runtime - sharing.Constant filled by server. + :ivar authorization_type: The authorization type for integration runtime sharing. Required. :vartype authorization_type: str - :ivar key: Required. The key used for authorization. + :ivar key: The key used for authorization. Required. :vartype key: ~azure.mgmt.datafactory.models.SecureString """ _validation = { - 'authorization_type': {'required': True}, - 'key': {'required': True}, + "authorization_type": {"required": True}, + "key": {"required": True}, } _attribute_map = { - 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - 'key': {'key': 'key', 'type': 'SecureString'}, + "authorization_type": {"key": "authorizationType", "type": "str"}, + "key": {"key": "key", "type": "SecureString"}, } - def __init__( - self, - *, - key: "_models.SecureString", - **kwargs - ): + def __init__(self, *, key: "_models.SecureString", **kwargs): """ - :keyword key: Required. The key used for authorization. + :keyword key: The key used for authorization. Required. :paramtype key: ~azure.mgmt.datafactory.models.SecureString """ - super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) - self.authorization_type = 'Key' # type: str + super().__init__(**kwargs) + self.authorization_type = "Key" # type: str self.key = key @@ -33257,74 +35586,62 @@ class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): All required parameters must be populated in order to send to Azure. - :ivar authorization_type: Required. The authorization type for integration runtime - sharing.Constant filled by server. + :ivar authorization_type: The authorization type for integration runtime sharing. Required. :vartype authorization_type: str - :ivar resource_id: Required. The resource identifier of the integration runtime to be shared. + :ivar resource_id: The resource identifier of the integration runtime to be shared. Required. :vartype resource_id: str :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { - 'authorization_type': {'required': True}, - 'resource_id': {'required': True}, + "authorization_type": {"required": True}, + "resource_id": {"required": True}, } _attribute_map = { - 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'credential': {'key': 'credential', 'type': 'CredentialReference'}, + "authorization_type": {"key": "authorizationType", "type": "str"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "credential": {"key": "credential", "type": "CredentialReference"}, } - def __init__( - self, - *, - resource_id: str, - credential: Optional["_models.CredentialReference"] = None, - **kwargs - ): + def __init__(self, *, resource_id: str, credential: Optional["_models.CredentialReference"] = None, **kwargs): """ - :keyword resource_id: Required. The resource identifier of the integration runtime to be - shared. + :keyword resource_id: The resource identifier of the integration runtime to be shared. + Required. :paramtype resource_id: str :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ - super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) - self.authorization_type = 'RBAC' # type: str + super().__init__(**kwargs) + self.authorization_type = "RBAC" # type: str self.resource_id = resource_id self.credential = credential -class LinkedIntegrationRuntimeRequest(msrest.serialization.Model): +class LinkedIntegrationRuntimeRequest(_serialization.Model): """Data factory name for linked integration runtime request. All required parameters must be populated in order to send to Azure. - :ivar linked_factory_name: Required. The data factory name for linked integration runtime. + :ivar linked_factory_name: The data factory name for linked integration runtime. Required. :vartype linked_factory_name: str """ _validation = { - 'linked_factory_name': {'required': True}, + "linked_factory_name": {"required": True}, } _attribute_map = { - 'linked_factory_name': {'key': 'factoryName', 'type': 'str'}, + "linked_factory_name": {"key": "factoryName", "type": "str"}, } - def __init__( - self, - *, - linked_factory_name: str, - **kwargs - ): + def __init__(self, *, linked_factory_name: str, **kwargs): """ - :keyword linked_factory_name: Required. The data factory name for linked integration runtime. + :keyword linked_factory_name: The data factory name for linked integration runtime. Required. :paramtype linked_factory_name: str """ - super(LinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + super().__init__(**kwargs) self.linked_factory_name = linked_factory_name @@ -33335,116 +35652,104 @@ class LinkedServiceDebugResource(SubResourceDebugResource): :ivar name: The resource name. :vartype name: str - :ivar properties: Required. Properties of linked service. + :ivar properties: Properties of linked service. Required. :vartype properties: ~azure.mgmt.datafactory.models.LinkedService """ _validation = { - 'properties': {'required': True}, + "properties": {"required": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'LinkedService'}, + "name": {"key": "name", "type": "str"}, + "properties": {"key": "properties", "type": "LinkedService"}, } - def __init__( - self, - *, - properties: "_models.LinkedService", - name: Optional[str] = None, - **kwargs - ): + def __init__(self, *, properties: "_models.LinkedService", name: Optional[str] = None, **kwargs): """ :keyword name: The resource name. :paramtype name: str - :keyword properties: Required. Properties of linked service. + :keyword properties: Properties of linked service. Required. :paramtype properties: ~azure.mgmt.datafactory.models.LinkedService """ - super(LinkedServiceDebugResource, self).__init__(name=name, **kwargs) + super().__init__(name=name, **kwargs) self.properties = properties -class LinkedServiceListResponse(msrest.serialization.Model): +class LinkedServiceListResponse(_serialization.Model): """A list of linked service resources. All required parameters must be populated in order to send to Azure. - :ivar value: Required. List of linked services. + :ivar value: List of linked services. Required. :vartype value: list[~azure.mgmt.datafactory.models.LinkedServiceResource] :ivar next_link: The link to the next page of results, if any remaining results exist. :vartype next_link: str """ _validation = { - 'value': {'required': True}, + "value": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[LinkedServiceResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[LinkedServiceResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - *, - value: List["_models.LinkedServiceResource"], - next_link: Optional[str] = None, - **kwargs - ): + def __init__(self, *, value: List["_models.LinkedServiceResource"], next_link: Optional[str] = None, **kwargs): """ - :keyword value: Required. List of linked services. + :keyword value: List of linked services. Required. :paramtype value: list[~azure.mgmt.datafactory.models.LinkedServiceResource] :keyword next_link: The link to the next page of results, if any remaining results exist. :paramtype next_link: str """ - super(LinkedServiceListResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.next_link = next_link -class LinkedServiceReference(msrest.serialization.Model): +class LinkedServiceReference(_serialization.Model): """Linked service reference type. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar type: Linked service reference type. Has constant value: "LinkedServiceReference". - :vartype type: str - :ivar reference_name: Required. Reference LinkedService name. + :ivar type: Linked service reference type. Required. "LinkedServiceReference" + :vartype type: str or ~azure.mgmt.datafactory.models.Type + :ivar reference_name: Reference LinkedService name. Required. :vartype reference_name: str :ivar parameters: Arguments for LinkedService. - :vartype parameters: dict[str, any] + :vartype parameters: dict[str, JSON] """ _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, + "type": {"required": True}, + "reference_name": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, + "type": {"key": "type", "type": "str"}, + "reference_name": {"key": "referenceName", "type": "str"}, + "parameters": {"key": "parameters", "type": "{object}"}, } - type = "LinkedServiceReference" - def __init__( self, *, + type: Union[str, "_models.Type"], reference_name: str, - parameters: Optional[Dict[str, Any]] = None, + parameters: Optional[Dict[str, JSON]] = None, **kwargs ): """ - :keyword reference_name: Required. Reference LinkedService name. + :keyword type: Linked service reference type. Required. "LinkedServiceReference" + :paramtype type: str or ~azure.mgmt.datafactory.models.Type + :keyword reference_name: Reference LinkedService name. Required. :paramtype reference_name: str :keyword parameters: Arguments for LinkedService. - :paramtype parameters: dict[str, any] + :paramtype parameters: dict[str, JSON] """ - super(LinkedServiceReference, self).__init__(**kwargs) + super().__init__(**kwargs) + self.type = type self.reference_name = reference_name self.parameters = parameters @@ -33464,189 +35769,178 @@ class LinkedServiceResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :ivar properties: Required. Properties of linked service. + :ivar properties: Properties of linked service. Required. :vartype properties: ~azure.mgmt.datafactory.models.LinkedService """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "etag": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'LinkedService'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "etag": {"key": "etag", "type": "str"}, + "properties": {"key": "properties", "type": "LinkedService"}, } - def __init__( - self, - *, - properties: "_models.LinkedService", - **kwargs - ): + def __init__(self, *, properties: "_models.LinkedService", **kwargs): """ - :keyword properties: Required. Properties of linked service. + :keyword properties: Properties of linked service. Required. :paramtype properties: ~azure.mgmt.datafactory.models.LinkedService """ - super(LinkedServiceResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class LogLocationSettings(msrest.serialization.Model): +class LogLocationSettings(_serialization.Model): """Log location settings. All required parameters must be populated in order to send to Azure. - :ivar linked_service_name: Required. Log storage linked service reference. + :ivar linked_service_name: Log storage linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). - :vartype path: any + :vartype path: JSON """ _validation = { - 'linked_service_name': {'required': True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'path': {'key': 'path', 'type': 'object'}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "path": {"key": "path", "type": "object"}, } - def __init__( - self, - *, - linked_service_name: "_models.LinkedServiceReference", - path: Optional[Any] = None, - **kwargs - ): + def __init__(self, *, linked_service_name: "_models.LinkedServiceReference", path: Optional[JSON] = None, **kwargs): """ - :keyword linked_service_name: Required. Log storage linked service reference. + :keyword linked_service_name: Log storage linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). - :paramtype path: any + :paramtype path: JSON """ - super(LogLocationSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.linked_service_name = linked_service_name self.path = path -class LogSettings(msrest.serialization.Model): +class LogSettings(_serialization.Model): """Log settings. All required parameters must be populated in order to send to Azure. :ivar enable_copy_activity_log: Specifies whether to enable copy activity log. Type: boolean (or Expression with resultType boolean). - :vartype enable_copy_activity_log: any + :vartype enable_copy_activity_log: JSON :ivar copy_activity_log_settings: Specifies settings for copy activity log. :vartype copy_activity_log_settings: ~azure.mgmt.datafactory.models.CopyActivityLogSettings - :ivar log_location_settings: Required. Log location settings customer needs to provide when - enabling log. + :ivar log_location_settings: Log location settings customer needs to provide when enabling log. + Required. :vartype log_location_settings: ~azure.mgmt.datafactory.models.LogLocationSettings """ _validation = { - 'log_location_settings': {'required': True}, + "log_location_settings": {"required": True}, } _attribute_map = { - 'enable_copy_activity_log': {'key': 'enableCopyActivityLog', 'type': 'object'}, - 'copy_activity_log_settings': {'key': 'copyActivityLogSettings', 'type': 'CopyActivityLogSettings'}, - 'log_location_settings': {'key': 'logLocationSettings', 'type': 'LogLocationSettings'}, + "enable_copy_activity_log": {"key": "enableCopyActivityLog", "type": "object"}, + "copy_activity_log_settings": {"key": "copyActivityLogSettings", "type": "CopyActivityLogSettings"}, + "log_location_settings": {"key": "logLocationSettings", "type": "LogLocationSettings"}, } def __init__( self, *, log_location_settings: "_models.LogLocationSettings", - enable_copy_activity_log: Optional[Any] = None, + enable_copy_activity_log: Optional[JSON] = None, copy_activity_log_settings: Optional["_models.CopyActivityLogSettings"] = None, **kwargs ): """ :keyword enable_copy_activity_log: Specifies whether to enable copy activity log. Type: boolean (or Expression with resultType boolean). - :paramtype enable_copy_activity_log: any + :paramtype enable_copy_activity_log: JSON :keyword copy_activity_log_settings: Specifies settings for copy activity log. :paramtype copy_activity_log_settings: ~azure.mgmt.datafactory.models.CopyActivityLogSettings - :keyword log_location_settings: Required. Log location settings customer needs to provide when - enabling log. + :keyword log_location_settings: Log location settings customer needs to provide when enabling + log. Required. :paramtype log_location_settings: ~azure.mgmt.datafactory.models.LogLocationSettings """ - super(LogSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.enable_copy_activity_log = enable_copy_activity_log self.copy_activity_log_settings = copy_activity_log_settings self.log_location_settings = log_location_settings -class LogStorageSettings(msrest.serialization.Model): +class LogStorageSettings(_serialization.Model): """(Deprecated. Please use LogSettings) Log storage settings. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar linked_service_name: Required. Log storage linked service reference. + :vartype additional_properties: dict[str, JSON] + :ivar linked_service_name: Log storage linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). - :vartype path: any + :vartype path: JSON :ivar log_level: Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). - :vartype log_level: any + :vartype log_level: JSON :ivar enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). - :vartype enable_reliable_logging: any + :vartype enable_reliable_logging: JSON """ _validation = { - 'linked_service_name': {'required': True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'path': {'key': 'path', 'type': 'object'}, - 'log_level': {'key': 'logLevel', 'type': 'object'}, - 'enable_reliable_logging': {'key': 'enableReliableLogging', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "path": {"key": "path", "type": "object"}, + "log_level": {"key": "logLevel", "type": "object"}, + "enable_reliable_logging": {"key": "enableReliableLogging", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, - path: Optional[Any] = None, - log_level: Optional[Any] = None, - enable_reliable_logging: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + path: Optional[JSON] = None, + log_level: Optional[JSON] = None, + enable_reliable_logging: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword linked_service_name: Required. Log storage linked service reference. + :paramtype additional_properties: dict[str, JSON] + :keyword linked_service_name: Log storage linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). - :paramtype path: any + :paramtype path: JSON :keyword log_level: Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). - :paramtype log_level: any + :paramtype log_level: JSON :keyword enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). - :paramtype enable_reliable_logging: any + :paramtype enable_reliable_logging: JSON """ - super(LogStorageSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.linked_service_name = linked_service_name self.path = path @@ -33654,17 +35948,17 @@ def __init__( self.enable_reliable_logging = enable_reliable_logging -class LookupActivity(ExecutionActivity): +class LookupActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """Lookup activity. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -33676,34 +35970,34 @@ class LookupActivity(ExecutionActivity): :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar policy: Activity policy. :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :ivar source: Required. Dataset-specific source properties, same as copy activity source. + :ivar source: Dataset-specific source properties, same as copy activity source. Required. :vartype source: ~azure.mgmt.datafactory.models.CopySource - :ivar dataset: Required. Lookup activity dataset reference. + :ivar dataset: Lookup activity dataset reference. Required. :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference :ivar first_row_only: Whether to return first row or all rows. Default value is true. Type: boolean (or Expression with resultType boolean). - :vartype first_row_only: any + :vartype first_row_only: JSON """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'source': {'required': True}, - 'dataset': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "source": {"required": True}, + "dataset": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "source": {"key": "typeProperties.source", "type": "CopySource"}, + "dataset": {"key": "typeProperties.dataset", "type": "DatasetReference"}, + "first_row_only": {"key": "typeProperties.firstRowOnly", "type": "object"}, } def __init__( @@ -33712,20 +36006,20 @@ def __init__( name: str, source: "_models.CopySource", dataset: "_models.DatasetReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, - first_row_only: Optional[Any] = None, + first_row_only: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -33737,30 +36031,39 @@ def __init__( :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword policy: Activity policy. :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :keyword source: Required. Dataset-specific source properties, same as copy activity source. + :keyword source: Dataset-specific source properties, same as copy activity source. Required. :paramtype source: ~azure.mgmt.datafactory.models.CopySource - :keyword dataset: Required. Lookup activity dataset reference. + :keyword dataset: Lookup activity dataset reference. Required. :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference :keyword first_row_only: Whether to return first row or all rows. Default value is true. Type: boolean (or Expression with resultType boolean). - :paramtype first_row_only: any - """ - super(LookupActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'Lookup' # type: str + :paramtype first_row_only: JSON + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "Lookup" # type: str self.source = source self.dataset = dataset self.first_row_only = first_row_only -class MagentoLinkedService(LinkedService): +class MagentoLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Magento server linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -33769,67 +36072,67 @@ class MagentoLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar host: Required. The URL of the Magento instance. (i.e. 192.168.222.110/magento3). - :vartype host: any + :vartype annotations: list[JSON] + :ivar host: The URL of the Magento instance. (i.e. 192.168.222.110/magento3). Required. + :vartype host: JSON :ivar access_token: The access token from Magento. :vartype access_token: ~azure.mgmt.datafactory.models.SecretBase :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :vartype use_encrypted_endpoints: any + :vartype use_encrypted_endpoints: JSON :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :vartype use_host_verification: any + :vartype use_host_verification: JSON :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :vartype use_peer_verification: any + :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'host': {'required': True}, + "type": {"required": True}, + "host": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "host": {"key": "typeProperties.host", "type": "object"}, + "access_token": {"key": "typeProperties.accessToken", "type": "SecretBase"}, + "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, + "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, + "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - host: Any, - additional_properties: Optional[Dict[str, Any]] = None, + host: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, access_token: Optional["_models.SecretBase"] = None, - use_encrypted_endpoints: Optional[Any] = None, - use_host_verification: Optional[Any] = None, - use_peer_verification: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + use_encrypted_endpoints: Optional[JSON] = None, + use_host_verification: Optional[JSON] = None, + use_peer_verification: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -33837,28 +36140,35 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword host: Required. The URL of the Magento instance. (i.e. 192.168.222.110/magento3). - :paramtype host: any + :paramtype annotations: list[JSON] + :keyword host: The URL of the Magento instance. (i.e. 192.168.222.110/magento3). Required. + :paramtype host: JSON :keyword access_token: The access token from Magento. :paramtype access_token: ~azure.mgmt.datafactory.models.SecretBase :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :paramtype use_encrypted_endpoints: any + :paramtype use_encrypted_endpoints: JSON :keyword use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :paramtype use_host_verification: any + :paramtype use_host_verification: JSON :keyword use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :paramtype use_peer_verification: any + :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(MagentoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Magento' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Magento" # type: str self.host = host self.access_token = access_token self.use_encrypted_endpoints = use_encrypted_endpoints @@ -33874,88 +36184,98 @@ class MagentoObjectDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(MagentoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'MagentoObject' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "MagentoObject" # type: str self.table_name = table_name @@ -33966,89 +36286,98 @@ class MagentoSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'MagentoSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "MagentoSource" # type: str self.query = query @@ -34059,51 +36388,53 @@ class ManagedIdentityCredential(Credential): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of credential.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of credential. Required. :vartype type: str :ivar description: Credential description. :vartype description: str :ivar annotations: List of tags that can be used for describing the Credential. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar resource_id: The resource id of user assigned managed identity. :vartype resource_id: str """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'resource_id': {'key': 'typeProperties.resourceId', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "resource_id": {"key": "typeProperties.resourceId", "type": "str"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, resource_id: Optional[str] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Credential description. :paramtype description: str :keyword annotations: List of tags that can be used for describing the Credential. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword resource_id: The resource id of user assigned managed identity. :paramtype resource_id: str """ - super(ManagedIdentityCredential, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.type = 'ManagedIdentity' # type: str + super().__init__( + additional_properties=additional_properties, description=description, annotations=annotations, **kwargs + ) + self.type = "ManagedIdentity" # type: str self.resource_id = resource_id @@ -34116,15 +36447,15 @@ class ManagedIntegrationRuntime(IntegrationRuntime): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of integration runtime.Constant filled by server. Known values are: - "Managed", "SelfHosted". + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of integration runtime. Required. Known values are: "Managed" and + "SelfHosted". :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :ivar description: Integration runtime description. :vartype description: str :ivar state: Integration runtime state, only valid for managed dedicated integration runtime. Known values are: "Initial", "Stopped", "Started", "Starting", "Stopping", "NeedRegistration", - "Online", "Limited", "Offline", "AccessDenied". + "Online", "Limited", "Offline", and "AccessDenied". :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState :ivar managed_virtual_network: Managed Virtual Network reference. :vartype managed_virtual_network: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkReference @@ -34139,25 +36470,31 @@ class ManagedIntegrationRuntime(IntegrationRuntime): """ _validation = { - 'type': {'required': True}, - 'state': {'readonly': True}, + "type": {"required": True}, + "state": {"readonly": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'managed_virtual_network': {'key': 'managedVirtualNetwork', 'type': 'ManagedVirtualNetworkReference'}, - 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, - 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, - 'customer_virtual_network': {'key': 'typeProperties.customerVirtualNetwork', 'type': 'IntegrationRuntimeCustomerVirtualNetwork'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "managed_virtual_network": {"key": "managedVirtualNetwork", "type": "ManagedVirtualNetworkReference"}, + "compute_properties": { + "key": "typeProperties.computeProperties", + "type": "IntegrationRuntimeComputeProperties", + }, + "ssis_properties": {"key": "typeProperties.ssisProperties", "type": "IntegrationRuntimeSsisProperties"}, + "customer_virtual_network": { + "key": "typeProperties.customerVirtualNetwork", + "type": "IntegrationRuntimeCustomerVirtualNetwork", + }, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, managed_virtual_network: Optional["_models.ManagedVirtualNetworkReference"] = None, compute_properties: Optional["_models.IntegrationRuntimeComputeProperties"] = None, @@ -34168,7 +36505,7 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Integration runtime description. :paramtype description: str :keyword managed_virtual_network: Managed Virtual Network reference. @@ -34184,8 +36521,8 @@ def __init__( :paramtype customer_virtual_network: ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomerVirtualNetwork """ - super(ManagedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) - self.type = 'Managed' # type: str + super().__init__(additional_properties=additional_properties, description=description, **kwargs) + self.type = "Managed" # type: str self.state = None self.managed_virtual_network = managed_virtual_network self.compute_properties = compute_properties @@ -34193,14 +36530,14 @@ def __init__( self.customer_virtual_network = customer_virtual_network -class ManagedIntegrationRuntimeError(msrest.serialization.Model): +class ManagedIntegrationRuntimeError(_serialization.Model): """Error definition for managed integration runtime. Variables are only populated by the server, and will be ignored when sending a request. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar time: The time when the error occurred. :vartype time: ~datetime.datetime :ivar code: Error code. @@ -34212,32 +36549,27 @@ class ManagedIntegrationRuntimeError(msrest.serialization.Model): """ _validation = { - 'time': {'readonly': True}, - 'code': {'readonly': True}, - 'parameters': {'readonly': True}, - 'message': {'readonly': True}, + "time": {"readonly": True}, + "code": {"readonly": True}, + "parameters": {"readonly": True}, + "message": {"readonly": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'time': {'key': 'time', 'type': 'iso-8601'}, - 'code': {'key': 'code', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '[str]'}, - 'message': {'key': 'message', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "time": {"key": "time", "type": "iso-8601"}, + "code": {"key": "code", "type": "str"}, + "parameters": {"key": "parameters", "type": "[str]"}, + "message": {"key": "message", "type": "str"}, } - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - **kwargs - ): + def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, **kwargs): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] """ - super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.time = None self.code = None @@ -34245,64 +36577,64 @@ def __init__( self.message = None -class ManagedIntegrationRuntimeNode(msrest.serialization.Model): +class ManagedIntegrationRuntimeNode(_serialization.Model): """Properties of integration runtime node. Variables are only populated by the server, and will be ignored when sending a request. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar node_id: The managed integration runtime node id. :vartype node_id: str :ivar status: The managed integration runtime node status. Known values are: "Starting", - "Available", "Recycling", "Unavailable". + "Available", "Recycling", and "Unavailable". :vartype status: str or ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNodeStatus :ivar errors: The errors that occurred on this integration runtime node. :vartype errors: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] """ _validation = { - 'node_id': {'readonly': True}, - 'status': {'readonly': True}, + "node_id": {"readonly": True}, + "status": {"readonly": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'node_id': {'key': 'nodeId', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'errors': {'key': 'errors', 'type': '[ManagedIntegrationRuntimeError]'}, + "additional_properties": {"key": "", "type": "{object}"}, + "node_id": {"key": "nodeId", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "errors": {"key": "errors", "type": "[ManagedIntegrationRuntimeError]"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, errors: Optional[List["_models.ManagedIntegrationRuntimeError"]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword errors: The errors that occurred on this integration runtime node. :paramtype errors: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] """ - super(ManagedIntegrationRuntimeNode, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.node_id = None self.status = None self.errors = errors -class ManagedIntegrationRuntimeOperationResult(msrest.serialization.Model): +class ManagedIntegrationRuntimeOperationResult(_serialization.Model): """Properties of managed integration runtime operation result. Variables are only populated by the server, and will be ignored when sending a request. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar type: The operation type. Could be start or stop. :vartype type: str :ivar start_time: The start time of the operation. @@ -34318,36 +36650,31 @@ class ManagedIntegrationRuntimeOperationResult(msrest.serialization.Model): """ _validation = { - 'type': {'readonly': True}, - 'start_time': {'readonly': True}, - 'result': {'readonly': True}, - 'error_code': {'readonly': True}, - 'parameters': {'readonly': True}, - 'activity_id': {'readonly': True}, + "type": {"readonly": True}, + "start_time": {"readonly": True}, + "result": {"readonly": True}, + "error_code": {"readonly": True}, + "parameters": {"readonly": True}, + "activity_id": {"readonly": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'result': {'key': 'result', 'type': 'str'}, - 'error_code': {'key': 'errorCode', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '[str]'}, - 'activity_id': {'key': 'activityId', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "start_time": {"key": "startTime", "type": "iso-8601"}, + "result": {"key": "result", "type": "str"}, + "error_code": {"key": "errorCode", "type": "str"}, + "parameters": {"key": "parameters", "type": "[str]"}, + "activity_id": {"key": "activityId", "type": "str"}, } - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - **kwargs - ): + def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, **kwargs): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] """ - super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.type = None self.start_time = None @@ -34366,14 +36693,14 @@ class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of integration runtime.Constant filled by server. Known values are: - "Managed", "SelfHosted". + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of integration runtime. Required. Known values are: "Managed" and + "SelfHosted". :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :ivar data_factory_name: The data factory name which the integration runtime belong to. :vartype data_factory_name: str :ivar state: The state of integration runtime. Known values are: "Initial", "Stopped", - "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", + "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", and "AccessDenied". :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState :ivar create_time: The time at which the integration runtime was created, in ISO8601 format. @@ -34388,53 +36715,48 @@ class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): """ _validation = { - 'type': {'required': True}, - 'data_factory_name': {'readonly': True}, - 'state': {'readonly': True}, - 'create_time': {'readonly': True}, - 'nodes': {'readonly': True}, - 'other_errors': {'readonly': True}, - 'last_operation': {'readonly': True}, + "type": {"required": True}, + "data_factory_name": {"readonly": True}, + "state": {"readonly": True}, + "create_time": {"readonly": True}, + "nodes": {"readonly": True}, + "other_errors": {"readonly": True}, + "last_operation": {"readonly": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, - 'nodes': {'key': 'typeProperties.nodes', 'type': '[ManagedIntegrationRuntimeNode]'}, - 'other_errors': {'key': 'typeProperties.otherErrors', 'type': '[ManagedIntegrationRuntimeError]'}, - 'last_operation': {'key': 'typeProperties.lastOperation', 'type': 'ManagedIntegrationRuntimeOperationResult'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "data_factory_name": {"key": "dataFactoryName", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "create_time": {"key": "typeProperties.createTime", "type": "iso-8601"}, + "nodes": {"key": "typeProperties.nodes", "type": "[ManagedIntegrationRuntimeNode]"}, + "other_errors": {"key": "typeProperties.otherErrors", "type": "[ManagedIntegrationRuntimeError]"}, + "last_operation": {"key": "typeProperties.lastOperation", "type": "ManagedIntegrationRuntimeOperationResult"}, } - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - **kwargs - ): + def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, **kwargs): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] """ - super(ManagedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'Managed' # type: str + super().__init__(additional_properties=additional_properties, **kwargs) + self.type = "Managed" # type: str self.create_time = None self.nodes = None self.other_errors = None self.last_operation = None -class ManagedPrivateEndpoint(msrest.serialization.Model): +class ManagedPrivateEndpoint(_serialization.Model): """Properties of a managed private endpoint. Variables are only populated by the server, and will be ignored when sending a request. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar connection_state: The managed private endpoint connection state. :vartype connection_state: ~azure.mgmt.datafactory.models.ConnectionStateProperties :ivar fqdns: Fully qualified domain names. @@ -34451,24 +36773,24 @@ class ManagedPrivateEndpoint(msrest.serialization.Model): """ _validation = { - 'is_reserved': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + "is_reserved": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connection_state': {'key': 'connectionState', 'type': 'ConnectionStateProperties'}, - 'fqdns': {'key': 'fqdns', 'type': '[str]'}, - 'group_id': {'key': 'groupId', 'type': 'str'}, - 'is_reserved': {'key': 'isReserved', 'type': 'bool'}, - 'private_link_resource_id': {'key': 'privateLinkResourceId', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "connection_state": {"key": "connectionState", "type": "ConnectionStateProperties"}, + "fqdns": {"key": "fqdns", "type": "[str]"}, + "group_id": {"key": "groupId", "type": "str"}, + "is_reserved": {"key": "isReserved", "type": "bool"}, + "private_link_resource_id": {"key": "privateLinkResourceId", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connection_state: Optional["_models.ConnectionStateProperties"] = None, fqdns: Optional[List[str]] = None, group_id: Optional[str] = None, @@ -34478,7 +36800,7 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connection_state: The managed private endpoint connection state. :paramtype connection_state: ~azure.mgmt.datafactory.models.ConnectionStateProperties :keyword fqdns: Fully qualified domain names. @@ -34489,7 +36811,7 @@ def __init__( private endpoint is created. :paramtype private_link_resource_id: str """ - super(ManagedPrivateEndpoint, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.connection_state = connection_state self.fqdns = fqdns @@ -34499,40 +36821,36 @@ def __init__( self.provisioning_state = None -class ManagedPrivateEndpointListResponse(msrest.serialization.Model): +class ManagedPrivateEndpointListResponse(_serialization.Model): """A list of managed private endpoint resources. All required parameters must be populated in order to send to Azure. - :ivar value: Required. List of managed private endpoints. + :ivar value: List of managed private endpoints. Required. :vartype value: list[~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource] :ivar next_link: The link to the next page of results, if any remaining results exist. :vartype next_link: str """ _validation = { - 'value': {'required': True}, + "value": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[ManagedPrivateEndpointResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[ManagedPrivateEndpointResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, } def __init__( - self, - *, - value: List["_models.ManagedPrivateEndpointResource"], - next_link: Optional[str] = None, - **kwargs + self, *, value: List["_models.ManagedPrivateEndpointResource"], next_link: Optional[str] = None, **kwargs ): """ - :keyword value: Required. List of managed private endpoints. + :keyword value: List of managed private endpoints. Required. :paramtype value: list[~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource] :keyword next_link: The link to the next page of results, if any remaining results exist. :paramtype next_link: str """ - super(ManagedPrivateEndpointListResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.next_link = next_link @@ -34552,48 +36870,43 @@ class ManagedPrivateEndpointResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :ivar properties: Required. Managed private endpoint properties. + :ivar properties: Managed private endpoint properties. Required. :vartype properties: ~azure.mgmt.datafactory.models.ManagedPrivateEndpoint """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "etag": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'ManagedPrivateEndpoint'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "etag": {"key": "etag", "type": "str"}, + "properties": {"key": "properties", "type": "ManagedPrivateEndpoint"}, } - def __init__( - self, - *, - properties: "_models.ManagedPrivateEndpoint", - **kwargs - ): + def __init__(self, *, properties: "_models.ManagedPrivateEndpoint", **kwargs): """ - :keyword properties: Required. Managed private endpoint properties. + :keyword properties: Managed private endpoint properties. Required. :paramtype properties: ~azure.mgmt.datafactory.models.ManagedPrivateEndpoint """ - super(ManagedPrivateEndpointResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class ManagedVirtualNetwork(msrest.serialization.Model): +class ManagedVirtualNetwork(_serialization.Model): """A managed Virtual Network associated with the Azure Data Factory. Variables are only populated by the server, and will be ignored when sending a request. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar v_net_id: Managed Virtual Network ID. :vartype v_net_id: str :ivar alias: Managed Virtual Network alias. @@ -34601,108 +36914,94 @@ class ManagedVirtualNetwork(msrest.serialization.Model): """ _validation = { - 'v_net_id': {'readonly': True}, - 'alias': {'readonly': True}, + "v_net_id": {"readonly": True}, + "alias": {"readonly": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'v_net_id': {'key': 'vNetId', 'type': 'str'}, - 'alias': {'key': 'alias', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "v_net_id": {"key": "vNetId", "type": "str"}, + "alias": {"key": "alias", "type": "str"}, } - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - **kwargs - ): + def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, **kwargs): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] """ - super(ManagedVirtualNetwork, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.v_net_id = None self.alias = None -class ManagedVirtualNetworkListResponse(msrest.serialization.Model): +class ManagedVirtualNetworkListResponse(_serialization.Model): """A list of managed Virtual Network resources. All required parameters must be populated in order to send to Azure. - :ivar value: Required. List of managed Virtual Networks. + :ivar value: List of managed Virtual Networks. Required. :vartype value: list[~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource] :ivar next_link: The link to the next page of results, if any remaining results exist. :vartype next_link: str """ _validation = { - 'value': {'required': True}, + "value": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[ManagedVirtualNetworkResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[ManagedVirtualNetworkResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, } def __init__( - self, - *, - value: List["_models.ManagedVirtualNetworkResource"], - next_link: Optional[str] = None, - **kwargs + self, *, value: List["_models.ManagedVirtualNetworkResource"], next_link: Optional[str] = None, **kwargs ): """ - :keyword value: Required. List of managed Virtual Networks. + :keyword value: List of managed Virtual Networks. Required. :paramtype value: list[~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource] :keyword next_link: The link to the next page of results, if any remaining results exist. :paramtype next_link: str """ - super(ManagedVirtualNetworkListResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.next_link = next_link -class ManagedVirtualNetworkReference(msrest.serialization.Model): +class ManagedVirtualNetworkReference(_serialization.Model): """Managed Virtual Network reference type. All required parameters must be populated in order to send to Azure. - :ivar type: Required. Managed Virtual Network reference type. Known values are: - "ManagedVirtualNetworkReference". + :ivar type: Managed Virtual Network reference type. Required. "ManagedVirtualNetworkReference" :vartype type: str or ~azure.mgmt.datafactory.models.ManagedVirtualNetworkReferenceType - :ivar reference_name: Required. Reference ManagedVirtualNetwork name. + :ivar reference_name: Reference ManagedVirtualNetwork name. Required. :vartype reference_name: str """ _validation = { - 'type': {'required': True}, - 'reference_name': {'required': True}, + "type": {"required": True}, + "reference_name": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, + "type": {"key": "type", "type": "str"}, + "reference_name": {"key": "referenceName", "type": "str"}, } def __init__( - self, - *, - type: Union[str, "_models.ManagedVirtualNetworkReferenceType"], - reference_name: str, - **kwargs + self, *, type: Union[str, "_models.ManagedVirtualNetworkReferenceType"], reference_name: str, **kwargs ): """ - :keyword type: Required. Managed Virtual Network reference type. Known values are: - "ManagedVirtualNetworkReference". + :keyword type: Managed Virtual Network reference type. Required. + "ManagedVirtualNetworkReference" :paramtype type: str or ~azure.mgmt.datafactory.models.ManagedVirtualNetworkReferenceType - :keyword reference_name: Required. Reference ManagedVirtualNetwork name. + :keyword reference_name: Reference ManagedVirtualNetwork name. Required. :paramtype reference_name: str """ - super(ManagedVirtualNetworkReference, self).__init__(**kwargs) + super().__init__(**kwargs) self.type = type self.reference_name = reference_name @@ -34722,37 +37021,32 @@ class ManagedVirtualNetworkResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :ivar properties: Required. Managed Virtual Network properties. + :ivar properties: Managed Virtual Network properties. Required. :vartype properties: ~azure.mgmt.datafactory.models.ManagedVirtualNetwork """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "etag": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'ManagedVirtualNetwork'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "etag": {"key": "etag", "type": "str"}, + "properties": {"key": "properties", "type": "ManagedVirtualNetwork"}, } - def __init__( - self, - *, - properties: "_models.ManagedVirtualNetwork", - **kwargs - ): + def __init__(self, *, properties: "_models.ManagedVirtualNetwork", **kwargs): """ - :keyword properties: Required. Managed Virtual Network properties. + :keyword properties: Managed Virtual Network properties. Required. :paramtype properties: ~azure.mgmt.datafactory.models.ManagedVirtualNetwork """ - super(ManagedVirtualNetworkResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties @@ -34761,12 +37055,12 @@ class MappingDataFlow(DataFlow): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Type of data flow.Constant filled by server. + :ivar type: Type of data flow. Required. :vartype type: str :ivar description: The description of the data flow. :vartype description: str :ivar annotations: List of tags that can be used for describing the data flow. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DataFlowFolder @@ -34783,26 +37077,26 @@ class MappingDataFlow(DataFlow): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, - 'sources': {'key': 'typeProperties.sources', 'type': '[DataFlowSource]'}, - 'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'}, - 'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'}, - 'script': {'key': 'typeProperties.script', 'type': 'str'}, - 'script_lines': {'key': 'typeProperties.scriptLines', 'type': '[str]'}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DataFlowFolder"}, + "sources": {"key": "typeProperties.sources", "type": "[DataFlowSource]"}, + "sinks": {"key": "typeProperties.sinks", "type": "[DataFlowSink]"}, + "transformations": {"key": "typeProperties.transformations", "type": "[Transformation]"}, + "script": {"key": "typeProperties.script", "type": "str"}, + "script_lines": {"key": "typeProperties.scriptLines", "type": "[str]"}, } def __init__( self, *, description: Optional[str] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DataFlowFolder"] = None, sources: Optional[List["_models.DataFlowSource"]] = None, sinks: Optional[List["_models.DataFlowSink"]] = None, @@ -34815,7 +37109,7 @@ def __init__( :keyword description: The description of the data flow. :paramtype description: str :keyword annotations: List of tags that can be used for describing the data flow. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DataFlowFolder @@ -34830,8 +37124,8 @@ def __init__( :keyword script_lines: Data flow script lines. :paramtype script_lines: list[str] """ - super(MappingDataFlow, self).__init__(description=description, annotations=annotations, folder=folder, **kwargs) - self.type = 'MappingDataFlow' # type: str + super().__init__(description=description, annotations=annotations, folder=folder, **kwargs) + self.type = "MappingDataFlow" # type: str self.sources = sources self.sinks = sinks self.transformations = transformations @@ -34846,8 +37140,8 @@ class MariaDBLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -34856,51 +37150,51 @@ class MariaDBLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype connection_string: JSON :ivar pwd: The Azure key vault secret reference of password in connection string. :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "pwd": {"key": "typeProperties.pwd", "type": "AzureKeyVaultSecretReference"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_string: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_string: Optional[JSON] = None, pwd: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -34908,19 +37202,26 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype connection_string: JSON :keyword pwd: The Azure key vault secret reference of password in connection string. :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(MariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'MariaDB' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "MariaDB" # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential @@ -34933,89 +37234,98 @@ class MariaDBSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'MariaDBSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "MariaDBSource" # type: str self.query = query @@ -35026,100 +37336,110 @@ class MariaDBTableDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(MariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'MariaDBTable' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "MariaDBTable" # type: str self.table_name = table_name -class MarketoLinkedService(LinkedService): +class MarketoLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Marketo server linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -35128,72 +37448,72 @@ class MarketoLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar endpoint: Required. The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com). - :vartype endpoint: any - :ivar client_id: Required. The client Id of your Marketo service. - :vartype client_id: any + :vartype annotations: list[JSON] + :ivar endpoint: The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com). Required. + :vartype endpoint: JSON + :ivar client_id: The client Id of your Marketo service. Required. + :vartype client_id: JSON :ivar client_secret: The client secret of your Marketo service. :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :vartype use_encrypted_endpoints: any + :vartype use_encrypted_endpoints: JSON :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :vartype use_host_verification: any + :vartype use_host_verification: JSON :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :vartype use_peer_verification: any + :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'client_id': {'required': True}, + "type": {"required": True}, + "endpoint": {"required": True}, + "client_id": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "endpoint": {"key": "typeProperties.endpoint", "type": "object"}, + "client_id": {"key": "typeProperties.clientId", "type": "object"}, + "client_secret": {"key": "typeProperties.clientSecret", "type": "SecretBase"}, + "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, + "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, + "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - endpoint: Any, - client_id: Any, - additional_properties: Optional[Dict[str, Any]] = None, + endpoint: JSON, + client_id: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, client_secret: Optional["_models.SecretBase"] = None, - use_encrypted_endpoints: Optional[Any] = None, - use_host_verification: Optional[Any] = None, - use_peer_verification: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + use_encrypted_endpoints: Optional[JSON] = None, + use_host_verification: Optional[JSON] = None, + use_peer_verification: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -35201,31 +37521,38 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword endpoint: Required. The endpoint of the Marketo server. (i.e. - 123-ABC-321.mktorest.com). - :paramtype endpoint: any - :keyword client_id: Required. The client Id of your Marketo service. - :paramtype client_id: any + :paramtype annotations: list[JSON] + :keyword endpoint: The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com). + Required. + :paramtype endpoint: JSON + :keyword client_id: The client Id of your Marketo service. Required. + :paramtype client_id: JSON :keyword client_secret: The client secret of your Marketo service. :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :paramtype use_encrypted_endpoints: any + :paramtype use_encrypted_endpoints: JSON :keyword use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :paramtype use_host_verification: any + :paramtype use_host_verification: JSON :keyword use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :paramtype use_peer_verification: any + :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(MarketoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Marketo' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Marketo" # type: str self.endpoint = endpoint self.client_id = client_id self.client_secret = client_secret @@ -35242,88 +37569,98 @@ class MarketoObjectDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(MarketoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'MarketoObject' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "MarketoObject" # type: str self.table_name = table_name @@ -35334,133 +37671,136 @@ class MarketoSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'MarketoSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "MarketoSource" # type: str self.query = query -class MetadataItem(msrest.serialization.Model): +class MetadataItem(_serialization.Model): """Specify the name and value of custom metadata item. :ivar name: Metadata item key name. Type: string (or Expression with resultType string). - :vartype name: any + :vartype name: JSON :ivar value: Metadata item value. Type: string (or Expression with resultType string). - :vartype value: any + :vartype value: JSON """ _attribute_map = { - 'name': {'key': 'name', 'type': 'object'}, - 'value': {'key': 'value', 'type': 'object'}, + "name": {"key": "name", "type": "object"}, + "value": {"key": "value", "type": "object"}, } - def __init__( - self, - *, - name: Optional[Any] = None, - value: Optional[Any] = None, - **kwargs - ): + def __init__(self, *, name: Optional[JSON] = None, value: Optional[JSON] = None, **kwargs): """ :keyword name: Metadata item key name. Type: string (or Expression with resultType string). - :paramtype name: any + :paramtype name: JSON :keyword value: Metadata item value. Type: string (or Expression with resultType string). - :paramtype value: any + :paramtype value: JSON """ - super(MetadataItem, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name self.value = value -class MicrosoftAccessLinkedService(LinkedService): +class MicrosoftAccessLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Microsoft Access linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -35469,69 +37809,69 @@ class MicrosoftAccessLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar connection_string: Required. The non-access credential portion of the connection string - as well as an optional encrypted credential. Type: string, SecureString or - AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype annotations: list[JSON] + :ivar connection_string: The non-access credential portion of the connection string as well as + an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. + Required. + :vartype connection_string: JSON :ivar authentication_type: Type of authentication used to connect to the Microsoft Access as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :vartype authentication_type: any + :vartype authentication_type: JSON :ivar credential: The access credential portion of the connection string specified in driver-specific property-value format. :vartype credential: ~azure.mgmt.datafactory.models.SecretBase :ivar user_name: User name for Basic authentication. Type: string (or Expression with resultType string). - :vartype user_name: any + :vartype user_name: JSON :ivar password: Password for Basic authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, + "type": {"required": True}, + "connection_string": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "object"}, + "credential": {"key": "typeProperties.credential", "type": "SecretBase"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - connection_string: Any, - additional_properties: Optional[Dict[str, Any]] = None, + connection_string: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - authentication_type: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + authentication_type: Optional[JSON] = None, credential: Optional["_models.SecretBase"] = None, - user_name: Optional[Any] = None, + user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -35539,30 +37879,37 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword connection_string: Required. The non-access credential portion of the connection - string as well as an optional encrypted credential. Type: string, SecureString or - AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype annotations: list[JSON] + :keyword connection_string: The non-access credential portion of the connection string as well + as an optional encrypted credential. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. + :paramtype connection_string: JSON :keyword authentication_type: Type of authentication used to connect to the Microsoft Access as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :paramtype authentication_type: any + :paramtype authentication_type: JSON :keyword credential: The access credential portion of the connection string specified in driver-specific property-value format. :paramtype credential: ~azure.mgmt.datafactory.models.SecretBase :keyword user_name: User name for Basic authentication. Type: string (or Expression with resultType string). - :paramtype user_name: any + :paramtype user_name: JSON :keyword password: Password for Basic authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(MicrosoftAccessLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'MicrosoftAccess' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "MicrosoftAccess" # type: str self.connection_string = connection_string self.authentication_type = authentication_type self.credential = credential @@ -35578,89 +37925,98 @@ class MicrosoftAccessSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :vartype pre_copy_script: any + :vartype pre_copy_script: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "pre_copy_script": {"key": "preCopyScript", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - pre_copy_script: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + pre_copy_script: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :paramtype pre_copy_script: any - """ - super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'MicrosoftAccessSink' # type: str + :paramtype pre_copy_script: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "MicrosoftAccessSink" # type: str self.pre_copy_script = pre_copy_script @@ -35671,79 +38027,86 @@ class MicrosoftAccessSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query: Database query. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query: Optional[Any] = None, - additional_columns: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query: Database query. Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'MicrosoftAccessSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "MicrosoftAccessSource" # type: str self.query = query self.additional_columns = additional_columns @@ -35755,90 +38118,100 @@ class MicrosoftAccessTableDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The Microsoft Access table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The Microsoft Access table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'MicrosoftAccessTable' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "MicrosoftAccessTable" # type: str self.table_name = table_name @@ -35849,91 +38222,101 @@ class MongoDbAtlasCollectionDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :ivar collection: Required. The collection name of the MongoDB Atlas database. Type: string (or - Expression with resultType string). - :vartype collection: any + :ivar collection: The collection name of the MongoDB Atlas database. Type: string (or + Expression with resultType string). Required. + :vartype collection: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, + "collection": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "collection": {"key": "typeProperties.collection", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - collection: Any, - additional_properties: Optional[Dict[str, Any]] = None, + collection: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :keyword collection: Required. The collection name of the MongoDB Atlas database. Type: string - (or Expression with resultType string). - :paramtype collection: any - """ - super(MongoDbAtlasCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'MongoDbAtlasCollection' # type: str + :keyword collection: The collection name of the MongoDB Atlas database. Type: string (or + Expression with resultType string). Required. + :paramtype collection: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "MongoDbAtlasCollection" # type: str self.collection = collection @@ -35944,8 +38327,8 @@ class MongoDbAtlasLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -35954,49 +38337,49 @@ class MongoDbAtlasLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar connection_string: Required. The MongoDB Atlas connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or - AzureKeyVaultSecretReference. - :vartype connection_string: any - :ivar database: Required. The name of the MongoDB Atlas database that you want to access. Type: - string (or Expression with resultType string). - :vartype database: any + :vartype annotations: list[JSON] + :ivar connection_string: The MongoDB Atlas connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. + Required. + :vartype connection_string: JSON + :ivar database: The name of the MongoDB Atlas database that you want to access. Type: string + (or Expression with resultType string). Required. + :vartype database: JSON """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, + "type": {"required": True}, + "connection_string": {"required": True}, + "database": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "database": {"key": "typeProperties.database", "type": "object"}, } def __init__( self, *, - connection_string: Any, - database: Any, - additional_properties: Optional[Dict[str, Any]] = None, + connection_string: JSON, + database: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -36004,17 +38387,24 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword connection_string: Required. The MongoDB Atlas connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or - AzureKeyVaultSecretReference. - :paramtype connection_string: any - :keyword database: Required. The name of the MongoDB Atlas database that you want to access. - Type: string (or Expression with resultType string). - :paramtype database: any - """ - super(MongoDbAtlasLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'MongoDbAtlas' # type: str + :paramtype annotations: list[JSON] + :keyword connection_string: The MongoDB Atlas connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. + Required. + :paramtype connection_string: JSON + :keyword database: The name of the MongoDB Atlas database that you want to access. Type: string + (or Expression with resultType string). Required. + :paramtype database: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "MongoDbAtlas" # type: str self.connection_string = connection_string self.database = database @@ -36026,204 +38416,220 @@ class MongoDbAtlasSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). - :vartype write_behavior: any + :vartype write_behavior: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "write_behavior": {"key": "writeBehavior", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - write_behavior: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + write_behavior: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). - :paramtype write_behavior: any - """ - super(MongoDbAtlasSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'MongoDbAtlasSink' # type: str + :paramtype write_behavior: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "MongoDbAtlasSink" # type: str self.write_behavior = write_behavior -class MongoDbAtlasSource(CopySource): +class MongoDbAtlasSource(CopySource): # pylint: disable=too-many-instance-attributes """A copy activity source for a MongoDB Atlas database. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). - :vartype filter: any + :vartype filter: JSON :ivar cursor_methods: Cursor methods for Mongodb query. :vartype cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :ivar batch_size: Specifies the number of documents to return in each batch of the response from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). - :vartype batch_size: any + :vartype batch_size: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "filter": {"key": "filter", "type": "object"}, + "cursor_methods": {"key": "cursorMethods", "type": "MongoDbCursorMethodsProperties"}, + "batch_size": {"key": "batchSize", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - filter: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + filter: Optional[JSON] = None, # pylint: disable=redefined-builtin cursor_methods: Optional["_models.MongoDbCursorMethodsProperties"] = None, - batch_size: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, + batch_size: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). - :paramtype filter: any + :paramtype filter: JSON :keyword cursor_methods: Cursor methods for Mongodb query. :paramtype cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :keyword batch_size: Specifies the number of documents to return in each batch of the response from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). - :paramtype batch_size: any + :paramtype batch_size: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(MongoDbAtlasSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'MongoDbAtlasSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "MongoDbAtlasSource" # type: str self.filter = filter self.cursor_methods = cursor_methods self.batch_size = batch_size @@ -36238,156 +38644,166 @@ class MongoDbCollectionDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :ivar collection_name: Required. The table name of the MongoDB database. Type: string (or - Expression with resultType string). - :vartype collection_name: any + :ivar collection_name: The table name of the MongoDB database. Type: string (or Expression with + resultType string). Required. + :vartype collection_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, + "collection_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "collection_name": {"key": "typeProperties.collectionName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - collection_name: Any, - additional_properties: Optional[Dict[str, Any]] = None, + collection_name: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :keyword collection_name: Required. The table name of the MongoDB database. Type: string (or - Expression with resultType string). - :paramtype collection_name: any - """ - super(MongoDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'MongoDbCollection' # type: str + :keyword collection_name: The table name of the MongoDB database. Type: string (or Expression + with resultType string). Required. + :paramtype collection_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "MongoDbCollection" # type: str self.collection_name = collection_name -class MongoDbCursorMethodsProperties(msrest.serialization.Model): +class MongoDbCursorMethodsProperties(_serialization.Model): """Cursor methods for Mongodb query. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar project: Specifies the fields to return in the documents that match the query filter. To return all fields in the matching documents, omit this parameter. Type: string (or Expression with resultType string). - :vartype project: any + :vartype project: JSON :ivar sort: Specifies the order in which the query returns matching documents. Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). - :vartype sort: any + :vartype sort: JSON :ivar skip: Specifies the how many documents skipped and where MongoDB begins returning results. This approach may be useful in implementing paginated results. Type: integer (or Expression with resultType integer). - :vartype skip: any + :vartype skip: JSON :ivar limit: Specifies the maximum number of documents the server returns. limit() is analogous to the LIMIT statement in a SQL database. Type: integer (or Expression with resultType integer). - :vartype limit: any + :vartype limit: JSON """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'project': {'key': 'project', 'type': 'object'}, - 'sort': {'key': 'sort', 'type': 'object'}, - 'skip': {'key': 'skip', 'type': 'object'}, - 'limit': {'key': 'limit', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "project": {"key": "project", "type": "object"}, + "sort": {"key": "sort", "type": "object"}, + "skip": {"key": "skip", "type": "object"}, + "limit": {"key": "limit", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - project: Optional[Any] = None, - sort: Optional[Any] = None, - skip: Optional[Any] = None, - limit: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + project: Optional[JSON] = None, + sort: Optional[JSON] = None, + skip: Optional[JSON] = None, + limit: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword project: Specifies the fields to return in the documents that match the query filter. To return all fields in the matching documents, omit this parameter. Type: string (or Expression with resultType string). - :paramtype project: any + :paramtype project: JSON :keyword sort: Specifies the order in which the query returns matching documents. Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). - :paramtype sort: any + :paramtype sort: JSON :keyword skip: Specifies the how many documents skipped and where MongoDB begins returning results. This approach may be useful in implementing paginated results. Type: integer (or Expression with resultType integer). - :paramtype skip: any + :paramtype skip: JSON :keyword limit: Specifies the maximum number of documents the server returns. limit() is analogous to the LIMIT statement in a SQL database. Type: integer (or Expression with resultType integer). - :paramtype limit: any + :paramtype limit: JSON """ - super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.project = project self.sort = sort @@ -36395,15 +38811,15 @@ def __init__( self.limit = limit -class MongoDbLinkedService(LinkedService): +class MongoDbLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Linked service for MongoDb data source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -36412,88 +38828,88 @@ class MongoDbLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar server: Required. The IP address or server name of the MongoDB server. Type: string (or - Expression with resultType string). - :vartype server: any + :vartype annotations: list[JSON] + :ivar server: The IP address or server name of the MongoDB server. Type: string (or Expression + with resultType string). Required. + :vartype server: JSON :ivar authentication_type: The authentication type to be used to connect to the MongoDB - database. Known values are: "Basic", "Anonymous". + database. Known values are: "Basic" and "Anonymous". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.MongoDbAuthenticationType - :ivar database_name: Required. The name of the MongoDB database that you want to access. Type: - string (or Expression with resultType string). - :vartype database_name: any + :ivar database_name: The name of the MongoDB database that you want to access. Type: string (or + Expression with resultType string). Required. + :vartype database_name: JSON :ivar username: Username for authentication. Type: string (or Expression with resultType string). - :vartype username: any + :vartype username: JSON :ivar password: Password for authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar auth_source: Database to verify the username and password. Type: string (or Expression with resultType string). - :vartype auth_source: any + :vartype auth_source: JSON :ivar port: The TCP port number that the MongoDB server uses to listen for client connections. The default value is 27017. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype port: any + :vartype port: JSON :ivar enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. Type: boolean (or Expression with resultType boolean). - :vartype enable_ssl: any + :vartype enable_ssl: JSON :ivar allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. Type: boolean (or Expression with resultType boolean). - :vartype allow_self_signed_server_cert: any + :vartype allow_self_signed_server_cert: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database_name': {'required': True}, + "type": {"required": True}, + "server": {"required": True}, + "database_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'database_name': {'key': 'typeProperties.databaseName', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'auth_source': {'key': 'typeProperties.authSource', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "server": {"key": "typeProperties.server", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, + "database_name": {"key": "typeProperties.databaseName", "type": "object"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "auth_source": {"key": "typeProperties.authSource", "type": "object"}, + "port": {"key": "typeProperties.port", "type": "object"}, + "enable_ssl": {"key": "typeProperties.enableSsl", "type": "object"}, + "allow_self_signed_server_cert": {"key": "typeProperties.allowSelfSignedServerCert", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - server: Any, - database_name: Any, - additional_properties: Optional[Dict[str, Any]] = None, + server: JSON, + database_name: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, authentication_type: Optional[Union[str, "_models.MongoDbAuthenticationType"]] = None, - username: Optional[Any] = None, + username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - auth_source: Optional[Any] = None, - port: Optional[Any] = None, - enable_ssl: Optional[Any] = None, - allow_self_signed_server_cert: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + auth_source: Optional[JSON] = None, + port: Optional[JSON] = None, + enable_ssl: Optional[JSON] = None, + allow_self_signed_server_cert: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -36501,42 +38917,49 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword server: Required. The IP address or server name of the MongoDB server. Type: string - (or Expression with resultType string). - :paramtype server: any + :paramtype annotations: list[JSON] + :keyword server: The IP address or server name of the MongoDB server. Type: string (or + Expression with resultType string). Required. + :paramtype server: JSON :keyword authentication_type: The authentication type to be used to connect to the MongoDB - database. Known values are: "Basic", "Anonymous". + database. Known values are: "Basic" and "Anonymous". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.MongoDbAuthenticationType - :keyword database_name: Required. The name of the MongoDB database that you want to access. - Type: string (or Expression with resultType string). - :paramtype database_name: any + :keyword database_name: The name of the MongoDB database that you want to access. Type: string + (or Expression with resultType string). Required. + :paramtype database_name: JSON :keyword username: Username for authentication. Type: string (or Expression with resultType string). - :paramtype username: any + :paramtype username: JSON :keyword password: Password for authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword auth_source: Database to verify the username and password. Type: string (or Expression with resultType string). - :paramtype auth_source: any + :paramtype auth_source: JSON :keyword port: The TCP port number that the MongoDB server uses to listen for client connections. The default value is 27017. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype port: any + :paramtype port: JSON :keyword enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. Type: boolean (or Expression with resultType boolean). - :paramtype enable_ssl: any + :paramtype enable_ssl: JSON :keyword allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. Type: boolean (or Expression with resultType boolean). - :paramtype allow_self_signed_server_cert: any + :paramtype allow_self_signed_server_cert: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(MongoDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'MongoDb' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "MongoDb" # type: str self.server = server self.authentication_type = authentication_type self.database_name = database_name @@ -36556,81 +38979,88 @@ class MongoDbSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query: Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query: Optional[Any] = None, - additional_columns: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query: Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'MongoDbSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "MongoDbSource" # type: str self.query = query self.additional_columns = additional_columns @@ -36642,91 +39072,101 @@ class MongoDbV2CollectionDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :ivar collection: Required. The collection name of the MongoDB database. Type: string (or - Expression with resultType string). - :vartype collection: any + :ivar collection: The collection name of the MongoDB database. Type: string (or Expression with + resultType string). Required. + :vartype collection: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, + "collection": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "collection": {"key": "typeProperties.collection", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - collection: Any, - additional_properties: Optional[Dict[str, Any]] = None, + collection: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :keyword collection: Required. The collection name of the MongoDB database. Type: string (or - Expression with resultType string). - :paramtype collection: any - """ - super(MongoDbV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'MongoDbV2Collection' # type: str + :keyword collection: The collection name of the MongoDB database. Type: string (or Expression + with resultType string). Required. + :paramtype collection: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "MongoDbV2Collection" # type: str self.collection = collection @@ -36737,8 +39177,8 @@ class MongoDbV2LinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -36747,48 +39187,49 @@ class MongoDbV2LinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar connection_string: Required. The MongoDB connection string. Type: string, SecureString or + :vartype annotations: list[JSON] + :ivar connection_string: The MongoDB connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype connection_string: any - :ivar database: Required. The name of the MongoDB database that you want to access. Type: - string (or Expression with resultType string). - :vartype database: any + Required. + :vartype connection_string: JSON + :ivar database: The name of the MongoDB database that you want to access. Type: string (or + Expression with resultType string). Required. + :vartype database: JSON """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, + "type": {"required": True}, + "connection_string": {"required": True}, + "database": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "database": {"key": "typeProperties.database", "type": "object"}, } def __init__( self, *, - connection_string: Any, - database: Any, - additional_properties: Optional[Dict[str, Any]] = None, + connection_string: JSON, + database: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -36796,16 +39237,24 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword connection_string: Required. The MongoDB connection string. Type: string, SecureString - or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype connection_string: any - :keyword database: Required. The name of the MongoDB database that you want to access. Type: - string (or Expression with resultType string). - :paramtype database: any - """ - super(MongoDbV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'MongoDbV2' # type: str + :paramtype annotations: list[JSON] + :keyword connection_string: The MongoDB connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. + Required. + :paramtype connection_string: JSON + :keyword database: The name of the MongoDB database that you want to access. Type: string (or + Expression with resultType string). Required. + :paramtype database: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "MongoDbV2" # type: str self.connection_string = connection_string self.database = database @@ -36817,204 +39266,220 @@ class MongoDbV2Sink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). - :vartype write_behavior: any + :vartype write_behavior: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "write_behavior": {"key": "writeBehavior", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - write_behavior: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + write_behavior: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). - :paramtype write_behavior: any - """ - super(MongoDbV2Sink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'MongoDbV2Sink' # type: str + :paramtype write_behavior: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "MongoDbV2Sink" # type: str self.write_behavior = write_behavior -class MongoDbV2Source(CopySource): +class MongoDbV2Source(CopySource): # pylint: disable=too-many-instance-attributes """A copy activity source for a MongoDB database. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). - :vartype filter: any + :vartype filter: JSON :ivar cursor_methods: Cursor methods for Mongodb query. :vartype cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :ivar batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). - :vartype batch_size: any + :vartype batch_size: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "filter": {"key": "filter", "type": "object"}, + "cursor_methods": {"key": "cursorMethods", "type": "MongoDbCursorMethodsProperties"}, + "batch_size": {"key": "batchSize", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - filter: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + filter: Optional[JSON] = None, # pylint: disable=redefined-builtin cursor_methods: Optional["_models.MongoDbCursorMethodsProperties"] = None, - batch_size: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, + batch_size: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). - :paramtype filter: any + :paramtype filter: JSON :keyword cursor_methods: Cursor methods for Mongodb query. :paramtype cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :keyword batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). - :paramtype batch_size: any + :paramtype batch_size: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'MongoDbV2Source' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "MongoDbV2Source" # type: str self.filter = filter self.cursor_methods = cursor_methods self.batch_size = batch_size @@ -37029,8 +39494,8 @@ class MySqlLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -37039,51 +39504,51 @@ class MySqlLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar connection_string: Required. The connection string. - :vartype connection_string: any + :vartype annotations: list[JSON] + :ivar connection_string: The connection string. Required. + :vartype connection_string: JSON :ivar password: The Azure key vault secret reference of password in connection string. :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, + "type": {"required": True}, + "connection_string": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "AzureKeyVaultSecretReference"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - connection_string: Any, - additional_properties: Optional[Dict[str, Any]] = None, + connection_string: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, password: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -37091,18 +39556,25 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword connection_string: Required. The connection string. - :paramtype connection_string: any + :paramtype annotations: list[JSON] + :keyword connection_string: The connection string. Required. + :paramtype connection_string: JSON :keyword password: The Azure key vault secret reference of password in connection string. :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(MySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'MySql' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "MySql" # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential @@ -37115,87 +39587,96 @@ class MySqlSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: Database query. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: Database query. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'MySqlSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "MySqlSource" # type: str self.query = query @@ -37206,88 +39687,98 @@ class MySqlTableDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The MySQL table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The MySQL table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(MySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'MySqlTable' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "MySqlTable" # type: str self.table_name = table_name @@ -37298,8 +39789,8 @@ class NetezzaLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -37308,51 +39799,51 @@ class NetezzaLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype connection_string: JSON :ivar pwd: The Azure key vault secret reference of password in connection string. :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "pwd": {"key": "typeProperties.pwd", "type": "AzureKeyVaultSecretReference"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_string: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_string: Optional[JSON] = None, pwd: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -37360,304 +39851,362 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype connection_string: JSON :keyword pwd: The Azure key vault secret reference of password in connection string. :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(NetezzaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Netezza' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Netezza" # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential -class NetezzaPartitionSettings(msrest.serialization.Model): +class NetezzaPartitionSettings(_serialization.Model): """The settings that will be leveraged for Netezza source partitioning. :ivar partition_column_name: The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :vartype partition_column_name: any + :vartype partition_column_name: JSON :ivar partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :vartype partition_upper_bound: any + :vartype partition_upper_bound: JSON :ivar partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :vartype partition_lower_bound: any + :vartype partition_lower_bound: JSON """ _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + "partition_column_name": {"key": "partitionColumnName", "type": "object"}, + "partition_upper_bound": {"key": "partitionUpperBound", "type": "object"}, + "partition_lower_bound": {"key": "partitionLowerBound", "type": "object"}, } def __init__( self, *, - partition_column_name: Optional[Any] = None, - partition_upper_bound: Optional[Any] = None, - partition_lower_bound: Optional[Any] = None, + partition_column_name: Optional[JSON] = None, + partition_upper_bound: Optional[JSON] = None, + partition_lower_bound: Optional[JSON] = None, **kwargs ): """ :keyword partition_column_name: The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :paramtype partition_column_name: any + :paramtype partition_column_name: JSON :keyword partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :paramtype partition_upper_bound: any + :paramtype partition_upper_bound: JSON :keyword partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :paramtype partition_lower_bound: any + :paramtype partition_lower_bound: JSON """ - super(NetezzaPartitionSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.partition_column_name = partition_column_name self.partition_upper_bound = partition_upper_bound self.partition_lower_bound = partition_lower_bound -class NetezzaSource(TabularSource): +class NetezzaSource(TabularSource): # pylint: disable=too-many-instance-attributes """A copy activity Netezza source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar partition_option: The partition mechanism that will be used for Netezza read in parallel. Possible values include: "None", "DataSlice", "DynamicRange". - :vartype partition_option: any + :vartype partition_option: JSON :ivar partition_settings: The settings that will be leveraged for Netezza source partitioning. :vartype partition_settings: ~azure.mgmt.datafactory.models.NetezzaPartitionSettings """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "partition_option": {"key": "partitionOption", "type": "object"}, + "partition_settings": {"key": "partitionSettings", "type": "NetezzaPartitionSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, - partition_option: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, + partition_option: Optional[JSON] = None, partition_settings: Optional["_models.NetezzaPartitionSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword partition_option: The partition mechanism that will be used for Netezza read in parallel. Possible values include: "None", "DataSlice", "DynamicRange". - :paramtype partition_option: any + :paramtype partition_option: JSON :keyword partition_settings: The settings that will be leveraged for Netezza source partitioning. :paramtype partition_settings: ~azure.mgmt.datafactory.models.NetezzaPartitionSettings """ - super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'NetezzaSource' # type: str + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "NetezzaSource" # type: str self.query = query self.partition_option = partition_option self.partition_settings = partition_settings -class NetezzaTableDataset(Dataset): +class NetezzaTableDataset(Dataset): # pylint: disable=too-many-instance-attributes """Netezza dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :vartype table_name: any + :vartype table_name: JSON :ivar table: The table name of the Netezza. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON :ivar schema_type_properties_schema: The schema name of the Netezza. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, - table: Optional[Any] = None, - schema_type_properties_schema: Optional[Any] = None, + table_name: Optional[JSON] = None, + table: Optional[JSON] = None, + schema_type_properties_schema: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: This property will be retired. Please consider using schema + table properties instead. - :paramtype table_name: any + :paramtype table_name: JSON :keyword table: The table name of the Netezza. Type: string (or Expression with resultType string). - :paramtype table: any + :paramtype table: JSON :keyword schema_type_properties_schema: The schema name of the Netezza. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any - """ - super(NetezzaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'NetezzaTable' # type: str + :paramtype schema_type_properties_schema: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "NetezzaTable" # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema -class ODataLinkedService(LinkedService): +class NotebookParameter(_serialization.Model): + """Notebook parameter. + + :ivar value: Notebook parameter value. Type: string (or Expression with resultType string). + :vartype value: JSON + :ivar type: Notebook parameter type. Known values are: "string", "int", "float", and "bool". + :vartype type: str or ~azure.mgmt.datafactory.models.NotebookParameterType + """ + + _attribute_map = { + "value": {"key": "value", "type": "object"}, + "type": {"key": "type", "type": "str"}, + } + + def __init__( + self, + *, + value: Optional[JSON] = None, + type: Optional[Union[str, "_models.NotebookParameterType"]] = None, + **kwargs + ): + """ + :keyword value: Notebook parameter value. Type: string (or Expression with resultType string). + :paramtype value: JSON + :keyword type: Notebook parameter type. Known values are: "string", "int", "float", and "bool". + :paramtype type: str or ~azure.mgmt.datafactory.models.NotebookParameterType + """ + super().__init__(**kwargs) + self.value = value + self.type = type + + +class ODataLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Open Data Protocol (OData) linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -37666,36 +40215,37 @@ class ODataLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar url: Required. The URL of the OData service endpoint. Type: string (or Expression with - resultType string). - :vartype url: any + :vartype annotations: list[JSON] + :ivar url: The URL of the OData service endpoint. Type: string (or Expression with resultType + string). Required. + :vartype url: JSON :ivar authentication_type: Type of authentication used to connect to the OData service. Known - values are: "Basic", "Anonymous", "Windows", "AadServicePrincipal", "ManagedServiceIdentity". + values are: "Basic", "Anonymous", "Windows", "AadServicePrincipal", and + "ManagedServiceIdentity". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.ODataAuthenticationType :ivar user_name: User name of the OData service. Type: string (or Expression with resultType string). - :vartype user_name: any + :vartype user_name: JSON :ivar password: Password of the OData service. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). - :vartype auth_headers: any + :vartype auth_headers: JSON :ivar tenant: Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or Expression with resultType string). - :vartype tenant: any + :vartype tenant: JSON :ivar service_principal_id: Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). - :vartype service_principal_id: any + :vartype service_principal_id: JSON :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :vartype azure_cloud_type: any + :vartype azure_cloud_type: JSON :ivar aad_resource_id: Specify the resource you are requesting authorization to use Directory. Type: string (or Expression with resultType string). - :vartype aad_resource_id: any + :vartype aad_resource_id: JSON :ivar aad_service_principal_credential_type: Specify the credential type (key or cert) is used - for service principal. Known values are: "ServicePrincipalKey", "ServicePrincipalCert". + for service principal. Known values are: "ServicePrincipalKey" and "ServicePrincipalCert". :vartype aad_service_principal_credential_type: str or ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType :ivar service_principal_key: Specify the secret of your application registered in Azure Active @@ -37712,65 +40262,73 @@ class ODataLinkedService(LinkedService): :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'url': {'required': True}, + "type": {"required": True}, + "url": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'auth_headers': {'key': 'typeProperties.authHeaders', 'type': 'object'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, - 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "url": {"key": "typeProperties.url", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "auth_headers": {"key": "typeProperties.authHeaders", "type": "object"}, + "tenant": {"key": "typeProperties.tenant", "type": "object"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "azure_cloud_type": {"key": "typeProperties.azureCloudType", "type": "object"}, + "aad_resource_id": {"key": "typeProperties.aadResourceId", "type": "object"}, + "aad_service_principal_credential_type": { + "key": "typeProperties.aadServicePrincipalCredentialType", + "type": "str", + }, + "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, + "service_principal_embedded_cert": {"key": "typeProperties.servicePrincipalEmbeddedCert", "type": "SecretBase"}, + "service_principal_embedded_cert_password": { + "key": "typeProperties.servicePrincipalEmbeddedCertPassword", + "type": "SecretBase", + }, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - url: Any, - additional_properties: Optional[Dict[str, Any]] = None, + url: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, authentication_type: Optional[Union[str, "_models.ODataAuthenticationType"]] = None, - user_name: Optional[Any] = None, + user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - auth_headers: Optional[Any] = None, - tenant: Optional[Any] = None, - service_principal_id: Optional[Any] = None, - azure_cloud_type: Optional[Any] = None, - aad_resource_id: Optional[Any] = None, - aad_service_principal_credential_type: Optional[Union[str, "_models.ODataAadServicePrincipalCredentialType"]] = None, + auth_headers: Optional[JSON] = None, + tenant: Optional[JSON] = None, + service_principal_id: Optional[JSON] = None, + azure_cloud_type: Optional[JSON] = None, + aad_resource_id: Optional[JSON] = None, + aad_service_principal_credential_type: Optional[ + Union[str, "_models.ODataAadServicePrincipalCredentialType"] + ] = None, service_principal_key: Optional["_models.SecretBase"] = None, service_principal_embedded_cert: Optional["_models.SecretBase"] = None, service_principal_embedded_cert_password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -37778,37 +40336,37 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword url: Required. The URL of the OData service endpoint. Type: string (or Expression with - resultType string). - :paramtype url: any + :paramtype annotations: list[JSON] + :keyword url: The URL of the OData service endpoint. Type: string (or Expression with + resultType string). Required. + :paramtype url: JSON :keyword authentication_type: Type of authentication used to connect to the OData service. - Known values are: "Basic", "Anonymous", "Windows", "AadServicePrincipal", + Known values are: "Basic", "Anonymous", "Windows", "AadServicePrincipal", and "ManagedServiceIdentity". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.ODataAuthenticationType :keyword user_name: User name of the OData service. Type: string (or Expression with resultType string). - :paramtype user_name: any + :paramtype user_name: JSON :keyword password: Password of the OData service. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). - :paramtype auth_headers: any + :paramtype auth_headers: JSON :keyword tenant: Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or Expression with resultType string). - :paramtype tenant: any + :paramtype tenant: JSON :keyword service_principal_id: Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). - :paramtype service_principal_id: any + :paramtype service_principal_id: JSON :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :paramtype azure_cloud_type: any + :paramtype azure_cloud_type: JSON :keyword aad_resource_id: Specify the resource you are requesting authorization to use Directory. Type: string (or Expression with resultType string). - :paramtype aad_resource_id: any + :paramtype aad_resource_id: JSON :keyword aad_service_principal_credential_type: Specify the credential type (key or cert) is - used for service principal. Known values are: "ServicePrincipalKey", "ServicePrincipalCert". + used for service principal. Known values are: "ServicePrincipalKey" and "ServicePrincipalCert". :paramtype aad_service_principal_credential_type: str or ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType :keyword service_principal_key: Specify the secret of your application registered in Azure @@ -37825,10 +40383,17 @@ def __init__( :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(ODataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'OData' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "OData" # type: str self.url = url self.authentication_type = authentication_type self.user_name = user_name @@ -37852,88 +40417,98 @@ class ODataResourceDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar path: The OData resource path. Type: string (or Expression with resultType string). - :vartype path: any + :vartype path: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "path": {"key": "typeProperties.path", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - path: Optional[Any] = None, + path: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword path: The OData resource path. Type: string (or Expression with resultType string). - :paramtype path: any - """ - super(ODataResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'ODataResource' # type: str + :paramtype path: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "ODataResource" # type: str self.path = path @@ -37944,107 +40519,114 @@ class ODataSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype http_request_timeout: any + :vartype http_request_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "http_request_timeout": {"key": "httpRequestTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query: Optional[Any] = None, - http_request_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query: Optional[JSON] = None, + http_request_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype http_request_timeout: any + :paramtype http_request_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'ODataSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "ODataSource" # type: str self.query = query self.http_request_timeout = http_request_timeout self.additional_columns = additional_columns -class OdbcLinkedService(LinkedService): +class OdbcLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Open Database Connectivity (ODBC) linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -38053,68 +40635,68 @@ class OdbcLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar connection_string: Required. The non-access credential portion of the connection string - as well as an optional encrypted credential. Type: string, SecureString or - AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype annotations: list[JSON] + :ivar connection_string: The non-access credential portion of the connection string as well as + an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. + Required. + :vartype connection_string: JSON :ivar authentication_type: Type of authentication used to connect to the ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :vartype authentication_type: any + :vartype authentication_type: JSON :ivar credential: The access credential portion of the connection string specified in driver-specific property-value format. :vartype credential: ~azure.mgmt.datafactory.models.SecretBase :ivar user_name: User name for Basic authentication. Type: string (or Expression with resultType string). - :vartype user_name: any + :vartype user_name: JSON :ivar password: Password for Basic authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, + "type": {"required": True}, + "connection_string": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "object"}, + "credential": {"key": "typeProperties.credential", "type": "SecretBase"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - connection_string: Any, - additional_properties: Optional[Dict[str, Any]] = None, + connection_string: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - authentication_type: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + authentication_type: Optional[JSON] = None, credential: Optional["_models.SecretBase"] = None, - user_name: Optional[Any] = None, + user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -38122,29 +40704,36 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword connection_string: Required. The non-access credential portion of the connection - string as well as an optional encrypted credential. Type: string, SecureString or - AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype annotations: list[JSON] + :keyword connection_string: The non-access credential portion of the connection string as well + as an optional encrypted credential. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. + :paramtype connection_string: JSON :keyword authentication_type: Type of authentication used to connect to the ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :paramtype authentication_type: any + :paramtype authentication_type: JSON :keyword credential: The access credential portion of the connection string specified in driver-specific property-value format. :paramtype credential: ~azure.mgmt.datafactory.models.SecretBase :keyword user_name: User name for Basic authentication. Type: string (or Expression with resultType string). - :paramtype user_name: any + :paramtype user_name: JSON :keyword password: Password for Basic authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(OdbcLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Odbc' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Odbc" # type: str self.connection_string = connection_string self.authentication_type = authentication_type self.credential = credential @@ -38160,89 +40749,98 @@ class OdbcSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :vartype pre_copy_script: any + :vartype pre_copy_script: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "pre_copy_script": {"key": "preCopyScript", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - pre_copy_script: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + pre_copy_script: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :paramtype pre_copy_script: any - """ - super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'OdbcSink' # type: str + :paramtype pre_copy_script: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "OdbcSink" # type: str self.pre_copy_script = pre_copy_script @@ -38253,87 +40851,96 @@ class OdbcSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: Database query. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: Database query. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'OdbcSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "OdbcSource" # type: str self.query = query @@ -38344,204 +40951,224 @@ class OdbcTableDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The ODBC table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The ODBC table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(OdbcTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'OdbcTable' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "OdbcTable" # type: str self.table_name = table_name -class Office365Dataset(Dataset): +class Office365Dataset(Dataset): # pylint: disable=too-many-instance-attributes """The Office365 account. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :ivar table_name: Required. Name of the dataset to extract from Office 365. Type: string (or - Expression with resultType string). - :vartype table_name: any + :ivar table_name: Name of the dataset to extract from Office 365. Type: string (or Expression + with resultType string). Required. + :vartype table_name: JSON :ivar predicate: A predicate expression that can be used to filter the specific rows to extract from Office 365. Type: string (or Expression with resultType string). - :vartype predicate: any + :vartype predicate: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'table_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, + "table_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "predicate": {"key": "typeProperties.predicate", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - table_name: Any, - additional_properties: Optional[Dict[str, Any]] = None, + table_name: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - predicate: Optional[Any] = None, + predicate: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :keyword table_name: Required. Name of the dataset to extract from Office 365. Type: string (or - Expression with resultType string). - :paramtype table_name: any + :keyword table_name: Name of the dataset to extract from Office 365. Type: string (or + Expression with resultType string). Required. + :paramtype table_name: JSON :keyword predicate: A predicate expression that can be used to filter the specific rows to extract from Office 365. Type: string (or Expression with resultType string). - :paramtype predicate: any - """ - super(Office365Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'Office365Table' # type: str + :paramtype predicate: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "Office365Table" # type: str self.table_name = table_name self.predicate = predicate -class Office365LinkedService(LinkedService): +class Office365LinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Office365 linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -38550,65 +41177,65 @@ class Office365LinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar office365_tenant_id: Required. Azure tenant ID to which the Office 365 account belongs. - Type: string (or Expression with resultType string). - :vartype office365_tenant_id: any - :ivar service_principal_tenant_id: Required. Specify the tenant information under which your - Azure AD web application resides. Type: string (or Expression with resultType string). - :vartype service_principal_tenant_id: any - :ivar service_principal_id: Required. Specify the application's client ID. Type: string (or - Expression with resultType string). - :vartype service_principal_id: any - :ivar service_principal_key: Required. Specify the application's key. + :vartype annotations: list[JSON] + :ivar office365_tenant_id: Azure tenant ID to which the Office 365 account belongs. Type: + string (or Expression with resultType string). Required. + :vartype office365_tenant_id: JSON + :ivar service_principal_tenant_id: Specify the tenant information under which your Azure AD web + application resides. Type: string (or Expression with resultType string). Required. + :vartype service_principal_tenant_id: JSON + :ivar service_principal_id: Specify the application's client ID. Type: string (or Expression + with resultType string). Required. + :vartype service_principal_id: JSON + :ivar service_principal_key: Specify the application's key. Required. :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'office365_tenant_id': {'required': True}, - 'service_principal_tenant_id': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, + "type": {"required": True}, + "office365_tenant_id": {"required": True}, + "service_principal_tenant_id": {"required": True}, + "service_principal_id": {"required": True}, + "service_principal_key": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, - 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "office365_tenant_id": {"key": "typeProperties.office365TenantId", "type": "object"}, + "service_principal_tenant_id": {"key": "typeProperties.servicePrincipalTenantId", "type": "object"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - office365_tenant_id: Any, - service_principal_tenant_id: Any, - service_principal_id: Any, + office365_tenant_id: JSON, + service_principal_tenant_id: JSON, + service_principal_id: JSON, service_principal_key: "_models.SecretBase", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - encrypted_credential: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -38616,25 +41243,32 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword office365_tenant_id: Required. Azure tenant ID to which the Office 365 account - belongs. Type: string (or Expression with resultType string). - :paramtype office365_tenant_id: any - :keyword service_principal_tenant_id: Required. Specify the tenant information under which your - Azure AD web application resides. Type: string (or Expression with resultType string). - :paramtype service_principal_tenant_id: any - :keyword service_principal_id: Required. Specify the application's client ID. Type: string (or - Expression with resultType string). - :paramtype service_principal_id: any - :keyword service_principal_key: Required. Specify the application's key. + :paramtype annotations: list[JSON] + :keyword office365_tenant_id: Azure tenant ID to which the Office 365 account belongs. Type: + string (or Expression with resultType string). Required. + :paramtype office365_tenant_id: JSON + :keyword service_principal_tenant_id: Specify the tenant information under which your Azure AD + web application resides. Type: string (or Expression with resultType string). Required. + :paramtype service_principal_tenant_id: JSON + :keyword service_principal_id: Specify the application's client ID. Type: string (or Expression + with resultType string). Required. + :paramtype service_principal_id: JSON + :keyword service_principal_key: Specify the application's key. Required. :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(Office365LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Office365' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Office365" # type: str self.office365_tenant_id = office365_tenant_id self.service_principal_tenant_id = service_principal_tenant_id self.service_principal_id = service_principal_id @@ -38642,122 +41276,129 @@ def __init__( self.encrypted_credential = encrypted_credential -class Office365Source(CopySource): +class Office365Source(CopySource): # pylint: disable=too-many-instance-attributes """A copy activity source for an Office 365 service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar allowed_groups: The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). - :vartype allowed_groups: any + :vartype allowed_groups: JSON :ivar user_scope_filter_uri: The user scope uri. Type: string (or Expression with resultType string). - :vartype user_scope_filter_uri: any + :vartype user_scope_filter_uri: JSON :ivar date_filter_column: The Column to apply the :code:`` and :code:``. Type: string (or Expression with resultType string). - :vartype date_filter_column: any + :vartype date_filter_column: JSON :ivar start_time: Start time of the requested range for this dataset. Type: string (or Expression with resultType string). - :vartype start_time: any + :vartype start_time: JSON :ivar end_time: End time of the requested range for this dataset. Type: string (or Expression with resultType string). - :vartype end_time: any + :vartype end_time: JSON :ivar output_columns: The columns to be read out from the Office 365 table. Type: array of objects (or Expression with resultType array of objects). Example: [ { "name": "Id" }, { "name": "CreatedDateTime" } ]. - :vartype output_columns: any + :vartype output_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, - 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, - 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, - 'start_time': {'key': 'startTime', 'type': 'object'}, - 'end_time': {'key': 'endTime', 'type': 'object'}, - 'output_columns': {'key': 'outputColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "allowed_groups": {"key": "allowedGroups", "type": "object"}, + "user_scope_filter_uri": {"key": "userScopeFilterUri", "type": "object"}, + "date_filter_column": {"key": "dateFilterColumn", "type": "object"}, + "start_time": {"key": "startTime", "type": "object"}, + "end_time": {"key": "endTime", "type": "object"}, + "output_columns": {"key": "outputColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - allowed_groups: Optional[Any] = None, - user_scope_filter_uri: Optional[Any] = None, - date_filter_column: Optional[Any] = None, - start_time: Optional[Any] = None, - end_time: Optional[Any] = None, - output_columns: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + allowed_groups: Optional[JSON] = None, + user_scope_filter_uri: Optional[JSON] = None, + date_filter_column: Optional[JSON] = None, + start_time: Optional[JSON] = None, + end_time: Optional[JSON] = None, + output_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword allowed_groups: The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). - :paramtype allowed_groups: any + :paramtype allowed_groups: JSON :keyword user_scope_filter_uri: The user scope uri. Type: string (or Expression with resultType string). - :paramtype user_scope_filter_uri: any + :paramtype user_scope_filter_uri: JSON :keyword date_filter_column: The Column to apply the :code:`` and :code:``. Type: string (or Expression with resultType string). - :paramtype date_filter_column: any + :paramtype date_filter_column: JSON :keyword start_time: Start time of the requested range for this dataset. Type: string (or Expression with resultType string). - :paramtype start_time: any + :paramtype start_time: JSON :keyword end_time: End time of the requested range for this dataset. Type: string (or Expression with resultType string). - :paramtype end_time: any + :paramtype end_time: JSON :keyword output_columns: The columns to be read out from the Office 365 table. Type: array of objects (or Expression with resultType array of objects). Example: [ { "name": "Id" }, { "name": "CreatedDateTime" } ]. - :paramtype output_columns: any - """ - super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'Office365Source' # type: str + :paramtype output_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "Office365Source" # type: str self.allowed_groups = allowed_groups self.user_scope_filter_uri = user_scope_filter_uri self.date_filter_column = date_filter_column @@ -38766,7 +41407,7 @@ def __init__( self.output_columns = output_columns -class Operation(msrest.serialization.Model): +class Operation(_serialization.Model): """Azure Data Factory API operation definition. :ivar name: Operation name: {provider}/{resource}/{operation}. @@ -38780,10 +41421,10 @@ class Operation(msrest.serialization.Model): """ _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'origin': {'key': 'origin', 'type': 'str'}, - 'display': {'key': 'display', 'type': 'OperationDisplay'}, - 'service_specification': {'key': 'properties.serviceSpecification', 'type': 'OperationServiceSpecification'}, + "name": {"key": "name", "type": "str"}, + "origin": {"key": "origin", "type": "str"}, + "display": {"key": "display", "type": "OperationDisplay"}, + "service_specification": {"key": "properties.serviceSpecification", "type": "OperationServiceSpecification"}, } def __init__( @@ -38805,14 +41446,14 @@ def __init__( :keyword service_specification: Details about a service operation. :paramtype service_specification: ~azure.mgmt.datafactory.models.OperationServiceSpecification """ - super(Operation, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name self.origin = origin self.display = display self.service_specification = service_specification -class OperationDisplay(msrest.serialization.Model): +class OperationDisplay(_serialization.Model): """Metadata associated with the operation. :ivar description: The description of the operation. @@ -38826,10 +41467,10 @@ class OperationDisplay(msrest.serialization.Model): """ _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "provider": {"key": "provider", "type": "str"}, + "resource": {"key": "resource", "type": "str"}, + "operation": {"key": "operation", "type": "str"}, } def __init__( @@ -38851,14 +41492,14 @@ def __init__( :keyword operation: The type of operation: get, read, delete, etc. :paramtype operation: str """ - super(OperationDisplay, self).__init__(**kwargs) + super().__init__(**kwargs) self.description = description self.provider = provider self.resource = resource self.operation = operation -class OperationListResponse(msrest.serialization.Model): +class OperationListResponse(_serialization.Model): """A list of operations that can be performed by the Data Factory service. :ivar value: List of Data Factory operations supported by the Data Factory resource provider. @@ -38868,17 +41509,11 @@ class OperationListResponse(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': '[Operation]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[Operation]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - *, - value: Optional[List["_models.Operation"]] = None, - next_link: Optional[str] = None, - **kwargs - ): + def __init__(self, *, value: Optional[List["_models.Operation"]] = None, next_link: Optional[str] = None, **kwargs): """ :keyword value: List of Data Factory operations supported by the Data Factory resource provider. @@ -38886,12 +41521,12 @@ def __init__( :keyword next_link: The link to the next page of results, if any remaining results exist. :paramtype next_link: str """ - super(OperationListResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.next_link = next_link -class OperationLogSpecification(msrest.serialization.Model): +class OperationLogSpecification(_serialization.Model): """Details about an operation related to logs. :ivar name: The name of the log category. @@ -38903,9 +41538,9 @@ class OperationLogSpecification(msrest.serialization.Model): """ _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "blob_duration": {"key": "blobDuration", "type": "str"}, } def __init__( @@ -38924,13 +41559,13 @@ def __init__( :keyword blob_duration: Blobs created in the customer storage account, per hour. :paramtype blob_duration: str """ - super(OperationLogSpecification, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name self.display_name = display_name self.blob_duration = blob_duration -class OperationMetricAvailability(msrest.serialization.Model): +class OperationMetricAvailability(_serialization.Model): """Defines how often data for a metric becomes available. :ivar time_grain: The granularity for the metric. @@ -38940,29 +41575,23 @@ class OperationMetricAvailability(msrest.serialization.Model): """ _attribute_map = { - 'time_grain': {'key': 'timeGrain', 'type': 'str'}, - 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, + "time_grain": {"key": "timeGrain", "type": "str"}, + "blob_duration": {"key": "blobDuration", "type": "str"}, } - def __init__( - self, - *, - time_grain: Optional[str] = None, - blob_duration: Optional[str] = None, - **kwargs - ): + def __init__(self, *, time_grain: Optional[str] = None, blob_duration: Optional[str] = None, **kwargs): """ :keyword time_grain: The granularity for the metric. :paramtype time_grain: str :keyword blob_duration: Blob created in the customer storage account, per hour. :paramtype blob_duration: str """ - super(OperationMetricAvailability, self).__init__(**kwargs) + super().__init__(**kwargs) self.time_grain = time_grain self.blob_duration = blob_duration -class OperationMetricDimension(msrest.serialization.Model): +class OperationMetricDimension(_serialization.Model): """Defines the metric dimension. :ivar name: The name of the dimension for the metric. @@ -38974,9 +41603,9 @@ class OperationMetricDimension(msrest.serialization.Model): """ _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'to_be_exported_for_shoebox': {'key': 'toBeExportedForShoebox', 'type': 'bool'}, + "name": {"key": "name", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "to_be_exported_for_shoebox": {"key": "toBeExportedForShoebox", "type": "bool"}, } def __init__( @@ -38995,13 +41624,13 @@ def __init__( :keyword to_be_exported_for_shoebox: Whether the dimension should be exported to Azure Monitor. :paramtype to_be_exported_for_shoebox: bool """ - super(OperationMetricDimension, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name self.display_name = display_name self.to_be_exported_for_shoebox = to_be_exported_for_shoebox -class OperationMetricSpecification(msrest.serialization.Model): +class OperationMetricSpecification(_serialization.Model): """Details about an operation related to metrics. :ivar name: The name of the metric. @@ -39027,16 +41656,16 @@ class OperationMetricSpecification(msrest.serialization.Model): """ _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'display_description': {'key': 'displayDescription', 'type': 'str'}, - 'unit': {'key': 'unit', 'type': 'str'}, - 'aggregation_type': {'key': 'aggregationType', 'type': 'str'}, - 'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'str'}, - 'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'}, - 'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'}, - 'availabilities': {'key': 'availabilities', 'type': '[OperationMetricAvailability]'}, - 'dimensions': {'key': 'dimensions', 'type': '[OperationMetricDimension]'}, + "name": {"key": "name", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "display_description": {"key": "displayDescription", "type": "str"}, + "unit": {"key": "unit", "type": "str"}, + "aggregation_type": {"key": "aggregationType", "type": "str"}, + "enable_regional_mdm_account": {"key": "enableRegionalMdmAccount", "type": "str"}, + "source_mdm_account": {"key": "sourceMdmAccount", "type": "str"}, + "source_mdm_namespace": {"key": "sourceMdmNamespace", "type": "str"}, + "availabilities": {"key": "availabilities", "type": "[OperationMetricAvailability]"}, + "dimensions": {"key": "dimensions", "type": "[OperationMetricDimension]"}, } def __init__( @@ -39077,7 +41706,7 @@ def __init__( :keyword dimensions: Defines the metric dimension. :paramtype dimensions: list[~azure.mgmt.datafactory.models.OperationMetricDimension] """ - super(OperationMetricSpecification, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name self.display_name = display_name self.display_description = display_description @@ -39090,7 +41719,7 @@ def __init__( self.dimensions = dimensions -class OperationServiceSpecification(msrest.serialization.Model): +class OperationServiceSpecification(_serialization.Model): """Details about a service operation. :ivar log_specifications: Details about operations related to logs. @@ -39101,8 +41730,8 @@ class OperationServiceSpecification(msrest.serialization.Model): """ _attribute_map = { - 'log_specifications': {'key': 'logSpecifications', 'type': '[OperationLogSpecification]'}, - 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecification]'}, + "log_specifications": {"key": "logSpecifications", "type": "[OperationLogSpecification]"}, + "metric_specifications": {"key": "metricSpecifications", "type": "[OperationMetricSpecification]"}, } def __init__( @@ -39119,7 +41748,7 @@ def __init__( :paramtype metric_specifications: list[~azure.mgmt.datafactory.models.OperationMetricSpecification] """ - super(OperationServiceSpecification, self).__init__(**kwargs) + super().__init__(**kwargs) self.log_specifications = log_specifications self.metric_specifications = metric_specifications @@ -39131,8 +41760,8 @@ class OracleCloudStorageLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -39141,10 +41770,10 @@ class OracleCloudStorageLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar access_key_id: The access key identifier of the Oracle Cloud Storage Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :vartype access_key_id: any + :vartype access_key_id: JSON :ivar secret_access_key: The secret access key of the Oracle Cloud Storage Identity and Access Management (IAM) user. :vartype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase @@ -39152,48 +41781,48 @@ class OracleCloudStorageLinkedService(LinkedService): Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :vartype service_url: any + :vartype service_url: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, - 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "access_key_id": {"key": "typeProperties.accessKeyId", "type": "object"}, + "secret_access_key": {"key": "typeProperties.secretAccessKey", "type": "SecretBase"}, + "service_url": {"key": "typeProperties.serviceUrl", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - access_key_id: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + access_key_id: Optional[JSON] = None, secret_access_key: Optional["_models.SecretBase"] = None, - service_url: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + service_url: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -39201,10 +41830,10 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword access_key_id: The access key identifier of the Oracle Cloud Storage Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :paramtype access_key_id: any + :paramtype access_key_id: JSON :keyword secret_access_key: The secret access key of the Oracle Cloud Storage Identity and Access Management (IAM) user. :paramtype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase @@ -39212,14 +41841,21 @@ def __init__( Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :paramtype service_url: any + :paramtype service_url: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(OracleCloudStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'OracleCloudStorage' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "OracleCloudStorage" # type: str self.access_key_id = access_key_id self.secret_access_key = secret_access_key self.service_url = service_url @@ -39233,199 +41869,206 @@ class OracleCloudStorageLocation(DatasetLocation): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage location. Required. :vartype type: str :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :vartype folder_path: any + :vartype folder_path: JSON :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :vartype file_name: any + :vartype file_name: JSON :ivar bucket_name: Specify the bucketName of Oracle Cloud Storage. Type: string (or Expression with resultType string). - :vartype bucket_name: any + :vartype bucket_name: JSON :ivar version: Specify the version of Oracle Cloud Storage. Type: string (or Expression with resultType string). - :vartype version: any + :vartype version: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'bucket_name': {'key': 'bucketName', 'type': 'object'}, - 'version': {'key': 'version', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "folder_path": {"key": "folderPath", "type": "object"}, + "file_name": {"key": "fileName", "type": "object"}, + "bucket_name": {"key": "bucketName", "type": "object"}, + "version": {"key": "version", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - folder_path: Optional[Any] = None, - file_name: Optional[Any] = None, - bucket_name: Optional[Any] = None, - version: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + folder_path: Optional[JSON] = None, + file_name: Optional[JSON] = None, + bucket_name: Optional[JSON] = None, + version: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :paramtype folder_path: any + :paramtype folder_path: JSON :keyword file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :paramtype file_name: any + :paramtype file_name: JSON :keyword bucket_name: Specify the bucketName of Oracle Cloud Storage. Type: string (or Expression with resultType string). - :paramtype bucket_name: any + :paramtype bucket_name: JSON :keyword version: Specify the version of Oracle Cloud Storage. Type: string (or Expression with resultType string). - :paramtype version: any + :paramtype version: JSON """ - super(OracleCloudStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type = 'OracleCloudStorageLocation' # type: str + super().__init__( + additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs + ) + self.type = "OracleCloudStorageLocation" # type: str self.bucket_name = bucket_name self.version = version -class OracleCloudStorageReadSettings(StoreReadSettings): +class OracleCloudStorageReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes """Oracle Cloud Storage read settings. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The read setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. :vartype type: str :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :vartype recursive: any + :vartype recursive: JSON :ivar wildcard_folder_path: Oracle Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). - :vartype wildcard_folder_path: any + :vartype wildcard_folder_path: JSON :ivar wildcard_file_name: Oracle Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). - :vartype wildcard_file_name: any + :vartype wildcard_file_name: JSON :ivar prefix: The prefix filter for the Oracle Cloud Storage object name. Type: string (or Expression with resultType string). - :vartype prefix: any + :vartype prefix: JSON :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :vartype file_list_path: any + :vartype file_list_path: JSON :ivar enable_partition_discovery: Indicates whether to enable partition discovery. :vartype enable_partition_discovery: bool :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :vartype partition_root_path: any + :vartype partition_root_path: JSON :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype delete_files_after_completion: any + :vartype delete_files_after_completion: JSON :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_start: any + :vartype modified_datetime_start: JSON :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_end: any + :vartype modified_datetime_end: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'prefix': {'key': 'prefix', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "recursive": {"key": "recursive", "type": "object"}, + "wildcard_folder_path": {"key": "wildcardFolderPath", "type": "object"}, + "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, + "prefix": {"key": "prefix", "type": "object"}, + "file_list_path": {"key": "fileListPath", "type": "object"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "partition_root_path": {"key": "partitionRootPath", "type": "object"}, + "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, + "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, + "modified_datetime_end": {"key": "modifiedDatetimeEnd", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - recursive: Optional[Any] = None, - wildcard_folder_path: Optional[Any] = None, - wildcard_file_name: Optional[Any] = None, - prefix: Optional[Any] = None, - file_list_path: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + recursive: Optional[JSON] = None, + wildcard_folder_path: Optional[JSON] = None, + wildcard_file_name: Optional[JSON] = None, + prefix: Optional[JSON] = None, + file_list_path: Optional[JSON] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[Any] = None, - delete_files_after_completion: Optional[Any] = None, - modified_datetime_start: Optional[Any] = None, - modified_datetime_end: Optional[Any] = None, + partition_root_path: Optional[JSON] = None, + delete_files_after_completion: Optional[JSON] = None, + modified_datetime_start: Optional[JSON] = None, + modified_datetime_end: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :paramtype recursive: any + :paramtype recursive: JSON :keyword wildcard_folder_path: Oracle Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). - :paramtype wildcard_folder_path: any + :paramtype wildcard_folder_path: JSON :keyword wildcard_file_name: Oracle Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). - :paramtype wildcard_file_name: any + :paramtype wildcard_file_name: JSON :keyword prefix: The prefix filter for the Oracle Cloud Storage object name. Type: string (or Expression with resultType string). - :paramtype prefix: any + :paramtype prefix: JSON :keyword file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :paramtype file_list_path: any + :paramtype file_list_path: JSON :keyword enable_partition_discovery: Indicates whether to enable partition discovery. :paramtype enable_partition_discovery: bool :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :paramtype partition_root_path: any + :paramtype partition_root_path: JSON :keyword delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype delete_files_after_completion: any + :paramtype delete_files_after_completion: JSON :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_start: any + :paramtype modified_datetime_start: JSON :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_end: any - """ - super(OracleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'OracleCloudStorageReadSettings' # type: str + :paramtype modified_datetime_end: JSON + """ + super().__init__( + additional_properties=additional_properties, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "OracleCloudStorageReadSettings" # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -39445,8 +42088,8 @@ class OracleLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -39455,52 +42098,52 @@ class OracleLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype annotations: list[JSON] + :ivar connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. + :vartype connection_string: JSON :ivar password: The Azure key vault secret reference of password in connection string. :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, + "type": {"required": True}, + "connection_string": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "AzureKeyVaultSecretReference"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - connection_string: Any, - additional_properties: Optional[Dict[str, Any]] = None, + connection_string: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, password: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -39508,89 +42151,96 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype annotations: list[JSON] + :keyword connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. + :paramtype connection_string: JSON :keyword password: The Azure key vault secret reference of password in connection string. :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(OracleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Oracle' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Oracle" # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential -class OraclePartitionSettings(msrest.serialization.Model): +class OraclePartitionSettings(_serialization.Model): """The settings that will be leveraged for Oracle source partitioning. :ivar partition_names: Names of the physical partitions of Oracle table. - :vartype partition_names: any + :vartype partition_names: JSON :ivar partition_column_name: The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :vartype partition_column_name: any + :vartype partition_column_name: JSON :ivar partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :vartype partition_upper_bound: any + :vartype partition_upper_bound: JSON :ivar partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :vartype partition_lower_bound: any + :vartype partition_lower_bound: JSON """ _attribute_map = { - 'partition_names': {'key': 'partitionNames', 'type': 'object'}, - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + "partition_names": {"key": "partitionNames", "type": "object"}, + "partition_column_name": {"key": "partitionColumnName", "type": "object"}, + "partition_upper_bound": {"key": "partitionUpperBound", "type": "object"}, + "partition_lower_bound": {"key": "partitionLowerBound", "type": "object"}, } def __init__( self, *, - partition_names: Optional[Any] = None, - partition_column_name: Optional[Any] = None, - partition_upper_bound: Optional[Any] = None, - partition_lower_bound: Optional[Any] = None, + partition_names: Optional[JSON] = None, + partition_column_name: Optional[JSON] = None, + partition_upper_bound: Optional[JSON] = None, + partition_lower_bound: Optional[JSON] = None, **kwargs ): """ :keyword partition_names: Names of the physical partitions of Oracle table. - :paramtype partition_names: any + :paramtype partition_names: JSON :keyword partition_column_name: The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :paramtype partition_column_name: any + :paramtype partition_column_name: JSON :keyword partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :paramtype partition_upper_bound: any + :paramtype partition_upper_bound: JSON :keyword partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :paramtype partition_lower_bound: any + :paramtype partition_lower_bound: JSON """ - super(OraclePartitionSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.partition_names = partition_names self.partition_column_name = partition_column_name self.partition_upper_bound = partition_upper_bound self.partition_lower_bound = partition_lower_bound -class OracleServiceCloudLinkedService(LinkedService): +class OracleServiceCloudLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Oracle Service Cloud linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -39599,75 +42249,75 @@ class OracleServiceCloudLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar host: Required. The URL of the Oracle Service Cloud instance. - :vartype host: any - :ivar username: Required. The user name that you use to access Oracle Service Cloud server. - :vartype username: any - :ivar password: Required. The password corresponding to the user name that you provided in the - username key. + :vartype annotations: list[JSON] + :ivar host: The URL of the Oracle Service Cloud instance. Required. + :vartype host: JSON + :ivar username: The user name that you use to access Oracle Service Cloud server. Required. + :vartype username: JSON + :ivar password: The password corresponding to the user name that you provided in the username + key. Required. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :vartype use_encrypted_endpoints: any + :vartype use_encrypted_endpoints: JSON :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :vartype use_host_verification: any + :vartype use_host_verification: JSON :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :vartype use_peer_verification: any + :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'username': {'required': True}, - 'password': {'required': True}, + "type": {"required": True}, + "host": {"required": True}, + "username": {"required": True}, + "password": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "host": {"key": "typeProperties.host", "type": "object"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, + "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, + "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - host: Any, - username: Any, + host: JSON, + username: JSON, password: "_models.SecretBase", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - use_encrypted_endpoints: Optional[Any] = None, - use_host_verification: Optional[Any] = None, - use_peer_verification: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + use_encrypted_endpoints: Optional[JSON] = None, + use_host_verification: Optional[JSON] = None, + use_peer_verification: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -39675,32 +42325,39 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword host: Required. The URL of the Oracle Service Cloud instance. - :paramtype host: any - :keyword username: Required. The user name that you use to access Oracle Service Cloud server. - :paramtype username: any - :keyword password: Required. The password corresponding to the user name that you provided in - the username key. + :paramtype annotations: list[JSON] + :keyword host: The URL of the Oracle Service Cloud instance. Required. + :paramtype host: JSON + :keyword username: The user name that you use to access Oracle Service Cloud server. Required. + :paramtype username: JSON + :keyword password: The password corresponding to the user name that you provided in the + username key. Required. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :paramtype use_encrypted_endpoints: any + :paramtype use_encrypted_endpoints: JSON :keyword use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :paramtype use_host_verification: any + :paramtype use_host_verification: JSON :keyword use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :paramtype use_peer_verification: any + :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(OracleServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'OracleServiceCloud' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "OracleServiceCloud" # type: str self.host = host self.username = username self.password = password @@ -39717,88 +42374,98 @@ class OracleServiceCloudObjectDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'OracleServiceCloudObject' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "OracleServiceCloudObject" # type: str self.table_name = table_name @@ -39809,89 +42476,98 @@ class OracleServiceCloudSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'OracleServiceCloudSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "OracleServiceCloudSource" # type: str self.query = query @@ -39902,197 +42578,213 @@ class OracleSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :vartype pre_copy_script: any + :vartype pre_copy_script: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "pre_copy_script": {"key": "preCopyScript", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - pre_copy_script: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + pre_copy_script: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :paramtype pre_copy_script: any - """ - super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'OracleSink' # type: str + :paramtype pre_copy_script: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "OracleSink" # type: str self.pre_copy_script = pre_copy_script -class OracleSource(CopySource): +class OracleSource(CopySource): # pylint: disable=too-many-instance-attributes """A copy activity Oracle source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType string). - :vartype oracle_reader_query: any + :vartype oracle_reader_query: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar partition_option: The partition mechanism that will be used for Oracle read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :vartype partition_option: any + :vartype partition_option: JSON :ivar partition_settings: The settings that will be leveraged for Oracle source partitioning. :vartype partition_settings: ~azure.mgmt.datafactory.models.OraclePartitionSettings :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "oracle_reader_query": {"key": "oracleReaderQuery", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "partition_option": {"key": "partitionOption", "type": "object"}, + "partition_settings": {"key": "partitionSettings", "type": "OraclePartitionSettings"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - oracle_reader_query: Optional[Any] = None, - query_timeout: Optional[Any] = None, - partition_option: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + oracle_reader_query: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + partition_option: Optional[JSON] = None, partition_settings: Optional["_models.OraclePartitionSettings"] = None, - additional_columns: Optional[Any] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType string). - :paramtype oracle_reader_query: any + :paramtype oracle_reader_query: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword partition_option: The partition mechanism that will be used for Oracle read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :paramtype partition_option: any + :paramtype partition_option: JSON :keyword partition_settings: The settings that will be leveraged for Oracle source partitioning. :paramtype partition_settings: ~azure.mgmt.datafactory.models.OraclePartitionSettings :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'OracleSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "OracleSource" # type: str self.oracle_reader_query = oracle_reader_query self.query_timeout = query_timeout self.partition_option = partition_option @@ -40100,142 +42792,152 @@ def __init__( self.additional_columns = additional_columns -class OracleTableDataset(Dataset): +class OracleTableDataset(Dataset): # pylint: disable=too-many-instance-attributes """The on-premises Oracle database dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :vartype table_name: any + :vartype table_name: JSON :ivar schema_type_properties_schema: The schema name of the on-premises Oracle database. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON :ivar table: The table name of the on-premises Oracle database. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, - schema_type_properties_schema: Optional[Any] = None, - table: Optional[Any] = None, + table_name: Optional[JSON] = None, + schema_type_properties_schema: Optional[JSON] = None, + table: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: This property will be retired. Please consider using schema + table properties instead. - :paramtype table_name: any + :paramtype table_name: JSON :keyword schema_type_properties_schema: The schema name of the on-premises Oracle database. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any + :paramtype schema_type_properties_schema: JSON :keyword table: The table name of the on-premises Oracle database. Type: string (or Expression with resultType string). - :paramtype table: any - """ - super(OracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'OracleTable' # type: str + :paramtype table: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "OracleTable" # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table -class OrcDataset(Dataset): +class OrcDataset(Dataset): # pylint: disable=too-many-instance-attributes """ORC dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -40243,61 +42945,61 @@ class OrcDataset(Dataset): :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation :ivar orc_compression_codec: The data orcCompressionCodec. Type: string (or Expression with resultType string). - :vartype orc_compression_codec: any + :vartype orc_compression_codec: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "location": {"key": "typeProperties.location", "type": "DatasetLocation"}, + "orc_compression_codec": {"key": "typeProperties.orcCompressionCodec", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, location: Optional["_models.DatasetLocation"] = None, - orc_compression_codec: Optional[Any] = None, + orc_compression_codec: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -40305,10 +43007,20 @@ def __init__( :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation :keyword orc_compression_codec: The data orcCompressionCodec. Type: string (or Expression with resultType string). - :paramtype orc_compression_codec: any - """ - super(OrcDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'Orc' # type: str + :paramtype orc_compression_codec: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "Orc" # type: str self.location = location self.orc_compression_codec = orc_compression_codec @@ -40320,45 +43032,47 @@ class OrcFormat(DatasetStorageFormat): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset storage format.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage format. Required. :vartype type: str :ivar serializer: Serializer. Type: string (or Expression with resultType string). - :vartype serializer: any + :vartype serializer: JSON :ivar deserializer: Deserializer. Type: string (or Expression with resultType string). - :vartype deserializer: any + :vartype deserializer: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "serializer": {"key": "serializer", "type": "object"}, + "deserializer": {"key": "deserializer", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - serializer: Optional[Any] = None, - deserializer: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + serializer: Optional[JSON] = None, + deserializer: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword serializer: Serializer. Type: string (or Expression with resultType string). - :paramtype serializer: any + :paramtype serializer: JSON :keyword deserializer: Deserializer. Type: string (or Expression with resultType string). - :paramtype deserializer: any + :paramtype deserializer: JSON """ - super(OrcFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.type = 'OrcFormat' # type: str + super().__init__( + additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs + ) + self.type = "OrcFormat" # type: str class OrcSink(CopySink): @@ -40368,27 +43082,27 @@ class OrcSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar store_settings: ORC store settings. :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :ivar format_settings: ORC format settings. @@ -40396,32 +43110,32 @@ class OrcSink(CopySink): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "store_settings": {"key": "storeSettings", "type": "StoreWriteSettings"}, + "format_settings": {"key": "formatSettings", "type": "OrcWriteSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, store_settings: Optional["_models.StoreWriteSettings"] = None, format_settings: Optional["_models.OrcWriteSettings"] = None, **kwargs @@ -40429,32 +43143,41 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword store_settings: ORC store settings. :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :keyword format_settings: ORC format settings. :paramtype format_settings: ~azure.mgmt.datafactory.models.OrcWriteSettings """ - super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'OrcSink' # type: str + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "OrcSink" # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -40466,79 +43189,86 @@ class OrcSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar store_settings: ORC store settings. :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "store_settings": {"key": "storeSettings", "type": "StoreReadSettings"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, store_settings: Optional["_models.StoreReadSettings"] = None, - additional_columns: Optional[Any] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword store_settings: ORC store settings. :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(OrcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'OrcSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "OrcSource" # type: str self.store_settings = store_settings self.additional_columns = additional_columns @@ -40550,158 +43280,146 @@ class OrcWriteSettings(FormatWriteSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The write setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The write setting type. Required. :vartype type: str :ivar max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :vartype max_rows_per_file: any + :vartype max_rows_per_file: JSON :ivar file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :vartype file_name_prefix: any + :vartype file_name_prefix: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, - 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_rows_per_file": {"key": "maxRowsPerFile", "type": "object"}, + "file_name_prefix": {"key": "fileNamePrefix", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_rows_per_file: Optional[Any] = None, - file_name_prefix: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_rows_per_file: Optional[JSON] = None, + file_name_prefix: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :paramtype max_rows_per_file: any + :paramtype max_rows_per_file: JSON :keyword file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :paramtype file_name_prefix: any + :paramtype file_name_prefix: JSON """ - super(OrcWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'OrcWriteSettings' # type: str + super().__init__(additional_properties=additional_properties, **kwargs) + self.type = "OrcWriteSettings" # type: str self.max_rows_per_file = max_rows_per_file self.file_name_prefix = file_name_prefix -class PackageStore(msrest.serialization.Model): +class PackageStore(_serialization.Model): """Package store for the SSIS integration runtime. All required parameters must be populated in order to send to Azure. - :ivar name: Required. The name of the package store. + :ivar name: The name of the package store. Required. :vartype name: str - :ivar package_store_linked_service: Required. The package store linked service reference. + :ivar package_store_linked_service: The package store linked service reference. Required. :vartype package_store_linked_service: ~azure.mgmt.datafactory.models.EntityReference """ _validation = { - 'name': {'required': True}, - 'package_store_linked_service': {'required': True}, + "name": {"required": True}, + "package_store_linked_service": {"required": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'package_store_linked_service': {'key': 'packageStoreLinkedService', 'type': 'EntityReference'}, + "name": {"key": "name", "type": "str"}, + "package_store_linked_service": {"key": "packageStoreLinkedService", "type": "EntityReference"}, } - def __init__( - self, - *, - name: str, - package_store_linked_service: "_models.EntityReference", - **kwargs - ): + def __init__(self, *, name: str, package_store_linked_service: "_models.EntityReference", **kwargs): """ - :keyword name: Required. The name of the package store. + :keyword name: The name of the package store. Required. :paramtype name: str - :keyword package_store_linked_service: Required. The package store linked service reference. + :keyword package_store_linked_service: The package store linked service reference. Required. :paramtype package_store_linked_service: ~azure.mgmt.datafactory.models.EntityReference """ - super(PackageStore, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name self.package_store_linked_service = package_store_linked_service -class ParameterSpecification(msrest.serialization.Model): +class ParameterSpecification(_serialization.Model): """Definition of a single parameter for an entity. All required parameters must be populated in order to send to Azure. - :ivar type: Required. Parameter type. Known values are: "Object", "String", "Int", "Float", - "Bool", "Array", "SecureString". + :ivar type: Parameter type. Required. Known values are: "Object", "String", "Int", "Float", + "Bool", "Array", and "SecureString". :vartype type: str or ~azure.mgmt.datafactory.models.ParameterType :ivar default_value: Default value of parameter. - :vartype default_value: any + :vartype default_value: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'object'}, + "type": {"key": "type", "type": "str"}, + "default_value": {"key": "defaultValue", "type": "object"}, } - def __init__( - self, - *, - type: Union[str, "_models.ParameterType"], - default_value: Optional[Any] = None, - **kwargs - ): + def __init__(self, *, type: Union[str, "_models.ParameterType"], default_value: Optional[JSON] = None, **kwargs): """ - :keyword type: Required. Parameter type. Known values are: "Object", "String", "Int", "Float", - "Bool", "Array", "SecureString". + :keyword type: Parameter type. Required. Known values are: "Object", "String", "Int", "Float", + "Bool", "Array", and "SecureString". :paramtype type: str or ~azure.mgmt.datafactory.models.ParameterType :keyword default_value: Default value of parameter. - :paramtype default_value: any + :paramtype default_value: JSON """ - super(ParameterSpecification, self).__init__(**kwargs) + super().__init__(**kwargs) self.type = type self.default_value = default_value -class ParquetDataset(Dataset): +class ParquetDataset(Dataset): # pylint: disable=too-many-instance-attributes """Parquet dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -40709,61 +43427,61 @@ class ParquetDataset(Dataset): :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation :ivar compression_codec: The data compressionCodec. Type: string (or Expression with resultType string). - :vartype compression_codec: any + :vartype compression_codec: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "location": {"key": "typeProperties.location", "type": "DatasetLocation"}, + "compression_codec": {"key": "typeProperties.compressionCodec", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, location: Optional["_models.DatasetLocation"] = None, - compression_codec: Optional[Any] = None, + compression_codec: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -40771,10 +43489,20 @@ def __init__( :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation :keyword compression_codec: The data compressionCodec. Type: string (or Expression with resultType string). - :paramtype compression_codec: any - """ - super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'Parquet' # type: str + :paramtype compression_codec: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "Parquet" # type: str self.location = location self.compression_codec = compression_codec @@ -40786,45 +43514,47 @@ class ParquetFormat(DatasetStorageFormat): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset storage format.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage format. Required. :vartype type: str :ivar serializer: Serializer. Type: string (or Expression with resultType string). - :vartype serializer: any + :vartype serializer: JSON :ivar deserializer: Deserializer. Type: string (or Expression with resultType string). - :vartype deserializer: any + :vartype deserializer: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "serializer": {"key": "serializer", "type": "object"}, + "deserializer": {"key": "deserializer", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - serializer: Optional[Any] = None, - deserializer: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + serializer: Optional[JSON] = None, + deserializer: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword serializer: Serializer. Type: string (or Expression with resultType string). - :paramtype serializer: any + :paramtype serializer: JSON :keyword deserializer: Deserializer. Type: string (or Expression with resultType string). - :paramtype deserializer: any + :paramtype deserializer: JSON """ - super(ParquetFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.type = 'ParquetFormat' # type: str + super().__init__( + additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs + ) + self.type = "ParquetFormat" # type: str class ParquetSink(CopySink): @@ -40834,27 +43564,27 @@ class ParquetSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar store_settings: Parquet store settings. :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :ivar format_settings: Parquet format settings. @@ -40862,32 +43592,32 @@ class ParquetSink(CopySink): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "store_settings": {"key": "storeSettings", "type": "StoreWriteSettings"}, + "format_settings": {"key": "formatSettings", "type": "ParquetWriteSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, store_settings: Optional["_models.StoreWriteSettings"] = None, format_settings: Optional["_models.ParquetWriteSettings"] = None, **kwargs @@ -40895,32 +43625,41 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword store_settings: Parquet store settings. :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :keyword format_settings: Parquet format settings. :paramtype format_settings: ~azure.mgmt.datafactory.models.ParquetWriteSettings """ - super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'ParquetSink' # type: str + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "ParquetSink" # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -40932,79 +43671,86 @@ class ParquetSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar store_settings: Parquet store settings. :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "store_settings": {"key": "storeSettings", "type": "StoreReadSettings"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, store_settings: Optional["_models.StoreReadSettings"] = None, - additional_columns: Optional[Any] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword store_settings: Parquet store settings. :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'ParquetSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "ParquetSource" # type: str self.store_settings = store_settings self.additional_columns = additional_columns @@ -41016,64 +43762,64 @@ class ParquetWriteSettings(FormatWriteSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The write setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The write setting type. Required. :vartype type: str :ivar max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :vartype max_rows_per_file: any + :vartype max_rows_per_file: JSON :ivar file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :vartype file_name_prefix: any + :vartype file_name_prefix: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, - 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_rows_per_file": {"key": "maxRowsPerFile", "type": "object"}, + "file_name_prefix": {"key": "fileNamePrefix", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_rows_per_file: Optional[Any] = None, - file_name_prefix: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_rows_per_file: Optional[JSON] = None, + file_name_prefix: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :paramtype max_rows_per_file: any + :paramtype max_rows_per_file: JSON :keyword file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :paramtype file_name_prefix: any + :paramtype file_name_prefix: JSON """ - super(ParquetWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'ParquetWriteSettings' # type: str + super().__init__(additional_properties=additional_properties, **kwargs) + self.type = "ParquetWriteSettings" # type: str self.max_rows_per_file = max_rows_per_file self.file_name_prefix = file_name_prefix -class PaypalLinkedService(LinkedService): +class PaypalLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Paypal Service linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -41082,72 +43828,72 @@ class PaypalLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar host: Required. The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). - :vartype host: any - :ivar client_id: Required. The client ID associated with your PayPal application. - :vartype client_id: any + :vartype annotations: list[JSON] + :ivar host: The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). Required. + :vartype host: JSON + :ivar client_id: The client ID associated with your PayPal application. Required. + :vartype client_id: JSON :ivar client_secret: The client secret associated with your PayPal application. :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :vartype use_encrypted_endpoints: any + :vartype use_encrypted_endpoints: JSON :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :vartype use_host_verification: any + :vartype use_host_verification: JSON :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :vartype use_peer_verification: any + :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'client_id': {'required': True}, + "type": {"required": True}, + "host": {"required": True}, + "client_id": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "host": {"key": "typeProperties.host", "type": "object"}, + "client_id": {"key": "typeProperties.clientId", "type": "object"}, + "client_secret": {"key": "typeProperties.clientSecret", "type": "SecretBase"}, + "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, + "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, + "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - host: Any, - client_id: Any, - additional_properties: Optional[Dict[str, Any]] = None, + host: JSON, + client_id: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, client_secret: Optional["_models.SecretBase"] = None, - use_encrypted_endpoints: Optional[Any] = None, - use_host_verification: Optional[Any] = None, - use_peer_verification: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + use_encrypted_endpoints: Optional[JSON] = None, + use_host_verification: Optional[JSON] = None, + use_peer_verification: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -41155,30 +43901,37 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword host: Required. The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). - :paramtype host: any - :keyword client_id: Required. The client ID associated with your PayPal application. - :paramtype client_id: any + :paramtype annotations: list[JSON] + :keyword host: The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). Required. + :paramtype host: JSON + :keyword client_id: The client ID associated with your PayPal application. Required. + :paramtype client_id: JSON :keyword client_secret: The client secret associated with your PayPal application. :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :paramtype use_encrypted_endpoints: any + :paramtype use_encrypted_endpoints: JSON :keyword use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :paramtype use_host_verification: any + :paramtype use_host_verification: JSON :keyword use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :paramtype use_peer_verification: any + :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(PaypalLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Paypal' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Paypal" # type: str self.host = host self.client_id = client_id self.client_secret = client_secret @@ -41195,88 +43948,98 @@ class PaypalObjectDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(PaypalObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'PaypalObject' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "PaypalObject" # type: str self.table_name = table_name @@ -41287,101 +44050,110 @@ class PaypalSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'PaypalSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "PaypalSource" # type: str self.query = query -class PhoenixLinkedService(LinkedService): +class PhoenixLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Phoenix server linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -41390,100 +44162,100 @@ class PhoenixLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar host: Required. The IP address or host name of the Phoenix server. (i.e. - 192.168.222.160). - :vartype host: any + :vartype annotations: list[JSON] + :ivar host: The IP address or host name of the Phoenix server. (i.e. 192.168.222.160). + Required. + :vartype host: JSON :ivar port: The TCP port that the Phoenix server uses to listen for client connections. The default value is 8765. - :vartype port: any + :vartype port: JSON :ivar http_path: The partial URL corresponding to the Phoenix server. (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using WindowsAzureHDInsightService. - :vartype http_path: any - :ivar authentication_type: Required. The authentication mechanism used to connect to the - Phoenix server. Known values are: "Anonymous", "UsernameAndPassword", + :vartype http_path: JSON + :ivar authentication_type: The authentication mechanism used to connect to the Phoenix server. + Required. Known values are: "Anonymous", "UsernameAndPassword", and "WindowsAzureHDInsightService". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.PhoenixAuthenticationType :ivar username: The user name used to connect to the Phoenix server. - :vartype username: any + :vartype username: JSON :ivar password: The password corresponding to the user name. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :vartype enable_ssl: any + :vartype enable_ssl: JSON :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :vartype trusted_cert_path: any + :vartype trusted_cert_path: JSON :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :vartype use_system_trust_store: any + :vartype use_system_trust_store: JSON :ivar allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :vartype allow_host_name_cn_mismatch: any + :vartype allow_host_name_cn_mismatch: JSON :ivar allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :vartype allow_self_signed_server_cert: any + :vartype allow_self_signed_server_cert: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, + "type": {"required": True}, + "host": {"required": True}, + "authentication_type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "host": {"key": "typeProperties.host", "type": "object"}, + "port": {"key": "typeProperties.port", "type": "object"}, + "http_path": {"key": "typeProperties.httpPath", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "enable_ssl": {"key": "typeProperties.enableSsl", "type": "object"}, + "trusted_cert_path": {"key": "typeProperties.trustedCertPath", "type": "object"}, + "use_system_trust_store": {"key": "typeProperties.useSystemTrustStore", "type": "object"}, + "allow_host_name_cn_mismatch": {"key": "typeProperties.allowHostNameCNMismatch", "type": "object"}, + "allow_self_signed_server_cert": {"key": "typeProperties.allowSelfSignedServerCert", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - host: Any, + host: JSON, authentication_type: Union[str, "_models.PhoenixAuthenticationType"], - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - port: Optional[Any] = None, - http_path: Optional[Any] = None, - username: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + port: Optional[JSON] = None, + http_path: Optional[JSON] = None, + username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - enable_ssl: Optional[Any] = None, - trusted_cert_path: Optional[Any] = None, - use_system_trust_store: Optional[Any] = None, - allow_host_name_cn_mismatch: Optional[Any] = None, - allow_self_signed_server_cert: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + enable_ssl: Optional[JSON] = None, + trusted_cert_path: Optional[JSON] = None, + use_system_trust_store: Optional[JSON] = None, + allow_host_name_cn_mismatch: Optional[JSON] = None, + allow_self_signed_server_cert: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -41491,48 +44263,55 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword host: Required. The IP address or host name of the Phoenix server. (i.e. - 192.168.222.160). - :paramtype host: any + :paramtype annotations: list[JSON] + :keyword host: The IP address or host name of the Phoenix server. (i.e. 192.168.222.160). + Required. + :paramtype host: JSON :keyword port: The TCP port that the Phoenix server uses to listen for client connections. The default value is 8765. - :paramtype port: any + :paramtype port: JSON :keyword http_path: The partial URL corresponding to the Phoenix server. (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using WindowsAzureHDInsightService. - :paramtype http_path: any - :keyword authentication_type: Required. The authentication mechanism used to connect to the - Phoenix server. Known values are: "Anonymous", "UsernameAndPassword", + :paramtype http_path: JSON + :keyword authentication_type: The authentication mechanism used to connect to the Phoenix + server. Required. Known values are: "Anonymous", "UsernameAndPassword", and "WindowsAzureHDInsightService". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.PhoenixAuthenticationType :keyword username: The user name used to connect to the Phoenix server. - :paramtype username: any + :paramtype username: JSON :keyword password: The password corresponding to the user name. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :paramtype enable_ssl: any + :paramtype enable_ssl: JSON :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :paramtype trusted_cert_path: any + :paramtype trusted_cert_path: JSON :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :paramtype use_system_trust_store: any + :paramtype use_system_trust_store: JSON :keyword allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :paramtype allow_host_name_cn_mismatch: any + :paramtype allow_host_name_cn_mismatch: JSON :keyword allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :paramtype allow_self_signed_server_cert: any + :paramtype allow_self_signed_server_cert: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(PhoenixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Phoenix' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Phoenix" # type: str self.host = host self.port = port self.http_path = http_path @@ -41547,113 +44326,123 @@ def __init__( self.encrypted_credential = encrypted_credential -class PhoenixObjectDataset(Dataset): +class PhoenixObjectDataset(Dataset): # pylint: disable=too-many-instance-attributes """Phoenix server dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :vartype table_name: any + :vartype table_name: JSON :ivar table: The table name of the Phoenix. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON :ivar schema_type_properties_schema: The schema name of the Phoenix. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, - table: Optional[Any] = None, - schema_type_properties_schema: Optional[Any] = None, + table_name: Optional[JSON] = None, + table: Optional[JSON] = None, + schema_type_properties_schema: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: This property will be retired. Please consider using schema + table properties instead. - :paramtype table_name: any + :paramtype table_name: JSON :keyword table: The table name of the Phoenix. Type: string (or Expression with resultType string). - :paramtype table: any + :paramtype table: JSON :keyword schema_type_properties_schema: The schema name of the Phoenix. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any - """ - super(PhoenixObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'PhoenixObject' # type: str + :paramtype schema_type_properties_schema: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "PhoenixObject" # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -41666,118 +44455,122 @@ class PhoenixSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'PhoenixSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "PhoenixSource" # type: str self.query = query -class PipelineElapsedTimeMetricPolicy(msrest.serialization.Model): +class PipelineElapsedTimeMetricPolicy(_serialization.Model): """Pipeline ElapsedTime Metric Policy. :ivar duration: TimeSpan value, after which an Azure Monitoring Metric is fired. - :vartype duration: any + :vartype duration: JSON """ _attribute_map = { - 'duration': {'key': 'duration', 'type': 'object'}, + "duration": {"key": "duration", "type": "object"}, } - def __init__( - self, - *, - duration: Optional[Any] = None, - **kwargs - ): + def __init__(self, *, duration: Optional[JSON] = None, **kwargs): """ :keyword duration: TimeSpan value, after which an Azure Monitoring Metric is fired. - :paramtype duration: any + :paramtype duration: JSON """ - super(PipelineElapsedTimeMetricPolicy, self).__init__(**kwargs) + super().__init__(**kwargs) self.duration = duration -class PipelineFolder(msrest.serialization.Model): +class PipelineFolder(_serialization.Model): """The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. :ivar name: The name of the folder that this Pipeline is in. @@ -41785,62 +44578,51 @@ class PipelineFolder(msrest.serialization.Model): """ _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, } - def __init__( - self, - *, - name: Optional[str] = None, - **kwargs - ): + def __init__(self, *, name: Optional[str] = None, **kwargs): """ :keyword name: The name of the folder that this Pipeline is in. :paramtype name: str """ - super(PipelineFolder, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name -class PipelineListResponse(msrest.serialization.Model): +class PipelineListResponse(_serialization.Model): """A list of pipeline resources. All required parameters must be populated in order to send to Azure. - :ivar value: Required. List of pipelines. + :ivar value: List of pipelines. Required. :vartype value: list[~azure.mgmt.datafactory.models.PipelineResource] :ivar next_link: The link to the next page of results, if any remaining results exist. :vartype next_link: str """ _validation = { - 'value': {'required': True}, + "value": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[PipelineResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[PipelineResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - *, - value: List["_models.PipelineResource"], - next_link: Optional[str] = None, - **kwargs - ): + def __init__(self, *, value: List["_models.PipelineResource"], next_link: Optional[str] = None, **kwargs): """ - :keyword value: Required. List of pipelines. + :keyword value: List of pipelines. Required. :paramtype value: list[~azure.mgmt.datafactory.models.PipelineResource] :keyword next_link: The link to the next page of results, if any remaining results exist. :paramtype next_link: str """ - super(PipelineListResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.next_link = next_link -class PipelinePolicy(msrest.serialization.Model): +class PipelinePolicy(_serialization.Model): """Pipeline Policy. :ivar elapsed_time_metric: Pipeline ElapsedTime Metric Policy. @@ -41848,70 +44630,65 @@ class PipelinePolicy(msrest.serialization.Model): """ _attribute_map = { - 'elapsed_time_metric': {'key': 'elapsedTimeMetric', 'type': 'PipelineElapsedTimeMetricPolicy'}, + "elapsed_time_metric": {"key": "elapsedTimeMetric", "type": "PipelineElapsedTimeMetricPolicy"}, } - def __init__( - self, - *, - elapsed_time_metric: Optional["_models.PipelineElapsedTimeMetricPolicy"] = None, - **kwargs - ): + def __init__(self, *, elapsed_time_metric: Optional["_models.PipelineElapsedTimeMetricPolicy"] = None, **kwargs): """ :keyword elapsed_time_metric: Pipeline ElapsedTime Metric Policy. :paramtype elapsed_time_metric: ~azure.mgmt.datafactory.models.PipelineElapsedTimeMetricPolicy """ - super(PipelinePolicy, self).__init__(**kwargs) + super().__init__(**kwargs) self.elapsed_time_metric = elapsed_time_metric -class PipelineReference(msrest.serialization.Model): +class PipelineReference(_serialization.Model): """Pipeline reference type. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar type: Pipeline reference type. Has constant value: "PipelineReference". - :vartype type: str - :ivar reference_name: Required. Reference pipeline name. + :ivar type: Pipeline reference type. Required. "PipelineReference" + :vartype type: str or ~azure.mgmt.datafactory.models.PipelineReferenceType + :ivar reference_name: Reference pipeline name. Required. :vartype reference_name: str :ivar name: Reference name. :vartype name: str """ _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, + "type": {"required": True}, + "reference_name": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + "type": {"key": "type", "type": "str"}, + "reference_name": {"key": "referenceName", "type": "str"}, + "name": {"key": "name", "type": "str"}, } - type = "PipelineReference" - def __init__( self, *, + type: Union[str, "_models.PipelineReferenceType"], reference_name: str, name: Optional[str] = None, **kwargs ): """ - :keyword reference_name: Required. Reference pipeline name. + :keyword type: Pipeline reference type. Required. "PipelineReference" + :paramtype type: str or ~azure.mgmt.datafactory.models.PipelineReferenceType + :keyword reference_name: Reference pipeline name. Required. :paramtype reference_name: str :keyword name: Reference name. :paramtype name: str """ - super(PipelineReference, self).__init__(**kwargs) + super().__init__(**kwargs) + self.type = type self.reference_name = reference_name self.name = name -class PipelineResource(SubResource): +class PipelineResource(SubResource): # pylint: disable=too-many-instance-attributes """Pipeline resource type. Variables are only populated by the server, and will be ignored when sending a request. @@ -41926,7 +44703,7 @@ class PipelineResource(SubResource): :vartype etag: str :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar description: The description of the pipeline. :vartype description: str :ivar activities: List of activities in pipeline. @@ -41938,9 +44715,9 @@ class PipelineResource(SubResource): :ivar concurrency: The max number of concurrent runs for the pipeline. :vartype concurrency: int :ivar annotations: List of tags that can be used for describing the Pipeline. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar run_dimensions: Dimensions emitted by Pipeline. - :vartype run_dimensions: dict[str, any] + :vartype run_dimensions: dict[str, JSON] :ivar folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.PipelineFolder @@ -41949,41 +44726,41 @@ class PipelineResource(SubResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'concurrency': {'minimum': 1}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "etag": {"readonly": True}, + "concurrency": {"minimum": 1}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'activities': {'key': 'properties.activities', 'type': '[Activity]'}, - 'parameters': {'key': 'properties.parameters', 'type': '{ParameterSpecification}'}, - 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, - 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, - 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, - 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, - 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, - 'policy': {'key': 'properties.policy', 'type': 'PipelinePolicy'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "etag": {"key": "etag", "type": "str"}, + "additional_properties": {"key": "", "type": "{object}"}, + "description": {"key": "properties.description", "type": "str"}, + "activities": {"key": "properties.activities", "type": "[Activity]"}, + "parameters": {"key": "properties.parameters", "type": "{ParameterSpecification}"}, + "variables": {"key": "properties.variables", "type": "{VariableSpecification}"}, + "concurrency": {"key": "properties.concurrency", "type": "int"}, + "annotations": {"key": "properties.annotations", "type": "[object]"}, + "run_dimensions": {"key": "properties.runDimensions", "type": "{object}"}, + "folder": {"key": "properties.folder", "type": "PipelineFolder"}, + "policy": {"key": "properties.policy", "type": "PipelinePolicy"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, activities: Optional[List["_models.Activity"]] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, variables: Optional[Dict[str, "_models.VariableSpecification"]] = None, concurrency: Optional[int] = None, - annotations: Optional[List[Any]] = None, - run_dimensions: Optional[Dict[str, Any]] = None, + annotations: Optional[List[JSON]] = None, + run_dimensions: Optional[Dict[str, JSON]] = None, folder: Optional["_models.PipelineFolder"] = None, policy: Optional["_models.PipelinePolicy"] = None, **kwargs @@ -41991,7 +44768,7 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: The description of the pipeline. :paramtype description: str :keyword activities: List of activities in pipeline. @@ -42003,16 +44780,16 @@ def __init__( :keyword concurrency: The max number of concurrent runs for the pipeline. :paramtype concurrency: int :keyword annotations: List of tags that can be used for describing the Pipeline. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword run_dimensions: Dimensions emitted by Pipeline. - :paramtype run_dimensions: dict[str, any] + :paramtype run_dimensions: dict[str, JSON] :keyword folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.PipelineFolder :keyword policy: Pipeline Policy. :paramtype policy: ~azure.mgmt.datafactory.models.PipelinePolicy """ - super(PipelineResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.description = description self.activities = activities @@ -42025,14 +44802,14 @@ def __init__( self.policy = policy -class PipelineRun(msrest.serialization.Model): +class PipelineRun(_serialization.Model): # pylint: disable=too-many-instance-attributes """Information about a pipeline run. Variables are only populated by the server, and will be ignored when sending a request. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar run_id: Identifier of a run. :vartype run_id: str :ivar run_group_id: Identifier that correlates all the recovery runs of a pipeline run. @@ -42064,50 +44841,45 @@ class PipelineRun(msrest.serialization.Model): """ _validation = { - 'run_id': {'readonly': True}, - 'run_group_id': {'readonly': True}, - 'is_latest': {'readonly': True}, - 'pipeline_name': {'readonly': True}, - 'parameters': {'readonly': True}, - 'run_dimensions': {'readonly': True}, - 'invoked_by': {'readonly': True}, - 'last_updated': {'readonly': True}, - 'run_start': {'readonly': True}, - 'run_end': {'readonly': True}, - 'duration_in_ms': {'readonly': True}, - 'status': {'readonly': True}, - 'message': {'readonly': True}, + "run_id": {"readonly": True}, + "run_group_id": {"readonly": True}, + "is_latest": {"readonly": True}, + "pipeline_name": {"readonly": True}, + "parameters": {"readonly": True}, + "run_dimensions": {"readonly": True}, + "invoked_by": {"readonly": True}, + "last_updated": {"readonly": True}, + "run_start": {"readonly": True}, + "run_end": {"readonly": True}, + "duration_in_ms": {"readonly": True}, + "status": {"readonly": True}, + "message": {"readonly": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'run_id': {'key': 'runId', 'type': 'str'}, - 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, - 'is_latest': {'key': 'isLatest', 'type': 'bool'}, - 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{str}'}, - 'run_dimensions': {'key': 'runDimensions', 'type': '{str}'}, - 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, - 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, - 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, - 'run_end': {'key': 'runEnd', 'type': 'iso-8601'}, - 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, - 'status': {'key': 'status', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "run_id": {"key": "runId", "type": "str"}, + "run_group_id": {"key": "runGroupId", "type": "str"}, + "is_latest": {"key": "isLatest", "type": "bool"}, + "pipeline_name": {"key": "pipelineName", "type": "str"}, + "parameters": {"key": "parameters", "type": "{str}"}, + "run_dimensions": {"key": "runDimensions", "type": "{str}"}, + "invoked_by": {"key": "invokedBy", "type": "PipelineRunInvokedBy"}, + "last_updated": {"key": "lastUpdated", "type": "iso-8601"}, + "run_start": {"key": "runStart", "type": "iso-8601"}, + "run_end": {"key": "runEnd", "type": "iso-8601"}, + "duration_in_ms": {"key": "durationInMs", "type": "int"}, + "status": {"key": "status", "type": "str"}, + "message": {"key": "message", "type": "str"}, } - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - **kwargs - ): + def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, **kwargs): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] """ - super(PipelineRun, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.run_id = None self.run_group_id = None @@ -42124,7 +44896,7 @@ def __init__( self.message = None -class PipelineRunInvokedBy(msrest.serialization.Model): +class PipelineRunInvokedBy(_serialization.Model): """Provides entity name and id that started the pipeline run. Variables are only populated by the server, and will be ignored when sending a request. @@ -42142,28 +44914,24 @@ class PipelineRunInvokedBy(msrest.serialization.Model): """ _validation = { - 'name': {'readonly': True}, - 'id': {'readonly': True}, - 'invoked_by_type': {'readonly': True}, - 'pipeline_name': {'readonly': True}, - 'pipeline_run_id': {'readonly': True}, + "name": {"readonly": True}, + "id": {"readonly": True}, + "invoked_by_type": {"readonly": True}, + "pipeline_name": {"readonly": True}, + "pipeline_run_id": {"readonly": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'str'}, - 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, - 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, - 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, + "id": {"key": "id", "type": "str"}, + "invoked_by_type": {"key": "invokedByType", "type": "str"}, + "pipeline_name": {"key": "pipelineName", "type": "str"}, + "pipeline_run_id": {"key": "pipelineRunId", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(PipelineRunInvokedBy, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.name = None self.id = None self.invoked_by_type = None @@ -42171,12 +44939,12 @@ def __init__( self.pipeline_run_id = None -class PipelineRunsQueryResponse(msrest.serialization.Model): +class PipelineRunsQueryResponse(_serialization.Model): """A list pipeline runs. All required parameters must be populated in order to send to Azure. - :ivar value: Required. List of pipeline runs. + :ivar value: List of pipeline runs. Required. :vartype value: list[~azure.mgmt.datafactory.models.PipelineRun] :ivar continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. @@ -42184,91 +44952,85 @@ class PipelineRunsQueryResponse(msrest.serialization.Model): """ _validation = { - 'value': {'required': True}, + "value": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[PipelineRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + "value": {"key": "value", "type": "[PipelineRun]"}, + "continuation_token": {"key": "continuationToken", "type": "str"}, } - def __init__( - self, - *, - value: List["_models.PipelineRun"], - continuation_token: Optional[str] = None, - **kwargs - ): + def __init__(self, *, value: List["_models.PipelineRun"], continuation_token: Optional[str] = None, **kwargs): """ - :keyword value: Required. List of pipeline runs. + :keyword value: List of pipeline runs. Required. :paramtype value: list[~azure.mgmt.datafactory.models.PipelineRun] :keyword continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. :paramtype continuation_token: str """ - super(PipelineRunsQueryResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.continuation_token = continuation_token -class PolybaseSettings(msrest.serialization.Model): +class PolybaseSettings(_serialization.Model): """PolyBase settings. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar reject_type: Reject type. Known values are: "value", "percentage". + :vartype additional_properties: dict[str, JSON] + :ivar reject_type: Reject type. Known values are: "value" and "percentage". :vartype reject_type: str or ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType :ivar reject_value: Specifies the value or the percentage of rows that can be rejected before the query fails. Type: number (or Expression with resultType number), minimum: 0. - :vartype reject_value: any + :vartype reject_value: JSON :ivar reject_sample_value: Determines the number of rows to attempt to retrieve before the PolyBase recalculates the percentage of rejected rows. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype reject_sample_value: any + :vartype reject_sample_value: JSON :ivar use_type_default: Specifies how to handle missing values in delimited text files when PolyBase retrieves data from the text file. Type: boolean (or Expression with resultType boolean). - :vartype use_type_default: any + :vartype use_type_default: JSON """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'reject_type': {'key': 'rejectType', 'type': 'str'}, - 'reject_value': {'key': 'rejectValue', 'type': 'object'}, - 'reject_sample_value': {'key': 'rejectSampleValue', 'type': 'object'}, - 'use_type_default': {'key': 'useTypeDefault', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "reject_type": {"key": "rejectType", "type": "str"}, + "reject_value": {"key": "rejectValue", "type": "object"}, + "reject_sample_value": {"key": "rejectSampleValue", "type": "object"}, + "use_type_default": {"key": "useTypeDefault", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, reject_type: Optional[Union[str, "_models.PolybaseSettingsRejectType"]] = None, - reject_value: Optional[Any] = None, - reject_sample_value: Optional[Any] = None, - use_type_default: Optional[Any] = None, + reject_value: Optional[JSON] = None, + reject_sample_value: Optional[JSON] = None, + use_type_default: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword reject_type: Reject type. Known values are: "value", "percentage". + :paramtype additional_properties: dict[str, JSON] + :keyword reject_type: Reject type. Known values are: "value" and "percentage". :paramtype reject_type: str or ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType :keyword reject_value: Specifies the value or the percentage of rows that can be rejected before the query fails. Type: number (or Expression with resultType number), minimum: 0. - :paramtype reject_value: any + :paramtype reject_value: JSON :keyword reject_sample_value: Determines the number of rows to attempt to retrieve before the PolyBase recalculates the percentage of rejected rows. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype reject_sample_value: any + :paramtype reject_sample_value: JSON :keyword use_type_default: Specifies how to handle missing values in delimited text files when PolyBase retrieves data from the text file. Type: boolean (or Expression with resultType boolean). - :paramtype use_type_default: any + :paramtype use_type_default: JSON """ - super(PolybaseSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.reject_type = reject_type self.reject_value = reject_value @@ -42283,8 +45045,8 @@ class PostgreSqlLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -42293,51 +45055,51 @@ class PostgreSqlLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar connection_string: Required. The connection string. - :vartype connection_string: any + :vartype annotations: list[JSON] + :ivar connection_string: The connection string. Required. + :vartype connection_string: JSON :ivar password: The Azure key vault secret reference of password in connection string. :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, + "type": {"required": True}, + "connection_string": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "AzureKeyVaultSecretReference"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - connection_string: Any, - additional_properties: Optional[Dict[str, Any]] = None, + connection_string: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, password: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -42345,18 +45107,25 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword connection_string: Required. The connection string. - :paramtype connection_string: any + :paramtype annotations: list[JSON] + :keyword connection_string: The connection string. Required. + :paramtype connection_string: JSON :keyword password: The Azure key vault secret reference of password in connection string. :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(PostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'PostgreSql' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "PostgreSql" # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential @@ -42369,195 +45138,214 @@ class PostgreSqlSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: Database query. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: Database query. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'PostgreSqlSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "PostgreSqlSource" # type: str self.query = query -class PostgreSqlTableDataset(Dataset): +class PostgreSqlTableDataset(Dataset): # pylint: disable=too-many-instance-attributes """The PostgreSQL table dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :vartype table_name: any + :vartype table_name: JSON :ivar table: The PostgreSQL table name. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON :ivar schema_type_properties_schema: The PostgreSQL schema name. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, - table: Optional[Any] = None, - schema_type_properties_schema: Optional[Any] = None, + table_name: Optional[JSON] = None, + table: Optional[JSON] = None, + schema_type_properties_schema: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: This property will be retired. Please consider using schema + table properties instead. - :paramtype table_name: any + :paramtype table_name: JSON :keyword table: The PostgreSQL table name. Type: string (or Expression with resultType string). - :paramtype table: any + :paramtype table: JSON :keyword schema_type_properties_schema: The PostgreSQL schema name. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any - """ - super(PostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'PostgreSqlTable' # type: str + :paramtype schema_type_properties_schema: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "PostgreSqlTable" # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -42568,7 +45356,7 @@ class PowerQuerySink(DataFlowSink): All required parameters must be populated in order to send to Azure. - :ivar name: Required. Transformation name. + :ivar name: Transformation name. Required. :vartype name: str :ivar description: Transformation description. :vartype description: str @@ -42587,18 +45375,18 @@ class PowerQuerySink(DataFlowSink): """ _validation = { - 'name': {'required': True}, + "name": {"required": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, - 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, - 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, - 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, - 'rejected_data_linked_service': {'key': 'rejectedDataLinkedService', 'type': 'LinkedServiceReference'}, - 'script': {'key': 'script', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "dataset": {"key": "dataset", "type": "DatasetReference"}, + "linked_service": {"key": "linkedService", "type": "LinkedServiceReference"}, + "flowlet": {"key": "flowlet", "type": "DataFlowReference"}, + "schema_linked_service": {"key": "schemaLinkedService", "type": "LinkedServiceReference"}, + "rejected_data_linked_service": {"key": "rejectedDataLinkedService", "type": "LinkedServiceReference"}, + "script": {"key": "script", "type": "str"}, } def __init__( @@ -42615,7 +45403,7 @@ def __init__( **kwargs ): """ - :keyword name: Required. Transformation name. + :keyword name: Transformation name. Required. :paramtype name: str :keyword description: Transformation description. :paramtype description: str @@ -42632,11 +45420,20 @@ def __init__( :keyword script: sink script. :paramtype script: str """ - super(PowerQuerySink, self).__init__(name=name, description=description, dataset=dataset, linked_service=linked_service, flowlet=flowlet, schema_linked_service=schema_linked_service, rejected_data_linked_service=rejected_data_linked_service, **kwargs) + super().__init__( + name=name, + description=description, + dataset=dataset, + linked_service=linked_service, + flowlet=flowlet, + schema_linked_service=schema_linked_service, + rejected_data_linked_service=rejected_data_linked_service, + **kwargs + ) self.script = script -class PowerQuerySinkMapping(msrest.serialization.Model): +class PowerQuerySinkMapping(_serialization.Model): """Map Power Query mashup query to sink dataset(s). :ivar query_name: Name of the query in Power Query mashup document. @@ -42646,8 +45443,8 @@ class PowerQuerySinkMapping(msrest.serialization.Model): """ _attribute_map = { - 'query_name': {'key': 'queryName', 'type': 'str'}, - 'dataflow_sinks': {'key': 'dataflowSinks', 'type': '[PowerQuerySink]'}, + "query_name": {"key": "queryName", "type": "str"}, + "dataflow_sinks": {"key": "dataflowSinks", "type": "[PowerQuerySink]"}, } def __init__( @@ -42663,7 +45460,7 @@ def __init__( :keyword dataflow_sinks: List of sinks mapped to Power Query mashup query. :paramtype dataflow_sinks: list[~azure.mgmt.datafactory.models.PowerQuerySink] """ - super(PowerQuerySinkMapping, self).__init__(**kwargs) + super().__init__(**kwargs) self.query_name = query_name self.dataflow_sinks = dataflow_sinks @@ -42673,7 +45470,7 @@ class PowerQuerySource(DataFlowSource): All required parameters must be populated in order to send to Azure. - :ivar name: Required. Transformation name. + :ivar name: Transformation name. Required. :vartype name: str :ivar description: Transformation description. :vartype description: str @@ -42690,17 +45487,17 @@ class PowerQuerySource(DataFlowSource): """ _validation = { - 'name': {'required': True}, + "name": {"required": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, - 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, - 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, - 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, - 'script': {'key': 'script', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "dataset": {"key": "dataset", "type": "DatasetReference"}, + "linked_service": {"key": "linkedService", "type": "LinkedServiceReference"}, + "flowlet": {"key": "flowlet", "type": "DataFlowReference"}, + "schema_linked_service": {"key": "schemaLinkedService", "type": "LinkedServiceReference"}, + "script": {"key": "script", "type": "str"}, } def __init__( @@ -42716,7 +45513,7 @@ def __init__( **kwargs ): """ - :keyword name: Required. Transformation name. + :keyword name: Transformation name. Required. :paramtype name: str :keyword description: Transformation description. :paramtype description: str @@ -42731,19 +45528,27 @@ def __init__( :keyword script: source script. :paramtype script: str """ - super(PowerQuerySource, self).__init__(name=name, description=description, dataset=dataset, linked_service=linked_service, flowlet=flowlet, schema_linked_service=schema_linked_service, **kwargs) + super().__init__( + name=name, + description=description, + dataset=dataset, + linked_service=linked_service, + flowlet=flowlet, + schema_linked_service=schema_linked_service, + **kwargs + ) self.script = script -class PrestoLinkedService(LinkedService): +class PrestoLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Presto server linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -42752,107 +45557,107 @@ class PrestoLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar host: Required. The IP address or host name of the Presto server. (i.e. 192.168.222.160). - :vartype host: any - :ivar server_version: Required. The version of the Presto server. (i.e. 0.148-t). - :vartype server_version: any - :ivar catalog: Required. The catalog context for all request against the server. - :vartype catalog: any + :vartype annotations: list[JSON] + :ivar host: The IP address or host name of the Presto server. (i.e. 192.168.222.160). Required. + :vartype host: JSON + :ivar server_version: The version of the Presto server. (i.e. 0.148-t). Required. + :vartype server_version: JSON + :ivar catalog: The catalog context for all request against the server. Required. + :vartype catalog: JSON :ivar port: The TCP port that the Presto server uses to listen for client connections. The default value is 8080. - :vartype port: any - :ivar authentication_type: Required. The authentication mechanism used to connect to the Presto - server. Known values are: "Anonymous", "LDAP". + :vartype port: JSON + :ivar authentication_type: The authentication mechanism used to connect to the Presto server. + Required. Known values are: "Anonymous" and "LDAP". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.PrestoAuthenticationType :ivar username: The user name used to connect to the Presto server. - :vartype username: any + :vartype username: JSON :ivar password: The password corresponding to the user name. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :vartype enable_ssl: any + :vartype enable_ssl: JSON :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :vartype trusted_cert_path: any + :vartype trusted_cert_path: JSON :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :vartype use_system_trust_store: any + :vartype use_system_trust_store: JSON :ivar allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :vartype allow_host_name_cn_mismatch: any + :vartype allow_host_name_cn_mismatch: JSON :ivar allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :vartype allow_self_signed_server_cert: any + :vartype allow_self_signed_server_cert: JSON :ivar time_zone_id: The local time zone used by the connection. Valid values for this option are specified in the IANA Time Zone Database. The default value is the system time zone. - :vartype time_zone_id: any + :vartype time_zone_id: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'server_version': {'required': True}, - 'catalog': {'required': True}, - 'authentication_type': {'required': True}, + "type": {"required": True}, + "host": {"required": True}, + "server_version": {"required": True}, + "catalog": {"required": True}, + "authentication_type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'server_version': {'key': 'typeProperties.serverVersion', 'type': 'object'}, - 'catalog': {'key': 'typeProperties.catalog', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'time_zone_id': {'key': 'typeProperties.timeZoneID', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "host": {"key": "typeProperties.host", "type": "object"}, + "server_version": {"key": "typeProperties.serverVersion", "type": "object"}, + "catalog": {"key": "typeProperties.catalog", "type": "object"}, + "port": {"key": "typeProperties.port", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "enable_ssl": {"key": "typeProperties.enableSsl", "type": "object"}, + "trusted_cert_path": {"key": "typeProperties.trustedCertPath", "type": "object"}, + "use_system_trust_store": {"key": "typeProperties.useSystemTrustStore", "type": "object"}, + "allow_host_name_cn_mismatch": {"key": "typeProperties.allowHostNameCNMismatch", "type": "object"}, + "allow_self_signed_server_cert": {"key": "typeProperties.allowSelfSignedServerCert", "type": "object"}, + "time_zone_id": {"key": "typeProperties.timeZoneID", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - host: Any, - server_version: Any, - catalog: Any, + host: JSON, + server_version: JSON, + catalog: JSON, authentication_type: Union[str, "_models.PrestoAuthenticationType"], - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - port: Optional[Any] = None, - username: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + port: Optional[JSON] = None, + username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - enable_ssl: Optional[Any] = None, - trusted_cert_path: Optional[Any] = None, - use_system_trust_store: Optional[Any] = None, - allow_host_name_cn_mismatch: Optional[Any] = None, - allow_self_signed_server_cert: Optional[Any] = None, - time_zone_id: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + enable_ssl: Optional[JSON] = None, + trusted_cert_path: Optional[JSON] = None, + use_system_trust_store: Optional[JSON] = None, + allow_host_name_cn_mismatch: Optional[JSON] = None, + allow_self_signed_server_cert: Optional[JSON] = None, + time_zone_id: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -42860,50 +45665,57 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword host: Required. The IP address or host name of the Presto server. (i.e. - 192.168.222.160). - :paramtype host: any - :keyword server_version: Required. The version of the Presto server. (i.e. 0.148-t). - :paramtype server_version: any - :keyword catalog: Required. The catalog context for all request against the server. - :paramtype catalog: any + :paramtype annotations: list[JSON] + :keyword host: The IP address or host name of the Presto server. (i.e. 192.168.222.160). + Required. + :paramtype host: JSON + :keyword server_version: The version of the Presto server. (i.e. 0.148-t). Required. + :paramtype server_version: JSON + :keyword catalog: The catalog context for all request against the server. Required. + :paramtype catalog: JSON :keyword port: The TCP port that the Presto server uses to listen for client connections. The default value is 8080. - :paramtype port: any - :keyword authentication_type: Required. The authentication mechanism used to connect to the - Presto server. Known values are: "Anonymous", "LDAP". + :paramtype port: JSON + :keyword authentication_type: The authentication mechanism used to connect to the Presto + server. Required. Known values are: "Anonymous" and "LDAP". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.PrestoAuthenticationType :keyword username: The user name used to connect to the Presto server. - :paramtype username: any + :paramtype username: JSON :keyword password: The password corresponding to the user name. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :paramtype enable_ssl: any + :paramtype enable_ssl: JSON :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :paramtype trusted_cert_path: any + :paramtype trusted_cert_path: JSON :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :paramtype use_system_trust_store: any + :paramtype use_system_trust_store: JSON :keyword allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :paramtype allow_host_name_cn_mismatch: any + :paramtype allow_host_name_cn_mismatch: JSON :keyword allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :paramtype allow_self_signed_server_cert: any + :paramtype allow_self_signed_server_cert: JSON :keyword time_zone_id: The local time zone used by the connection. Valid values for this option are specified in the IANA Time Zone Database. The default value is the system time zone. - :paramtype time_zone_id: any + :paramtype time_zone_id: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(PrestoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Presto' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Presto" # type: str self.host = host self.server_version = server_version self.catalog = catalog @@ -42920,112 +45732,122 @@ def __init__( self.encrypted_credential = encrypted_credential -class PrestoObjectDataset(Dataset): +class PrestoObjectDataset(Dataset): # pylint: disable=too-many-instance-attributes """Presto server dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :vartype table_name: any + :vartype table_name: JSON :ivar table: The table name of the Presto. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON :ivar schema_type_properties_schema: The schema name of the Presto. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, - table: Optional[Any] = None, - schema_type_properties_schema: Optional[Any] = None, + table_name: Optional[JSON] = None, + table: Optional[JSON] = None, + schema_type_properties_schema: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: This property will be retired. Please consider using schema + table properties instead. - :paramtype table_name: any + :paramtype table_name: JSON :keyword table: The table name of the Presto. Type: string (or Expression with resultType string). - :paramtype table: any + :paramtype table: JSON :keyword schema_type_properties_schema: The schema name of the Presto. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any - """ - super(PrestoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'PrestoObject' # type: str + :paramtype schema_type_properties_schema: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "PrestoObject" # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -43038,93 +45860,102 @@ class PrestoSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'PrestoSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "PrestoSource" # type: str self.query = query -class PrivateEndpoint(msrest.serialization.Model): +class PrivateEndpoint(_serialization.Model): """Private endpoint which a connection belongs to. :ivar id: The resource Id for private endpoint. @@ -43132,57 +45963,48 @@ class PrivateEndpoint(msrest.serialization.Model): """ _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, } - def __init__( - self, - *, - id: Optional[str] = None, - **kwargs - ): + def __init__(self, *, id: Optional[str] = None, **kwargs): # pylint: disable=redefined-builtin """ :keyword id: The resource Id for private endpoint. :paramtype id: str """ - super(PrivateEndpoint, self).__init__(**kwargs) + super().__init__(**kwargs) self.id = id -class PrivateEndpointConnectionListResponse(msrest.serialization.Model): +class PrivateEndpointConnectionListResponse(_serialization.Model): """A list of linked service resources. All required parameters must be populated in order to send to Azure. - :ivar value: Required. List of Private Endpoint Connections. + :ivar value: List of Private Endpoint Connections. Required. :vartype value: list[~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource] :ivar next_link: The link to the next page of results, if any remaining results exist. :vartype next_link: str """ _validation = { - 'value': {'required': True}, + "value": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[PrivateEndpointConnectionResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[PrivateEndpointConnectionResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, } def __init__( - self, - *, - value: List["_models.PrivateEndpointConnectionResource"], - next_link: Optional[str] = None, - **kwargs + self, *, value: List["_models.PrivateEndpointConnectionResource"], next_link: Optional[str] = None, **kwargs ): """ - :keyword value: Required. List of Private Endpoint Connections. + :keyword value: List of Private Endpoint Connections. Required. :paramtype value: list[~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource] :keyword next_link: The link to the next page of results, if any remaining results exist. :paramtype next_link: str """ - super(PrivateEndpointConnectionListResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.next_link = next_link @@ -43205,35 +46027,30 @@ class PrivateEndpointConnectionResource(SubResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "etag": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'RemotePrivateEndpointConnection'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "etag": {"key": "etag", "type": "str"}, + "properties": {"key": "properties", "type": "RemotePrivateEndpointConnection"}, } - def __init__( - self, - *, - properties: Optional["_models.RemotePrivateEndpointConnection"] = None, - **kwargs - ): + def __init__(self, *, properties: Optional["_models.RemotePrivateEndpointConnection"] = None, **kwargs): """ :keyword properties: Core resource properties. :paramtype properties: ~azure.mgmt.datafactory.models.RemotePrivateEndpointConnection """ - super(PrivateEndpointConnectionResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class PrivateLinkConnectionApprovalRequest(msrest.serialization.Model): +class PrivateLinkConnectionApprovalRequest(_serialization.Model): """A request to approve or reject a private endpoint connection. :ivar private_link_service_connection_state: The state of a private link connection. @@ -43244,8 +46061,11 @@ class PrivateLinkConnectionApprovalRequest(msrest.serialization.Model): """ _attribute_map = { - 'private_link_service_connection_state': {'key': 'privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, - 'private_endpoint': {'key': 'privateEndpoint', 'type': 'PrivateEndpoint'}, + "private_link_service_connection_state": { + "key": "privateLinkServiceConnectionState", + "type": "PrivateLinkConnectionState", + }, + "private_endpoint": {"key": "privateEndpoint", "type": "PrivateEndpoint"}, } def __init__( @@ -43262,7 +46082,7 @@ def __init__( :keyword private_endpoint: The resource of private endpoint. :paramtype private_endpoint: ~azure.mgmt.datafactory.models.PrivateEndpoint """ - super(PrivateLinkConnectionApprovalRequest, self).__init__(**kwargs) + super().__init__(**kwargs) self.private_link_service_connection_state = private_link_service_connection_state self.private_endpoint = private_endpoint @@ -43285,35 +46105,30 @@ class PrivateLinkConnectionApprovalRequestResource(SubResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "etag": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'PrivateLinkConnectionApprovalRequest'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "etag": {"key": "etag", "type": "str"}, + "properties": {"key": "properties", "type": "PrivateLinkConnectionApprovalRequest"}, } - def __init__( - self, - *, - properties: Optional["_models.PrivateLinkConnectionApprovalRequest"] = None, - **kwargs - ): + def __init__(self, *, properties: Optional["_models.PrivateLinkConnectionApprovalRequest"] = None, **kwargs): """ :keyword properties: Core resource properties. :paramtype properties: ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequest """ - super(PrivateLinkConnectionApprovalRequestResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class PrivateLinkConnectionState(msrest.serialization.Model): +class PrivateLinkConnectionState(_serialization.Model): """The state of a private link connection. :ivar status: Status of a private link connection. @@ -43325,9 +46140,9 @@ class PrivateLinkConnectionState(msrest.serialization.Model): """ _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, + "status": {"key": "status", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "actions_required": {"key": "actionsRequired", "type": "str"}, } def __init__( @@ -43346,7 +46161,7 @@ def __init__( :keyword actions_required: ActionsRequired for a private link connection. :paramtype actions_required: str """ - super(PrivateLinkConnectionState, self).__init__(**kwargs) + super().__init__(**kwargs) self.status = status self.description = description self.actions_required = actions_required @@ -43370,35 +46185,30 @@ class PrivateLinkResource(SubResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "etag": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'PrivateLinkResourceProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "etag": {"key": "etag", "type": "str"}, + "properties": {"key": "properties", "type": "PrivateLinkResourceProperties"}, } - def __init__( - self, - *, - properties: Optional["_models.PrivateLinkResourceProperties"] = None, - **kwargs - ): + def __init__(self, *, properties: Optional["_models.PrivateLinkResourceProperties"] = None, **kwargs): """ :keyword properties: Core resource properties. :paramtype properties: ~azure.mgmt.datafactory.models.PrivateLinkResourceProperties """ - super(PrivateLinkResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class PrivateLinkResourceProperties(msrest.serialization.Model): +class PrivateLinkResourceProperties(_serialization.Model): """Properties of a private link resource. Variables are only populated by the server, and will be ignored when sending a request. @@ -43412,30 +46222,26 @@ class PrivateLinkResourceProperties(msrest.serialization.Model): """ _validation = { - 'group_id': {'readonly': True}, - 'required_members': {'readonly': True}, - 'required_zone_names': {'readonly': True}, + "group_id": {"readonly": True}, + "required_members": {"readonly": True}, + "required_zone_names": {"readonly": True}, } _attribute_map = { - 'group_id': {'key': 'groupId', 'type': 'str'}, - 'required_members': {'key': 'requiredMembers', 'type': '[str]'}, - 'required_zone_names': {'key': 'requiredZoneNames', 'type': '[str]'}, + "group_id": {"key": "groupId", "type": "str"}, + "required_members": {"key": "requiredMembers", "type": "[str]"}, + "required_zone_names": {"key": "requiredZoneNames", "type": "[str]"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(PrivateLinkResourceProperties, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.group_id = None self.required_members = None self.required_zone_names = None -class PrivateLinkResourcesWrapper(msrest.serialization.Model): +class PrivateLinkResourcesWrapper(_serialization.Model): """Wrapper for a collection of private link resources. All required parameters must be populated in order to send to Azure. @@ -43445,28 +46251,23 @@ class PrivateLinkResourcesWrapper(msrest.serialization.Model): """ _validation = { - 'value': {'required': True}, + "value": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, + "value": {"key": "value", "type": "[PrivateLinkResource]"}, } - def __init__( - self, - *, - value: List["_models.PrivateLinkResource"], - **kwargs - ): + def __init__(self, *, value: List["_models.PrivateLinkResource"], **kwargs): """ :keyword value: Required. :paramtype value: list[~azure.mgmt.datafactory.models.PrivateLinkResource] """ - super(PrivateLinkResourcesWrapper, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value -class PurviewConfiguration(msrest.serialization.Model): +class PurviewConfiguration(_serialization.Model): """Purview configuration. :ivar purview_resource_id: Purview resource id. @@ -43474,24 +46275,19 @@ class PurviewConfiguration(msrest.serialization.Model): """ _attribute_map = { - 'purview_resource_id': {'key': 'purviewResourceId', 'type': 'str'}, + "purview_resource_id": {"key": "purviewResourceId", "type": "str"}, } - def __init__( - self, - *, - purview_resource_id: Optional[str] = None, - **kwargs - ): + def __init__(self, *, purview_resource_id: Optional[str] = None, **kwargs): """ :keyword purview_resource_id: Purview resource id. :paramtype purview_resource_id: str """ - super(PurviewConfiguration, self).__init__(**kwargs) + super().__init__(**kwargs) self.purview_resource_id = purview_resource_id -class QueryDataFlowDebugSessionsResponse(msrest.serialization.Model): +class QueryDataFlowDebugSessionsResponse(_serialization.Model): """A list of active debug sessions. :ivar value: Array with all active debug sessions. @@ -43501,8 +46297,8 @@ class QueryDataFlowDebugSessionsResponse(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': '[DataFlowDebugSessionInfo]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[DataFlowDebugSessionInfo]"}, + "next_link": {"key": "nextLink", "type": "str"}, } def __init__( @@ -43518,7 +46314,7 @@ def __init__( :keyword next_link: The link to the next page of results, if any remaining results exist. :paramtype next_link: str """ - super(QueryDataFlowDebugSessionsResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.next_link = next_link @@ -43530,8 +46326,8 @@ class QuickbaseLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -43540,53 +46336,53 @@ class QuickbaseLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar url: Required. The url to connect Quickbase source. Type: string (or Expression with - resultType string). - :vartype url: any - :ivar user_token: Required. The user token for the Quickbase source. + :vartype annotations: list[JSON] + :ivar url: The url to connect Quickbase source. Type: string (or Expression with resultType + string). Required. + :vartype url: JSON + :ivar user_token: The user token for the Quickbase source. Required. :vartype user_token: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - 'user_token': {'required': True}, + "type": {"required": True}, + "url": {"required": True}, + "user_token": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'user_token': {'key': 'typeProperties.userToken', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "url": {"key": "typeProperties.url", "type": "object"}, + "user_token": {"key": "typeProperties.userToken", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - url: Any, + url: JSON, user_token: "_models.SecretBase", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - encrypted_credential: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -43594,33 +46390,40 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword url: Required. The url to connect Quickbase source. Type: string (or Expression with - resultType string). - :paramtype url: any - :keyword user_token: Required. The user token for the Quickbase source. + :paramtype annotations: list[JSON] + :keyword url: The url to connect Quickbase source. Type: string (or Expression with resultType + string). Required. + :paramtype url: JSON + :keyword user_token: The user token for the Quickbase source. Required. :paramtype user_token: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(QuickbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Quickbase' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Quickbase" # type: str self.url = url self.user_token = user_token self.encrypted_credential = encrypted_credential -class QuickBooksLinkedService(LinkedService): +class QuickBooksLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """QuickBooks server linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -43629,16 +46432,16 @@ class QuickBooksLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_properties: Properties used to connect to QuickBooks. It is mutually exclusive with any other properties in the linked service. Type: object. - :vartype connection_properties: any + :vartype connection_properties: JSON :ivar endpoint: The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com). - :vartype endpoint: any + :vartype endpoint: JSON :ivar company_id: The company ID of the QuickBooks company to authorize. - :vartype company_id: any + :vartype company_id: JSON :ivar consumer_key: The consumer key for OAuth 1.0 authentication. - :vartype consumer_key: any + :vartype consumer_key: JSON :ivar consumer_secret: The consumer secret for OAuth 1.0 authentication. :vartype consumer_secret: ~azure.mgmt.datafactory.models.SecretBase :ivar access_token: The access token for OAuth 1.0 authentication. @@ -43647,58 +46450,58 @@ class QuickBooksLinkedService(LinkedService): :vartype access_token_secret: ~azure.mgmt.datafactory.models.SecretBase :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :vartype use_encrypted_endpoints: any + :vartype use_encrypted_endpoints: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, - 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, - 'consumer_secret': {'key': 'typeProperties.consumerSecret', 'type': 'SecretBase'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'access_token_secret': {'key': 'typeProperties.accessTokenSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_properties": {"key": "typeProperties.connectionProperties", "type": "object"}, + "endpoint": {"key": "typeProperties.endpoint", "type": "object"}, + "company_id": {"key": "typeProperties.companyId", "type": "object"}, + "consumer_key": {"key": "typeProperties.consumerKey", "type": "object"}, + "consumer_secret": {"key": "typeProperties.consumerSecret", "type": "SecretBase"}, + "access_token": {"key": "typeProperties.accessToken", "type": "SecretBase"}, + "access_token_secret": {"key": "typeProperties.accessTokenSecret", "type": "SecretBase"}, + "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_properties: Optional[Any] = None, - endpoint: Optional[Any] = None, - company_id: Optional[Any] = None, - consumer_key: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_properties: Optional[JSON] = None, + endpoint: Optional[JSON] = None, + company_id: Optional[JSON] = None, + consumer_key: Optional[JSON] = None, consumer_secret: Optional["_models.SecretBase"] = None, access_token: Optional["_models.SecretBase"] = None, access_token_secret: Optional["_models.SecretBase"] = None, - use_encrypted_endpoints: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + use_encrypted_endpoints: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -43706,16 +46509,16 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_properties: Properties used to connect to QuickBooks. It is mutually exclusive with any other properties in the linked service. Type: object. - :paramtype connection_properties: any + :paramtype connection_properties: JSON :keyword endpoint: The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com). - :paramtype endpoint: any + :paramtype endpoint: JSON :keyword company_id: The company ID of the QuickBooks company to authorize. - :paramtype company_id: any + :paramtype company_id: JSON :keyword consumer_key: The consumer key for OAuth 1.0 authentication. - :paramtype consumer_key: any + :paramtype consumer_key: JSON :keyword consumer_secret: The consumer secret for OAuth 1.0 authentication. :paramtype consumer_secret: ~azure.mgmt.datafactory.models.SecretBase :keyword access_token: The access token for OAuth 1.0 authentication. @@ -43724,14 +46527,21 @@ def __init__( :paramtype access_token_secret: ~azure.mgmt.datafactory.models.SecretBase :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :paramtype use_encrypted_endpoints: any + :paramtype use_encrypted_endpoints: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(QuickBooksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'QuickBooks' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "QuickBooks" # type: str self.connection_properties = connection_properties self.endpoint = endpoint self.company_id = company_id @@ -43750,88 +46560,98 @@ class QuickBooksObjectDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(QuickBooksObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'QuickBooksObject' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "QuickBooksObject" # type: str self.table_name = table_name @@ -43842,98 +46662,107 @@ class QuickBooksSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'QuickBooksSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "QuickBooksSource" # type: str self.query = query -class RecurrenceSchedule(msrest.serialization.Model): +class RecurrenceSchedule(_serialization.Model): """The recurrence schedule. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar minutes: The minutes. :vartype minutes: list[int] :ivar hours: The hours. @@ -43947,18 +46776,18 @@ class RecurrenceSchedule(msrest.serialization.Model): """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'minutes': {'key': 'minutes', 'type': '[int]'}, - 'hours': {'key': 'hours', 'type': '[int]'}, - 'week_days': {'key': 'weekDays', 'type': '[str]'}, - 'month_days': {'key': 'monthDays', 'type': '[int]'}, - 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, + "additional_properties": {"key": "", "type": "{object}"}, + "minutes": {"key": "minutes", "type": "[int]"}, + "hours": {"key": "hours", "type": "[int]"}, + "week_days": {"key": "weekDays", "type": "[str]"}, + "month_days": {"key": "monthDays", "type": "[int]"}, + "monthly_occurrences": {"key": "monthlyOccurrences", "type": "[RecurrenceScheduleOccurrence]"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, minutes: Optional[List[int]] = None, hours: Optional[List[int]] = None, week_days: Optional[List[Union[str, "_models.DaysOfWeek"]]] = None, @@ -43969,7 +46798,7 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword minutes: The minutes. :paramtype minutes: list[int] :keyword hours: The hours. @@ -43982,7 +46811,7 @@ def __init__( :paramtype monthly_occurrences: list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] """ - super(RecurrenceSchedule, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.minutes = minutes self.hours = hours @@ -43991,29 +46820,29 @@ def __init__( self.monthly_occurrences = monthly_occurrences -class RecurrenceScheduleOccurrence(msrest.serialization.Model): +class RecurrenceScheduleOccurrence(_serialization.Model): """The recurrence schedule occurrence. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar day: The day of the week. Known values are: "Sunday", "Monday", "Tuesday", "Wednesday", - "Thursday", "Friday", "Saturday". + "Thursday", "Friday", and "Saturday". :vartype day: str or ~azure.mgmt.datafactory.models.DayOfWeek :ivar occurrence: The occurrence. :vartype occurrence: int """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'day': {'key': 'day', 'type': 'str'}, - 'occurrence': {'key': 'occurrence', 'type': 'int'}, + "additional_properties": {"key": "", "type": "{object}"}, + "day": {"key": "day", "type": "str"}, + "occurrence": {"key": "occurrence", "type": "int"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, day: Optional[Union[str, "_models.DayOfWeek"]] = None, occurrence: Optional[int] = None, **kwargs @@ -44021,115 +46850,109 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword day: The day of the week. Known values are: "Sunday", "Monday", "Tuesday", - "Wednesday", "Thursday", "Friday", "Saturday". + "Wednesday", "Thursday", "Friday", and "Saturday". :paramtype day: str or ~azure.mgmt.datafactory.models.DayOfWeek :keyword occurrence: The occurrence. :paramtype occurrence: int """ - super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.day = day self.occurrence = occurrence -class RedirectIncompatibleRowSettings(msrest.serialization.Model): +class RedirectIncompatibleRowSettings(_serialization.Model): """Redirect incompatible row settings. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar linked_service_name: Required. Name of the Azure Storage, Storage SAS, or Azure Data Lake - Store linked service used for redirecting incompatible row. Must be specified if + :vartype additional_properties: dict[str, JSON] + :ivar linked_service_name: Name of the Azure Storage, Storage SAS, or Azure Data Lake Store + linked service used for redirecting incompatible row. Must be specified if redirectIncompatibleRowSettings is specified. Type: string (or Expression with resultType - string). - :vartype linked_service_name: any + string). Required. + :vartype linked_service_name: JSON :ivar path: The path for storing the redirect incompatible row data. Type: string (or Expression with resultType string). - :vartype path: any + :vartype path: JSON """ _validation = { - 'linked_service_name': {'required': True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'object'}, - 'path': {'key': 'path', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "linked_service_name": {"key": "linkedServiceName", "type": "object"}, + "path": {"key": "path", "type": "object"}, } def __init__( self, *, - linked_service_name: Any, - additional_properties: Optional[Dict[str, Any]] = None, - path: Optional[Any] = None, + linked_service_name: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, + path: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword linked_service_name: Required. Name of the Azure Storage, Storage SAS, or Azure Data - Lake Store linked service used for redirecting incompatible row. Must be specified if + :paramtype additional_properties: dict[str, JSON] + :keyword linked_service_name: Name of the Azure Storage, Storage SAS, or Azure Data Lake Store + linked service used for redirecting incompatible row. Must be specified if redirectIncompatibleRowSettings is specified. Type: string (or Expression with resultType - string). - :paramtype linked_service_name: any + string). Required. + :paramtype linked_service_name: JSON :keyword path: The path for storing the redirect incompatible row data. Type: string (or Expression with resultType string). - :paramtype path: any + :paramtype path: JSON """ - super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.linked_service_name = linked_service_name self.path = path -class RedshiftUnloadSettings(msrest.serialization.Model): +class RedshiftUnloadSettings(_serialization.Model): """The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. All required parameters must be populated in order to send to Azure. - :ivar s3_linked_service_name: Required. The name of the Amazon S3 linked service which will be - used for the unload operation when copying from the Amazon Redshift source. + :ivar s3_linked_service_name: The name of the Amazon S3 linked service which will be used for + the unload operation when copying from the Amazon Redshift source. Required. :vartype s3_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :ivar bucket_name: Required. The bucket of the interim Amazon S3 which will be used to store - the unloaded data from Amazon Redshift source. The bucket must be in the same region as the - Amazon Redshift source. Type: string (or Expression with resultType string). - :vartype bucket_name: any + :ivar bucket_name: The bucket of the interim Amazon S3 which will be used to store the unloaded + data from Amazon Redshift source. The bucket must be in the same region as the Amazon Redshift + source. Type: string (or Expression with resultType string). Required. + :vartype bucket_name: JSON """ _validation = { - 's3_linked_service_name': {'required': True}, - 'bucket_name': {'required': True}, + "s3_linked_service_name": {"required": True}, + "bucket_name": {"required": True}, } _attribute_map = { - 's3_linked_service_name': {'key': 's3LinkedServiceName', 'type': 'LinkedServiceReference'}, - 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + "s3_linked_service_name": {"key": "s3LinkedServiceName", "type": "LinkedServiceReference"}, + "bucket_name": {"key": "bucketName", "type": "object"}, } - def __init__( - self, - *, - s3_linked_service_name: "_models.LinkedServiceReference", - bucket_name: Any, - **kwargs - ): + def __init__(self, *, s3_linked_service_name: "_models.LinkedServiceReference", bucket_name: JSON, **kwargs): """ - :keyword s3_linked_service_name: Required. The name of the Amazon S3 linked service which will - be used for the unload operation when copying from the Amazon Redshift source. + :keyword s3_linked_service_name: The name of the Amazon S3 linked service which will be used + for the unload operation when copying from the Amazon Redshift source. Required. :paramtype s3_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :keyword bucket_name: Required. The bucket of the interim Amazon S3 which will be used to store - the unloaded data from Amazon Redshift source. The bucket must be in the same region as the - Amazon Redshift source. Type: string (or Expression with resultType string). - :paramtype bucket_name: any + :keyword bucket_name: The bucket of the interim Amazon S3 which will be used to store the + unloaded data from Amazon Redshift source. The bucket must be in the same region as the Amazon + Redshift source. Type: string (or Expression with resultType string). Required. + :paramtype bucket_name: JSON """ - super(RedshiftUnloadSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.s3_linked_service_name = s3_linked_service_name self.bucket_name = bucket_name @@ -44141,79 +46964,86 @@ class RelationalSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query: Database query. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query: Optional[Any] = None, - additional_columns: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query: Database query. Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'RelationalSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "RelationalSource" # type: str self.query = query self.additional_columns = additional_columns @@ -44225,94 +47055,104 @@ class RelationalTableDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The relational table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The relational table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(RelationalTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'RelationalTable' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "RelationalTable" # type: str self.table_name = table_name -class RemotePrivateEndpointConnection(msrest.serialization.Model): +class RemotePrivateEndpointConnection(_serialization.Model): """A remote private endpoint connection. Variables are only populated by the server, and will be ignored when sending a request. @@ -44327,13 +47167,16 @@ class RemotePrivateEndpointConnection(msrest.serialization.Model): """ _validation = { - 'provisioning_state': {'readonly': True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'private_endpoint': {'key': 'privateEndpoint', 'type': 'ArmIdWrapper'}, - 'private_link_service_connection_state': {'key': 'privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "private_endpoint": {"key": "privateEndpoint", "type": "ArmIdWrapper"}, + "private_link_service_connection_state": { + "key": "privateLinkServiceConnectionState", + "type": "PrivateLinkConnectionState", + }, } def __init__( @@ -44350,7 +47193,7 @@ def __init__( :paramtype private_link_service_connection_state: ~azure.mgmt.datafactory.models.PrivateLinkConnectionState """ - super(RemotePrivateEndpointConnection, self).__init__(**kwargs) + super().__init__(**kwargs) self.provisioning_state = None self.private_endpoint = private_endpoint self.private_link_service_connection_state = private_link_service_connection_state @@ -44365,99 +47208,101 @@ class RerunTumblingWindowTrigger(Trigger): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Trigger type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Trigger type. Required. :vartype type: str :ivar description: Trigger description. :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Known values are: "Started", "Stopped", "Disabled". + called on the Trigger. Known values are: "Started", "Stopped", and "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :ivar annotations: List of tags that can be used for describing the trigger. - :vartype annotations: list[any] - :ivar parent_trigger: Required. The parent trigger reference. - :vartype parent_trigger: any - :ivar requested_start_time: Required. The start time for the time period for which restatement - is initiated. Only UTC time is currently supported. + :vartype annotations: list[JSON] + :ivar parent_trigger: The parent trigger reference. Required. + :vartype parent_trigger: JSON + :ivar requested_start_time: The start time for the time period for which restatement is + initiated. Only UTC time is currently supported. Required. :vartype requested_start_time: ~datetime.datetime - :ivar requested_end_time: Required. The end time for the time period for which restatement is - initiated. Only UTC time is currently supported. + :ivar requested_end_time: The end time for the time period for which restatement is initiated. + Only UTC time is currently supported. Required. :vartype requested_end_time: ~datetime.datetime - :ivar rerun_concurrency: Required. The max number of parallel time windows (ready for - execution) for which a rerun is triggered. + :ivar rerun_concurrency: The max number of parallel time windows (ready for execution) for + which a rerun is triggered. Required. :vartype rerun_concurrency: int """ _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, - 'parent_trigger': {'required': True}, - 'requested_start_time': {'required': True}, - 'requested_end_time': {'required': True}, - 'rerun_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + "type": {"required": True}, + "runtime_state": {"readonly": True}, + "parent_trigger": {"required": True}, + "requested_start_time": {"required": True}, + "requested_end_time": {"required": True}, + "rerun_concurrency": {"required": True, "maximum": 50, "minimum": 1}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, - 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, - 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, - 'rerun_concurrency': {'key': 'typeProperties.rerunConcurrency', 'type': 'int'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "runtime_state": {"key": "runtimeState", "type": "str"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "parent_trigger": {"key": "typeProperties.parentTrigger", "type": "object"}, + "requested_start_time": {"key": "typeProperties.requestedStartTime", "type": "iso-8601"}, + "requested_end_time": {"key": "typeProperties.requestedEndTime", "type": "iso-8601"}, + "rerun_concurrency": {"key": "typeProperties.rerunConcurrency", "type": "int"}, } def __init__( self, *, - parent_trigger: Any, + parent_trigger: JSON, requested_start_time: datetime.datetime, requested_end_time: datetime.datetime, rerun_concurrency: int, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Trigger description. :paramtype description: str :keyword annotations: List of tags that can be used for describing the trigger. - :paramtype annotations: list[any] - :keyword parent_trigger: Required. The parent trigger reference. - :paramtype parent_trigger: any - :keyword requested_start_time: Required. The start time for the time period for which - restatement is initiated. Only UTC time is currently supported. + :paramtype annotations: list[JSON] + :keyword parent_trigger: The parent trigger reference. Required. + :paramtype parent_trigger: JSON + :keyword requested_start_time: The start time for the time period for which restatement is + initiated. Only UTC time is currently supported. Required. :paramtype requested_start_time: ~datetime.datetime - :keyword requested_end_time: Required. The end time for the time period for which restatement - is initiated. Only UTC time is currently supported. + :keyword requested_end_time: The end time for the time period for which restatement is + initiated. Only UTC time is currently supported. Required. :paramtype requested_end_time: ~datetime.datetime - :keyword rerun_concurrency: Required. The max number of parallel time windows (ready for - execution) for which a rerun is triggered. + :keyword rerun_concurrency: The max number of parallel time windows (ready for execution) for + which a rerun is triggered. Required. :paramtype rerun_concurrency: int """ - super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.type = 'RerunTumblingWindowTrigger' # type: str + super().__init__( + additional_properties=additional_properties, description=description, annotations=annotations, **kwargs + ) + self.type = "RerunTumblingWindowTrigger" # type: str self.parent_trigger = parent_trigger self.requested_start_time = requested_start_time self.requested_end_time = requested_end_time self.rerun_concurrency = rerun_concurrency -class ResponsysLinkedService(LinkedService): +class ResponsysLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Responsys linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -44466,75 +47311,75 @@ class ResponsysLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar endpoint: Required. The endpoint of the Responsys server. - :vartype endpoint: any - :ivar client_id: Required. The client ID associated with the Responsys application. Type: - string (or Expression with resultType string). - :vartype client_id: any + :vartype annotations: list[JSON] + :ivar endpoint: The endpoint of the Responsys server. Required. + :vartype endpoint: JSON + :ivar client_id: The client ID associated with the Responsys application. Type: string (or + Expression with resultType string). Required. + :vartype client_id: JSON :ivar client_secret: The client secret associated with the Responsys application. Type: string (or Expression with resultType string). :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :vartype use_encrypted_endpoints: any + :vartype use_encrypted_endpoints: JSON :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :vartype use_host_verification: any + :vartype use_host_verification: JSON :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :vartype use_peer_verification: any + :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'client_id': {'required': True}, + "type": {"required": True}, + "endpoint": {"required": True}, + "client_id": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "endpoint": {"key": "typeProperties.endpoint", "type": "object"}, + "client_id": {"key": "typeProperties.clientId", "type": "object"}, + "client_secret": {"key": "typeProperties.clientSecret", "type": "SecretBase"}, + "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, + "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, + "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - endpoint: Any, - client_id: Any, - additional_properties: Optional[Dict[str, Any]] = None, + endpoint: JSON, + client_id: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, client_secret: Optional["_models.SecretBase"] = None, - use_encrypted_endpoints: Optional[Any] = None, - use_host_verification: Optional[Any] = None, - use_peer_verification: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + use_encrypted_endpoints: Optional[JSON] = None, + use_host_verification: Optional[JSON] = None, + use_peer_verification: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -44542,33 +47387,40 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword endpoint: Required. The endpoint of the Responsys server. - :paramtype endpoint: any - :keyword client_id: Required. The client ID associated with the Responsys application. Type: - string (or Expression with resultType string). - :paramtype client_id: any + :paramtype annotations: list[JSON] + :keyword endpoint: The endpoint of the Responsys server. Required. + :paramtype endpoint: JSON + :keyword client_id: The client ID associated with the Responsys application. Type: string (or + Expression with resultType string). Required. + :paramtype client_id: JSON :keyword client_secret: The client secret associated with the Responsys application. Type: string (or Expression with resultType string). :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :paramtype use_encrypted_endpoints: any + :paramtype use_encrypted_endpoints: JSON :keyword use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :paramtype use_host_verification: any + :paramtype use_host_verification: JSON :keyword use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :paramtype use_peer_verification: any + :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(ResponsysLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Responsys' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Responsys" # type: str self.endpoint = endpoint self.client_id = client_id self.client_secret = client_secret @@ -44585,88 +47437,98 @@ class ResponsysObjectDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(ResponsysObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'ResponsysObject' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "ResponsysObject" # type: str self.table_name = table_name @@ -44677,215 +47539,234 @@ class ResponsysSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'ResponsysSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "ResponsysSource" # type: str self.query = query -class RestResourceDataset(Dataset): +class RestResourceDataset(Dataset): # pylint: disable=too-many-instance-attributes """A Rest service dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar relative_url: The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType string). - :vartype relative_url: any + :vartype relative_url: JSON :ivar request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). - :vartype request_method: any + :vartype request_method: JSON :ivar request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). - :vartype request_body: any + :vartype request_body: JSON :ivar additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :vartype additional_headers: any + :vartype additional_headers: JSON :ivar pagination_rules: The pagination rules to compose next page requests. Type: string (or Expression with resultType string). - :vartype pagination_rules: any + :vartype pagination_rules: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, - 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, - 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, - 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "relative_url": {"key": "typeProperties.relativeUrl", "type": "object"}, + "request_method": {"key": "typeProperties.requestMethod", "type": "object"}, + "request_body": {"key": "typeProperties.requestBody", "type": "object"}, + "additional_headers": {"key": "typeProperties.additionalHeaders", "type": "object"}, + "pagination_rules": {"key": "typeProperties.paginationRules", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - relative_url: Optional[Any] = None, - request_method: Optional[Any] = None, - request_body: Optional[Any] = None, - additional_headers: Optional[Any] = None, - pagination_rules: Optional[Any] = None, + relative_url: Optional[JSON] = None, + request_method: Optional[JSON] = None, + request_body: Optional[JSON] = None, + additional_headers: Optional[JSON] = None, + pagination_rules: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword relative_url: The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType string). - :paramtype relative_url: any + :paramtype relative_url: JSON :keyword request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). - :paramtype request_method: any + :paramtype request_method: JSON :keyword request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). - :paramtype request_body: any + :paramtype request_body: JSON :keyword additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :paramtype additional_headers: any + :paramtype additional_headers: JSON :keyword pagination_rules: The pagination rules to compose next page requests. Type: string (or Expression with resultType string). - :paramtype pagination_rules: any - """ - super(RestResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'RestResource' # type: str + :paramtype pagination_rules: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "RestResource" # type: str self.relative_url = relative_url self.request_method = request_method self.request_body = request_body @@ -44893,15 +47774,15 @@ def __init__( self.pagination_rules = pagination_rules -class RestServiceLinkedService(LinkedService): +class RestServiceLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Rest Service linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -44910,127 +47791,130 @@ class RestServiceLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar url: Required. The base URL of the REST service. - :vartype url: any + :vartype annotations: list[JSON] + :ivar url: The base URL of the REST service. Required. + :vartype url: JSON :ivar enable_server_certificate_validation: Whether to validate server side SSL certificate when connecting to the endpoint.The default value is true. Type: boolean (or Expression with resultType boolean). - :vartype enable_server_certificate_validation: any - :ivar authentication_type: Required. Type of authentication used to connect to the REST - service. Known values are: "Anonymous", "Basic", "AadServicePrincipal", - "ManagedServiceIdentity", "OAuth2ClientCredential". + :vartype enable_server_certificate_validation: JSON + :ivar authentication_type: Type of authentication used to connect to the REST service. + Required. Known values are: "Anonymous", "Basic", "AadServicePrincipal", + "ManagedServiceIdentity", and "OAuth2ClientCredential". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.RestServiceAuthenticationType :ivar user_name: The user name used in Basic authentication type. - :vartype user_name: any + :vartype user_name: JSON :ivar password: The password used in Basic authentication type. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). - :vartype auth_headers: any + :vartype auth_headers: JSON :ivar service_principal_id: The application's client ID used in AadServicePrincipal authentication type. - :vartype service_principal_id: any + :vartype service_principal_id: JSON :ivar service_principal_key: The application's key used in AadServicePrincipal authentication type. :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :ivar tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application resides. - :vartype tenant: any + :vartype tenant: JSON :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :vartype azure_cloud_type: any + :vartype azure_cloud_type: JSON :ivar aad_resource_id: The resource you are requesting authorization to use. - :vartype aad_resource_id: any + :vartype aad_resource_id: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference :ivar client_id: The client ID associated with your application. Type: string (or Expression with resultType string). - :vartype client_id: any + :vartype client_id: JSON :ivar client_secret: The client secret associated with your application. :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase :ivar token_endpoint: The token endpoint of the authorization server to acquire access token. Type: string (or Expression with resultType string). - :vartype token_endpoint: any + :vartype token_endpoint: JSON :ivar resource: The target service or resource to which the access will be requested. Type: string (or Expression with resultType string). - :vartype resource: any + :vartype resource: JSON :ivar scope: The scope of the access required. It describes what kind of access will be requested. Type: string (or Expression with resultType string). - :vartype scope: any + :vartype scope: JSON """ _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - 'authentication_type': {'required': True}, + "type": {"required": True}, + "url": {"required": True}, + "authentication_type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'auth_headers': {'key': 'typeProperties.authHeaders', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'token_endpoint': {'key': 'typeProperties.tokenEndpoint', 'type': 'object'}, - 'resource': {'key': 'typeProperties.resource', 'type': 'object'}, - 'scope': {'key': 'typeProperties.scope', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "url": {"key": "typeProperties.url", "type": "object"}, + "enable_server_certificate_validation": { + "key": "typeProperties.enableServerCertificateValidation", + "type": "object", + }, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "auth_headers": {"key": "typeProperties.authHeaders", "type": "object"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, + "tenant": {"key": "typeProperties.tenant", "type": "object"}, + "azure_cloud_type": {"key": "typeProperties.azureCloudType", "type": "object"}, + "aad_resource_id": {"key": "typeProperties.aadResourceId", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, + "client_id": {"key": "typeProperties.clientId", "type": "object"}, + "client_secret": {"key": "typeProperties.clientSecret", "type": "SecretBase"}, + "token_endpoint": {"key": "typeProperties.tokenEndpoint", "type": "object"}, + "resource": {"key": "typeProperties.resource", "type": "object"}, + "scope": {"key": "typeProperties.scope", "type": "object"}, } - def __init__( + def __init__( # pylint: disable=too-many-locals self, *, - url: Any, + url: JSON, authentication_type: Union[str, "_models.RestServiceAuthenticationType"], - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - enable_server_certificate_validation: Optional[Any] = None, - user_name: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + enable_server_certificate_validation: Optional[JSON] = None, + user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - auth_headers: Optional[Any] = None, - service_principal_id: Optional[Any] = None, + auth_headers: Optional[JSON] = None, + service_principal_id: Optional[JSON] = None, service_principal_key: Optional["_models.SecretBase"] = None, - tenant: Optional[Any] = None, - azure_cloud_type: Optional[Any] = None, - aad_resource_id: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + tenant: Optional[JSON] = None, + azure_cloud_type: Optional[JSON] = None, + aad_resource_id: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, credential: Optional["_models.CredentialReference"] = None, - client_id: Optional[Any] = None, + client_id: Optional[JSON] = None, client_secret: Optional["_models.SecretBase"] = None, - token_endpoint: Optional[Any] = None, - resource: Optional[Any] = None, - scope: Optional[Any] = None, + token_endpoint: Optional[JSON] = None, + resource: Optional[JSON] = None, + scope: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -45038,63 +47922,70 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword url: Required. The base URL of the REST service. - :paramtype url: any + :paramtype annotations: list[JSON] + :keyword url: The base URL of the REST service. Required. + :paramtype url: JSON :keyword enable_server_certificate_validation: Whether to validate server side SSL certificate when connecting to the endpoint.The default value is true. Type: boolean (or Expression with resultType boolean). - :paramtype enable_server_certificate_validation: any - :keyword authentication_type: Required. Type of authentication used to connect to the REST - service. Known values are: "Anonymous", "Basic", "AadServicePrincipal", - "ManagedServiceIdentity", "OAuth2ClientCredential". + :paramtype enable_server_certificate_validation: JSON + :keyword authentication_type: Type of authentication used to connect to the REST service. + Required. Known values are: "Anonymous", "Basic", "AadServicePrincipal", + "ManagedServiceIdentity", and "OAuth2ClientCredential". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.RestServiceAuthenticationType :keyword user_name: The user name used in Basic authentication type. - :paramtype user_name: any + :paramtype user_name: JSON :keyword password: The password used in Basic authentication type. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). - :paramtype auth_headers: any + :paramtype auth_headers: JSON :keyword service_principal_id: The application's client ID used in AadServicePrincipal authentication type. - :paramtype service_principal_id: any + :paramtype service_principal_id: JSON :keyword service_principal_key: The application's key used in AadServicePrincipal authentication type. :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :keyword tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application resides. - :paramtype tenant: any + :paramtype tenant: JSON :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :paramtype azure_cloud_type: any + :paramtype azure_cloud_type: JSON :keyword aad_resource_id: The resource you are requesting authorization to use. - :paramtype aad_resource_id: any + :paramtype aad_resource_id: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any + :paramtype encrypted_credential: JSON :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference :keyword client_id: The client ID associated with your application. Type: string (or Expression with resultType string). - :paramtype client_id: any + :paramtype client_id: JSON :keyword client_secret: The client secret associated with your application. :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase :keyword token_endpoint: The token endpoint of the authorization server to acquire access token. Type: string (or Expression with resultType string). - :paramtype token_endpoint: any + :paramtype token_endpoint: JSON :keyword resource: The target service or resource to which the access will be requested. Type: string (or Expression with resultType string). - :paramtype resource: any + :paramtype resource: JSON :keyword scope: The scope of the access required. It describes what kind of access will be requested. Type: string (or Expression with resultType string). - :paramtype scope: any - """ - super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'RestService' # type: str + :paramtype scope: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "RestService" # type: str self.url = url self.enable_server_certificate_validation = enable_server_certificate_validation self.authentication_type = authentication_type @@ -45115,130 +48006,139 @@ def __init__( self.scope = scope -class RestSink(CopySink): +class RestSink(CopySink): # pylint: disable=too-many-instance-attributes """A copy activity Rest service Sink. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar request_method: The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType string). - :vartype request_method: any + :vartype request_method: JSON :ivar additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :vartype additional_headers: any + :vartype additional_headers: JSON :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype http_request_timeout: any + :vartype http_request_timeout: JSON :ivar request_interval: The time to await before sending next request, in milliseconds. - :vartype request_interval: any + :vartype request_interval: JSON :ivar http_compression_type: Http Compression Type to Send data in compressed format with Optimal Compression Level, Default is None. And The Only Supported option is Gzip. - :vartype http_compression_type: any + :vartype http_compression_type: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'request_method': {'key': 'requestMethod', 'type': 'object'}, - 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - 'http_compression_type': {'key': 'httpCompressionType', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "request_method": {"key": "requestMethod", "type": "object"}, + "additional_headers": {"key": "additionalHeaders", "type": "object"}, + "http_request_timeout": {"key": "httpRequestTimeout", "type": "object"}, + "request_interval": {"key": "requestInterval", "type": "object"}, + "http_compression_type": {"key": "httpCompressionType", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - request_method: Optional[Any] = None, - additional_headers: Optional[Any] = None, - http_request_timeout: Optional[Any] = None, - request_interval: Optional[Any] = None, - http_compression_type: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + request_method: Optional[JSON] = None, + additional_headers: Optional[JSON] = None, + http_request_timeout: Optional[JSON] = None, + request_interval: Optional[JSON] = None, + http_compression_type: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword request_method: The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType string). - :paramtype request_method: any + :paramtype request_method: JSON :keyword additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :paramtype additional_headers: any + :paramtype additional_headers: JSON :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype http_request_timeout: any + :paramtype http_request_timeout: JSON :keyword request_interval: The time to await before sending next request, in milliseconds. - :paramtype request_interval: any + :paramtype request_interval: JSON :keyword http_compression_type: Http Compression Type to Send data in compressed format with Optimal Compression Level, Default is None. And The Only Supported option is Gzip. - :paramtype http_compression_type: any - """ - super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'RestSink' # type: str + :paramtype http_compression_type: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "RestSink" # type: str self.request_method = request_method self.additional_headers = additional_headers self.http_request_timeout = http_request_timeout @@ -45246,130 +48146,137 @@ def __init__( self.http_compression_type = http_compression_type -class RestSource(CopySource): +class RestSource(CopySource): # pylint: disable=too-many-instance-attributes """A copy activity Rest service source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). - :vartype request_method: any + :vartype request_method: JSON :ivar request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). - :vartype request_body: any + :vartype request_body: JSON :ivar additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :vartype additional_headers: any + :vartype additional_headers: JSON :ivar pagination_rules: The pagination rules to compose next page requests. Type: string (or Expression with resultType string). - :vartype pagination_rules: any + :vartype pagination_rules: JSON :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype http_request_timeout: any + :vartype http_request_timeout: JSON :ivar request_interval: The time to await before sending next page request. - :vartype request_interval: any + :vartype request_interval: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'request_method': {'key': 'requestMethod', 'type': 'object'}, - 'request_body': {'key': 'requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, - 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "request_method": {"key": "requestMethod", "type": "object"}, + "request_body": {"key": "requestBody", "type": "object"}, + "additional_headers": {"key": "additionalHeaders", "type": "object"}, + "pagination_rules": {"key": "paginationRules", "type": "object"}, + "http_request_timeout": {"key": "httpRequestTimeout", "type": "object"}, + "request_interval": {"key": "requestInterval", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - request_method: Optional[Any] = None, - request_body: Optional[Any] = None, - additional_headers: Optional[Any] = None, - pagination_rules: Optional[Any] = None, - http_request_timeout: Optional[Any] = None, - request_interval: Optional[Any] = None, - additional_columns: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + request_method: Optional[JSON] = None, + request_body: Optional[JSON] = None, + additional_headers: Optional[JSON] = None, + pagination_rules: Optional[JSON] = None, + http_request_timeout: Optional[JSON] = None, + request_interval: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). - :paramtype request_method: any + :paramtype request_method: JSON :keyword request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). - :paramtype request_body: any + :paramtype request_body: JSON :keyword additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :paramtype additional_headers: any + :paramtype additional_headers: JSON :keyword pagination_rules: The pagination rules to compose next page requests. Type: string (or Expression with resultType string). - :paramtype pagination_rules: any + :paramtype pagination_rules: JSON :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype http_request_timeout: any + :paramtype http_request_timeout: JSON :keyword request_interval: The time to await before sending next page request. - :paramtype request_interval: any + :paramtype request_interval: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'RestSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "RestSource" # type: str self.request_method = request_method self.request_body = request_body self.additional_headers = additional_headers @@ -45379,45 +48286,39 @@ def __init__( self.additional_columns = additional_columns -class RetryPolicy(msrest.serialization.Model): +class RetryPolicy(_serialization.Model): """Execution policy for an activity. :ivar count: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype count: any + :vartype count: JSON :ivar interval_in_seconds: Interval between retries in seconds. Default is 30. :vartype interval_in_seconds: int """ _validation = { - 'interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + "interval_in_seconds": {"maximum": 86400, "minimum": 30}, } _attribute_map = { - 'count': {'key': 'count', 'type': 'object'}, - 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, + "count": {"key": "count", "type": "object"}, + "interval_in_seconds": {"key": "intervalInSeconds", "type": "int"}, } - def __init__( - self, - *, - count: Optional[Any] = None, - interval_in_seconds: Optional[int] = None, - **kwargs - ): + def __init__(self, *, count: Optional[JSON] = None, interval_in_seconds: Optional[int] = None, **kwargs): """ :keyword count: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype count: any + :paramtype count: JSON :keyword interval_in_seconds: Interval between retries in seconds. Default is 30. :paramtype interval_in_seconds: int """ - super(RetryPolicy, self).__init__(**kwargs) + super().__init__(**kwargs) self.count = count self.interval_in_seconds = interval_in_seconds -class RunFilterParameters(msrest.serialization.Model): +class RunFilterParameters(_serialization.Model): """Query parameters for listing runs. All required parameters must be populated in order to send to Azure. @@ -45425,11 +48326,11 @@ class RunFilterParameters(msrest.serialization.Model): :ivar continuation_token: The continuation token for getting the next page of results. Null for first page. :vartype continuation_token: str - :ivar last_updated_after: Required. The time at or after which the run event was updated in - 'ISO 8601' format. + :ivar last_updated_after: The time at or after which the run event was updated in 'ISO 8601' + format. Required. :vartype last_updated_after: ~datetime.datetime - :ivar last_updated_before: Required. The time at or before which the run event was updated in - 'ISO 8601' format. + :ivar last_updated_before: The time at or before which the run event was updated in 'ISO 8601' + format. Required. :vartype last_updated_before: ~datetime.datetime :ivar filters: List of filters. :vartype filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] @@ -45438,16 +48339,16 @@ class RunFilterParameters(msrest.serialization.Model): """ _validation = { - 'last_updated_after': {'required': True}, - 'last_updated_before': {'required': True}, + "last_updated_after": {"required": True}, + "last_updated_before": {"required": True}, } _attribute_map = { - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - 'last_updated_after': {'key': 'lastUpdatedAfter', 'type': 'iso-8601'}, - 'last_updated_before': {'key': 'lastUpdatedBefore', 'type': 'iso-8601'}, - 'filters': {'key': 'filters', 'type': '[RunQueryFilter]'}, - 'order_by': {'key': 'orderBy', 'type': '[RunQueryOrderBy]'}, + "continuation_token": {"key": "continuationToken", "type": "str"}, + "last_updated_after": {"key": "lastUpdatedAfter", "type": "iso-8601"}, + "last_updated_before": {"key": "lastUpdatedBefore", "type": "iso-8601"}, + "filters": {"key": "filters", "type": "[RunQueryFilter]"}, + "order_by": {"key": "orderBy", "type": "[RunQueryOrderBy]"}, } def __init__( @@ -45464,18 +48365,18 @@ def __init__( :keyword continuation_token: The continuation token for getting the next page of results. Null for first page. :paramtype continuation_token: str - :keyword last_updated_after: Required. The time at or after which the run event was updated in - 'ISO 8601' format. + :keyword last_updated_after: The time at or after which the run event was updated in 'ISO 8601' + format. Required. :paramtype last_updated_after: ~datetime.datetime - :keyword last_updated_before: Required. The time at or before which the run event was updated - in 'ISO 8601' format. + :keyword last_updated_before: The time at or before which the run event was updated in 'ISO + 8601' format. Required. :paramtype last_updated_before: ~datetime.datetime :keyword filters: List of filters. :paramtype filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] :keyword order_by: List of OrderBy option. :paramtype order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] """ - super(RunFilterParameters, self).__init__(**kwargs) + super().__init__(**kwargs) self.continuation_token = continuation_token self.last_updated_after = last_updated_after self.last_updated_before = last_updated_before @@ -45483,35 +48384,35 @@ def __init__( self.order_by = order_by -class RunQueryFilter(msrest.serialization.Model): +class RunQueryFilter(_serialization.Model): """Query filter option for listing runs. All required parameters must be populated in order to send to Azure. - :ivar operand: Required. Parameter name to be used for filter. The allowed operands to query - pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are - ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger - runs are TriggerName, TriggerRunTimestamp and Status. Known values are: "PipelineName", + :ivar operand: Parameter name to be used for filter. The allowed operands to query pipeline + runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are ActivityName, + ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger runs are + TriggerName, TriggerRunTimestamp and Status. Required. Known values are: "PipelineName", "Status", "RunStart", "RunEnd", "ActivityName", "ActivityRunStart", "ActivityRunEnd", - "ActivityType", "TriggerName", "TriggerRunTimestamp", "RunGroupId", "LatestOnly". + "ActivityType", "TriggerName", "TriggerRunTimestamp", "RunGroupId", and "LatestOnly". :vartype operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand - :ivar operator: Required. Operator to be used for filter. Known values are: "Equals", - "NotEquals", "In", "NotIn". + :ivar operator: Operator to be used for filter. Required. Known values are: "Equals", + "NotEquals", "In", and "NotIn". :vartype operator: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperator - :ivar values: Required. List of filter values. + :ivar values: List of filter values. Required. :vartype values: list[str] """ _validation = { - 'operand': {'required': True}, - 'operator': {'required': True}, - 'values': {'required': True}, + "operand": {"required": True}, + "operator": {"required": True}, + "values": {"required": True}, } _attribute_map = { - 'operand': {'key': 'operand', 'type': 'str'}, - 'operator': {'key': 'operator', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[str]'}, + "operand": {"key": "operand", "type": "str"}, + "operator": {"key": "operator", "type": "str"}, + "values": {"key": "values", "type": "[str]"}, } def __init__( @@ -45523,49 +48424,49 @@ def __init__( **kwargs ): """ - :keyword operand: Required. Parameter name to be used for filter. The allowed operands to query - pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are - ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger - runs are TriggerName, TriggerRunTimestamp and Status. Known values are: "PipelineName", + :keyword operand: Parameter name to be used for filter. The allowed operands to query pipeline + runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are ActivityName, + ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger runs are + TriggerName, TriggerRunTimestamp and Status. Required. Known values are: "PipelineName", "Status", "RunStart", "RunEnd", "ActivityName", "ActivityRunStart", "ActivityRunEnd", - "ActivityType", "TriggerName", "TriggerRunTimestamp", "RunGroupId", "LatestOnly". + "ActivityType", "TriggerName", "TriggerRunTimestamp", "RunGroupId", and "LatestOnly". :paramtype operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand - :keyword operator: Required. Operator to be used for filter. Known values are: "Equals", - "NotEquals", "In", "NotIn". + :keyword operator: Operator to be used for filter. Required. Known values are: "Equals", + "NotEquals", "In", and "NotIn". :paramtype operator: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperator - :keyword values: Required. List of filter values. + :keyword values: List of filter values. Required. :paramtype values: list[str] """ - super(RunQueryFilter, self).__init__(**kwargs) + super().__init__(**kwargs) self.operand = operand self.operator = operator self.values = values -class RunQueryOrderBy(msrest.serialization.Model): +class RunQueryOrderBy(_serialization.Model): """An object to provide order by options for listing runs. All required parameters must be populated in order to send to Azure. - :ivar order_by: Required. Parameter name to be used for order by. The allowed parameters to - order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are + :ivar order_by: Parameter name to be used for order by. The allowed parameters to order by for + pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are ActivityName, ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, - TriggerRunTimestamp and Status. Known values are: "RunStart", "RunEnd", "PipelineName", - "Status", "ActivityName", "ActivityRunStart", "ActivityRunEnd", "TriggerName", - "TriggerRunTimestamp". + TriggerRunTimestamp and Status. Required. Known values are: "RunStart", "RunEnd", + "PipelineName", "Status", "ActivityName", "ActivityRunStart", "ActivityRunEnd", "TriggerName", + and "TriggerRunTimestamp". :vartype order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField - :ivar order: Required. Sorting order of the parameter. Known values are: "ASC", "DESC". + :ivar order: Sorting order of the parameter. Required. Known values are: "ASC" and "DESC". :vartype order: str or ~azure.mgmt.datafactory.models.RunQueryOrder """ _validation = { - 'order_by': {'required': True}, - 'order': {'required': True}, + "order_by": {"required": True}, + "order": {"required": True}, } _attribute_map = { - 'order_by': {'key': 'orderBy', 'type': 'str'}, - 'order': {'key': 'order', 'type': 'str'}, + "order_by": {"key": "orderBy", "type": "str"}, + "order": {"key": "order", "type": "str"}, } def __init__( @@ -45576,30 +48477,30 @@ def __init__( **kwargs ): """ - :keyword order_by: Required. Parameter name to be used for order by. The allowed parameters to - order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are + :keyword order_by: Parameter name to be used for order by. The allowed parameters to order by + for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are ActivityName, ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, - TriggerRunTimestamp and Status. Known values are: "RunStart", "RunEnd", "PipelineName", - "Status", "ActivityName", "ActivityRunStart", "ActivityRunEnd", "TriggerName", - "TriggerRunTimestamp". + TriggerRunTimestamp and Status. Required. Known values are: "RunStart", "RunEnd", + "PipelineName", "Status", "ActivityName", "ActivityRunStart", "ActivityRunEnd", "TriggerName", + and "TriggerRunTimestamp". :paramtype order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField - :keyword order: Required. Sorting order of the parameter. Known values are: "ASC", "DESC". + :keyword order: Sorting order of the parameter. Required. Known values are: "ASC" and "DESC". :paramtype order: str or ~azure.mgmt.datafactory.models.RunQueryOrder """ - super(RunQueryOrderBy, self).__init__(**kwargs) + super().__init__(**kwargs) self.order_by = order_by self.order = order -class SalesforceLinkedService(LinkedService): +class SalesforceLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Linked service for Salesforce. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -45608,67 +48509,67 @@ class SalesforceLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar environment_url: The URL of Salesforce instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). - :vartype environment_url: any + :vartype environment_url: JSON :ivar username: The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). - :vartype username: any + :vartype username: JSON :ivar password: The password for Basic authentication of the Salesforce instance. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar security_token: The security token is optional to remotely access Salesforce instance. :vartype security_token: ~azure.mgmt.datafactory.models.SecretBase :ivar api_version: The Salesforce API version used in ADF. Type: string (or Expression with resultType string). - :vartype api_version: any + :vartype api_version: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, - 'api_version': {'key': 'typeProperties.apiVersion', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "environment_url": {"key": "typeProperties.environmentUrl", "type": "object"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "security_token": {"key": "typeProperties.securityToken", "type": "SecretBase"}, + "api_version": {"key": "typeProperties.apiVersion", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - environment_url: Optional[Any] = None, - username: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + environment_url: Optional[JSON] = None, + username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, security_token: Optional["_models.SecretBase"] = None, - api_version: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + api_version: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -45676,29 +48577,36 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword environment_url: The URL of Salesforce instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). - :paramtype environment_url: any + :paramtype environment_url: JSON :keyword username: The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). - :paramtype username: any + :paramtype username: JSON :keyword password: The password for Basic authentication of the Salesforce instance. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword security_token: The security token is optional to remotely access Salesforce instance. :paramtype security_token: ~azure.mgmt.datafactory.models.SecretBase :keyword api_version: The Salesforce API version used in ADF. Type: string (or Expression with resultType string). - :paramtype api_version: any + :paramtype api_version: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(SalesforceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Salesforce' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Salesforce" # type: str self.environment_url = environment_url self.username = username self.password = password @@ -45707,15 +48615,15 @@ def __init__( self.encrypted_credential = encrypted_credential -class SalesforceMarketingCloudLinkedService(LinkedService): +class SalesforceMarketingCloudLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Salesforce Marketing Cloud linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -45724,74 +48632,74 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_properties: Properties used to connect to Salesforce Marketing Cloud. It is mutually exclusive with any other properties in the linked service. Type: object. - :vartype connection_properties: any + :vartype connection_properties: JSON :ivar client_id: The client ID associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). - :vartype client_id: any + :vartype client_id: JSON :ivar client_secret: The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :vartype use_encrypted_endpoints: any + :vartype use_encrypted_endpoints: JSON :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :vartype use_host_verification: any + :vartype use_host_verification: JSON :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :vartype use_peer_verification: any + :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_properties": {"key": "typeProperties.connectionProperties", "type": "object"}, + "client_id": {"key": "typeProperties.clientId", "type": "object"}, + "client_secret": {"key": "typeProperties.clientSecret", "type": "SecretBase"}, + "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, + "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, + "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_properties: Optional[Any] = None, - client_id: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_properties: Optional[JSON] = None, + client_id: Optional[JSON] = None, client_secret: Optional["_models.SecretBase"] = None, - use_encrypted_endpoints: Optional[Any] = None, - use_host_verification: Optional[Any] = None, - use_peer_verification: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + use_encrypted_endpoints: Optional[JSON] = None, + use_host_verification: Optional[JSON] = None, + use_peer_verification: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -45799,34 +48707,41 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_properties: Properties used to connect to Salesforce Marketing Cloud. It is mutually exclusive with any other properties in the linked service. Type: object. - :paramtype connection_properties: any + :paramtype connection_properties: JSON :keyword client_id: The client ID associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). - :paramtype client_id: any + :paramtype client_id: JSON :keyword client_secret: The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :paramtype use_encrypted_endpoints: any + :paramtype use_encrypted_endpoints: JSON :keyword use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :paramtype use_host_verification: any + :paramtype use_host_verification: JSON :keyword use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :paramtype use_peer_verification: any + :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(SalesforceMarketingCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'SalesforceMarketingCloud' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "SalesforceMarketingCloud" # type: str self.connection_properties = connection_properties self.client_id = client_id self.client_secret = client_secret @@ -45843,88 +48758,98 @@ class SalesforceMarketingCloudObjectDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(SalesforceMarketingCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'SalesforceMarketingCloudObject' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "SalesforceMarketingCloudObject" # type: str self.table_name = table_name @@ -45935,89 +48860,98 @@ class SalesforceMarketingCloudSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'SalesforceMarketingCloudSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "SalesforceMarketingCloudSource" # type: str self.query = query @@ -46028,102 +48962,112 @@ class SalesforceObjectDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar object_api_name: The Salesforce object API name. Type: string (or Expression with resultType string). - :vartype object_api_name: any + :vartype object_api_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "object_api_name": {"key": "typeProperties.objectApiName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - object_api_name: Optional[Any] = None, + object_api_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword object_api_name: The Salesforce object API name. Type: string (or Expression with resultType string). - :paramtype object_api_name: any - """ - super(SalesforceObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'SalesforceObject' # type: str + :paramtype object_api_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "SalesforceObject" # type: str self.object_api_name = object_api_name -class SalesforceServiceCloudLinkedService(LinkedService): +class SalesforceServiceCloudLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Linked service for Salesforce Service Cloud. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -46132,72 +49076,72 @@ class SalesforceServiceCloudLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar environment_url: The URL of Salesforce Service Cloud instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). - :vartype environment_url: any + :vartype environment_url: JSON :ivar username: The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). - :vartype username: any + :vartype username: JSON :ivar password: The password for Basic authentication of the Salesforce instance. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar security_token: The security token is optional to remotely access Salesforce instance. :vartype security_token: ~azure.mgmt.datafactory.models.SecretBase :ivar api_version: The Salesforce API version used in ADF. Type: string (or Expression with resultType string). - :vartype api_version: any + :vartype api_version: JSON :ivar extended_properties: Extended properties appended to the connection string. Type: string (or Expression with resultType string). - :vartype extended_properties: any + :vartype extended_properties: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, - 'api_version': {'key': 'typeProperties.apiVersion', 'type': 'object'}, - 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "environment_url": {"key": "typeProperties.environmentUrl", "type": "object"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "security_token": {"key": "typeProperties.securityToken", "type": "SecretBase"}, + "api_version": {"key": "typeProperties.apiVersion", "type": "object"}, + "extended_properties": {"key": "typeProperties.extendedProperties", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - environment_url: Optional[Any] = None, - username: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + environment_url: Optional[JSON] = None, + username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, security_token: Optional["_models.SecretBase"] = None, - api_version: Optional[Any] = None, - extended_properties: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + api_version: Optional[JSON] = None, + extended_properties: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -46205,32 +49149,39 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword environment_url: The URL of Salesforce Service Cloud instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). - :paramtype environment_url: any + :paramtype environment_url: JSON :keyword username: The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). - :paramtype username: any + :paramtype username: JSON :keyword password: The password for Basic authentication of the Salesforce instance. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword security_token: The security token is optional to remotely access Salesforce instance. :paramtype security_token: ~azure.mgmt.datafactory.models.SecretBase :keyword api_version: The Salesforce API version used in ADF. Type: string (or Expression with resultType string). - :paramtype api_version: any + :paramtype api_version: JSON :keyword extended_properties: Extended properties appended to the connection string. Type: string (or Expression with resultType string). - :paramtype extended_properties: any + :paramtype extended_properties: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(SalesforceServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'SalesforceServiceCloud' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "SalesforceServiceCloud" # type: str self.environment_url = environment_url self.username = username self.password = password @@ -46247,197 +49198,207 @@ class SalesforceServiceCloudObjectDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar object_api_name: The Salesforce Service Cloud object API name. Type: string (or Expression with resultType string). - :vartype object_api_name: any + :vartype object_api_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "object_api_name": {"key": "typeProperties.objectApiName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - object_api_name: Optional[Any] = None, + object_api_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword object_api_name: The Salesforce Service Cloud object API name. Type: string (or Expression with resultType string). - :paramtype object_api_name: any - """ - super(SalesforceServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'SalesforceServiceCloudObject' # type: str + :paramtype object_api_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "SalesforceServiceCloudObject" # type: str self.object_api_name = object_api_name -class SalesforceServiceCloudSink(CopySink): +class SalesforceServiceCloudSink(CopySink): # pylint: disable=too-many-instance-attributes """A copy activity Salesforce Service Cloud sink. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar write_behavior: The write behavior for the operation. Default is Insert. Known values - are: "Insert", "Upsert". + are: "Insert" and "Upsert". :vartype write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior :ivar external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). - :vartype external_id_field_name: any + :vartype external_id_field_name: JSON :ivar ignore_null_values: The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). - :vartype ignore_null_values: any + :vartype ignore_null_values: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "write_behavior": {"key": "writeBehavior", "type": "str"}, + "external_id_field_name": {"key": "externalIdFieldName", "type": "object"}, + "ignore_null_values": {"key": "ignoreNullValues", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, write_behavior: Optional[Union[str, "_models.SalesforceSinkWriteBehavior"]] = None, - external_id_field_name: Optional[Any] = None, - ignore_null_values: Optional[Any] = None, + external_id_field_name: Optional[JSON] = None, + ignore_null_values: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword write_behavior: The write behavior for the operation. Default is Insert. Known values - are: "Insert", "Upsert". + are: "Insert" and "Upsert". :paramtype write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior :keyword external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). - :paramtype external_id_field_name: any + :paramtype external_id_field_name: JSON :keyword ignore_null_values: The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing @@ -46445,10 +49406,19 @@ def __init__( ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). - :paramtype ignore_null_values: any - """ - super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'SalesforceServiceCloudSink' # type: str + :paramtype ignore_null_values: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "SalesforceServiceCloudSink" # type: str self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name self.ignore_null_values = ignore_null_values @@ -46461,196 +49431,203 @@ class SalesforceServiceCloudSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query: Database query. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar read_behavior: The read behavior for the operation. Default is Query. Known values are: - "Query", "QueryAll". + "Query" and "QueryAll". :vartype read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "read_behavior": {"key": "readBehavior", "type": "str"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query: Optional[JSON] = None, read_behavior: Optional[Union[str, "_models.SalesforceSourceReadBehavior"]] = None, - additional_columns: Optional[Any] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query: Database query. Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword read_behavior: The read behavior for the operation. Default is Query. Known values - are: "Query", "QueryAll". + are: "Query" and "QueryAll". :paramtype read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'SalesforceServiceCloudSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "SalesforceServiceCloudSource" # type: str self.query = query self.read_behavior = read_behavior self.additional_columns = additional_columns -class SalesforceSink(CopySink): +class SalesforceSink(CopySink): # pylint: disable=too-many-instance-attributes """A copy activity Salesforce sink. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar write_behavior: The write behavior for the operation. Default is Insert. Known values - are: "Insert", "Upsert". + are: "Insert" and "Upsert". :vartype write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior :ivar external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). - :vartype external_id_field_name: any + :vartype external_id_field_name: JSON :ivar ignore_null_values: The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). - :vartype ignore_null_values: any + :vartype ignore_null_values: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "write_behavior": {"key": "writeBehavior", "type": "str"}, + "external_id_field_name": {"key": "externalIdFieldName", "type": "object"}, + "ignore_null_values": {"key": "ignoreNullValues", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, write_behavior: Optional[Union[str, "_models.SalesforceSinkWriteBehavior"]] = None, - external_id_field_name: Optional[Any] = None, - ignore_null_values: Optional[Any] = None, + external_id_field_name: Optional[JSON] = None, + ignore_null_values: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword write_behavior: The write behavior for the operation. Default is Insert. Known values - are: "Insert", "Upsert". + are: "Insert" and "Upsert". :paramtype write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior :keyword external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). - :paramtype external_id_field_name: any + :paramtype external_id_field_name: JSON :keyword ignore_null_values: The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing @@ -46658,10 +49635,19 @@ def __init__( ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). - :paramtype ignore_null_values: any - """ - super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'SalesforceSink' # type: str + :paramtype ignore_null_values: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "SalesforceSink" # type: str self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name self.ignore_null_values = ignore_null_values @@ -46674,95 +49660,104 @@ class SalesforceSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: Database query. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar read_behavior: The read behavior for the operation. Default is Query. Known values are: - "Query", "QueryAll". + "Query" and "QueryAll". :vartype read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "read_behavior": {"key": "readBehavior", "type": "str"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, read_behavior: Optional[Union[str, "_models.SalesforceSourceReadBehavior"]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: Database query. Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword read_behavior: The read behavior for the operation. Default is Query. Known values - are: "Query", "QueryAll". + are: "Query" and "QueryAll". :paramtype read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior """ - super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'SalesforceSource' # type: str + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "SalesforceSource" # type: str self.query = query self.read_behavior = read_behavior @@ -46774,93 +49769,103 @@ class SapBwCubeDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder """ - super(SapBwCubeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'SapBwCube' # type: str - - -class SapBWLinkedService(LinkedService): + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "SapBwCube" # type: str + + +class SapBWLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """SAP Business Warehouse Linked Service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -46869,69 +49874,69 @@ class SapBWLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar server: Required. Host name of the SAP BW instance. Type: string (or Expression with - resultType string). - :vartype server: any - :ivar system_number: Required. System number of the BW system. (Usually a two-digit decimal - number represented as a string.) Type: string (or Expression with resultType string). - :vartype system_number: any - :ivar client_id: Required. Client ID of the client on the BW system. (Usually a three-digit - decimal number represented as a string) Type: string (or Expression with resultType string). - :vartype client_id: any + :vartype annotations: list[JSON] + :ivar server: Host name of the SAP BW instance. Type: string (or Expression with resultType + string). Required. + :vartype server: JSON + :ivar system_number: System number of the BW system. (Usually a two-digit decimal number + represented as a string.) Type: string (or Expression with resultType string). Required. + :vartype system_number: JSON + :ivar client_id: Client ID of the client on the BW system. (Usually a three-digit decimal + number represented as a string) Type: string (or Expression with resultType string). Required. + :vartype client_id: JSON :ivar user_name: Username to access the SAP BW server. Type: string (or Expression with resultType string). - :vartype user_name: any + :vartype user_name: JSON :ivar password: Password to access the SAP BW server. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'system_number': {'required': True}, - 'client_id': {'required': True}, + "type": {"required": True}, + "server": {"required": True}, + "system_number": {"required": True}, + "client_id": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "server": {"key": "typeProperties.server", "type": "object"}, + "system_number": {"key": "typeProperties.systemNumber", "type": "object"}, + "client_id": {"key": "typeProperties.clientId", "type": "object"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - server: Any, - system_number: Any, - client_id: Any, - additional_properties: Optional[Dict[str, Any]] = None, + server: JSON, + system_number: JSON, + client_id: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - user_name: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -46939,28 +49944,35 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword server: Required. Host name of the SAP BW instance. Type: string (or Expression with - resultType string). - :paramtype server: any - :keyword system_number: Required. System number of the BW system. (Usually a two-digit decimal - number represented as a string.) Type: string (or Expression with resultType string). - :paramtype system_number: any - :keyword client_id: Required. Client ID of the client on the BW system. (Usually a three-digit - decimal number represented as a string) Type: string (or Expression with resultType string). - :paramtype client_id: any + :paramtype annotations: list[JSON] + :keyword server: Host name of the SAP BW instance. Type: string (or Expression with resultType + string). Required. + :paramtype server: JSON + :keyword system_number: System number of the BW system. (Usually a two-digit decimal number + represented as a string.) Type: string (or Expression with resultType string). Required. + :paramtype system_number: JSON + :keyword client_id: Client ID of the client on the BW system. (Usually a three-digit decimal + number represented as a string) Type: string (or Expression with resultType string). Required. + :paramtype client_id: JSON :keyword user_name: Username to access the SAP BW server. Type: string (or Expression with resultType string). - :paramtype user_name: any + :paramtype user_name: JSON :keyword password: Password to access the SAP BW server. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(SapBWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'SapBW' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "SapBW" # type: str self.server = server self.system_number = system_number self.client_id = client_id @@ -46976,87 +49988,96 @@ class SapBwSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: MDX query. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: MDX query. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'SapBwSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "SapBwSource" # type: str self.query = query @@ -47067,8 +50088,8 @@ class SapCloudForCustomerLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -47077,58 +50098,58 @@ class SapCloudForCustomerLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar url: Required. The URL of SAP Cloud for Customer OData API. For example, + :vartype annotations: list[JSON] + :ivar url: The URL of SAP Cloud for Customer OData API. For example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with - resultType string). - :vartype url: any + resultType string). Required. + :vartype url: JSON :ivar username: The username for Basic authentication. Type: string (or Expression with resultType string). - :vartype username: any + :vartype username: JSON :ivar password: The password for Basic authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'url': {'required': True}, + "type": {"required": True}, + "url": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "url": {"key": "typeProperties.url", "type": "object"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - url: Any, - additional_properties: Optional[Dict[str, Any]] = None, + url: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - username: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -47136,23 +50157,30 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword url: Required. The URL of SAP Cloud for Customer OData API. For example, + :paramtype annotations: list[JSON] + :keyword url: The URL of SAP Cloud for Customer OData API. For example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with - resultType string). - :paramtype url: any + resultType string). Required. + :paramtype url: JSON :keyword username: The username for Basic authentication. Type: string (or Expression with resultType string). - :paramtype username: any + :paramtype username: JSON :keyword password: The password for Basic authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(SapCloudForCustomerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'SapCloudForCustomer' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "SapCloudForCustomer" # type: str self.url = url self.username = username self.password = password @@ -47166,91 +50194,101 @@ class SapCloudForCustomerResourceDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :ivar path: Required. The path of the SAP Cloud for Customer OData entity. Type: string (or - Expression with resultType string). - :vartype path: any + :ivar path: The path of the SAP Cloud for Customer OData entity. Type: string (or Expression + with resultType string). Required. + :vartype path: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'path': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, + "path": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "path": {"key": "typeProperties.path", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - path: Any, - additional_properties: Optional[Dict[str, Any]] = None, + path: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :keyword path: Required. The path of the SAP Cloud for Customer OData entity. Type: string (or - Expression with resultType string). - :paramtype path: any - """ - super(SapCloudForCustomerResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'SapCloudForCustomerResource' # type: str + :keyword path: The path of the SAP Cloud for Customer OData entity. Type: string (or Expression + with resultType string). Required. + :paramtype path: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "SapCloudForCustomerResource" # type: str self.path = path @@ -47261,103 +50299,112 @@ class SapCloudForCustomerSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar write_behavior: The write behavior for the operation. Default is 'Insert'. Known values - are: "Insert", "Update". + are: "Insert" and "Update". :vartype write_behavior: str or ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype http_request_timeout: any + :vartype http_request_timeout: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "write_behavior": {"key": "writeBehavior", "type": "str"}, + "http_request_timeout": {"key": "httpRequestTimeout", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, write_behavior: Optional[Union[str, "_models.SapCloudForCustomerSinkWriteBehavior"]] = None, - http_request_timeout: Optional[Any] = None, + http_request_timeout: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword write_behavior: The write behavior for the operation. Default is 'Insert'. Known - values are: "Insert", "Update". + values are: "Insert" and "Update". :paramtype write_behavior: str or ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype http_request_timeout: any - """ - super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'SapCloudForCustomerSink' # type: str + :paramtype http_request_timeout: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "SapCloudForCustomerSink" # type: str self.write_behavior = write_behavior self.http_request_timeout = http_request_timeout @@ -47369,101 +50416,110 @@ class SapCloudForCustomerSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype http_request_timeout: any + :vartype http_request_timeout: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "http_request_timeout": {"key": "httpRequestTimeout", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, - http_request_timeout: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, + http_request_timeout: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype http_request_timeout: any - """ - super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'SapCloudForCustomerSource' # type: str + :paramtype http_request_timeout: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "SapCloudForCustomerSource" # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -47475,8 +50531,8 @@ class SapEccLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -47485,10 +50541,10 @@ class SapEccLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar url: Required. The URL of SAP ECC OData API. For example, + :vartype annotations: list[JSON] + :ivar url: The URL of SAP ECC OData API. For example, '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with - resultType string). + resultType string). Required. :vartype url: str :ivar username: The username for Basic authentication. Type: string (or Expression with resultType string). @@ -47502,32 +50558,32 @@ class SapEccLinkedService(LinkedService): """ _validation = { - 'type': {'required': True}, - 'url': {'required': True}, + "type": {"required": True}, + "url": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'str'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "url": {"key": "typeProperties.url", "type": "str"}, + "username": {"key": "typeProperties.username", "type": "str"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( self, *, url: str, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, username: Optional[str] = None, password: Optional["_models.SecretBase"] = None, encrypted_credential: Optional[str] = None, @@ -47536,7 +50592,7 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -47544,10 +50600,10 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword url: Required. The URL of SAP ECC OData API. For example, + :paramtype annotations: list[JSON] + :keyword url: The URL of SAP ECC OData API. For example, '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with - resultType string). + resultType string). Required. :paramtype url: str :keyword username: The username for Basic authentication. Type: string (or Expression with resultType string). @@ -47559,8 +50615,15 @@ def __init__( username/password must be provided. Type: string (or Expression with resultType string). :paramtype encrypted_credential: str """ - super(SapEccLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'SapEcc' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "SapEcc" # type: str self.url = url self.username = username self.password = password @@ -47574,91 +50637,101 @@ class SapEccResourceDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :ivar path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with - resultType string). - :vartype path: any + :ivar path: The path of the SAP ECC OData entity. Type: string (or Expression with resultType + string). Required. + :vartype path: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'path': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, + "path": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "path": {"key": "typeProperties.path", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - path: Any, - additional_properties: Optional[Dict[str, Any]] = None, + path: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :keyword path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with - resultType string). - :paramtype path: any - """ - super(SapEccResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'SapEccResource' # type: str + :keyword path: The path of the SAP ECC OData entity. Type: string (or Expression with + resultType string). Required. + :paramtype path: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "SapEccResource" # type: str self.path = path @@ -47669,114 +50742,123 @@ class SapEccSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype http_request_timeout: any + :vartype http_request_timeout: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "http_request_timeout": {"key": "httpRequestTimeout", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, - http_request_timeout: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, + http_request_timeout: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype http_request_timeout: any - """ - super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'SapEccSource' # type: str + :paramtype http_request_timeout: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "SapEccSource" # type: str self.query = query self.http_request_timeout = http_request_timeout -class SapHanaLinkedService(LinkedService): +class SapHanaLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """SAP HANA Linked Service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -47785,66 +50867,66 @@ class SapHanaLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_string: SAP HANA ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype connection_string: JSON :ivar server: Host name of the SAP HANA server. Type: string (or Expression with resultType string). - :vartype server: any + :vartype server: JSON :ivar authentication_type: The authentication type to be used to connect to the SAP HANA - server. Known values are: "Basic", "Windows". + server. Known values are: "Basic" and "Windows". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.SapHanaAuthenticationType :ivar user_name: Username to access the SAP HANA server. Type: string (or Expression with resultType string). - :vartype user_name: any + :vartype user_name: JSON :ivar password: Password to access the SAP HANA server. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "server": {"key": "typeProperties.server", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_string: Optional[Any] = None, - server: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_string: Optional[JSON] = None, + server: Optional[JSON] = None, authentication_type: Optional[Union[str, "_models.SapHanaAuthenticationType"]] = None, - user_name: Optional[Any] = None, + user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -47852,28 +50934,35 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_string: SAP HANA ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype connection_string: JSON :keyword server: Host name of the SAP HANA server. Type: string (or Expression with resultType string). - :paramtype server: any + :paramtype server: JSON :keyword authentication_type: The authentication type to be used to connect to the SAP HANA - server. Known values are: "Basic", "Windows". + server. Known values are: "Basic" and "Windows". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.SapHanaAuthenticationType :keyword user_name: Username to access the SAP HANA server. Type: string (or Expression with resultType string). - :paramtype user_name: any + :paramtype user_name: JSON :keyword password: Password to access the SAP HANA server. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'SapHana' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "SapHana" # type: str self.connection_string = connection_string self.server = server self.authentication_type = authentication_type @@ -47882,261 +50971,275 @@ def __init__( self.encrypted_credential = encrypted_credential -class SapHanaPartitionSettings(msrest.serialization.Model): +class SapHanaPartitionSettings(_serialization.Model): """The settings that will be leveraged for SAP HANA source partitioning. :ivar partition_column_name: The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :vartype partition_column_name: any + :vartype partition_column_name: JSON """ _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + "partition_column_name": {"key": "partitionColumnName", "type": "object"}, } - def __init__( - self, - *, - partition_column_name: Optional[Any] = None, - **kwargs - ): + def __init__(self, *, partition_column_name: Optional[JSON] = None, **kwargs): """ :keyword partition_column_name: The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :paramtype partition_column_name: any + :paramtype partition_column_name: JSON """ - super(SapHanaPartitionSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.partition_column_name = partition_column_name -class SapHanaSource(TabularSource): +class SapHanaSource(TabularSource): # pylint: disable=too-many-instance-attributes """A copy activity source for SAP HANA source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: SAP HANA Sql query. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression with resultType integer). - :vartype packet_size: any + :vartype packet_size: JSON :ivar partition_option: The partition mechanism that will be used for SAP HANA read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange". - :vartype partition_option: any + :vartype partition_option: JSON :ivar partition_settings: The settings that will be leveraged for SAP HANA source partitioning. :vartype partition_settings: ~azure.mgmt.datafactory.models.SapHanaPartitionSettings """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'packet_size': {'key': 'packetSize', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SapHanaPartitionSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "packet_size": {"key": "packetSize", "type": "object"}, + "partition_option": {"key": "partitionOption", "type": "object"}, + "partition_settings": {"key": "partitionSettings", "type": "SapHanaPartitionSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, - packet_size: Optional[Any] = None, - partition_option: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, + packet_size: Optional[JSON] = None, + partition_option: Optional[JSON] = None, partition_settings: Optional["_models.SapHanaPartitionSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: SAP HANA Sql query. Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression with resultType integer). - :paramtype packet_size: any + :paramtype packet_size: JSON :keyword partition_option: The partition mechanism that will be used for SAP HANA read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange". - :paramtype partition_option: any + :paramtype partition_option: JSON :keyword partition_settings: The settings that will be leveraged for SAP HANA source partitioning. :paramtype partition_settings: ~azure.mgmt.datafactory.models.SapHanaPartitionSettings """ - super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'SapHanaSource' # type: str + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "SapHanaSource" # type: str self.query = query self.packet_size = packet_size self.partition_option = partition_option self.partition_settings = partition_settings -class SapHanaTableDataset(Dataset): +class SapHanaTableDataset(Dataset): # pylint: disable=too-many-instance-attributes """SAP HANA Table properties. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar schema_type_properties_schema: The schema name of SAP HANA. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON :ivar table: The table name of SAP HANA. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - schema_type_properties_schema: Optional[Any] = None, - table: Optional[Any] = None, + schema_type_properties_schema: Optional[JSON] = None, + table: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword schema_type_properties_schema: The schema name of SAP HANA. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any + :paramtype schema_type_properties_schema: JSON :keyword table: The table name of SAP HANA. Type: string (or Expression with resultType string). - :paramtype table: any - """ - super(SapHanaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'SapHanaTable' # type: str + :paramtype table: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "SapHanaTable" # type: str self.schema_type_properties_schema = schema_type_properties_schema self.table = table -class SapOdpLinkedService(LinkedService): +class SapOdpLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """SAP ODP Linked Service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -48145,128 +51248,128 @@ class SapOdpLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar server: Host name of the SAP instance where the table is located. Type: string (or Expression with resultType string). - :vartype server: any + :vartype server: JSON :ivar system_number: System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). - :vartype system_number: any + :vartype system_number: JSON :ivar client_id: Client ID of the client on the SAP system where the table is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). - :vartype client_id: any + :vartype client_id: JSON :ivar language: Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with resultType string). - :vartype language: any + :vartype language: JSON :ivar system_id: SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). - :vartype system_id: any + :vartype system_id: JSON :ivar user_name: Username to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :vartype user_name: any + :vartype user_name: JSON :ivar password: Password to access the SAP server where the table is located. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar message_server: The hostname of the SAP Message Server. Type: string (or Expression with resultType string). - :vartype message_server: any + :vartype message_server: JSON :ivar message_server_service: The service name or port number of the Message Server. Type: string (or Expression with resultType string). - :vartype message_server_service: any + :vartype message_server_service: JSON :ivar snc_mode: SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). - :vartype snc_mode: any + :vartype snc_mode: JSON :ivar snc_my_name: Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :vartype snc_my_name: any + :vartype snc_my_name: JSON :ivar snc_partner_name: Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :vartype snc_partner_name: any + :vartype snc_partner_name: JSON :ivar snc_library_path: External security product's library to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :vartype snc_library_path: any + :vartype snc_library_path: JSON :ivar snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType string). - :vartype snc_qop: any + :vartype snc_qop: JSON :ivar x509_certificate_path: SNC X509 certificate file path. Type: string (or Expression with resultType string). - :vartype x509_certificate_path: any + :vartype x509_certificate_path: JSON :ivar logon_group: The Logon Group for the SAP System. Type: string (or Expression with resultType string). - :vartype logon_group: any + :vartype logon_group: JSON :ivar subscriber_name: The subscriber name. Type: string (or Expression with resultType string). - :vartype subscriber_name: any + :vartype subscriber_name: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'language': {'key': 'typeProperties.language', 'type': 'object'}, - 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, - 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, - 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, - 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, - 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, - 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, - 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, - 'x509_certificate_path': {'key': 'typeProperties.x509CertificatePath', 'type': 'object'}, - 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, - 'subscriber_name': {'key': 'typeProperties.subscriberName', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "server": {"key": "typeProperties.server", "type": "object"}, + "system_number": {"key": "typeProperties.systemNumber", "type": "object"}, + "client_id": {"key": "typeProperties.clientId", "type": "object"}, + "language": {"key": "typeProperties.language", "type": "object"}, + "system_id": {"key": "typeProperties.systemId", "type": "object"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "message_server": {"key": "typeProperties.messageServer", "type": "object"}, + "message_server_service": {"key": "typeProperties.messageServerService", "type": "object"}, + "snc_mode": {"key": "typeProperties.sncMode", "type": "object"}, + "snc_my_name": {"key": "typeProperties.sncMyName", "type": "object"}, + "snc_partner_name": {"key": "typeProperties.sncPartnerName", "type": "object"}, + "snc_library_path": {"key": "typeProperties.sncLibraryPath", "type": "object"}, + "snc_qop": {"key": "typeProperties.sncQop", "type": "object"}, + "x509_certificate_path": {"key": "typeProperties.x509CertificatePath", "type": "object"}, + "logon_group": {"key": "typeProperties.logonGroup", "type": "object"}, + "subscriber_name": {"key": "typeProperties.subscriberName", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } - def __init__( + def __init__( # pylint: disable=too-many-locals self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - server: Optional[Any] = None, - system_number: Optional[Any] = None, - client_id: Optional[Any] = None, - language: Optional[Any] = None, - system_id: Optional[Any] = None, - user_name: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + server: Optional[JSON] = None, + system_number: Optional[JSON] = None, + client_id: Optional[JSON] = None, + language: Optional[JSON] = None, + system_id: Optional[JSON] = None, + user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - message_server: Optional[Any] = None, - message_server_service: Optional[Any] = None, - snc_mode: Optional[Any] = None, - snc_my_name: Optional[Any] = None, - snc_partner_name: Optional[Any] = None, - snc_library_path: Optional[Any] = None, - snc_qop: Optional[Any] = None, - x509_certificate_path: Optional[Any] = None, - logon_group: Optional[Any] = None, - subscriber_name: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + message_server: Optional[JSON] = None, + message_server_service: Optional[JSON] = None, + snc_mode: Optional[JSON] = None, + snc_my_name: Optional[JSON] = None, + snc_partner_name: Optional[JSON] = None, + snc_library_path: Optional[JSON] = None, + snc_qop: Optional[JSON] = None, + x509_certificate_path: Optional[JSON] = None, + logon_group: Optional[JSON] = None, + subscriber_name: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -48274,66 +51377,73 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword server: Host name of the SAP instance where the table is located. Type: string (or Expression with resultType string). - :paramtype server: any + :paramtype server: JSON :keyword system_number: System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). - :paramtype system_number: any + :paramtype system_number: JSON :keyword client_id: Client ID of the client on the SAP system where the table is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). - :paramtype client_id: any + :paramtype client_id: JSON :keyword language: Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with resultType string). - :paramtype language: any + :paramtype language: JSON :keyword system_id: SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). - :paramtype system_id: any + :paramtype system_id: JSON :keyword user_name: Username to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :paramtype user_name: any + :paramtype user_name: JSON :keyword password: Password to access the SAP server where the table is located. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword message_server: The hostname of the SAP Message Server. Type: string (or Expression with resultType string). - :paramtype message_server: any + :paramtype message_server: JSON :keyword message_server_service: The service name or port number of the Message Server. Type: string (or Expression with resultType string). - :paramtype message_server_service: any + :paramtype message_server_service: JSON :keyword snc_mode: SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). - :paramtype snc_mode: any + :paramtype snc_mode: JSON :keyword snc_my_name: Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :paramtype snc_my_name: any + :paramtype snc_my_name: JSON :keyword snc_partner_name: Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :paramtype snc_partner_name: any + :paramtype snc_partner_name: JSON :keyword snc_library_path: External security product's library to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :paramtype snc_library_path: any + :paramtype snc_library_path: JSON :keyword snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType string). - :paramtype snc_qop: any + :paramtype snc_qop: JSON :keyword x509_certificate_path: SNC X509 certificate file path. Type: string (or Expression with resultType string). - :paramtype x509_certificate_path: any + :paramtype x509_certificate_path: JSON :keyword logon_group: The Logon Group for the SAP System. Type: string (or Expression with resultType string). - :paramtype logon_group: any + :paramtype logon_group: JSON :keyword subscriber_name: The subscriber name. Type: string (or Expression with resultType string). - :paramtype subscriber_name: any + :paramtype subscriber_name: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(SapOdpLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'SapOdp' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "SapOdp" # type: str self.server = server self.system_number = system_number self.client_id = client_id @@ -48354,240 +51464,259 @@ def __init__( self.encrypted_credential = encrypted_credential -class SapOdpResourceDataset(Dataset): +class SapOdpResourceDataset(Dataset): # pylint: disable=too-many-instance-attributes """SAP ODP Resource properties. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :ivar context: Required. The context of the SAP ODP Object. Type: string (or Expression with - resultType string). - :vartype context: any - :ivar object_name: Required. The name of the SAP ODP Object. Type: string (or Expression with - resultType string). - :vartype object_name: any + :ivar context: The context of the SAP ODP Object. Type: string (or Expression with resultType + string). Required. + :vartype context: JSON + :ivar object_name: The name of the SAP ODP Object. Type: string (or Expression with resultType + string). Required. + :vartype object_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'context': {'required': True}, - 'object_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, + "context": {"required": True}, + "object_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'context': {'key': 'typeProperties.context', 'type': 'object'}, - 'object_name': {'key': 'typeProperties.objectName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "context": {"key": "typeProperties.context", "type": "object"}, + "object_name": {"key": "typeProperties.objectName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - context: Any, - object_name: Any, - additional_properties: Optional[Dict[str, Any]] = None, + context: JSON, + object_name: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :keyword context: Required. The context of the SAP ODP Object. Type: string (or Expression with - resultType string). - :paramtype context: any - :keyword object_name: Required. The name of the SAP ODP Object. Type: string (or Expression - with resultType string). - :paramtype object_name: any - """ - super(SapOdpResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'SapOdpResource' # type: str + :keyword context: The context of the SAP ODP Object. Type: string (or Expression with + resultType string). Required. + :paramtype context: JSON + :keyword object_name: The name of the SAP ODP Object. Type: string (or Expression with + resultType string). Required. + :paramtype object_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "SapOdpResource" # type: str self.context = context self.object_name = object_name -class SapOdpSource(TabularSource): +class SapOdpSource(TabularSource): # pylint: disable=too-many-instance-attributes """A copy activity source for SAP ODP source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar extraction_mode: The extraction mode. Allowed value include: Full, Delta and Recovery. The default value is Full. Type: string (or Expression with resultType string). - :vartype extraction_mode: any + :vartype extraction_mode: JSON :ivar subscriber_process: The subscriber process to manage the delta process. Type: string (or Expression with resultType string). - :vartype subscriber_process: any + :vartype subscriber_process: JSON :ivar selection: Specifies the selection conditions from source data. Type: array of objects(selection) (or Expression with resultType array of objects). - :vartype selection: any + :vartype selection: JSON :ivar projection: Specifies the columns to be selected from source data. Type: array of objects(projection) (or Expression with resultType array of objects). - :vartype projection: any + :vartype projection: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'extraction_mode': {'key': 'extractionMode', 'type': 'object'}, - 'subscriber_process': {'key': 'subscriberProcess', 'type': 'object'}, - 'selection': {'key': 'selection', 'type': 'object'}, - 'projection': {'key': 'projection', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "extraction_mode": {"key": "extractionMode", "type": "object"}, + "subscriber_process": {"key": "subscriberProcess", "type": "object"}, + "selection": {"key": "selection", "type": "object"}, + "projection": {"key": "projection", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - extraction_mode: Optional[Any] = None, - subscriber_process: Optional[Any] = None, - selection: Optional[Any] = None, - projection: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + extraction_mode: Optional[JSON] = None, + subscriber_process: Optional[JSON] = None, + selection: Optional[JSON] = None, + projection: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword extraction_mode: The extraction mode. Allowed value include: Full, Delta and Recovery. The default value is Full. Type: string (or Expression with resultType string). - :paramtype extraction_mode: any + :paramtype extraction_mode: JSON :keyword subscriber_process: The subscriber process to manage the delta process. Type: string (or Expression with resultType string). - :paramtype subscriber_process: any + :paramtype subscriber_process: JSON :keyword selection: Specifies the selection conditions from source data. Type: array of objects(selection) (or Expression with resultType array of objects). - :paramtype selection: any + :paramtype selection: JSON :keyword projection: Specifies the columns to be selected from source data. Type: array of objects(projection) (or Expression with resultType array of objects). - :paramtype projection: any - """ - super(SapOdpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'SapOdpSource' # type: str + :paramtype projection: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "SapOdpSource" # type: str self.extraction_mode = extraction_mode self.subscriber_process = subscriber_process self.selection = selection self.projection = projection -class SapOpenHubLinkedService(LinkedService): +class SapOpenHubLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """SAP Business Warehouse Open Hub Destination Linked Service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -48596,93 +51725,93 @@ class SapOpenHubLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar server: Host name of the SAP BW instance where the open hub destination is located. Type: string (or Expression with resultType string). - :vartype server: any + :vartype server: JSON :ivar system_number: System number of the BW system where the open hub destination is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). - :vartype system_number: any + :vartype system_number: JSON :ivar client_id: Client ID of the client on the BW system where the open hub destination is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). - :vartype client_id: any + :vartype client_id: JSON :ivar language: Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or Expression with resultType string). - :vartype language: any + :vartype language: JSON :ivar system_id: SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). - :vartype system_id: any + :vartype system_id: JSON :ivar user_name: Username to access the SAP BW server where the open hub destination is located. Type: string (or Expression with resultType string). - :vartype user_name: any + :vartype user_name: JSON :ivar password: Password to access the SAP BW server where the open hub destination is located. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar message_server: The hostname of the SAP Message Server. Type: string (or Expression with resultType string). - :vartype message_server: any + :vartype message_server: JSON :ivar message_server_service: The service name or port number of the Message Server. Type: string (or Expression with resultType string). - :vartype message_server_service: any + :vartype message_server_service: JSON :ivar logon_group: The Logon Group for the SAP System. Type: string (or Expression with resultType string). - :vartype logon_group: any + :vartype logon_group: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'language': {'key': 'typeProperties.language', 'type': 'object'}, - 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, - 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, - 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "server": {"key": "typeProperties.server", "type": "object"}, + "system_number": {"key": "typeProperties.systemNumber", "type": "object"}, + "client_id": {"key": "typeProperties.clientId", "type": "object"}, + "language": {"key": "typeProperties.language", "type": "object"}, + "system_id": {"key": "typeProperties.systemId", "type": "object"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "message_server": {"key": "typeProperties.messageServer", "type": "object"}, + "message_server_service": {"key": "typeProperties.messageServerService", "type": "object"}, + "logon_group": {"key": "typeProperties.logonGroup", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - server: Optional[Any] = None, - system_number: Optional[Any] = None, - client_id: Optional[Any] = None, - language: Optional[Any] = None, - system_id: Optional[Any] = None, - user_name: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + server: Optional[JSON] = None, + system_number: Optional[JSON] = None, + client_id: Optional[JSON] = None, + language: Optional[JSON] = None, + system_id: Optional[JSON] = None, + user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - message_server: Optional[Any] = None, - message_server_service: Optional[Any] = None, - logon_group: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + message_server: Optional[JSON] = None, + message_server_service: Optional[JSON] = None, + logon_group: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -48690,46 +51819,53 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword server: Host name of the SAP BW instance where the open hub destination is located. Type: string (or Expression with resultType string). - :paramtype server: any + :paramtype server: JSON :keyword system_number: System number of the BW system where the open hub destination is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). - :paramtype system_number: any + :paramtype system_number: JSON :keyword client_id: Client ID of the client on the BW system where the open hub destination is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). - :paramtype client_id: any + :paramtype client_id: JSON :keyword language: Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or Expression with resultType string). - :paramtype language: any + :paramtype language: JSON :keyword system_id: SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). - :paramtype system_id: any + :paramtype system_id: JSON :keyword user_name: Username to access the SAP BW server where the open hub destination is located. Type: string (or Expression with resultType string). - :paramtype user_name: any + :paramtype user_name: JSON :keyword password: Password to access the SAP BW server where the open hub destination is located. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword message_server: The hostname of the SAP Message Server. Type: string (or Expression with resultType string). - :paramtype message_server: any + :paramtype message_server: JSON :keyword message_server_service: The service name or port number of the Message Server. Type: string (or Expression with resultType string). - :paramtype message_server_service: any + :paramtype message_server_service: JSON :keyword logon_group: The Logon Group for the SAP System. Type: string (or Expression with resultType string). - :paramtype logon_group: any + :paramtype logon_group: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(SapOpenHubLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'SapOpenHub' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "SapOpenHub" # type: str self.server = server self.system_number = system_number self.client_id = client_id @@ -48743,254 +51879,273 @@ def __init__( self.encrypted_credential = encrypted_credential -class SapOpenHubSource(TabularSource): +class SapOpenHubSource(TabularSource): # pylint: disable=too-many-instance-attributes """A copy activity source for SAP Business Warehouse Open Hub Destination source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). - :vartype exclude_last_request: any + :vartype exclude_last_request: JSON :ivar base_request_id: The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). - :vartype base_request_id: any + :vartype base_request_id: JSON :ivar custom_rfc_read_table_function_module: Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). - :vartype custom_rfc_read_table_function_module: any + :vartype custom_rfc_read_table_function_module: JSON :ivar sap_data_column_delimiter: The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). - :vartype sap_data_column_delimiter: any + :vartype sap_data_column_delimiter: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, - 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, - 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, - 'sap_data_column_delimiter': {'key': 'sapDataColumnDelimiter', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "exclude_last_request": {"key": "excludeLastRequest", "type": "object"}, + "base_request_id": {"key": "baseRequestId", "type": "object"}, + "custom_rfc_read_table_function_module": {"key": "customRfcReadTableFunctionModule", "type": "object"}, + "sap_data_column_delimiter": {"key": "sapDataColumnDelimiter", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - exclude_last_request: Optional[Any] = None, - base_request_id: Optional[Any] = None, - custom_rfc_read_table_function_module: Optional[Any] = None, - sap_data_column_delimiter: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + exclude_last_request: Optional[JSON] = None, + base_request_id: Optional[JSON] = None, + custom_rfc_read_table_function_module: Optional[JSON] = None, + sap_data_column_delimiter: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). - :paramtype exclude_last_request: any + :paramtype exclude_last_request: JSON :keyword base_request_id: The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). - :paramtype base_request_id: any + :paramtype base_request_id: JSON :keyword custom_rfc_read_table_function_module: Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). - :paramtype custom_rfc_read_table_function_module: any + :paramtype custom_rfc_read_table_function_module: JSON :keyword sap_data_column_delimiter: The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). - :paramtype sap_data_column_delimiter: any - """ - super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'SapOpenHubSource' # type: str + :paramtype sap_data_column_delimiter: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "SapOpenHubSource" # type: str self.exclude_last_request = exclude_last_request self.base_request_id = base_request_id self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module self.sap_data_column_delimiter = sap_data_column_delimiter -class SapOpenHubTableDataset(Dataset): +class SapOpenHubTableDataset(Dataset): # pylint: disable=too-many-instance-attributes """Sap Business Warehouse Open Hub Destination Table properties. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :ivar open_hub_destination_name: Required. The name of the Open Hub Destination with - destination type as Database Table. Type: string (or Expression with resultType string). - :vartype open_hub_destination_name: any + :ivar open_hub_destination_name: The name of the Open Hub Destination with destination type as + Database Table. Type: string (or Expression with resultType string). Required. + :vartype open_hub_destination_name: JSON :ivar exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). - :vartype exclude_last_request: any + :vartype exclude_last_request: JSON :ivar base_request_id: The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). - :vartype base_request_id: any + :vartype base_request_id: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'open_hub_destination_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, + "open_hub_destination_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, - 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, - 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "open_hub_destination_name": {"key": "typeProperties.openHubDestinationName", "type": "object"}, + "exclude_last_request": {"key": "typeProperties.excludeLastRequest", "type": "object"}, + "base_request_id": {"key": "typeProperties.baseRequestId", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - open_hub_destination_name: Any, - additional_properties: Optional[Dict[str, Any]] = None, + open_hub_destination_name: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - exclude_last_request: Optional[Any] = None, - base_request_id: Optional[Any] = None, + exclude_last_request: Optional[JSON] = None, + base_request_id: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :keyword open_hub_destination_name: Required. The name of the Open Hub Destination with - destination type as Database Table. Type: string (or Expression with resultType string). - :paramtype open_hub_destination_name: any + :keyword open_hub_destination_name: The name of the Open Hub Destination with destination type + as Database Table. Type: string (or Expression with resultType string). Required. + :paramtype open_hub_destination_name: JSON :keyword exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). - :paramtype exclude_last_request: any + :paramtype exclude_last_request: JSON :keyword base_request_id: The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). - :paramtype base_request_id: any - """ - super(SapOpenHubTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'SapOpenHubTable' # type: str + :paramtype base_request_id: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "SapOpenHubTable" # type: str self.open_hub_destination_name = open_hub_destination_name self.exclude_last_request = exclude_last_request self.base_request_id = base_request_id -class SapTableLinkedService(LinkedService): +class SapTableLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """SAP Table Linked Service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -48999,118 +52154,118 @@ class SapTableLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar server: Host name of the SAP instance where the table is located. Type: string (or Expression with resultType string). - :vartype server: any + :vartype server: JSON :ivar system_number: System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). - :vartype system_number: any + :vartype system_number: JSON :ivar client_id: Client ID of the client on the SAP system where the table is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). - :vartype client_id: any + :vartype client_id: JSON :ivar language: Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with resultType string). - :vartype language: any + :vartype language: JSON :ivar system_id: SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). - :vartype system_id: any + :vartype system_id: JSON :ivar user_name: Username to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :vartype user_name: any + :vartype user_name: JSON :ivar password: Password to access the SAP server where the table is located. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar message_server: The hostname of the SAP Message Server. Type: string (or Expression with resultType string). - :vartype message_server: any + :vartype message_server: JSON :ivar message_server_service: The service name or port number of the Message Server. Type: string (or Expression with resultType string). - :vartype message_server_service: any + :vartype message_server_service: JSON :ivar snc_mode: SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). - :vartype snc_mode: any + :vartype snc_mode: JSON :ivar snc_my_name: Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :vartype snc_my_name: any + :vartype snc_my_name: JSON :ivar snc_partner_name: Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :vartype snc_partner_name: any + :vartype snc_partner_name: JSON :ivar snc_library_path: External security product's library to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :vartype snc_library_path: any + :vartype snc_library_path: JSON :ivar snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType string). - :vartype snc_qop: any + :vartype snc_qop: JSON :ivar logon_group: The Logon Group for the SAP System. Type: string (or Expression with resultType string). - :vartype logon_group: any + :vartype logon_group: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'language': {'key': 'typeProperties.language', 'type': 'object'}, - 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, - 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, - 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, - 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, - 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, - 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, - 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, - 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "server": {"key": "typeProperties.server", "type": "object"}, + "system_number": {"key": "typeProperties.systemNumber", "type": "object"}, + "client_id": {"key": "typeProperties.clientId", "type": "object"}, + "language": {"key": "typeProperties.language", "type": "object"}, + "system_id": {"key": "typeProperties.systemId", "type": "object"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "message_server": {"key": "typeProperties.messageServer", "type": "object"}, + "message_server_service": {"key": "typeProperties.messageServerService", "type": "object"}, + "snc_mode": {"key": "typeProperties.sncMode", "type": "object"}, + "snc_my_name": {"key": "typeProperties.sncMyName", "type": "object"}, + "snc_partner_name": {"key": "typeProperties.sncPartnerName", "type": "object"}, + "snc_library_path": {"key": "typeProperties.sncLibraryPath", "type": "object"}, + "snc_qop": {"key": "typeProperties.sncQop", "type": "object"}, + "logon_group": {"key": "typeProperties.logonGroup", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - server: Optional[Any] = None, - system_number: Optional[Any] = None, - client_id: Optional[Any] = None, - language: Optional[Any] = None, - system_id: Optional[Any] = None, - user_name: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + server: Optional[JSON] = None, + system_number: Optional[JSON] = None, + client_id: Optional[JSON] = None, + language: Optional[JSON] = None, + system_id: Optional[JSON] = None, + user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - message_server: Optional[Any] = None, - message_server_service: Optional[Any] = None, - snc_mode: Optional[Any] = None, - snc_my_name: Optional[Any] = None, - snc_partner_name: Optional[Any] = None, - snc_library_path: Optional[Any] = None, - snc_qop: Optional[Any] = None, - logon_group: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + message_server: Optional[JSON] = None, + message_server_service: Optional[JSON] = None, + snc_mode: Optional[JSON] = None, + snc_my_name: Optional[JSON] = None, + snc_partner_name: Optional[JSON] = None, + snc_library_path: Optional[JSON] = None, + snc_qop: Optional[JSON] = None, + logon_group: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -49118,60 +52273,67 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword server: Host name of the SAP instance where the table is located. Type: string (or Expression with resultType string). - :paramtype server: any + :paramtype server: JSON :keyword system_number: System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). - :paramtype system_number: any + :paramtype system_number: JSON :keyword client_id: Client ID of the client on the SAP system where the table is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). - :paramtype client_id: any + :paramtype client_id: JSON :keyword language: Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with resultType string). - :paramtype language: any + :paramtype language: JSON :keyword system_id: SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). - :paramtype system_id: any + :paramtype system_id: JSON :keyword user_name: Username to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :paramtype user_name: any + :paramtype user_name: JSON :keyword password: Password to access the SAP server where the table is located. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword message_server: The hostname of the SAP Message Server. Type: string (or Expression with resultType string). - :paramtype message_server: any + :paramtype message_server: JSON :keyword message_server_service: The service name or port number of the Message Server. Type: string (or Expression with resultType string). - :paramtype message_server_service: any + :paramtype message_server_service: JSON :keyword snc_mode: SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). - :paramtype snc_mode: any + :paramtype snc_mode: JSON :keyword snc_my_name: Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :paramtype snc_my_name: any + :paramtype snc_my_name: JSON :keyword snc_partner_name: Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :paramtype snc_partner_name: any + :paramtype snc_partner_name: JSON :keyword snc_library_path: External security product's library to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :paramtype snc_library_path: any + :paramtype snc_library_path: JSON :keyword snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType string). - :paramtype snc_qop: any + :paramtype snc_qop: JSON :keyword logon_group: The Logon Group for the SAP System. Type: string (or Expression with resultType string). - :paramtype logon_group: any + :paramtype logon_group: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(SapTableLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'SapTable' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "SapTable" # type: str self.server = server self.system_number = system_number self.client_id = client_id @@ -49190,58 +52352,58 @@ def __init__( self.encrypted_credential = encrypted_credential -class SapTablePartitionSettings(msrest.serialization.Model): +class SapTablePartitionSettings(_serialization.Model): """The settings that will be leveraged for SAP table source partitioning. :ivar partition_column_name: The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :vartype partition_column_name: any + :vartype partition_column_name: JSON :ivar partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :vartype partition_upper_bound: any + :vartype partition_upper_bound: JSON :ivar partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :vartype partition_lower_bound: any + :vartype partition_lower_bound: JSON :ivar max_partitions_number: The maximum value of partitions the table will be split into. Type: integer (or Expression with resultType string). - :vartype max_partitions_number: any + :vartype max_partitions_number: JSON """ _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, + "partition_column_name": {"key": "partitionColumnName", "type": "object"}, + "partition_upper_bound": {"key": "partitionUpperBound", "type": "object"}, + "partition_lower_bound": {"key": "partitionLowerBound", "type": "object"}, + "max_partitions_number": {"key": "maxPartitionsNumber", "type": "object"}, } def __init__( self, *, - partition_column_name: Optional[Any] = None, - partition_upper_bound: Optional[Any] = None, - partition_lower_bound: Optional[Any] = None, - max_partitions_number: Optional[Any] = None, + partition_column_name: Optional[JSON] = None, + partition_upper_bound: Optional[JSON] = None, + partition_lower_bound: Optional[JSON] = None, + max_partitions_number: Optional[JSON] = None, **kwargs ): """ :keyword partition_column_name: The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :paramtype partition_column_name: any + :paramtype partition_column_name: JSON :keyword partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :paramtype partition_upper_bound: any + :paramtype partition_upper_bound: JSON :keyword partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :paramtype partition_lower_bound: any + :paramtype partition_lower_bound: JSON :keyword max_partitions_number: The maximum value of partitions the table will be split into. Type: integer (or Expression with resultType string). - :paramtype max_partitions_number: any + :paramtype max_partitions_number: JSON """ - super(SapTablePartitionSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.partition_column_name = partition_column_name self.partition_upper_bound = partition_upper_bound self.partition_lower_bound = partition_lower_bound @@ -49255,252 +52417,271 @@ class SapTableResourceDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :ivar table_name: Required. The name of the SAP Table. Type: string (or Expression with - resultType string). - :vartype table_name: any + :ivar table_name: The name of the SAP Table. Type: string (or Expression with resultType + string). Required. + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'table_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, + "table_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - table_name: Any, - additional_properties: Optional[Dict[str, Any]] = None, + table_name: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :keyword table_name: Required. The name of the SAP Table. Type: string (or Expression with - resultType string). - :paramtype table_name: any - """ - super(SapTableResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'SapTableResource' # type: str + :keyword table_name: The name of the SAP Table. Type: string (or Expression with resultType + string). Required. + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "SapTableResource" # type: str self.table_name = table_name -class SapTableSource(TabularSource): +class SapTableSource(TabularSource): # pylint: disable=too-many-instance-attributes """A copy activity source for SAP Table source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar row_count: The number of rows to be retrieved. Type: integer(or Expression with resultType integer). - :vartype row_count: any + :vartype row_count: JSON :ivar row_skips: The number of rows that will be skipped. Type: integer (or Expression with resultType integer). - :vartype row_skips: any + :vartype row_skips: JSON :ivar rfc_table_fields: The fields of the SAP table that will be retrieved. For example, column0, column1. Type: string (or Expression with resultType string). - :vartype rfc_table_fields: any + :vartype rfc_table_fields: JSON :ivar rfc_table_options: The options for the filtering of the SAP Table. For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with resultType string). - :vartype rfc_table_options: any + :vartype rfc_table_options: JSON :ivar batch_size: Specifies the maximum number of rows that will be retrieved at a time when retrieving data from SAP Table. Type: integer (or Expression with resultType integer). - :vartype batch_size: any + :vartype batch_size: JSON :ivar custom_rfc_read_table_function_module: Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). - :vartype custom_rfc_read_table_function_module: any + :vartype custom_rfc_read_table_function_module: JSON :ivar sap_data_column_delimiter: The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). - :vartype sap_data_column_delimiter: any + :vartype sap_data_column_delimiter: JSON :ivar partition_option: The partition mechanism that will be used for SAP table read in parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". - :vartype partition_option: any + :vartype partition_option: JSON :ivar partition_settings: The settings that will be leveraged for SAP table source partitioning. :vartype partition_settings: ~azure.mgmt.datafactory.models.SapTablePartitionSettings """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'row_count': {'key': 'rowCount', 'type': 'object'}, - 'row_skips': {'key': 'rowSkips', 'type': 'object'}, - 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, - 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, - 'sap_data_column_delimiter': {'key': 'sapDataColumnDelimiter', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "row_count": {"key": "rowCount", "type": "object"}, + "row_skips": {"key": "rowSkips", "type": "object"}, + "rfc_table_fields": {"key": "rfcTableFields", "type": "object"}, + "rfc_table_options": {"key": "rfcTableOptions", "type": "object"}, + "batch_size": {"key": "batchSize", "type": "object"}, + "custom_rfc_read_table_function_module": {"key": "customRfcReadTableFunctionModule", "type": "object"}, + "sap_data_column_delimiter": {"key": "sapDataColumnDelimiter", "type": "object"}, + "partition_option": {"key": "partitionOption", "type": "object"}, + "partition_settings": {"key": "partitionSettings", "type": "SapTablePartitionSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - row_count: Optional[Any] = None, - row_skips: Optional[Any] = None, - rfc_table_fields: Optional[Any] = None, - rfc_table_options: Optional[Any] = None, - batch_size: Optional[Any] = None, - custom_rfc_read_table_function_module: Optional[Any] = None, - sap_data_column_delimiter: Optional[Any] = None, - partition_option: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + row_count: Optional[JSON] = None, + row_skips: Optional[JSON] = None, + rfc_table_fields: Optional[JSON] = None, + rfc_table_options: Optional[JSON] = None, + batch_size: Optional[JSON] = None, + custom_rfc_read_table_function_module: Optional[JSON] = None, + sap_data_column_delimiter: Optional[JSON] = None, + partition_option: Optional[JSON] = None, partition_settings: Optional["_models.SapTablePartitionSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword row_count: The number of rows to be retrieved. Type: integer(or Expression with resultType integer). - :paramtype row_count: any + :paramtype row_count: JSON :keyword row_skips: The number of rows that will be skipped. Type: integer (or Expression with resultType integer). - :paramtype row_skips: any + :paramtype row_skips: JSON :keyword rfc_table_fields: The fields of the SAP table that will be retrieved. For example, column0, column1. Type: string (or Expression with resultType string). - :paramtype rfc_table_fields: any + :paramtype rfc_table_fields: JSON :keyword rfc_table_options: The options for the filtering of the SAP Table. For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with resultType string). - :paramtype rfc_table_options: any + :paramtype rfc_table_options: JSON :keyword batch_size: Specifies the maximum number of rows that will be retrieved at a time when retrieving data from SAP Table. Type: integer (or Expression with resultType integer). - :paramtype batch_size: any + :paramtype batch_size: JSON :keyword custom_rfc_read_table_function_module: Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). - :paramtype custom_rfc_read_table_function_module: any + :paramtype custom_rfc_read_table_function_module: JSON :keyword sap_data_column_delimiter: The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). - :paramtype sap_data_column_delimiter: any + :paramtype sap_data_column_delimiter: JSON :keyword partition_option: The partition mechanism that will be used for SAP table read in parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". - :paramtype partition_option: any + :paramtype partition_option: JSON :keyword partition_settings: The settings that will be leveraged for SAP table source partitioning. :paramtype partition_settings: ~azure.mgmt.datafactory.models.SapTablePartitionSettings """ - super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'SapTableSource' # type: str + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "SapTableSource" # type: str self.row_count = row_count self.row_skips = row_skips self.rfc_table_fields = rfc_table_fields @@ -49521,74 +52702,80 @@ class ScheduleTrigger(MultiplePipelineTrigger): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Trigger type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Trigger type. Required. :vartype type: str :ivar description: Trigger description. :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Known values are: "Started", "Stopped", "Disabled". + called on the Trigger. Known values are: "Started", "Stopped", and "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :ivar annotations: List of tags that can be used for describing the trigger. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar pipelines: Pipelines that need to be started. :vartype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :ivar recurrence: Required. Recurrence schedule configuration. + :ivar recurrence: Recurrence schedule configuration. Required. :vartype recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence """ _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, - 'recurrence': {'required': True}, + "type": {"required": True}, + "runtime_state": {"readonly": True}, + "recurrence": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "runtime_state": {"key": "runtimeState", "type": "str"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "pipelines": {"key": "pipelines", "type": "[TriggerPipelineReference]"}, + "recurrence": {"key": "typeProperties.recurrence", "type": "ScheduleTriggerRecurrence"}, } def __init__( self, *, recurrence: "_models.ScheduleTriggerRecurrence", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, pipelines: Optional[List["_models.TriggerPipelineReference"]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Trigger description. :paramtype description: str :keyword annotations: List of tags that can be used for describing the trigger. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword pipelines: Pipelines that need to be started. :paramtype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :keyword recurrence: Required. Recurrence schedule configuration. + :keyword recurrence: Recurrence schedule configuration. Required. :paramtype recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence """ - super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) - self.type = 'ScheduleTrigger' # type: str + super().__init__( + additional_properties=additional_properties, + description=description, + annotations=annotations, + pipelines=pipelines, + **kwargs + ) + self.type = "ScheduleTrigger" # type: str self.recurrence = recurrence -class ScheduleTriggerRecurrence(msrest.serialization.Model): +class ScheduleTriggerRecurrence(_serialization.Model): """The workflow trigger recurrence. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar frequency: The frequency. Known values are: "NotSpecified", "Minute", "Hour", "Day", - "Week", "Month", "Year". + "Week", "Month", and "Year". :vartype frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency :ivar interval: The interval. :vartype interval: int @@ -49603,19 +52790,19 @@ class ScheduleTriggerRecurrence(msrest.serialization.Model): """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'frequency': {'key': 'frequency', 'type': 'str'}, - 'interval': {'key': 'interval', 'type': 'int'}, - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, + "additional_properties": {"key": "", "type": "{object}"}, + "frequency": {"key": "frequency", "type": "str"}, + "interval": {"key": "interval", "type": "int"}, + "start_time": {"key": "startTime", "type": "iso-8601"}, + "end_time": {"key": "endTime", "type": "iso-8601"}, + "time_zone": {"key": "timeZone", "type": "str"}, + "schedule": {"key": "schedule", "type": "RecurrenceSchedule"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, frequency: Optional[Union[str, "_models.RecurrenceFrequency"]] = None, interval: Optional[int] = None, start_time: Optional[datetime.datetime] = None, @@ -49627,9 +52814,9 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword frequency: The frequency. Known values are: "NotSpecified", "Minute", "Hour", "Day", - "Week", "Month", "Year". + "Week", "Month", and "Year". :paramtype frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency :keyword interval: The interval. :paramtype interval: int @@ -49642,7 +52829,7 @@ def __init__( :keyword schedule: The recurrence schedule. :paramtype schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule """ - super(ScheduleTriggerRecurrence, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.frequency = frequency self.interval = interval @@ -49652,54 +52839,46 @@ def __init__( self.schedule = schedule -class ScriptAction(msrest.serialization.Model): +class ScriptAction(_serialization.Model): """Custom script action to run on HDI ondemand cluster once it's up. All required parameters must be populated in order to send to Azure. - :ivar name: Required. The user provided name of the script action. + :ivar name: The user provided name of the script action. Required. :vartype name: str - :ivar uri: Required. The URI for the script action. + :ivar uri: The URI for the script action. Required. :vartype uri: str - :ivar roles: Required. The node types on which the script action should be executed. - :vartype roles: any + :ivar roles: The node types on which the script action should be executed. Required. + :vartype roles: JSON :ivar parameters: The parameters for the script action. :vartype parameters: str """ _validation = { - 'name': {'required': True}, - 'uri': {'required': True}, - 'roles': {'required': True}, + "name": {"required": True}, + "uri": {"required": True}, + "roles": {"required": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'roles': {'key': 'roles', 'type': 'object'}, - 'parameters': {'key': 'parameters', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + "roles": {"key": "roles", "type": "object"}, + "parameters": {"key": "parameters", "type": "str"}, } - def __init__( - self, - *, - name: str, - uri: str, - roles: Any, - parameters: Optional[str] = None, - **kwargs - ): + def __init__(self, *, name: str, uri: str, roles: JSON, parameters: Optional[str] = None, **kwargs): """ - :keyword name: Required. The user provided name of the script action. + :keyword name: The user provided name of the script action. Required. :paramtype name: str - :keyword uri: Required. The URI for the script action. + :keyword uri: The URI for the script action. Required. :paramtype uri: str - :keyword roles: Required. The node types on which the script action should be executed. - :paramtype roles: any + :keyword roles: The node types on which the script action should be executed. Required. + :paramtype roles: JSON :keyword parameters: The parameters for the script action. :paramtype parameters: str """ - super(ScriptAction, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name self.uri = uri self.roles = roles @@ -49713,10 +52892,10 @@ class ScriptActivity(ExecutionActivity): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -49735,28 +52914,28 @@ class ScriptActivity(ExecutionActivity): """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'scripts': {'key': 'typeProperties.scripts', 'type': '[ScriptActivityScriptBlock]'}, - 'log_settings': {'key': 'typeProperties.logSettings', 'type': 'ScriptActivityTypePropertiesLogSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "scripts": {"key": "typeProperties.scripts", "type": "[ScriptActivityScriptBlock]"}, + "log_settings": {"key": "typeProperties.logSettings", "type": "ScriptActivityTypePropertiesLogSettings"}, } def __init__( self, *, name: str, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, @@ -49769,8 +52948,8 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -49787,24 +52966,33 @@ def __init__( :keyword log_settings: Log settings of script activity. :paramtype log_settings: ~azure.mgmt.datafactory.models.ScriptActivityTypePropertiesLogSettings """ - super(ScriptActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'Script' # type: str + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "Script" # type: str self.scripts = scripts self.log_settings = log_settings -class ScriptActivityParameter(msrest.serialization.Model): +class ScriptActivityParameter(_serialization.Model): """Parameters of a script block. :ivar name: The name of the parameter. Type: string (or Expression with resultType string). - :vartype name: any + :vartype name: JSON :ivar type: The type of the parameter. Known values are: "Boolean", "DateTime", "DateTimeOffset", "Decimal", "Double", "Guid", "Int16", "Int32", "Int64", "Single", "String", - "Timespan". + and "Timespan". :vartype type: str or ~azure.mgmt.datafactory.models.ScriptActivityParameterType :ivar value: The value of the parameter. - :vartype value: any - :ivar direction: The direction of the parameter. Known values are: "Input", "Output", + :vartype value: JSON + :ivar direction: The direction of the parameter. Known values are: "Input", "Output", and "InputOutput". :vartype direction: str or ~azure.mgmt.datafactory.models.ScriptActivityParameterDirection :ivar size: The size of the output direction parameter. @@ -49812,39 +53000,39 @@ class ScriptActivityParameter(msrest.serialization.Model): """ _attribute_map = { - 'name': {'key': 'name', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'object'}, - 'direction': {'key': 'direction', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'int'}, + "name": {"key": "name", "type": "object"}, + "type": {"key": "type", "type": "str"}, + "value": {"key": "value", "type": "object"}, + "direction": {"key": "direction", "type": "str"}, + "size": {"key": "size", "type": "int"}, } def __init__( self, *, - name: Optional[Any] = None, + name: Optional[JSON] = None, type: Optional[Union[str, "_models.ScriptActivityParameterType"]] = None, - value: Optional[Any] = None, + value: Optional[JSON] = None, direction: Optional[Union[str, "_models.ScriptActivityParameterDirection"]] = None, size: Optional[int] = None, **kwargs ): """ :keyword name: The name of the parameter. Type: string (or Expression with resultType string). - :paramtype name: any + :paramtype name: JSON :keyword type: The type of the parameter. Known values are: "Boolean", "DateTime", "DateTimeOffset", "Decimal", "Double", "Guid", "Int16", "Int32", "Int64", "Single", "String", - "Timespan". + and "Timespan". :paramtype type: str or ~azure.mgmt.datafactory.models.ScriptActivityParameterType :keyword value: The value of the parameter. - :paramtype value: any - :keyword direction: The direction of the parameter. Known values are: "Input", "Output", + :paramtype value: JSON + :keyword direction: The direction of the parameter. Known values are: "Input", "Output", and "InputOutput". :paramtype direction: str or ~azure.mgmt.datafactory.models.ScriptActivityParameterDirection :keyword size: The size of the output direction parameter. :paramtype size: int """ - super(ScriptActivityParameter, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name self.type = type self.value = value @@ -49852,14 +53040,14 @@ def __init__( self.size = size -class ScriptActivityScriptBlock(msrest.serialization.Model): +class ScriptActivityScriptBlock(_serialization.Model): """Script block of scripts. All required parameters must be populated in order to send to Azure. - :ivar text: Required. The query text. Type: string (or Expression with resultType string). - :vartype text: any - :ivar type: Required. The type of the query. Type: string. Known values are: "Query", + :ivar text: The query text. Type: string (or Expression with resultType string). Required. + :vartype text: JSON + :ivar type: The type of the query. Type: string. Required. Known values are: "Query" and "NonQuery". :vartype type: str or ~azure.mgmt.datafactory.models.ScriptType :ivar parameters: Array of script parameters. Type: array. @@ -49867,58 +53055,58 @@ class ScriptActivityScriptBlock(msrest.serialization.Model): """ _validation = { - 'text': {'required': True}, - 'type': {'required': True}, + "text": {"required": True}, + "type": {"required": True}, } _attribute_map = { - 'text': {'key': 'text', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '[ScriptActivityParameter]'}, + "text": {"key": "text", "type": "object"}, + "type": {"key": "type", "type": "str"}, + "parameters": {"key": "parameters", "type": "[ScriptActivityParameter]"}, } def __init__( self, *, - text: Any, + text: JSON, type: Union[str, "_models.ScriptType"], parameters: Optional[List["_models.ScriptActivityParameter"]] = None, **kwargs ): """ - :keyword text: Required. The query text. Type: string (or Expression with resultType string). - :paramtype text: any - :keyword type: Required. The type of the query. Type: string. Known values are: "Query", + :keyword text: The query text. Type: string (or Expression with resultType string). Required. + :paramtype text: JSON + :keyword type: The type of the query. Type: string. Required. Known values are: "Query" and "NonQuery". :paramtype type: str or ~azure.mgmt.datafactory.models.ScriptType :keyword parameters: Array of script parameters. Type: array. :paramtype parameters: list[~azure.mgmt.datafactory.models.ScriptActivityParameter] """ - super(ScriptActivityScriptBlock, self).__init__(**kwargs) + super().__init__(**kwargs) self.text = text self.type = type self.parameters = parameters -class ScriptActivityTypePropertiesLogSettings(msrest.serialization.Model): +class ScriptActivityTypePropertiesLogSettings(_serialization.Model): """Log settings of script activity. All required parameters must be populated in order to send to Azure. - :ivar log_destination: Required. The destination of logs. Type: string. Known values are: - "ActivityOutput", "ExternalStore". + :ivar log_destination: The destination of logs. Type: string. Required. Known values are: + "ActivityOutput" and "ExternalStore". :vartype log_destination: str or ~azure.mgmt.datafactory.models.ScriptActivityLogDestination :ivar log_location_settings: Log location settings customer needs to provide when enabling log. :vartype log_location_settings: ~azure.mgmt.datafactory.models.LogLocationSettings """ _validation = { - 'log_destination': {'required': True}, + "log_destination": {"required": True}, } _attribute_map = { - 'log_destination': {'key': 'logDestination', 'type': 'str'}, - 'log_location_settings': {'key': 'logLocationSettings', 'type': 'LogLocationSettings'}, + "log_destination": {"key": "logDestination", "type": "str"}, + "log_location_settings": {"key": "logLocationSettings", "type": "LogLocationSettings"}, } def __init__( @@ -49929,14 +53117,14 @@ def __init__( **kwargs ): """ - :keyword log_destination: Required. The destination of logs. Type: string. Known values are: - "ActivityOutput", "ExternalStore". + :keyword log_destination: The destination of logs. Type: string. Required. Known values are: + "ActivityOutput" and "ExternalStore". :paramtype log_destination: str or ~azure.mgmt.datafactory.models.ScriptActivityLogDestination :keyword log_location_settings: Log location settings customer needs to provide when enabling log. :paramtype log_location_settings: ~azure.mgmt.datafactory.models.LogLocationSettings """ - super(ScriptActivityTypePropertiesLogSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.log_destination = log_destination self.log_location_settings = log_location_settings @@ -49946,34 +53134,29 @@ class SecureString(SecretBase): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Type of the secret.Constant filled by server. + :ivar type: Type of the secret. Required. :vartype type: str - :ivar value: Required. Value of secure string. + :ivar value: Value of secure string. Required. :vartype value: str """ _validation = { - 'type': {'required': True}, - 'value': {'required': True}, + "type": {"required": True}, + "value": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + "type": {"key": "type", "type": "str"}, + "value": {"key": "value", "type": "str"}, } - def __init__( - self, - *, - value: str, - **kwargs - ): + def __init__(self, *, value: str, **kwargs): """ - :keyword value: Required. Value of secure string. + :keyword value: Value of secure string. Required. :paramtype value: str """ - super(SecureString, self).__init__(**kwargs) - self.type = 'SecureString' # type: str + super().__init__(**kwargs) + self.type = "SecureString" # type: str self.value = value @@ -49982,10 +53165,10 @@ class SelfDependencyTumblingWindowTriggerReference(DependencyReference): All required parameters must be populated in order to send to Azure. - :ivar type: Required. The type of dependency reference.Constant filled by server. + :ivar type: The type of dependency reference. Required. :vartype type: str - :ivar offset: Required. Timespan applied to the start time of a tumbling window when evaluating - dependency. + :ivar offset: Timespan applied to the start time of a tumbling window when evaluating + dependency. Required. :vartype offset: str :ivar size: The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used. @@ -49993,34 +53176,33 @@ class SelfDependencyTumblingWindowTriggerReference(DependencyReference): """ _validation = { - 'type': {'required': True}, - 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'-((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + "type": {"required": True}, + "offset": { + "required": True, + "max_length": 15, + "min_length": 8, + "pattern": r"-((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))", + }, + "size": {"max_length": 15, "min_length": 8, "pattern": r"((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))"}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'offset': {'key': 'offset', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'str'}, + "type": {"key": "type", "type": "str"}, + "offset": {"key": "offset", "type": "str"}, + "size": {"key": "size", "type": "str"}, } - def __init__( - self, - *, - offset: str, - size: Optional[str] = None, - **kwargs - ): + def __init__(self, *, offset: str, size: Optional[str] = None, **kwargs): """ - :keyword offset: Required. Timespan applied to the start time of a tumbling window when - evaluating dependency. + :keyword offset: Timespan applied to the start time of a tumbling window when evaluating + dependency. Required. :paramtype offset: str :keyword size: The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used. :paramtype size: str """ - super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) - self.type = 'SelfDependencyTumblingWindowTriggerReference' # type: str + super().__init__(**kwargs) + self.type = "SelfDependencyTumblingWindowTriggerReference" # type: str self.offset = offset self.size = size @@ -50032,9 +53214,9 @@ class SelfHostedIntegrationRuntime(IntegrationRuntime): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of integration runtime.Constant filled by server. Known values are: - "Managed", "SelfHosted". + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of integration runtime. Required. Known values are: "Managed" and + "SelfHosted". :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :ivar description: Integration runtime description. :vartype description: str @@ -50043,20 +53225,20 @@ class SelfHostedIntegrationRuntime(IntegrationRuntime): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "linked_info": {"key": "typeProperties.linkedInfo", "type": "LinkedIntegrationRuntimeType"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, linked_info: Optional["_models.LinkedIntegrationRuntimeType"] = None, **kwargs @@ -50064,25 +53246,25 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Integration runtime description. :paramtype description: str :keyword linked_info: The base definition of a linked integration runtime. :paramtype linked_info: ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType """ - super(SelfHostedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) - self.type = 'SelfHosted' # type: str + super().__init__(additional_properties=additional_properties, description=description, **kwargs) + self.type = "SelfHosted" # type: str self.linked_info = linked_info -class SelfHostedIntegrationRuntimeNode(msrest.serialization.Model): +class SelfHostedIntegrationRuntimeNode(_serialization.Model): # pylint: disable=too-many-instance-attributes """Properties of Self-hosted integration runtime node. Variables are only populated by the server, and will be ignored when sending a request. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar node_name: Name of the integration runtime node. :vartype node_name: str :ivar machine_name: Machine name of the integration runtime node. @@ -50090,7 +53272,7 @@ class SelfHostedIntegrationRuntimeNode(msrest.serialization.Model): :ivar host_service_uri: URI for the host machine of the integration runtime. :vartype host_service_uri: str :ivar status: Status of the integration runtime node. Known values are: "NeedRegistration", - "Online", "Limited", "Offline", "Upgrading", "Initializing", "InitializeFailed". + "Online", "Limited", "Offline", "Upgrading", "Initializing", and "InitializeFailed". :vartype status: str or ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus :ivar capabilities: The integration runtime capabilities dictionary. :vartype capabilities: dict[str, str] @@ -50111,7 +53293,7 @@ class SelfHostedIntegrationRuntimeNode(msrest.serialization.Model): :ivar last_stop_time: The integration runtime node last stop time. :vartype last_stop_time: ~datetime.datetime :ivar last_update_result: The result of the last integration runtime node update. Known values - are: "None", "Succeed", "Fail". + are: "None", "Succeed", and "Fail". :vartype last_update_result: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeUpdateResult :ivar last_start_update_time: The last time for the integration runtime node update start. @@ -50128,60 +53310,55 @@ class SelfHostedIntegrationRuntimeNode(msrest.serialization.Model): """ _validation = { - 'node_name': {'readonly': True}, - 'machine_name': {'readonly': True}, - 'host_service_uri': {'readonly': True}, - 'status': {'readonly': True}, - 'capabilities': {'readonly': True}, - 'version_status': {'readonly': True}, - 'version': {'readonly': True}, - 'register_time': {'readonly': True}, - 'last_connect_time': {'readonly': True}, - 'expiry_time': {'readonly': True}, - 'last_start_time': {'readonly': True}, - 'last_stop_time': {'readonly': True}, - 'last_update_result': {'readonly': True}, - 'last_start_update_time': {'readonly': True}, - 'last_end_update_time': {'readonly': True}, - 'is_active_dispatcher': {'readonly': True}, - 'concurrent_jobs_limit': {'readonly': True}, - 'max_concurrent_jobs': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'node_name': {'key': 'nodeName', 'type': 'str'}, - 'machine_name': {'key': 'machineName', 'type': 'str'}, - 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'capabilities': {'key': 'capabilities', 'type': '{str}'}, - 'version_status': {'key': 'versionStatus', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, - 'register_time': {'key': 'registerTime', 'type': 'iso-8601'}, - 'last_connect_time': {'key': 'lastConnectTime', 'type': 'iso-8601'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'last_start_time': {'key': 'lastStartTime', 'type': 'iso-8601'}, - 'last_stop_time': {'key': 'lastStopTime', 'type': 'iso-8601'}, - 'last_update_result': {'key': 'lastUpdateResult', 'type': 'str'}, - 'last_start_update_time': {'key': 'lastStartUpdateTime', 'type': 'iso-8601'}, - 'last_end_update_time': {'key': 'lastEndUpdateTime', 'type': 'iso-8601'}, - 'is_active_dispatcher': {'key': 'isActiveDispatcher', 'type': 'bool'}, - 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, - 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - **kwargs - ): - """ - :keyword additional_properties: Unmatched properties from the message are deserialized to this - collection. - :paramtype additional_properties: dict[str, any] - """ - super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) + "node_name": {"readonly": True}, + "machine_name": {"readonly": True}, + "host_service_uri": {"readonly": True}, + "status": {"readonly": True}, + "capabilities": {"readonly": True}, + "version_status": {"readonly": True}, + "version": {"readonly": True}, + "register_time": {"readonly": True}, + "last_connect_time": {"readonly": True}, + "expiry_time": {"readonly": True}, + "last_start_time": {"readonly": True}, + "last_stop_time": {"readonly": True}, + "last_update_result": {"readonly": True}, + "last_start_update_time": {"readonly": True}, + "last_end_update_time": {"readonly": True}, + "is_active_dispatcher": {"readonly": True}, + "concurrent_jobs_limit": {"readonly": True}, + "max_concurrent_jobs": {"readonly": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "node_name": {"key": "nodeName", "type": "str"}, + "machine_name": {"key": "machineName", "type": "str"}, + "host_service_uri": {"key": "hostServiceUri", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "capabilities": {"key": "capabilities", "type": "{str}"}, + "version_status": {"key": "versionStatus", "type": "str"}, + "version": {"key": "version", "type": "str"}, + "register_time": {"key": "registerTime", "type": "iso-8601"}, + "last_connect_time": {"key": "lastConnectTime", "type": "iso-8601"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "last_start_time": {"key": "lastStartTime", "type": "iso-8601"}, + "last_stop_time": {"key": "lastStopTime", "type": "iso-8601"}, + "last_update_result": {"key": "lastUpdateResult", "type": "str"}, + "last_start_update_time": {"key": "lastStartUpdateTime", "type": "iso-8601"}, + "last_end_update_time": {"key": "lastEndUpdateTime", "type": "iso-8601"}, + "is_active_dispatcher": {"key": "isActiveDispatcher", "type": "bool"}, + "concurrent_jobs_limit": {"key": "concurrentJobsLimit", "type": "int"}, + "max_concurrent_jobs": {"key": "maxConcurrentJobs", "type": "int"}, + } + + def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, **kwargs): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + """ + super().__init__(**kwargs) self.additional_properties = additional_properties self.node_name = None self.machine_name = None @@ -50203,7 +53380,7 @@ def __init__( self.max_concurrent_jobs = None -class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): +class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): # pylint: disable=too-many-instance-attributes """Self-hosted integration runtime status. Variables are only populated by the server, and will be ignored when sending a request. @@ -50212,14 +53389,14 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of integration runtime.Constant filled by server. Known values are: - "Managed", "SelfHosted". + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of integration runtime. Required. Known values are: "Managed" and + "SelfHosted". :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :ivar data_factory_name: The data factory name which the integration runtime belong to. :vartype data_factory_name: str :ivar state: The state of integration runtime. Known values are: "Initial", "Stopped", - "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", + "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", and "AccessDenied". :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState :ivar create_time: The time at which the integration runtime was created, in ISO8601 format. @@ -50228,7 +53405,7 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): :vartype task_queue_id: str :ivar internal_channel_encryption: It is used to set the encryption mode for node-node communication channel (when more than 2 self-hosted integration runtime nodes exist). Known - values are: "NotSet", "SslEncrypted", "NotEncrypted". + values are: "NotSet", "SslEncrypted", and "NotEncrypted". :vartype internal_channel_encryption: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode :ivar version: Version of the integration runtime. @@ -50248,7 +53425,7 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): :ivar service_urls: The URLs for the services used in integration runtime backend service. :vartype service_urls: list[str] :ivar auto_update: Whether Self-hosted integration runtime auto update has been turned on. - Known values are: "On", "Off". + Known values are: "On" and "Off". :vartype auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate :ivar version_status: Status of the integration runtime version. :vartype version_status: str @@ -50265,52 +53442,52 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): """ _validation = { - 'type': {'required': True}, - 'data_factory_name': {'readonly': True}, - 'state': {'readonly': True}, - 'create_time': {'readonly': True}, - 'task_queue_id': {'readonly': True}, - 'internal_channel_encryption': {'readonly': True}, - 'version': {'readonly': True}, - 'scheduled_update_date': {'readonly': True}, - 'update_delay_offset': {'readonly': True}, - 'local_time_zone_offset': {'readonly': True}, - 'capabilities': {'readonly': True}, - 'service_urls': {'readonly': True}, - 'auto_update': {'readonly': True}, - 'version_status': {'readonly': True}, - 'pushed_version': {'readonly': True}, - 'latest_version': {'readonly': True}, - 'auto_update_eta': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, - 'task_queue_id': {'key': 'typeProperties.taskQueueId', 'type': 'str'}, - 'internal_channel_encryption': {'key': 'typeProperties.internalChannelEncryption', 'type': 'str'}, - 'version': {'key': 'typeProperties.version', 'type': 'str'}, - 'nodes': {'key': 'typeProperties.nodes', 'type': '[SelfHostedIntegrationRuntimeNode]'}, - 'scheduled_update_date': {'key': 'typeProperties.scheduledUpdateDate', 'type': 'iso-8601'}, - 'update_delay_offset': {'key': 'typeProperties.updateDelayOffset', 'type': 'str'}, - 'local_time_zone_offset': {'key': 'typeProperties.localTimeZoneOffset', 'type': 'str'}, - 'capabilities': {'key': 'typeProperties.capabilities', 'type': '{str}'}, - 'service_urls': {'key': 'typeProperties.serviceUrls', 'type': '[str]'}, - 'auto_update': {'key': 'typeProperties.autoUpdate', 'type': 'str'}, - 'version_status': {'key': 'typeProperties.versionStatus', 'type': 'str'}, - 'links': {'key': 'typeProperties.links', 'type': '[LinkedIntegrationRuntime]'}, - 'pushed_version': {'key': 'typeProperties.pushedVersion', 'type': 'str'}, - 'latest_version': {'key': 'typeProperties.latestVersion', 'type': 'str'}, - 'auto_update_eta': {'key': 'typeProperties.autoUpdateETA', 'type': 'iso-8601'}, - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, + "type": {"required": True}, + "data_factory_name": {"readonly": True}, + "state": {"readonly": True}, + "create_time": {"readonly": True}, + "task_queue_id": {"readonly": True}, + "internal_channel_encryption": {"readonly": True}, + "version": {"readonly": True}, + "scheduled_update_date": {"readonly": True}, + "update_delay_offset": {"readonly": True}, + "local_time_zone_offset": {"readonly": True}, + "capabilities": {"readonly": True}, + "service_urls": {"readonly": True}, + "auto_update": {"readonly": True}, + "version_status": {"readonly": True}, + "pushed_version": {"readonly": True}, + "latest_version": {"readonly": True}, + "auto_update_eta": {"readonly": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "data_factory_name": {"key": "dataFactoryName", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "create_time": {"key": "typeProperties.createTime", "type": "iso-8601"}, + "task_queue_id": {"key": "typeProperties.taskQueueId", "type": "str"}, + "internal_channel_encryption": {"key": "typeProperties.internalChannelEncryption", "type": "str"}, + "version": {"key": "typeProperties.version", "type": "str"}, + "nodes": {"key": "typeProperties.nodes", "type": "[SelfHostedIntegrationRuntimeNode]"}, + "scheduled_update_date": {"key": "typeProperties.scheduledUpdateDate", "type": "iso-8601"}, + "update_delay_offset": {"key": "typeProperties.updateDelayOffset", "type": "str"}, + "local_time_zone_offset": {"key": "typeProperties.localTimeZoneOffset", "type": "str"}, + "capabilities": {"key": "typeProperties.capabilities", "type": "{str}"}, + "service_urls": {"key": "typeProperties.serviceUrls", "type": "[str]"}, + "auto_update": {"key": "typeProperties.autoUpdate", "type": "str"}, + "version_status": {"key": "typeProperties.versionStatus", "type": "str"}, + "links": {"key": "typeProperties.links", "type": "[LinkedIntegrationRuntime]"}, + "pushed_version": {"key": "typeProperties.pushedVersion", "type": "str"}, + "latest_version": {"key": "typeProperties.latestVersion", "type": "str"}, + "auto_update_eta": {"key": "typeProperties.autoUpdateETA", "type": "iso-8601"}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, JSON]] = None, nodes: Optional[List["_models.SelfHostedIntegrationRuntimeNode"]] = None, links: Optional[List["_models.LinkedIntegrationRuntime"]] = None, **kwargs @@ -50318,15 +53495,15 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword nodes: The list of nodes for this integration runtime. :paramtype nodes: list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] :keyword links: The list of linked integration runtimes that are created to share with this integration runtime. :paramtype links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] """ - super(SelfHostedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'SelfHosted' # type: str + super().__init__(additional_properties=additional_properties, **kwargs) + self.type = "SelfHosted" # type: str self.create_time = None self.task_queue_id = None self.internal_channel_encryption = None @@ -50345,15 +53522,15 @@ def __init__( self.auto_update_eta = None -class ServiceNowLinkedService(LinkedService): +class ServiceNowLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """ServiceNow server linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -50362,89 +53539,89 @@ class ServiceNowLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar endpoint: Required. The endpoint of the ServiceNow server. (i.e. - :code:``.service-now.com). - :vartype endpoint: any - :ivar authentication_type: Required. The authentication type to use. Known values are: "Basic", - "OAuth2". + :vartype annotations: list[JSON] + :ivar endpoint: The endpoint of the ServiceNow server. (i.e. + :code:``.service-now.com). Required. + :vartype endpoint: JSON + :ivar authentication_type: The authentication type to use. Required. Known values are: "Basic" + and "OAuth2". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType :ivar username: The user name used to connect to the ServiceNow server for Basic and OAuth2 authentication. - :vartype username: any + :vartype username: JSON :ivar password: The password corresponding to the user name for Basic and OAuth2 authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar client_id: The client id for OAuth2 authentication. - :vartype client_id: any + :vartype client_id: JSON :ivar client_secret: The client secret for OAuth2 authentication. :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :vartype use_encrypted_endpoints: any + :vartype use_encrypted_endpoints: JSON :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :vartype use_host_verification: any + :vartype use_host_verification: JSON :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :vartype use_peer_verification: any + :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'authentication_type': {'required': True}, + "type": {"required": True}, + "endpoint": {"required": True}, + "authentication_type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "endpoint": {"key": "typeProperties.endpoint", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "client_id": {"key": "typeProperties.clientId", "type": "object"}, + "client_secret": {"key": "typeProperties.clientSecret", "type": "SecretBase"}, + "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, + "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, + "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - endpoint: Any, + endpoint: JSON, authentication_type: Union[str, "_models.ServiceNowAuthenticationType"], - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - username: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - client_id: Optional[Any] = None, + client_id: Optional[JSON] = None, client_secret: Optional["_models.SecretBase"] = None, - use_encrypted_endpoints: Optional[Any] = None, - use_host_verification: Optional[Any] = None, - use_peer_verification: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + use_encrypted_endpoints: Optional[JSON] = None, + use_host_verification: Optional[JSON] = None, + use_peer_verification: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -50452,41 +53629,48 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword endpoint: Required. The endpoint of the ServiceNow server. (i.e. - :code:``.service-now.com). - :paramtype endpoint: any - :keyword authentication_type: Required. The authentication type to use. Known values are: - "Basic", "OAuth2". + :paramtype annotations: list[JSON] + :keyword endpoint: The endpoint of the ServiceNow server. (i.e. + :code:``.service-now.com). Required. + :paramtype endpoint: JSON + :keyword authentication_type: The authentication type to use. Required. Known values are: + "Basic" and "OAuth2". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType :keyword username: The user name used to connect to the ServiceNow server for Basic and OAuth2 authentication. - :paramtype username: any + :paramtype username: JSON :keyword password: The password corresponding to the user name for Basic and OAuth2 authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword client_id: The client id for OAuth2 authentication. - :paramtype client_id: any + :paramtype client_id: JSON :keyword client_secret: The client secret for OAuth2 authentication. :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :paramtype use_encrypted_endpoints: any + :paramtype use_encrypted_endpoints: JSON :keyword use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :paramtype use_host_verification: any + :paramtype use_host_verification: JSON :keyword use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :paramtype use_peer_verification: any + :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(ServiceNowLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'ServiceNow' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "ServiceNow" # type: str self.endpoint = endpoint self.authentication_type = authentication_type self.username = username @@ -50506,88 +53690,98 @@ class ServiceNowObjectDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(ServiceNowObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'ServiceNowObject' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "ServiceNowObject" # type: str self.table_name = table_name @@ -50598,89 +53792,98 @@ class ServiceNowSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'ServiceNowSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "ServiceNowSource" # type: str self.query = query @@ -50691,63 +53894,65 @@ class ServicePrincipalCredential(Credential): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of credential.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of credential. Required. :vartype type: str :ivar description: Credential description. :vartype description: str :ivar annotations: List of tags that can be used for describing the Credential. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar service_principal_id: The app ID of the service principal used to authenticate. - :vartype service_principal_id: any + :vartype service_principal_id: JSON :ivar service_principal_key: The key of the service principal used to authenticate. :vartype service_principal_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar tenant: The ID of the tenant to which the service principal belongs. - :vartype tenant: any + :vartype tenant: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'AzureKeyVaultSecretReference'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "AzureKeyVaultSecretReference"}, + "tenant": {"key": "typeProperties.tenant", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - annotations: Optional[List[Any]] = None, - service_principal_id: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + service_principal_id: Optional[JSON] = None, service_principal_key: Optional["_models.AzureKeyVaultSecretReference"] = None, - tenant: Optional[Any] = None, + tenant: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Credential description. :paramtype description: str :keyword annotations: List of tags that can be used for describing the Credential. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword service_principal_id: The app ID of the service principal used to authenticate. - :paramtype service_principal_id: any + :paramtype service_principal_id: JSON :keyword service_principal_key: The key of the service principal used to authenticate. :paramtype service_principal_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword tenant: The ID of the tenant to which the service principal belongs. - :paramtype tenant: any + :paramtype tenant: JSON """ - super(ServicePrincipalCredential, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.type = 'ServicePrincipal' # type: str + super().__init__( + additional_properties=additional_properties, description=description, annotations=annotations, **kwargs + ) + self.type = "ServicePrincipal" # type: str self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant @@ -50760,10 +53965,10 @@ class SetVariableActivity(ControlActivity): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -50774,42 +53979,42 @@ class SetVariableActivity(ControlActivity): :ivar variable_name: Name of the variable whose value needs to be set. :vartype variable_name: str :ivar value: Value to be set. Could be a static value or Expression. - :vartype value: any + :vartype value: JSON """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'value': {'key': 'typeProperties.value', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "variable_name": {"key": "typeProperties.variableName", "type": "str"}, + "value": {"key": "typeProperties.value", "type": "object"}, } def __init__( self, *, name: str, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, variable_name: Optional[str] = None, - value: Optional[Any] = None, + value: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -50820,10 +54025,17 @@ def __init__( :keyword variable_name: Name of the variable whose value needs to be set. :paramtype variable_name: str :keyword value: Value to be set. Could be a static value or Expression. - :paramtype value: any - """ - super(SetVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'SetVariable' # type: str + :paramtype value: JSON + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + **kwargs + ) + self.type = "SetVariable" # type: str self.variable_name = variable_name self.value = value @@ -50835,181 +54047,188 @@ class SftpLocation(DatasetLocation): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage location. Required. :vartype type: str :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :vartype folder_path: any + :vartype folder_path: JSON :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :vartype file_name: any + :vartype file_name: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "folder_path": {"key": "folderPath", "type": "object"}, + "file_name": {"key": "fileName", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - folder_path: Optional[Any] = None, - file_name: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + folder_path: Optional[JSON] = None, + file_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :paramtype folder_path: any + :paramtype folder_path: JSON :keyword file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :paramtype file_name: any + :paramtype file_name: JSON """ - super(SftpLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type = 'SftpLocation' # type: str + super().__init__( + additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs + ) + self.type = "SftpLocation" # type: str -class SftpReadSettings(StoreReadSettings): +class SftpReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes """Sftp read settings. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The read setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. :vartype type: str :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :vartype recursive: any + :vartype recursive: JSON :ivar wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or Expression with resultType string). - :vartype wildcard_folder_path: any + :vartype wildcard_folder_path: JSON :ivar wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType string). - :vartype wildcard_file_name: any + :vartype wildcard_file_name: JSON :ivar enable_partition_discovery: Indicates whether to enable partition discovery. :vartype enable_partition_discovery: bool :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :vartype partition_root_path: any + :vartype partition_root_path: JSON :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :vartype file_list_path: any + :vartype file_list_path: JSON :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype delete_files_after_completion: any + :vartype delete_files_after_completion: JSON :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_start: any + :vartype modified_datetime_start: JSON :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :vartype modified_datetime_end: any + :vartype modified_datetime_end: JSON :ivar disable_chunking: If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_chunking: any + :vartype disable_chunking: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, - 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, - 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - 'disable_chunking': {'key': 'disableChunking', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "recursive": {"key": "recursive", "type": "object"}, + "wildcard_folder_path": {"key": "wildcardFolderPath", "type": "object"}, + "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "partition_root_path": {"key": "partitionRootPath", "type": "object"}, + "file_list_path": {"key": "fileListPath", "type": "object"}, + "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, + "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, + "modified_datetime_end": {"key": "modifiedDatetimeEnd", "type": "object"}, + "disable_chunking": {"key": "disableChunking", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - recursive: Optional[Any] = None, - wildcard_folder_path: Optional[Any] = None, - wildcard_file_name: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + recursive: Optional[JSON] = None, + wildcard_folder_path: Optional[JSON] = None, + wildcard_file_name: Optional[JSON] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[Any] = None, - file_list_path: Optional[Any] = None, - delete_files_after_completion: Optional[Any] = None, - modified_datetime_start: Optional[Any] = None, - modified_datetime_end: Optional[Any] = None, - disable_chunking: Optional[Any] = None, + partition_root_path: Optional[JSON] = None, + file_list_path: Optional[JSON] = None, + delete_files_after_completion: Optional[JSON] = None, + modified_datetime_start: Optional[JSON] = None, + modified_datetime_end: Optional[JSON] = None, + disable_chunking: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :paramtype recursive: any + :paramtype recursive: JSON :keyword wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or Expression with resultType string). - :paramtype wildcard_folder_path: any + :paramtype wildcard_folder_path: JSON :keyword wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType string). - :paramtype wildcard_file_name: any + :paramtype wildcard_file_name: JSON :keyword enable_partition_discovery: Indicates whether to enable partition discovery. :paramtype enable_partition_discovery: bool :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :paramtype partition_root_path: any + :paramtype partition_root_path: JSON :keyword file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :paramtype file_list_path: any + :paramtype file_list_path: JSON :keyword delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype delete_files_after_completion: any + :paramtype delete_files_after_completion: JSON :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_start: any + :paramtype modified_datetime_start: JSON :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :paramtype modified_datetime_end: any + :paramtype modified_datetime_end: JSON :keyword disable_chunking: If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_chunking: any - """ - super(SftpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'SftpReadSettings' # type: str + :paramtype disable_chunking: JSON + """ + super().__init__( + additional_properties=additional_properties, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "SftpReadSettings" # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name @@ -51022,15 +54241,15 @@ def __init__( self.disable_chunking = disable_chunking -class SftpServerLinkedService(LinkedService): +class SftpServerLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """A linked service for an SSH File Transfer Protocol (SFTP) server. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -51039,30 +54258,30 @@ class SftpServerLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar host: Required. The SFTP server host name. Type: string (or Expression with resultType - string). - :vartype host: any + :vartype annotations: list[JSON] + :ivar host: The SFTP server host name. Type: string (or Expression with resultType string). + Required. + :vartype host: JSON :ivar port: The TCP port number that the SFTP server uses to listen for client connections. Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype port: any + :vartype port: JSON :ivar authentication_type: The authentication type to be used to connect to the FTP server. - Known values are: "Basic", "SshPublicKey", "MultiFactor". + Known values are: "Basic", "SshPublicKey", and "MultiFactor". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.SftpAuthenticationType :ivar user_name: The username used to log on to the SFTP server. Type: string (or Expression with resultType string). - :vartype user_name: any + :vartype user_name: JSON :ivar password: Password to logon the SFTP server for Basic authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON :ivar private_key_path: The SSH private key file path for SshPublicKey authentication. Only valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression with resultType string). - :vartype private_key_path: any + :vartype private_key_path: JSON :ivar private_key_content: Base64 encoded SSH private key content for SshPublicKey authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. @@ -51072,63 +54291,63 @@ class SftpServerLinkedService(LinkedService): :vartype pass_phrase: ~azure.mgmt.datafactory.models.SecretBase :ivar skip_host_key_validation: If true, skip the SSH host key validation. Default value is false. Type: boolean (or Expression with resultType boolean). - :vartype skip_host_key_validation: any + :vartype skip_host_key_validation: JSON :ivar host_key_fingerprint: The host key finger-print of the SFTP server. When SkipHostKeyValidation is false, HostKeyFingerprint should be specified. Type: string (or Expression with resultType string). - :vartype host_key_fingerprint: any + :vartype host_key_fingerprint: JSON """ _validation = { - 'type': {'required': True}, - 'host': {'required': True}, + "type": {"required": True}, + "host": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'private_key_path': {'key': 'typeProperties.privateKeyPath', 'type': 'object'}, - 'private_key_content': {'key': 'typeProperties.privateKeyContent', 'type': 'SecretBase'}, - 'pass_phrase': {'key': 'typeProperties.passPhrase', 'type': 'SecretBase'}, - 'skip_host_key_validation': {'key': 'typeProperties.skipHostKeyValidation', 'type': 'object'}, - 'host_key_fingerprint': {'key': 'typeProperties.hostKeyFingerprint', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "host": {"key": "typeProperties.host", "type": "object"}, + "port": {"key": "typeProperties.port", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "private_key_path": {"key": "typeProperties.privateKeyPath", "type": "object"}, + "private_key_content": {"key": "typeProperties.privateKeyContent", "type": "SecretBase"}, + "pass_phrase": {"key": "typeProperties.passPhrase", "type": "SecretBase"}, + "skip_host_key_validation": {"key": "typeProperties.skipHostKeyValidation", "type": "object"}, + "host_key_fingerprint": {"key": "typeProperties.hostKeyFingerprint", "type": "object"}, } def __init__( self, *, - host: Any, - additional_properties: Optional[Dict[str, Any]] = None, + host: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - port: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + port: Optional[JSON] = None, authentication_type: Optional[Union[str, "_models.SftpAuthenticationType"]] = None, - user_name: Optional[Any] = None, + user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, - private_key_path: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, + private_key_path: Optional[JSON] = None, private_key_content: Optional["_models.SecretBase"] = None, pass_phrase: Optional["_models.SecretBase"] = None, - skip_host_key_validation: Optional[Any] = None, - host_key_fingerprint: Optional[Any] = None, + skip_host_key_validation: Optional[JSON] = None, + host_key_fingerprint: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -51136,30 +54355,30 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword host: Required. The SFTP server host name. Type: string (or Expression with resultType - string). - :paramtype host: any + :paramtype annotations: list[JSON] + :keyword host: The SFTP server host name. Type: string (or Expression with resultType string). + Required. + :paramtype host: JSON :keyword port: The TCP port number that the SFTP server uses to listen for client connections. Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype port: any + :paramtype port: JSON :keyword authentication_type: The authentication type to be used to connect to the FTP server. - Known values are: "Basic", "SshPublicKey", "MultiFactor". + Known values are: "Basic", "SshPublicKey", and "MultiFactor". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.SftpAuthenticationType :keyword user_name: The username used to log on to the SFTP server. Type: string (or Expression with resultType string). - :paramtype user_name: any + :paramtype user_name: JSON :keyword password: Password to logon the SFTP server for Basic authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any + :paramtype encrypted_credential: JSON :keyword private_key_path: The SSH private key file path for SshPublicKey authentication. Only valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression with resultType string). - :paramtype private_key_path: any + :paramtype private_key_path: JSON :keyword private_key_content: Base64 encoded SSH private key content for SshPublicKey authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. @@ -51169,14 +54388,21 @@ def __init__( :paramtype pass_phrase: ~azure.mgmt.datafactory.models.SecretBase :keyword skip_host_key_validation: If true, skip the SSH host key validation. Default value is false. Type: boolean (or Expression with resultType boolean). - :paramtype skip_host_key_validation: any + :paramtype skip_host_key_validation: JSON :keyword host_key_fingerprint: The host key finger-print of the SFTP server. When SkipHostKeyValidation is false, HostKeyFingerprint should be specified. Type: string (or Expression with resultType string). - :paramtype host_key_fingerprint: any - """ - super(SftpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Sftp' # type: str + :paramtype host_key_fingerprint: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Sftp" # type: str self.host = host self.port = port self.authentication_type = authentication_type @@ -51197,86 +54423,92 @@ class SftpWriteSettings(StoreWriteSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The write setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The write setting type. Required. :vartype type: str :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar copy_behavior: The type of copy behavior for copy sink. - :vartype copy_behavior: any + :vartype copy_behavior: JSON :ivar operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). - :vartype operation_timeout: any + :vartype operation_timeout: JSON :ivar use_temp_file_rename: Upload to temporary file(s) and rename. Disable this option if your SFTP server doesn't support rename operation. Type: boolean (or Expression with resultType boolean). - :vartype use_temp_file_rename: any + :vartype use_temp_file_rename: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'operation_timeout': {'key': 'operationTimeout', 'type': 'object'}, - 'use_temp_file_rename': {'key': 'useTempFileRename', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "copy_behavior": {"key": "copyBehavior", "type": "object"}, + "operation_timeout": {"key": "operationTimeout", "type": "object"}, + "use_temp_file_rename": {"key": "useTempFileRename", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - copy_behavior: Optional[Any] = None, - operation_timeout: Optional[Any] = None, - use_temp_file_rename: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + copy_behavior: Optional[JSON] = None, + operation_timeout: Optional[JSON] = None, + use_temp_file_rename: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword copy_behavior: The type of copy behavior for copy sink. - :paramtype copy_behavior: any + :paramtype copy_behavior: JSON :keyword operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). - :paramtype operation_timeout: any + :paramtype operation_timeout: JSON :keyword use_temp_file_rename: Upload to temporary file(s) and rename. Disable this option if your SFTP server doesn't support rename operation. Type: boolean (or Expression with resultType boolean). - :paramtype use_temp_file_rename: any - """ - super(SftpWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) - self.type = 'SftpWriteSettings' # type: str + :paramtype use_temp_file_rename: JSON + """ + super().__init__( + additional_properties=additional_properties, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + copy_behavior=copy_behavior, + **kwargs + ) + self.type = "SftpWriteSettings" # type: str self.operation_timeout = operation_timeout self.use_temp_file_rename = use_temp_file_rename -class SharePointOnlineListLinkedService(LinkedService): +class SharePointOnlineListLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """SharePoint Online List linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -51285,69 +54517,69 @@ class SharePointOnlineListLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar site_url: Required. The URL of the SharePoint Online site. For example, + :vartype annotations: list[JSON] + :ivar site_url: The URL of the SharePoint Online site. For example, https://contoso.sharepoint.com/sites/siteName. Type: string (or Expression with resultType - string). - :vartype site_url: any - :ivar tenant_id: Required. The tenant ID under which your application resides. You can find it - from Azure portal Active Directory overview page. Type: string (or Expression with resultType - string). - :vartype tenant_id: any - :ivar service_principal_id: Required. The application (client) ID of your application - registered in Azure Active Directory. Make sure to grant SharePoint site permission to this - application. Type: string (or Expression with resultType string). - :vartype service_principal_id: any - :ivar service_principal_key: Required. The client secret of your application registered in - Azure Active Directory. Type: string (or Expression with resultType string). + string). Required. + :vartype site_url: JSON + :ivar tenant_id: The tenant ID under which your application resides. You can find it from Azure + portal Active Directory overview page. Type: string (or Expression with resultType string). + Required. + :vartype tenant_id: JSON + :ivar service_principal_id: The application (client) ID of your application registered in Azure + Active Directory. Make sure to grant SharePoint site permission to this application. Type: + string (or Expression with resultType string). Required. + :vartype service_principal_id: JSON + :ivar service_principal_key: The client secret of your application registered in Azure Active + Directory. Type: string (or Expression with resultType string). Required. :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'site_url': {'required': True}, - 'tenant_id': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, + "type": {"required": True}, + "site_url": {"required": True}, + "tenant_id": {"required": True}, + "service_principal_id": {"required": True}, + "service_principal_key": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'site_url': {'key': 'typeProperties.siteUrl', 'type': 'object'}, - 'tenant_id': {'key': 'typeProperties.tenantId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "site_url": {"key": "typeProperties.siteUrl", "type": "object"}, + "tenant_id": {"key": "typeProperties.tenantId", "type": "object"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - site_url: Any, - tenant_id: Any, - service_principal_id: Any, + site_url: JSON, + tenant_id: JSON, + service_principal_id: JSON, service_principal_key: "_models.SecretBase", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - encrypted_credential: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -51355,29 +54587,36 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword site_url: Required. The URL of the SharePoint Online site. For example, + :paramtype annotations: list[JSON] + :keyword site_url: The URL of the SharePoint Online site. For example, https://contoso.sharepoint.com/sites/siteName. Type: string (or Expression with resultType - string). - :paramtype site_url: any - :keyword tenant_id: Required. The tenant ID under which your application resides. You can find - it from Azure portal Active Directory overview page. Type: string (or Expression with - resultType string). - :paramtype tenant_id: any - :keyword service_principal_id: Required. The application (client) ID of your application - registered in Azure Active Directory. Make sure to grant SharePoint site permission to this - application. Type: string (or Expression with resultType string). - :paramtype service_principal_id: any - :keyword service_principal_key: Required. The client secret of your application registered in - Azure Active Directory. Type: string (or Expression with resultType string). + string). Required. + :paramtype site_url: JSON + :keyword tenant_id: The tenant ID under which your application resides. You can find it from + Azure portal Active Directory overview page. Type: string (or Expression with resultType + string). Required. + :paramtype tenant_id: JSON + :keyword service_principal_id: The application (client) ID of your application registered in + Azure Active Directory. Make sure to grant SharePoint site permission to this application. + Type: string (or Expression with resultType string). Required. + :paramtype service_principal_id: JSON + :keyword service_principal_key: The client secret of your application registered in Azure + Active Directory. Type: string (or Expression with resultType string). Required. :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(SharePointOnlineListLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'SharePointOnlineList' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "SharePointOnlineList" # type: str self.site_url = site_url self.tenant_id = tenant_id self.service_principal_id = service_principal_id @@ -51392,90 +54631,100 @@ class SharePointOnlineListResourceDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar list_name: The name of the SharePoint Online list. Type: string (or Expression with resultType string). - :vartype list_name: any + :vartype list_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'list_name': {'key': 'typeProperties.listName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "list_name": {"key": "typeProperties.listName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - list_name: Optional[Any] = None, + list_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword list_name: The name of the SharePoint Online list. Type: string (or Expression with resultType string). - :paramtype list_name: any - """ - super(SharePointOnlineListResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'SharePointOnlineListResource' # type: str + :paramtype list_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "SharePointOnlineListResource" # type: str self.list_name = list_name @@ -51486,96 +54735,103 @@ class SharePointOnlineListSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query: The OData query to filter the data in SharePoint Online list. For example, "$top=1". Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar http_request_timeout: The wait time to get a response from SharePoint Online. Default value is 5 minutes (00:05:00). Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype http_request_timeout: any + :vartype http_request_timeout: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "http_request_timeout": {"key": "httpRequestTimeout", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query: Optional[Any] = None, - http_request_timeout: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query: Optional[JSON] = None, + http_request_timeout: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query: The OData query to filter the data in SharePoint Online list. For example, "$top=1". Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword http_request_timeout: The wait time to get a response from SharePoint Online. Default value is 5 minutes (00:05:00). Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype http_request_timeout: any - """ - super(SharePointOnlineListSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'SharePointOnlineListSource' # type: str + :paramtype http_request_timeout: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "SharePointOnlineListSource" # type: str self.query = query self.http_request_timeout = http_request_timeout -class ShopifyLinkedService(LinkedService): +class ShopifyLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Shopify Service linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -51584,68 +54840,68 @@ class ShopifyLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar host: Required. The endpoint of the Shopify server. (i.e. mystore.myshopify.com). - :vartype host: any + :vartype annotations: list[JSON] + :ivar host: The endpoint of the Shopify server. (i.e. mystore.myshopify.com). Required. + :vartype host: JSON :ivar access_token: The API access token that can be used to access Shopify’s data. The token won't expire if it is offline mode. :vartype access_token: ~azure.mgmt.datafactory.models.SecretBase :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :vartype use_encrypted_endpoints: any + :vartype use_encrypted_endpoints: JSON :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :vartype use_host_verification: any + :vartype use_host_verification: JSON :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :vartype use_peer_verification: any + :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'host': {'required': True}, + "type": {"required": True}, + "host": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "host": {"key": "typeProperties.host", "type": "object"}, + "access_token": {"key": "typeProperties.accessToken", "type": "SecretBase"}, + "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, + "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, + "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - host: Any, - additional_properties: Optional[Dict[str, Any]] = None, + host: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, access_token: Optional["_models.SecretBase"] = None, - use_encrypted_endpoints: Optional[Any] = None, - use_host_verification: Optional[Any] = None, - use_peer_verification: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + use_encrypted_endpoints: Optional[JSON] = None, + use_host_verification: Optional[JSON] = None, + use_peer_verification: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -51653,29 +54909,36 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword host: Required. The endpoint of the Shopify server. (i.e. mystore.myshopify.com). - :paramtype host: any + :paramtype annotations: list[JSON] + :keyword host: The endpoint of the Shopify server. (i.e. mystore.myshopify.com). Required. + :paramtype host: JSON :keyword access_token: The API access token that can be used to access Shopify’s data. The token won't expire if it is offline mode. :paramtype access_token: ~azure.mgmt.datafactory.models.SecretBase :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :paramtype use_encrypted_endpoints: any + :paramtype use_encrypted_endpoints: JSON :keyword use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :paramtype use_host_verification: any + :paramtype use_host_verification: JSON :keyword use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :paramtype use_peer_verification: any + :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(ShopifyLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Shopify' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Shopify" # type: str self.host = host self.access_token = access_token self.use_encrypted_endpoints = use_encrypted_endpoints @@ -51691,88 +54954,98 @@ class ShopifyObjectDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'ShopifyObject' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "ShopifyObject" # type: str self.table_name = table_name @@ -51783,124 +55056,127 @@ class ShopifySource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'ShopifySource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "ShopifySource" # type: str self.query = query -class SkipErrorFile(msrest.serialization.Model): +class SkipErrorFile(_serialization.Model): """Skip error file. :ivar file_missing: Skip if file is deleted by other client during copy. Default is true. Type: boolean (or Expression with resultType boolean). - :vartype file_missing: any + :vartype file_missing: JSON :ivar data_inconsistency: Skip if source/sink file changed by other concurrent write. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype data_inconsistency: any + :vartype data_inconsistency: JSON """ _attribute_map = { - 'file_missing': {'key': 'fileMissing', 'type': 'object'}, - 'data_inconsistency': {'key': 'dataInconsistency', 'type': 'object'}, + "file_missing": {"key": "fileMissing", "type": "object"}, + "data_inconsistency": {"key": "dataInconsistency", "type": "object"}, } - def __init__( - self, - *, - file_missing: Optional[Any] = None, - data_inconsistency: Optional[Any] = None, - **kwargs - ): + def __init__(self, *, file_missing: Optional[JSON] = None, data_inconsistency: Optional[JSON] = None, **kwargs): """ :keyword file_missing: Skip if file is deleted by other client during copy. Default is true. Type: boolean (or Expression with resultType boolean). - :paramtype file_missing: any + :paramtype file_missing: JSON :keyword data_inconsistency: Skip if source/sink file changed by other concurrent write. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype data_inconsistency: any + :paramtype data_inconsistency: JSON """ - super(SkipErrorFile, self).__init__(**kwargs) + super().__init__(**kwargs) self.file_missing = file_missing self.data_inconsistency = data_inconsistency @@ -51912,8 +55188,8 @@ class SmartsheetLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -51922,47 +55198,47 @@ class SmartsheetLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar api_token: Required. The api token for the Smartsheet source. + :vartype annotations: list[JSON] + :ivar api_token: The api token for the Smartsheet source. Required. :vartype api_token: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'api_token': {'required': True}, + "type": {"required": True}, + "api_token": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'api_token': {'key': 'typeProperties.apiToken', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "api_token": {"key": "typeProperties.apiToken", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, api_token: "_models.SecretBase", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - encrypted_credential: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -51970,119 +55246,136 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword api_token: Required. The api token for the Smartsheet source. + :paramtype annotations: list[JSON] + :keyword api_token: The api token for the Smartsheet source. Required. :paramtype api_token: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(SmartsheetLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Smartsheet' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Smartsheet" # type: str self.api_token = api_token self.encrypted_credential = encrypted_credential -class SnowflakeDataset(Dataset): +class SnowflakeDataset(Dataset): # pylint: disable=too-many-instance-attributes """The snowflake dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar schema_type_properties_schema: The schema name of the Snowflake database. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON :ivar table: The table name of the Snowflake database. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - schema_type_properties_schema: Optional[Any] = None, - table: Optional[Any] = None, + schema_type_properties_schema: Optional[JSON] = None, + table: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword schema_type_properties_schema: The schema name of the Snowflake database. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any + :paramtype schema_type_properties_schema: JSON :keyword table: The table name of the Snowflake database. Type: string (or Expression with resultType string). - :paramtype table: any - """ - super(SnowflakeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'SnowflakeTable' # type: str + :paramtype table: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "SnowflakeTable" # type: str self.schema_type_properties_schema = schema_type_properties_schema self.table = table @@ -52094,57 +55387,57 @@ class SnowflakeExportCopyCommand(ExportSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The export setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The export setting type. Required. :vartype type: str :ivar additional_copy_options: Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" }. - :vartype additional_copy_options: dict[str, any] + :vartype additional_copy_options: dict[str, JSON] :ivar additional_format_options: Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": "'FALSE'" }. - :vartype additional_format_options: dict[str, any] + :vartype additional_format_options: dict[str, JSON] """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'additional_copy_options': {'key': 'additionalCopyOptions', 'type': '{object}'}, - 'additional_format_options': {'key': 'additionalFormatOptions', 'type': '{object}'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "additional_copy_options": {"key": "additionalCopyOptions", "type": "{object}"}, + "additional_format_options": {"key": "additionalFormatOptions", "type": "{object}"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - additional_copy_options: Optional[Dict[str, Any]] = None, - additional_format_options: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + additional_copy_options: Optional[Dict[str, JSON]] = None, + additional_format_options: Optional[Dict[str, JSON]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword additional_copy_options: Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" }. - :paramtype additional_copy_options: dict[str, any] + :paramtype additional_copy_options: dict[str, JSON] :keyword additional_format_options: Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": "'FALSE'" }. - :paramtype additional_format_options: dict[str, any] + :paramtype additional_format_options: dict[str, JSON] """ - super(SnowflakeExportCopyCommand, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'SnowflakeExportCopyCommand' # type: str + super().__init__(additional_properties=additional_properties, **kwargs) + self.type = "SnowflakeExportCopyCommand" # type: str self.additional_copy_options = additional_copy_options self.additional_format_options = additional_format_options @@ -52156,57 +55449,57 @@ class SnowflakeImportCopyCommand(ImportSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The import setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The import setting type. Required. :vartype type: str :ivar additional_copy_options: Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" }. - :vartype additional_copy_options: dict[str, any] + :vartype additional_copy_options: dict[str, JSON] :ivar additional_format_options: Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": "'FALSE'" }. - :vartype additional_format_options: dict[str, any] + :vartype additional_format_options: dict[str, JSON] """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'additional_copy_options': {'key': 'additionalCopyOptions', 'type': '{object}'}, - 'additional_format_options': {'key': 'additionalFormatOptions', 'type': '{object}'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "additional_copy_options": {"key": "additionalCopyOptions", "type": "{object}"}, + "additional_format_options": {"key": "additionalFormatOptions", "type": "{object}"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - additional_copy_options: Optional[Dict[str, Any]] = None, - additional_format_options: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + additional_copy_options: Optional[Dict[str, JSON]] = None, + additional_format_options: Optional[Dict[str, JSON]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword additional_copy_options: Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" }. - :paramtype additional_copy_options: dict[str, any] + :paramtype additional_copy_options: dict[str, JSON] :keyword additional_format_options: Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": "'FALSE'" }. - :paramtype additional_format_options: dict[str, any] + :paramtype additional_format_options: dict[str, JSON] """ - super(SnowflakeImportCopyCommand, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'SnowflakeImportCopyCommand' # type: str + super().__init__(additional_properties=additional_properties, **kwargs) + self.type = "SnowflakeImportCopyCommand" # type: str self.additional_copy_options = additional_copy_options self.additional_format_options = additional_format_options @@ -52218,8 +55511,8 @@ class SnowflakeLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -52228,52 +55521,52 @@ class SnowflakeLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar connection_string: Required. The connection string of snowflake. Type: string, - SecureString. - :vartype connection_string: any + :vartype annotations: list[JSON] + :ivar connection_string: The connection string of snowflake. Type: string, SecureString. + Required. + :vartype connection_string: JSON :ivar password: The Azure key vault secret reference of password in connection string. :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, + "type": {"required": True}, + "connection_string": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "AzureKeyVaultSecretReference"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - connection_string: Any, - additional_properties: Optional[Dict[str, Any]] = None, + connection_string: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, password: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -52281,19 +55574,26 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword connection_string: Required. The connection string of snowflake. Type: string, - SecureString. - :paramtype connection_string: any + :paramtype annotations: list[JSON] + :keyword connection_string: The connection string of snowflake. Type: string, SecureString. + Required. + :paramtype connection_string: JSON :keyword password: The Azure key vault secret reference of password in connection string. :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(SnowflakeLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Snowflake' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Snowflake" # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential @@ -52306,95 +55606,104 @@ class SnowflakeSink(CopySink): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :vartype pre_copy_script: any + :vartype pre_copy_script: JSON :ivar import_settings: Snowflake import settings. :vartype import_settings: ~azure.mgmt.datafactory.models.SnowflakeImportCopyCommand """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'import_settings': {'key': 'importSettings', 'type': 'SnowflakeImportCopyCommand'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "pre_copy_script": {"key": "preCopyScript", "type": "object"}, + "import_settings": {"key": "importSettings", "type": "SnowflakeImportCopyCommand"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - pre_copy_script: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + pre_copy_script: Optional[JSON] = None, import_settings: Optional["_models.SnowflakeImportCopyCommand"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :paramtype pre_copy_script: any + :paramtype pre_copy_script: JSON :keyword import_settings: Snowflake import settings. :paramtype import_settings: ~azure.mgmt.datafactory.models.SnowflakeImportCopyCommand """ - super(SnowflakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'SnowflakeSink' # type: str + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "SnowflakeSink" # type: str self.pre_copy_script = pre_copy_script self.import_settings = import_settings @@ -52406,90 +55715,97 @@ class SnowflakeSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query: Snowflake Sql query. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar export_settings: Snowflake export settings. :vartype export_settings: ~azure.mgmt.datafactory.models.SnowflakeExportCopyCommand """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'export_settings': {'key': 'exportSettings', 'type': 'SnowflakeExportCopyCommand'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "export_settings": {"key": "exportSettings", "type": "SnowflakeExportCopyCommand"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query: Optional[JSON] = None, export_settings: Optional["_models.SnowflakeExportCopyCommand"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query: Snowflake Sql query. Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword export_settings: Snowflake export settings. :paramtype export_settings: ~azure.mgmt.datafactory.models.SnowflakeExportCopyCommand """ - super(SnowflakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'SnowflakeSource' # type: str + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "SnowflakeSource" # type: str self.query = query self.export_settings = export_settings -class SparkLinkedService(LinkedService): +class SparkLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Spark Server linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -52498,109 +55814,109 @@ class SparkLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar host: Required. IP address or host name of the Spark server. - :vartype host: any - :ivar port: Required. The TCP port that the Spark server uses to listen for client connections. - :vartype port: any + :vartype annotations: list[JSON] + :ivar host: IP address or host name of the Spark server. Required. + :vartype host: JSON + :ivar port: The TCP port that the Spark server uses to listen for client connections. Required. + :vartype port: JSON :ivar server_type: The type of Spark server. Known values are: "SharkServer", "SharkServer2", - "SparkThriftServer". + and "SparkThriftServer". :vartype server_type: str or ~azure.mgmt.datafactory.models.SparkServerType :ivar thrift_transport_protocol: The transport protocol to use in the Thrift layer. Known - values are: "Binary", "SASL", "HTTP ". + values are: "Binary", "SASL", and "HTTP ". :vartype thrift_transport_protocol: str or ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol - :ivar authentication_type: Required. The authentication method used to access the Spark server. - Known values are: "Anonymous", "Username", "UsernameAndPassword", + :ivar authentication_type: The authentication method used to access the Spark server. Required. + Known values are: "Anonymous", "Username", "UsernameAndPassword", and "WindowsAzureHDInsightService". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.SparkAuthenticationType :ivar username: The user name that you use to access Spark Server. - :vartype username: any + :vartype username: JSON :ivar password: The password corresponding to the user name that you provided in the Username field. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar http_path: The partial URL corresponding to the Spark server. - :vartype http_path: any + :vartype http_path: JSON :ivar enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :vartype enable_ssl: any + :vartype enable_ssl: JSON :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :vartype trusted_cert_path: any + :vartype trusted_cert_path: JSON :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :vartype use_system_trust_store: any + :vartype use_system_trust_store: JSON :ivar allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :vartype allow_host_name_cn_mismatch: any + :vartype allow_host_name_cn_mismatch: JSON :ivar allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :vartype allow_self_signed_server_cert: any + :vartype allow_self_signed_server_cert: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'port': {'required': True}, - 'authentication_type': {'required': True}, + "type": {"required": True}, + "host": {"required": True}, + "port": {"required": True}, + "authentication_type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, - 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "host": {"key": "typeProperties.host", "type": "object"}, + "port": {"key": "typeProperties.port", "type": "object"}, + "server_type": {"key": "typeProperties.serverType", "type": "str"}, + "thrift_transport_protocol": {"key": "typeProperties.thriftTransportProtocol", "type": "str"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "http_path": {"key": "typeProperties.httpPath", "type": "object"}, + "enable_ssl": {"key": "typeProperties.enableSsl", "type": "object"}, + "trusted_cert_path": {"key": "typeProperties.trustedCertPath", "type": "object"}, + "use_system_trust_store": {"key": "typeProperties.useSystemTrustStore", "type": "object"}, + "allow_host_name_cn_mismatch": {"key": "typeProperties.allowHostNameCNMismatch", "type": "object"}, + "allow_self_signed_server_cert": {"key": "typeProperties.allowSelfSignedServerCert", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - host: Any, - port: Any, + host: JSON, + port: JSON, authentication_type: Union[str, "_models.SparkAuthenticationType"], - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, server_type: Optional[Union[str, "_models.SparkServerType"]] = None, thrift_transport_protocol: Optional[Union[str, "_models.SparkThriftTransportProtocol"]] = None, - username: Optional[Any] = None, + username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - http_path: Optional[Any] = None, - enable_ssl: Optional[Any] = None, - trusted_cert_path: Optional[Any] = None, - use_system_trust_store: Optional[Any] = None, - allow_host_name_cn_mismatch: Optional[Any] = None, - allow_self_signed_server_cert: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + http_path: Optional[JSON] = None, + enable_ssl: Optional[JSON] = None, + trusted_cert_path: Optional[JSON] = None, + use_system_trust_store: Optional[JSON] = None, + allow_host_name_cn_mismatch: Optional[JSON] = None, + allow_self_signed_server_cert: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -52608,53 +55924,60 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword host: Required. IP address or host name of the Spark server. - :paramtype host: any - :keyword port: Required. The TCP port that the Spark server uses to listen for client - connections. - :paramtype port: any + :paramtype annotations: list[JSON] + :keyword host: IP address or host name of the Spark server. Required. + :paramtype host: JSON + :keyword port: The TCP port that the Spark server uses to listen for client connections. + Required. + :paramtype port: JSON :keyword server_type: The type of Spark server. Known values are: "SharkServer", - "SharkServer2", "SparkThriftServer". + "SharkServer2", and "SparkThriftServer". :paramtype server_type: str or ~azure.mgmt.datafactory.models.SparkServerType :keyword thrift_transport_protocol: The transport protocol to use in the Thrift layer. Known - values are: "Binary", "SASL", "HTTP ". + values are: "Binary", "SASL", and "HTTP ". :paramtype thrift_transport_protocol: str or ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol - :keyword authentication_type: Required. The authentication method used to access the Spark - server. Known values are: "Anonymous", "Username", "UsernameAndPassword", + :keyword authentication_type: The authentication method used to access the Spark server. + Required. Known values are: "Anonymous", "Username", "UsernameAndPassword", and "WindowsAzureHDInsightService". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.SparkAuthenticationType :keyword username: The user name that you use to access Spark Server. - :paramtype username: any + :paramtype username: JSON :keyword password: The password corresponding to the user name that you provided in the Username field. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword http_path: The partial URL corresponding to the Spark server. - :paramtype http_path: any + :paramtype http_path: JSON :keyword enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :paramtype enable_ssl: any + :paramtype enable_ssl: JSON :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :paramtype trusted_cert_path: any + :paramtype trusted_cert_path: JSON :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :paramtype use_system_trust_store: any + :paramtype use_system_trust_store: JSON :keyword allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :paramtype allow_host_name_cn_mismatch: any + :paramtype allow_host_name_cn_mismatch: JSON :keyword allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :paramtype allow_self_signed_server_cert: any + :paramtype allow_self_signed_server_cert: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(SparkLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Spark' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Spark" # type: str self.host = host self.port = port self.server_type = server_type @@ -52671,112 +55994,122 @@ def __init__( self.encrypted_credential = encrypted_credential -class SparkObjectDataset(Dataset): +class SparkObjectDataset(Dataset): # pylint: disable=too-many-instance-attributes """Spark Server dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :vartype table_name: any + :vartype table_name: JSON :ivar table: The table name of the Spark. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON :ivar schema_type_properties_schema: The schema name of the Spark. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, - table: Optional[Any] = None, - schema_type_properties_schema: Optional[Any] = None, + table_name: Optional[JSON] = None, + table: Optional[JSON] = None, + schema_type_properties_schema: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: This property will be retired. Please consider using schema + table properties instead. - :paramtype table_name: any + :paramtype table_name: JSON :keyword table: The table name of the Spark. Type: string (or Expression with resultType string). - :paramtype table: any + :paramtype table: JSON :keyword schema_type_properties_schema: The schema name of the Spark. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any - """ - super(SparkObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'SparkObject' # type: str + :paramtype schema_type_properties_schema: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "SparkObject" # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -52789,105 +56122,114 @@ class SparkSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'SparkSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "SparkSource" # type: str self.query = query -class SqlAlwaysEncryptedProperties(msrest.serialization.Model): +class SqlAlwaysEncryptedProperties(_serialization.Model): """Sql always encrypted properties. All required parameters must be populated in order to send to Azure. - :ivar always_encrypted_akv_auth_type: Required. Sql always encrypted AKV authentication type. - Type: string (or Expression with resultType string). Known values are: "ServicePrincipal", - "ManagedIdentity", "UserAssignedManagedIdentity". + :ivar always_encrypted_akv_auth_type: Sql always encrypted AKV authentication type. Type: + string (or Expression with resultType string). Required. Known values are: "ServicePrincipal", + "ManagedIdentity", and "UserAssignedManagedIdentity". :vartype always_encrypted_akv_auth_type: str or ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedAkvAuthType :ivar service_principal_id: The client ID of the application in Azure Active Directory used for Azure Key Vault authentication. Type: string (or Expression with resultType string). - :vartype service_principal_id: any + :vartype service_principal_id: JSON :ivar service_principal_key: The key of the service principal used to authenticate against Azure Key Vault. :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase @@ -52896,197 +56238,206 @@ class SqlAlwaysEncryptedProperties(msrest.serialization.Model): """ _validation = { - 'always_encrypted_akv_auth_type': {'required': True}, + "always_encrypted_akv_auth_type": {"required": True}, } _attribute_map = { - 'always_encrypted_akv_auth_type': {'key': 'alwaysEncryptedAkvAuthType', 'type': 'str'}, - 'service_principal_id': {'key': 'servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'servicePrincipalKey', 'type': 'SecretBase'}, - 'credential': {'key': 'credential', 'type': 'CredentialReference'}, + "always_encrypted_akv_auth_type": {"key": "alwaysEncryptedAkvAuthType", "type": "str"}, + "service_principal_id": {"key": "servicePrincipalId", "type": "object"}, + "service_principal_key": {"key": "servicePrincipalKey", "type": "SecretBase"}, + "credential": {"key": "credential", "type": "CredentialReference"}, } def __init__( self, *, always_encrypted_akv_auth_type: Union[str, "_models.SqlAlwaysEncryptedAkvAuthType"], - service_principal_id: Optional[Any] = None, + service_principal_id: Optional[JSON] = None, service_principal_key: Optional["_models.SecretBase"] = None, credential: Optional["_models.CredentialReference"] = None, **kwargs ): """ - :keyword always_encrypted_akv_auth_type: Required. Sql always encrypted AKV authentication - type. Type: string (or Expression with resultType string). Known values are: - "ServicePrincipal", "ManagedIdentity", "UserAssignedManagedIdentity". + :keyword always_encrypted_akv_auth_type: Sql always encrypted AKV authentication type. Type: + string (or Expression with resultType string). Required. Known values are: "ServicePrincipal", + "ManagedIdentity", and "UserAssignedManagedIdentity". :paramtype always_encrypted_akv_auth_type: str or ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedAkvAuthType :keyword service_principal_id: The client ID of the application in Azure Active Directory used for Azure Key Vault authentication. Type: string (or Expression with resultType string). - :paramtype service_principal_id: any + :paramtype service_principal_id: JSON :keyword service_principal_key: The key of the service principal used to authenticate against Azure Key Vault. :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ - super(SqlAlwaysEncryptedProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.always_encrypted_akv_auth_type = always_encrypted_akv_auth_type self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.credential = credential -class SqlDWSink(CopySink): +class SqlDWSink(CopySink): # pylint: disable=too-many-instance-attributes """A copy activity SQL Data Warehouse sink. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :vartype pre_copy_script: any + :vartype pre_copy_script: JSON :ivar allow_poly_base: Indicates to use PolyBase to copy data into SQL Data Warehouse when applicable. Type: boolean (or Expression with resultType boolean). - :vartype allow_poly_base: any + :vartype allow_poly_base: JSON :ivar poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. :vartype poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings :ivar allow_copy_command: Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType boolean). - :vartype allow_copy_command: any + :vartype allow_copy_command: JSON :ivar copy_command_settings: Specifies Copy Command related settings when allowCopyCommand is true. :vartype copy_command_settings: ~azure.mgmt.datafactory.models.DWCopyCommandSettings :ivar table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :vartype table_option: any + :vartype table_option: JSON :ivar sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - :vartype sql_writer_use_table_lock: any + :vartype sql_writer_use_table_lock: JSON :ivar write_behavior: Write behavior when copying data into azure SQL DW. Type: SqlDWWriteBehaviorEnum (or Expression with resultType SqlDWWriteBehaviorEnum). - :vartype write_behavior: any + :vartype write_behavior: JSON :ivar upsert_settings: SQL DW upsert settings. :vartype upsert_settings: ~azure.mgmt.datafactory.models.SqlDWUpsertSettings """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, - 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, - 'allow_copy_command': {'key': 'allowCopyCommand', 'type': 'object'}, - 'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DWCopyCommandSettings'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlDWUpsertSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "pre_copy_script": {"key": "preCopyScript", "type": "object"}, + "allow_poly_base": {"key": "allowPolyBase", "type": "object"}, + "poly_base_settings": {"key": "polyBaseSettings", "type": "PolybaseSettings"}, + "allow_copy_command": {"key": "allowCopyCommand", "type": "object"}, + "copy_command_settings": {"key": "copyCommandSettings", "type": "DWCopyCommandSettings"}, + "table_option": {"key": "tableOption", "type": "object"}, + "sql_writer_use_table_lock": {"key": "sqlWriterUseTableLock", "type": "object"}, + "write_behavior": {"key": "writeBehavior", "type": "object"}, + "upsert_settings": {"key": "upsertSettings", "type": "SqlDWUpsertSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - pre_copy_script: Optional[Any] = None, - allow_poly_base: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + pre_copy_script: Optional[JSON] = None, + allow_poly_base: Optional[JSON] = None, poly_base_settings: Optional["_models.PolybaseSettings"] = None, - allow_copy_command: Optional[Any] = None, + allow_copy_command: Optional[JSON] = None, copy_command_settings: Optional["_models.DWCopyCommandSettings"] = None, - table_option: Optional[Any] = None, - sql_writer_use_table_lock: Optional[Any] = None, - write_behavior: Optional[Any] = None, + table_option: Optional[JSON] = None, + sql_writer_use_table_lock: Optional[JSON] = None, + write_behavior: Optional[JSON] = None, upsert_settings: Optional["_models.SqlDWUpsertSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :paramtype pre_copy_script: any + :paramtype pre_copy_script: JSON :keyword allow_poly_base: Indicates to use PolyBase to copy data into SQL Data Warehouse when applicable. Type: boolean (or Expression with resultType boolean). - :paramtype allow_poly_base: any + :paramtype allow_poly_base: JSON :keyword poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. :paramtype poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings :keyword allow_copy_command: Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType boolean). - :paramtype allow_copy_command: any + :paramtype allow_copy_command: JSON :keyword copy_command_settings: Specifies Copy Command related settings when allowCopyCommand is true. :paramtype copy_command_settings: ~azure.mgmt.datafactory.models.DWCopyCommandSettings :keyword table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :paramtype table_option: any + :paramtype table_option: JSON :keyword sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - :paramtype sql_writer_use_table_lock: any + :paramtype sql_writer_use_table_lock: JSON :keyword write_behavior: Write behavior when copying data into azure SQL DW. Type: SqlDWWriteBehaviorEnum (or Expression with resultType SqlDWWriteBehaviorEnum). - :paramtype write_behavior: any + :paramtype write_behavior: JSON :keyword upsert_settings: SQL DW upsert settings. :paramtype upsert_settings: ~azure.mgmt.datafactory.models.SqlDWUpsertSettings """ - super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'SqlDWSink' # type: str + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "SqlDWSink" # type: str self.pre_copy_script = pre_copy_script self.allow_poly_base = allow_poly_base self.poly_base_settings = poly_base_settings @@ -53098,130 +56449,139 @@ def __init__( self.upsert_settings = upsert_settings -class SqlDWSource(TabularSource): +class SqlDWSource(TabularSource): # pylint: disable=too-many-instance-attributes """A copy activity SQL Data Warehouse source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). - :vartype sql_reader_query: any + :vartype sql_reader_query: JSON :ivar sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Data Warehouse source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :vartype sql_reader_stored_procedure_name: any + :vartype sql_reader_stored_procedure_name: JSON :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. - :vartype stored_procedure_parameters: any + :vartype stored_procedure_parameters: JSON :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :vartype partition_option: any + :vartype partition_option: JSON :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "sql_reader_query": {"key": "sqlReaderQuery", "type": "object"}, + "sql_reader_stored_procedure_name": {"key": "sqlReaderStoredProcedureName", "type": "object"}, + "stored_procedure_parameters": {"key": "storedProcedureParameters", "type": "object"}, + "partition_option": {"key": "partitionOption", "type": "object"}, + "partition_settings": {"key": "partitionSettings", "type": "SqlPartitionSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - sql_reader_query: Optional[Any] = None, - sql_reader_stored_procedure_name: Optional[Any] = None, - stored_procedure_parameters: Optional[Any] = None, - partition_option: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + sql_reader_query: Optional[JSON] = None, + sql_reader_stored_procedure_name: Optional[JSON] = None, + stored_procedure_parameters: Optional[JSON] = None, + partition_option: Optional[JSON] = None, partition_settings: Optional["_models.SqlPartitionSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). - :paramtype sql_reader_query: any + :paramtype sql_reader_query: JSON :keyword sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Data Warehouse source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :paramtype sql_reader_stored_procedure_name: any + :paramtype sql_reader_stored_procedure_name: JSON :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. - :paramtype stored_procedure_parameters: any + :paramtype stored_procedure_parameters: JSON :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :paramtype partition_option: any + :paramtype partition_option: JSON :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ - super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'SqlDWSource' # type: str + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "SqlDWSource" # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -53229,194 +56589,200 @@ def __init__( self.partition_settings = partition_settings -class SqlDWUpsertSettings(msrest.serialization.Model): +class SqlDWUpsertSettings(_serialization.Model): """Sql DW upsert option settings. :ivar interim_schema_name: Schema name for interim table. Type: string (or Expression with resultType string). - :vartype interim_schema_name: any + :vartype interim_schema_name: JSON :ivar keys: Key column names for unique row identification. Type: array of strings (or Expression with resultType array of strings). - :vartype keys: any + :vartype keys: JSON """ _attribute_map = { - 'interim_schema_name': {'key': 'interimSchemaName', 'type': 'object'}, - 'keys': {'key': 'keys', 'type': 'object'}, + "interim_schema_name": {"key": "interimSchemaName", "type": "object"}, + "keys": {"key": "keys", "type": "object"}, } - def __init__( - self, - *, - interim_schema_name: Optional[Any] = None, - keys: Optional[Any] = None, - **kwargs - ): + def __init__(self, *, interim_schema_name: Optional[JSON] = None, keys: Optional[JSON] = None, **kwargs): """ :keyword interim_schema_name: Schema name for interim table. Type: string (or Expression with resultType string). - :paramtype interim_schema_name: any + :paramtype interim_schema_name: JSON :keyword keys: Key column names for unique row identification. Type: array of strings (or Expression with resultType array of strings). - :paramtype keys: any + :paramtype keys: JSON """ - super(SqlDWUpsertSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.interim_schema_name = interim_schema_name self.keys = keys -class SqlMISink(CopySink): +class SqlMISink(CopySink): # pylint: disable=too-many-instance-attributes """A copy activity Azure SQL Managed Instance sink. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :vartype sql_writer_stored_procedure_name: any + :vartype sql_writer_stored_procedure_name: JSON :ivar sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). - :vartype sql_writer_table_type: any + :vartype sql_writer_table_type: JSON :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :vartype pre_copy_script: any + :vartype pre_copy_script: JSON :ivar stored_procedure_parameters: SQL stored procedure parameters. :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :ivar stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :vartype stored_procedure_table_type_parameter_name: any + :vartype stored_procedure_table_type_parameter_name: JSON :ivar table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :vartype table_option: any + :vartype table_option: JSON :ivar sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - :vartype sql_writer_use_table_lock: any + :vartype sql_writer_use_table_lock: JSON :ivar write_behavior: White behavior when copying data into azure SQL MI. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). - :vartype write_behavior: any + :vartype write_behavior: JSON :ivar upsert_settings: SQL upsert settings. :vartype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "sql_writer_stored_procedure_name": {"key": "sqlWriterStoredProcedureName", "type": "object"}, + "sql_writer_table_type": {"key": "sqlWriterTableType", "type": "object"}, + "pre_copy_script": {"key": "preCopyScript", "type": "object"}, + "stored_procedure_parameters": {"key": "storedProcedureParameters", "type": "{StoredProcedureParameter}"}, + "stored_procedure_table_type_parameter_name": { + "key": "storedProcedureTableTypeParameterName", + "type": "object", + }, + "table_option": {"key": "tableOption", "type": "object"}, + "sql_writer_use_table_lock": {"key": "sqlWriterUseTableLock", "type": "object"}, + "write_behavior": {"key": "writeBehavior", "type": "object"}, + "upsert_settings": {"key": "upsertSettings", "type": "SqlUpsertSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - sql_writer_stored_procedure_name: Optional[Any] = None, - sql_writer_table_type: Optional[Any] = None, - pre_copy_script: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + sql_writer_stored_procedure_name: Optional[JSON] = None, + sql_writer_table_type: Optional[JSON] = None, + pre_copy_script: Optional[JSON] = None, stored_procedure_parameters: Optional[Dict[str, "_models.StoredProcedureParameter"]] = None, - stored_procedure_table_type_parameter_name: Optional[Any] = None, - table_option: Optional[Any] = None, - sql_writer_use_table_lock: Optional[Any] = None, - write_behavior: Optional[Any] = None, + stored_procedure_table_type_parameter_name: Optional[JSON] = None, + table_option: Optional[JSON] = None, + sql_writer_use_table_lock: Optional[JSON] = None, + write_behavior: Optional[JSON] = None, upsert_settings: Optional["_models.SqlUpsertSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :paramtype sql_writer_stored_procedure_name: any + :paramtype sql_writer_stored_procedure_name: JSON :keyword sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). - :paramtype sql_writer_table_type: any + :paramtype sql_writer_table_type: JSON :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :paramtype pre_copy_script: any + :paramtype pre_copy_script: JSON :keyword stored_procedure_parameters: SQL stored procedure parameters. :paramtype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :keyword stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :paramtype stored_procedure_table_type_parameter_name: any + :paramtype stored_procedure_table_type_parameter_name: JSON :keyword table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :paramtype table_option: any + :paramtype table_option: JSON :keyword sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - :paramtype sql_writer_use_table_lock: any + :paramtype sql_writer_use_table_lock: JSON :keyword write_behavior: White behavior when copying data into azure SQL MI. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). - :paramtype write_behavior: any + :paramtype write_behavior: JSON :keyword upsert_settings: SQL upsert settings. :paramtype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ - super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'SqlMISink' # type: str + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "SqlMISink" # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script @@ -53428,135 +56794,144 @@ def __init__( self.upsert_settings = upsert_settings -class SqlMISource(TabularSource): +class SqlMISource(TabularSource): # pylint: disable=too-many-instance-attributes """A copy activity Azure SQL Managed Instance source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :vartype sql_reader_query: any + :vartype sql_reader_query: JSON :ivar sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed Instance source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :vartype sql_reader_stored_procedure_name: any + :vartype sql_reader_stored_procedure_name: JSON :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :ivar produce_additional_types: Which additional types to produce. - :vartype produce_additional_types: any + :vartype produce_additional_types: JSON :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :vartype partition_option: any + :vartype partition_option: JSON :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "sql_reader_query": {"key": "sqlReaderQuery", "type": "object"}, + "sql_reader_stored_procedure_name": {"key": "sqlReaderStoredProcedureName", "type": "object"}, + "stored_procedure_parameters": {"key": "storedProcedureParameters", "type": "{StoredProcedureParameter}"}, + "produce_additional_types": {"key": "produceAdditionalTypes", "type": "object"}, + "partition_option": {"key": "partitionOption", "type": "object"}, + "partition_settings": {"key": "partitionSettings", "type": "SqlPartitionSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - sql_reader_query: Optional[Any] = None, - sql_reader_stored_procedure_name: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + sql_reader_query: Optional[JSON] = None, + sql_reader_stored_procedure_name: Optional[JSON] = None, stored_procedure_parameters: Optional[Dict[str, "_models.StoredProcedureParameter"]] = None, - produce_additional_types: Optional[Any] = None, - partition_option: Optional[Any] = None, + produce_additional_types: Optional[JSON] = None, + partition_option: Optional[JSON] = None, partition_settings: Optional["_models.SqlPartitionSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :paramtype sql_reader_query: any + :paramtype sql_reader_query: JSON :keyword sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed Instance source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :paramtype sql_reader_stored_procedure_name: any + :paramtype sql_reader_stored_procedure_name: JSON :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :paramtype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :keyword produce_additional_types: Which additional types to produce. - :paramtype produce_additional_types: any + :paramtype produce_additional_types: JSON :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :paramtype partition_option: any + :paramtype partition_option: JSON :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ - super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'SqlMISource' # type: str + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "SqlMISource" # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -53565,38 +56940,38 @@ def __init__( self.partition_settings = partition_settings -class SqlPartitionSettings(msrest.serialization.Model): +class SqlPartitionSettings(_serialization.Model): """The settings that will be leveraged for Sql source partitioning. :ivar partition_column_name: The name of the column in integer or datetime type that will be used for proceeding partitioning. If not specified, the primary key of the table is auto-detected and used as the partition column. Type: string (or Expression with resultType string). - :vartype partition_column_name: any + :vartype partition_column_name: JSON :ivar partition_upper_bound: The maximum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). - :vartype partition_upper_bound: any + :vartype partition_upper_bound: JSON :ivar partition_lower_bound: The minimum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). - :vartype partition_lower_bound: any + :vartype partition_lower_bound: JSON """ _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + "partition_column_name": {"key": "partitionColumnName", "type": "object"}, + "partition_upper_bound": {"key": "partitionUpperBound", "type": "object"}, + "partition_lower_bound": {"key": "partitionLowerBound", "type": "object"}, } def __init__( self, *, - partition_column_name: Optional[Any] = None, - partition_upper_bound: Optional[Any] = None, - partition_lower_bound: Optional[Any] = None, + partition_column_name: Optional[JSON] = None, + partition_upper_bound: Optional[JSON] = None, + partition_lower_bound: Optional[JSON] = None, **kwargs ): """ @@ -53604,33 +56979,33 @@ def __init__( used for proceeding partitioning. If not specified, the primary key of the table is auto-detected and used as the partition column. Type: string (or Expression with resultType string). - :paramtype partition_column_name: any + :paramtype partition_column_name: JSON :keyword partition_upper_bound: The maximum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). - :paramtype partition_upper_bound: any + :paramtype partition_upper_bound: JSON :keyword partition_lower_bound: The minimum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). - :paramtype partition_lower_bound: any + :paramtype partition_lower_bound: JSON """ - super(SqlPartitionSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.partition_column_name = partition_column_name self.partition_upper_bound = partition_upper_bound self.partition_lower_bound = partition_lower_bound -class SqlServerLinkedService(LinkedService): +class SqlServerLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """SQL Server linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -53639,61 +57014,64 @@ class SqlServerLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype annotations: list[JSON] + :ivar connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. + :vartype connection_string: JSON :ivar user_name: The on-premises Windows authentication user name. Type: string (or Expression with resultType string). - :vartype user_name: any + :vartype user_name: JSON :ivar password: The on-premises Windows authentication password. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON :ivar always_encrypted_settings: Sql always encrypted properties. :vartype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, + "type": {"required": True}, + "connection_string": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "always_encrypted_settings": { + "key": "typeProperties.alwaysEncryptedSettings", + "type": "SqlAlwaysEncryptedProperties", + }, } def __init__( self, *, - connection_string: Any, - additional_properties: Optional[Dict[str, Any]] = None, + connection_string: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - user_name: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, always_encrypted_settings: Optional["_models.SqlAlwaysEncryptedProperties"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -53701,25 +57079,32 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype annotations: list[JSON] + :keyword connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. + :paramtype connection_string: JSON :keyword user_name: The on-premises Windows authentication user name. Type: string (or Expression with resultType string). - :paramtype user_name: any + :paramtype user_name: JSON :keyword password: The on-premises Windows authentication password. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any + :paramtype encrypted_credential: JSON :keyword always_encrypted_settings: Sql always encrypted properties. :paramtype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ - super(SqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'SqlServer' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "SqlServer" # type: str self.connection_string = connection_string self.user_name = user_name self.password = password @@ -53727,158 +57112,170 @@ def __init__( self.always_encrypted_settings = always_encrypted_settings -class SqlServerSink(CopySink): +class SqlServerSink(CopySink): # pylint: disable=too-many-instance-attributes """A copy activity SQL server sink. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :vartype sql_writer_stored_procedure_name: any + :vartype sql_writer_stored_procedure_name: JSON :ivar sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). - :vartype sql_writer_table_type: any + :vartype sql_writer_table_type: JSON :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :vartype pre_copy_script: any + :vartype pre_copy_script: JSON :ivar stored_procedure_parameters: SQL stored procedure parameters. :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :ivar stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :vartype stored_procedure_table_type_parameter_name: any + :vartype stored_procedure_table_type_parameter_name: JSON :ivar table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :vartype table_option: any + :vartype table_option: JSON :ivar sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - :vartype sql_writer_use_table_lock: any + :vartype sql_writer_use_table_lock: JSON :ivar write_behavior: Write behavior when copying data into sql server. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). - :vartype write_behavior: any + :vartype write_behavior: JSON :ivar upsert_settings: SQL upsert settings. :vartype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "sql_writer_stored_procedure_name": {"key": "sqlWriterStoredProcedureName", "type": "object"}, + "sql_writer_table_type": {"key": "sqlWriterTableType", "type": "object"}, + "pre_copy_script": {"key": "preCopyScript", "type": "object"}, + "stored_procedure_parameters": {"key": "storedProcedureParameters", "type": "{StoredProcedureParameter}"}, + "stored_procedure_table_type_parameter_name": { + "key": "storedProcedureTableTypeParameterName", + "type": "object", + }, + "table_option": {"key": "tableOption", "type": "object"}, + "sql_writer_use_table_lock": {"key": "sqlWriterUseTableLock", "type": "object"}, + "write_behavior": {"key": "writeBehavior", "type": "object"}, + "upsert_settings": {"key": "upsertSettings", "type": "SqlUpsertSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - sql_writer_stored_procedure_name: Optional[Any] = None, - sql_writer_table_type: Optional[Any] = None, - pre_copy_script: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + sql_writer_stored_procedure_name: Optional[JSON] = None, + sql_writer_table_type: Optional[JSON] = None, + pre_copy_script: Optional[JSON] = None, stored_procedure_parameters: Optional[Dict[str, "_models.StoredProcedureParameter"]] = None, - stored_procedure_table_type_parameter_name: Optional[Any] = None, - table_option: Optional[Any] = None, - sql_writer_use_table_lock: Optional[Any] = None, - write_behavior: Optional[Any] = None, + stored_procedure_table_type_parameter_name: Optional[JSON] = None, + table_option: Optional[JSON] = None, + sql_writer_use_table_lock: Optional[JSON] = None, + write_behavior: Optional[JSON] = None, upsert_settings: Optional["_models.SqlUpsertSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :paramtype sql_writer_stored_procedure_name: any + :paramtype sql_writer_stored_procedure_name: JSON :keyword sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). - :paramtype sql_writer_table_type: any + :paramtype sql_writer_table_type: JSON :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :paramtype pre_copy_script: any + :paramtype pre_copy_script: JSON :keyword stored_procedure_parameters: SQL stored procedure parameters. :paramtype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :keyword stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :paramtype stored_procedure_table_type_parameter_name: any + :paramtype stored_procedure_table_type_parameter_name: JSON :keyword table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :paramtype table_option: any + :paramtype table_option: JSON :keyword sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - :paramtype sql_writer_use_table_lock: any + :paramtype sql_writer_use_table_lock: JSON :keyword write_behavior: Write behavior when copying data into sql server. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). - :paramtype write_behavior: any + :paramtype write_behavior: JSON :keyword upsert_settings: SQL upsert settings. :paramtype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ - super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'SqlServerSink' # type: str + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "SqlServerSink" # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script @@ -53890,135 +57287,144 @@ def __init__( self.upsert_settings = upsert_settings -class SqlServerSource(TabularSource): +class SqlServerSource(TabularSource): # pylint: disable=too-many-instance-attributes """A copy activity SQL server source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :vartype sql_reader_query: any + :vartype sql_reader_query: JSON :ivar sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :vartype sql_reader_stored_procedure_name: any + :vartype sql_reader_stored_procedure_name: JSON :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :ivar produce_additional_types: Which additional types to produce. - :vartype produce_additional_types: any + :vartype produce_additional_types: JSON :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :vartype partition_option: any + :vartype partition_option: JSON :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "sql_reader_query": {"key": "sqlReaderQuery", "type": "object"}, + "sql_reader_stored_procedure_name": {"key": "sqlReaderStoredProcedureName", "type": "object"}, + "stored_procedure_parameters": {"key": "storedProcedureParameters", "type": "{StoredProcedureParameter}"}, + "produce_additional_types": {"key": "produceAdditionalTypes", "type": "object"}, + "partition_option": {"key": "partitionOption", "type": "object"}, + "partition_settings": {"key": "partitionSettings", "type": "SqlPartitionSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - sql_reader_query: Optional[Any] = None, - sql_reader_stored_procedure_name: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + sql_reader_query: Optional[JSON] = None, + sql_reader_stored_procedure_name: Optional[JSON] = None, stored_procedure_parameters: Optional[Dict[str, "_models.StoredProcedureParameter"]] = None, - produce_additional_types: Optional[Any] = None, - partition_option: Optional[Any] = None, + produce_additional_types: Optional[JSON] = None, + partition_option: Optional[JSON] = None, partition_settings: Optional["_models.SqlPartitionSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :paramtype sql_reader_query: any + :paramtype sql_reader_query: JSON :keyword sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :paramtype sql_reader_stored_procedure_name: any + :paramtype sql_reader_stored_procedure_name: JSON :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :paramtype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :keyword produce_additional_types: Which additional types to produce. - :paramtype produce_additional_types: any + :paramtype produce_additional_types: JSON :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :paramtype partition_option: any + :paramtype partition_option: JSON :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ - super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'SqlServerSource' # type: str + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "SqlServerSource" # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -54034,10 +57440,10 @@ class SqlServerStoredProcedureActivity(ExecutionActivity): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -54049,52 +57455,52 @@ class SqlServerStoredProcedureActivity(ExecutionActivity): :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar policy: Activity policy. :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :ivar stored_procedure_name: Required. Stored procedure name. Type: string (or Expression with - resultType string). - :vartype stored_procedure_name: any + :ivar stored_procedure_name: Stored procedure name. Type: string (or Expression with resultType + string). Required. + :vartype stored_procedure_name: JSON :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :vartype stored_procedure_parameters: any + :vartype stored_procedure_parameters: JSON """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'stored_procedure_name': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "stored_procedure_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'stored_procedure_name': {'key': 'typeProperties.storedProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "stored_procedure_name": {"key": "typeProperties.storedProcedureName", "type": "object"}, + "stored_procedure_parameters": {"key": "typeProperties.storedProcedureParameters", "type": "object"}, } def __init__( self, *, name: str, - stored_procedure_name: Any, - additional_properties: Optional[Dict[str, Any]] = None, + stored_procedure_name: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, - stored_procedure_parameters: Optional[Any] = None, + stored_procedure_parameters: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -54106,283 +57512,314 @@ def __init__( :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword policy: Activity policy. :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :keyword stored_procedure_name: Required. Stored procedure name. Type: string (or Expression - with resultType string). - :paramtype stored_procedure_name: any + :keyword stored_procedure_name: Stored procedure name. Type: string (or Expression with + resultType string). Required. + :paramtype stored_procedure_name: JSON :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :paramtype stored_procedure_parameters: any - """ - super(SqlServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'SqlServerStoredProcedure' # type: str + :paramtype stored_procedure_parameters: JSON + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "SqlServerStoredProcedure" # type: str self.stored_procedure_name = stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters -class SqlServerTableDataset(Dataset): +class SqlServerTableDataset(Dataset): # pylint: disable=too-many-instance-attributes """The on-premises SQL Server dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :vartype table_name: any + :vartype table_name: JSON :ivar schema_type_properties_schema: The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON :ivar table: The table name of the SQL Server dataset. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, - schema_type_properties_schema: Optional[Any] = None, - table: Optional[Any] = None, + table_name: Optional[JSON] = None, + schema_type_properties_schema: Optional[JSON] = None, + table: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: This property will be retired. Please consider using schema + table properties instead. - :paramtype table_name: any + :paramtype table_name: JSON :keyword schema_type_properties_schema: The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any + :paramtype schema_type_properties_schema: JSON :keyword table: The table name of the SQL Server dataset. Type: string (or Expression with resultType string). - :paramtype table: any - """ - super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'SqlServerTable' # type: str + :paramtype table: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "SqlServerTable" # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table -class SqlSink(CopySink): +class SqlSink(CopySink): # pylint: disable=too-many-instance-attributes """A copy activity SQL sink. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy sink type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. :vartype type: str :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :vartype write_batch_size: any + :vartype write_batch_size: JSON :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype write_batch_timeout: any + :vartype write_batch_timeout: JSON :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :vartype sink_retry_count: any + :vartype sink_retry_count: JSON :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype sink_retry_wait: any + :vartype sink_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :vartype sql_writer_stored_procedure_name: any + :vartype sql_writer_stored_procedure_name: JSON :ivar sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). - :vartype sql_writer_table_type: any + :vartype sql_writer_table_type: JSON :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :vartype pre_copy_script: any + :vartype pre_copy_script: JSON :ivar stored_procedure_parameters: SQL stored procedure parameters. :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :ivar stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :vartype stored_procedure_table_type_parameter_name: any + :vartype stored_procedure_table_type_parameter_name: JSON :ivar table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :vartype table_option: any + :vartype table_option: JSON :ivar sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - :vartype sql_writer_use_table_lock: any + :vartype sql_writer_use_table_lock: JSON :ivar write_behavior: Write behavior when copying data into sql. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). - :vartype write_behavior: any + :vartype write_behavior: JSON :ivar upsert_settings: SQL upsert settings. :vartype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "sql_writer_stored_procedure_name": {"key": "sqlWriterStoredProcedureName", "type": "object"}, + "sql_writer_table_type": {"key": "sqlWriterTableType", "type": "object"}, + "pre_copy_script": {"key": "preCopyScript", "type": "object"}, + "stored_procedure_parameters": {"key": "storedProcedureParameters", "type": "{StoredProcedureParameter}"}, + "stored_procedure_table_type_parameter_name": { + "key": "storedProcedureTableTypeParameterName", + "type": "object", + }, + "table_option": {"key": "tableOption", "type": "object"}, + "sql_writer_use_table_lock": {"key": "sqlWriterUseTableLock", "type": "object"}, + "write_behavior": {"key": "writeBehavior", "type": "object"}, + "upsert_settings": {"key": "upsertSettings", "type": "SqlUpsertSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - write_batch_size: Optional[Any] = None, - write_batch_timeout: Optional[Any] = None, - sink_retry_count: Optional[Any] = None, - sink_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - sql_writer_stored_procedure_name: Optional[Any] = None, - sql_writer_table_type: Optional[Any] = None, - pre_copy_script: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + sql_writer_stored_procedure_name: Optional[JSON] = None, + sql_writer_table_type: Optional[JSON] = None, + pre_copy_script: Optional[JSON] = None, stored_procedure_parameters: Optional[Dict[str, "_models.StoredProcedureParameter"]] = None, - stored_procedure_table_type_parameter_name: Optional[Any] = None, - table_option: Optional[Any] = None, - sql_writer_use_table_lock: Optional[Any] = None, - write_behavior: Optional[Any] = None, + stored_procedure_table_type_parameter_name: Optional[JSON] = None, + table_option: Optional[JSON] = None, + sql_writer_use_table_lock: Optional[JSON] = None, + write_behavior: Optional[JSON] = None, upsert_settings: Optional["_models.SqlUpsertSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :paramtype write_batch_size: any + :paramtype write_batch_size: JSON :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype write_batch_timeout: any + :paramtype write_batch_timeout: JSON :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :paramtype sink_retry_count: any + :paramtype sink_retry_count: JSON :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype sink_retry_wait: any + :paramtype sink_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :paramtype sql_writer_stored_procedure_name: any + :paramtype sql_writer_stored_procedure_name: JSON :keyword sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). - :paramtype sql_writer_table_type: any + :paramtype sql_writer_table_type: JSON :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :paramtype pre_copy_script: any + :paramtype pre_copy_script: JSON :keyword stored_procedure_parameters: SQL stored procedure parameters. :paramtype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :keyword stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :paramtype stored_procedure_table_type_parameter_name: any + :paramtype stored_procedure_table_type_parameter_name: JSON :keyword table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :paramtype table_option: any + :paramtype table_option: JSON :keyword sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - :paramtype sql_writer_use_table_lock: any + :paramtype sql_writer_use_table_lock: JSON :keyword write_behavior: Write behavior when copying data into sql. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). - :paramtype write_behavior: any + :paramtype write_behavior: JSON :keyword upsert_settings: SQL upsert settings. :paramtype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ - super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'SqlSink' # type: str + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "SqlSink" # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script @@ -54394,40 +57831,40 @@ def __init__( self.upsert_settings = upsert_settings -class SqlSource(TabularSource): +class SqlSource(TabularSource): # pylint: disable=too-many-instance-attributes """A copy activity SQL source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :vartype sql_reader_query: any + :vartype sql_reader_query: JSON :ivar sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :vartype sql_reader_stored_procedure_name: any + :vartype sql_reader_stored_procedure_name: JSON :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :vartype stored_procedure_parameters: dict[str, @@ -54435,82 +57872,82 @@ class SqlSource(TabularSource): :ivar isolation_level: Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). - :vartype isolation_level: any + :vartype isolation_level: JSON :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :vartype partition_option: any + :vartype partition_option: JSON :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'isolation_level': {'key': 'isolationLevel', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "sql_reader_query": {"key": "sqlReaderQuery", "type": "object"}, + "sql_reader_stored_procedure_name": {"key": "sqlReaderStoredProcedureName", "type": "object"}, + "stored_procedure_parameters": {"key": "storedProcedureParameters", "type": "{StoredProcedureParameter}"}, + "isolation_level": {"key": "isolationLevel", "type": "object"}, + "partition_option": {"key": "partitionOption", "type": "object"}, + "partition_settings": {"key": "partitionSettings", "type": "SqlPartitionSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - sql_reader_query: Optional[Any] = None, - sql_reader_stored_procedure_name: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + sql_reader_query: Optional[JSON] = None, + sql_reader_stored_procedure_name: Optional[JSON] = None, stored_procedure_parameters: Optional[Dict[str, "_models.StoredProcedureParameter"]] = None, - isolation_level: Optional[Any] = None, - partition_option: Optional[Any] = None, + isolation_level: Optional[JSON] = None, + partition_option: Optional[JSON] = None, partition_settings: Optional["_models.SqlPartitionSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :paramtype sql_reader_query: any + :paramtype sql_reader_query: JSON :keyword sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :paramtype sql_reader_stored_procedure_name: any + :paramtype sql_reader_stored_procedure_name: JSON :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :paramtype stored_procedure_parameters: dict[str, @@ -54518,15 +57955,24 @@ def __init__( :keyword isolation_level: Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). - :paramtype isolation_level: any + :paramtype isolation_level: JSON :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :paramtype partition_option: any + :paramtype partition_option: JSON :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ - super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'SqlSource' # type: str + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "SqlSource" # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -54535,60 +57981,60 @@ def __init__( self.partition_settings = partition_settings -class SqlUpsertSettings(msrest.serialization.Model): +class SqlUpsertSettings(_serialization.Model): """Sql upsert option settings. :ivar use_temp_db: Specifies whether to use temp db for upsert interim table. Type: boolean (or Expression with resultType boolean). - :vartype use_temp_db: any + :vartype use_temp_db: JSON :ivar interim_schema_name: Schema name for interim table. Type: string (or Expression with resultType string). - :vartype interim_schema_name: any + :vartype interim_schema_name: JSON :ivar keys: Key column names for unique row identification. Type: array of strings (or Expression with resultType array of strings). - :vartype keys: any + :vartype keys: JSON """ _attribute_map = { - 'use_temp_db': {'key': 'useTempDB', 'type': 'object'}, - 'interim_schema_name': {'key': 'interimSchemaName', 'type': 'object'}, - 'keys': {'key': 'keys', 'type': 'object'}, + "use_temp_db": {"key": "useTempDB", "type": "object"}, + "interim_schema_name": {"key": "interimSchemaName", "type": "object"}, + "keys": {"key": "keys", "type": "object"}, } def __init__( self, *, - use_temp_db: Optional[Any] = None, - interim_schema_name: Optional[Any] = None, - keys: Optional[Any] = None, + use_temp_db: Optional[JSON] = None, + interim_schema_name: Optional[JSON] = None, + keys: Optional[JSON] = None, **kwargs ): """ :keyword use_temp_db: Specifies whether to use temp db for upsert interim table. Type: boolean (or Expression with resultType boolean). - :paramtype use_temp_db: any + :paramtype use_temp_db: JSON :keyword interim_schema_name: Schema name for interim table. Type: string (or Expression with resultType string). - :paramtype interim_schema_name: any + :paramtype interim_schema_name: JSON :keyword keys: Key column names for unique row identification. Type: array of strings (or Expression with resultType array of strings). - :paramtype keys: any + :paramtype keys: JSON """ - super(SqlUpsertSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.use_temp_db = use_temp_db self.interim_schema_name = interim_schema_name self.keys = keys -class SquareLinkedService(LinkedService): +class SquareLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Square Service linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -54597,80 +58043,80 @@ class SquareLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_properties: Properties used to connect to Square. It is mutually exclusive with any other properties in the linked service. Type: object. - :vartype connection_properties: any + :vartype connection_properties: JSON :ivar host: The URL of the Square instance. (i.e. mystore.mysquare.com). - :vartype host: any + :vartype host: JSON :ivar client_id: The client ID associated with your Square application. - :vartype client_id: any + :vartype client_id: JSON :ivar client_secret: The client secret associated with your Square application. :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase :ivar redirect_uri: The redirect URL assigned in the Square application dashboard. (i.e. http://localhost:2500). - :vartype redirect_uri: any + :vartype redirect_uri: JSON :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :vartype use_encrypted_endpoints: any + :vartype use_encrypted_endpoints: JSON :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :vartype use_host_verification: any + :vartype use_host_verification: JSON :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :vartype use_peer_verification: any + :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'redirect_uri': {'key': 'typeProperties.redirectUri', 'type': 'object'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_properties": {"key": "typeProperties.connectionProperties", "type": "object"}, + "host": {"key": "typeProperties.host", "type": "object"}, + "client_id": {"key": "typeProperties.clientId", "type": "object"}, + "client_secret": {"key": "typeProperties.clientSecret", "type": "SecretBase"}, + "redirect_uri": {"key": "typeProperties.redirectUri", "type": "object"}, + "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, + "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, + "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_properties: Optional[Any] = None, - host: Optional[Any] = None, - client_id: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_properties: Optional[JSON] = None, + host: Optional[JSON] = None, + client_id: Optional[JSON] = None, client_secret: Optional["_models.SecretBase"] = None, - redirect_uri: Optional[Any] = None, - use_encrypted_endpoints: Optional[Any] = None, - use_host_verification: Optional[Any] = None, - use_peer_verification: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + redirect_uri: Optional[JSON] = None, + use_encrypted_endpoints: Optional[JSON] = None, + use_host_verification: Optional[JSON] = None, + use_peer_verification: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -54678,36 +58124,43 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_properties: Properties used to connect to Square. It is mutually exclusive with any other properties in the linked service. Type: object. - :paramtype connection_properties: any + :paramtype connection_properties: JSON :keyword host: The URL of the Square instance. (i.e. mystore.mysquare.com). - :paramtype host: any + :paramtype host: JSON :keyword client_id: The client ID associated with your Square application. - :paramtype client_id: any + :paramtype client_id: JSON :keyword client_secret: The client secret associated with your Square application. :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase :keyword redirect_uri: The redirect URL assigned in the Square application dashboard. (i.e. http://localhost:2500). - :paramtype redirect_uri: any + :paramtype redirect_uri: JSON :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :paramtype use_encrypted_endpoints: any + :paramtype use_encrypted_endpoints: JSON :keyword use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :paramtype use_host_verification: any + :paramtype use_host_verification: JSON :keyword use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :paramtype use_peer_verification: any + :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(SquareLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Square' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Square" # type: str self.connection_properties = connection_properties self.host = host self.client_id = client_id @@ -54726,88 +58179,98 @@ class SquareObjectDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(SquareObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'SquareObject' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "SquareObject" # type: str self.table_name = table_name @@ -54818,209 +58281,211 @@ class SquareSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'SquareSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "SquareSource" # type: str self.query = query -class SSISAccessCredential(msrest.serialization.Model): +class SSISAccessCredential(_serialization.Model): """SSIS access credential. All required parameters must be populated in order to send to Azure. - :ivar domain: Required. Domain for windows authentication. - :vartype domain: any - :ivar user_name: Required. UseName for windows authentication. - :vartype user_name: any - :ivar password: Required. Password for windows authentication. + :ivar domain: Domain for windows authentication. Required. + :vartype domain: JSON + :ivar user_name: UseName for windows authentication. Required. + :vartype user_name: JSON + :ivar password: Password for windows authentication. Required. :vartype password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { - 'domain': {'required': True}, - 'user_name': {'required': True}, - 'password': {'required': True}, + "domain": {"required": True}, + "user_name": {"required": True}, + "password": {"required": True}, } _attribute_map = { - 'domain': {'key': 'domain', 'type': 'object'}, - 'user_name': {'key': 'userName', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecretBase'}, + "domain": {"key": "domain", "type": "object"}, + "user_name": {"key": "userName", "type": "object"}, + "password": {"key": "password", "type": "SecretBase"}, } - def __init__( - self, - *, - domain: Any, - user_name: Any, - password: "_models.SecretBase", - **kwargs - ): + def __init__(self, *, domain: JSON, user_name: JSON, password: "_models.SecretBase", **kwargs): """ - :keyword domain: Required. Domain for windows authentication. - :paramtype domain: any - :keyword user_name: Required. UseName for windows authentication. - :paramtype user_name: any - :keyword password: Required. Password for windows authentication. + :keyword domain: Domain for windows authentication. Required. + :paramtype domain: JSON + :keyword user_name: UseName for windows authentication. Required. + :paramtype user_name: JSON + :keyword password: Password for windows authentication. Required. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase """ - super(SSISAccessCredential, self).__init__(**kwargs) + super().__init__(**kwargs) self.domain = domain self.user_name = user_name self.password = password -class SSISChildPackage(msrest.serialization.Model): +class SSISChildPackage(_serialization.Model): """SSIS embedded child package. All required parameters must be populated in order to send to Azure. - :ivar package_path: Required. Path for embedded child package. Type: string (or Expression with - resultType string). - :vartype package_path: any + :ivar package_path: Path for embedded child package. Type: string (or Expression with + resultType string). Required. + :vartype package_path: JSON :ivar package_name: Name for embedded child package. :vartype package_name: str - :ivar package_content: Required. Content for embedded child package. Type: string (or - Expression with resultType string). - :vartype package_content: any + :ivar package_content: Content for embedded child package. Type: string (or Expression with + resultType string). Required. + :vartype package_content: JSON :ivar package_last_modified_date: Last modified date for embedded child package. :vartype package_last_modified_date: str """ _validation = { - 'package_path': {'required': True}, - 'package_content': {'required': True}, + "package_path": {"required": True}, + "package_content": {"required": True}, } _attribute_map = { - 'package_path': {'key': 'packagePath', 'type': 'object'}, - 'package_name': {'key': 'packageName', 'type': 'str'}, - 'package_content': {'key': 'packageContent', 'type': 'object'}, - 'package_last_modified_date': {'key': 'packageLastModifiedDate', 'type': 'str'}, + "package_path": {"key": "packagePath", "type": "object"}, + "package_name": {"key": "packageName", "type": "str"}, + "package_content": {"key": "packageContent", "type": "object"}, + "package_last_modified_date": {"key": "packageLastModifiedDate", "type": "str"}, } def __init__( self, *, - package_path: Any, - package_content: Any, + package_path: JSON, + package_content: JSON, package_name: Optional[str] = None, package_last_modified_date: Optional[str] = None, **kwargs ): """ - :keyword package_path: Required. Path for embedded child package. Type: string (or Expression - with resultType string). - :paramtype package_path: any + :keyword package_path: Path for embedded child package. Type: string (or Expression with + resultType string). Required. + :paramtype package_path: JSON :keyword package_name: Name for embedded child package. :paramtype package_name: str - :keyword package_content: Required. Content for embedded child package. Type: string (or - Expression with resultType string). - :paramtype package_content: any + :keyword package_content: Content for embedded child package. Type: string (or Expression with + resultType string). Required. + :paramtype package_content: JSON :keyword package_last_modified_date: Last modified date for embedded child package. :paramtype package_last_modified_date: str """ - super(SSISChildPackage, self).__init__(**kwargs) + super().__init__(**kwargs) self.package_path = package_path self.package_name = package_name self.package_content = package_content self.package_last_modified_date = package_last_modified_date -class SsisObjectMetadata(msrest.serialization.Model): +class SsisObjectMetadata(_serialization.Model): """SSIS object metadata. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SsisEnvironment, SsisFolder, SsisPackage, SsisProject. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + SsisEnvironment, SsisFolder, SsisPackage, SsisProject All required parameters must be populated in order to send to Azure. - :ivar type: Required. Type of metadata.Constant filled by server. Known values are: "Folder", - "Project", "Package", "Environment". + :ivar type: Type of metadata. Required. Known values are: "Folder", "Project", "Package", and + "Environment". :vartype type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType :ivar id: Metadata id. - :vartype id: long + :vartype id: int :ivar name: Metadata name. :vartype name: str :ivar description: Metadata description. @@ -55028,37 +58493,42 @@ class SsisObjectMetadata(msrest.serialization.Model): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, + "type": {"key": "type", "type": "str"}, + "id": {"key": "id", "type": "int"}, + "name": {"key": "name", "type": "str"}, + "description": {"key": "description", "type": "str"}, } _subtype_map = { - 'type': {'Environment': 'SsisEnvironment', 'Folder': 'SsisFolder', 'Package': 'SsisPackage', 'Project': 'SsisProject'} + "type": { + "Environment": "SsisEnvironment", + "Folder": "SsisFolder", + "Package": "SsisPackage", + "Project": "SsisProject", + } } def __init__( self, *, - id: Optional[int] = None, + id: Optional[int] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, description: Optional[str] = None, **kwargs ): """ :keyword id: Metadata id. - :paramtype id: long + :paramtype id: int :keyword name: Metadata name. :paramtype name: str :keyword description: Metadata description. :paramtype description: str """ - super(SsisObjectMetadata, self).__init__(**kwargs) + super().__init__(**kwargs) self.type = None # type: Optional[str] self.id = id self.name = name @@ -55070,38 +58540,38 @@ class SsisEnvironment(SsisObjectMetadata): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Type of metadata.Constant filled by server. Known values are: "Folder", - "Project", "Package", "Environment". + :ivar type: Type of metadata. Required. Known values are: "Folder", "Project", "Package", and + "Environment". :vartype type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType :ivar id: Metadata id. - :vartype id: long + :vartype id: int :ivar name: Metadata name. :vartype name: str :ivar description: Metadata description. :vartype description: str :ivar folder_id: Folder id which contains environment. - :vartype folder_id: long + :vartype folder_id: int :ivar variables: Variable in environment. :vartype variables: list[~azure.mgmt.datafactory.models.SsisVariable] """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'folder_id': {'key': 'folderId', 'type': 'long'}, - 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, + "type": {"key": "type", "type": "str"}, + "id": {"key": "id", "type": "int"}, + "name": {"key": "name", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "folder_id": {"key": "folderId", "type": "int"}, + "variables": {"key": "variables", "type": "[SsisVariable]"}, } def __init__( self, *, - id: Optional[int] = None, + id: Optional[int] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, description: Optional[str] = None, folder_id: Optional[int] = None, @@ -55110,27 +58580,27 @@ def __init__( ): """ :keyword id: Metadata id. - :paramtype id: long + :paramtype id: int :keyword name: Metadata name. :paramtype name: str :keyword description: Metadata description. :paramtype description: str :keyword folder_id: Folder id which contains environment. - :paramtype folder_id: long + :paramtype folder_id: int :keyword variables: Variable in environment. :paramtype variables: list[~azure.mgmt.datafactory.models.SsisVariable] """ - super(SsisEnvironment, self).__init__(id=id, name=name, description=description, **kwargs) - self.type = 'Environment' # type: str + super().__init__(id=id, name=name, description=description, **kwargs) + self.type = "Environment" # type: str self.folder_id = folder_id self.variables = variables -class SsisEnvironmentReference(msrest.serialization.Model): +class SsisEnvironmentReference(_serialization.Model): """Ssis environment reference. :ivar id: Environment reference id. - :vartype id: long + :vartype id: int :ivar environment_folder_name: Environment folder name. :vartype environment_folder_name: str :ivar environment_name: Environment name. @@ -55140,16 +58610,16 @@ class SsisEnvironmentReference(msrest.serialization.Model): """ _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, - 'environment_name': {'key': 'environmentName', 'type': 'str'}, - 'reference_type': {'key': 'referenceType', 'type': 'str'}, + "id": {"key": "id", "type": "int"}, + "environment_folder_name": {"key": "environmentFolderName", "type": "str"}, + "environment_name": {"key": "environmentName", "type": "str"}, + "reference_type": {"key": "referenceType", "type": "str"}, } def __init__( self, *, - id: Optional[int] = None, + id: Optional[int] = None, # pylint: disable=redefined-builtin environment_folder_name: Optional[str] = None, environment_name: Optional[str] = None, reference_type: Optional[str] = None, @@ -55157,7 +58627,7 @@ def __init__( ): """ :keyword id: Environment reference id. - :paramtype id: long + :paramtype id: int :keyword environment_folder_name: Environment folder name. :paramtype environment_folder_name: str :keyword environment_name: Environment name. @@ -55165,90 +58635,78 @@ def __init__( :keyword reference_type: Reference type. :paramtype reference_type: str """ - super(SsisEnvironmentReference, self).__init__(**kwargs) + super().__init__(**kwargs) self.id = id self.environment_folder_name = environment_folder_name self.environment_name = environment_name self.reference_type = reference_type -class SSISExecutionCredential(msrest.serialization.Model): +class SSISExecutionCredential(_serialization.Model): """SSIS package execution credential. All required parameters must be populated in order to send to Azure. - :ivar domain: Required. Domain for windows authentication. - :vartype domain: any - :ivar user_name: Required. UseName for windows authentication. - :vartype user_name: any - :ivar password: Required. Password for windows authentication. + :ivar domain: Domain for windows authentication. Required. + :vartype domain: JSON + :ivar user_name: UseName for windows authentication. Required. + :vartype user_name: JSON + :ivar password: Password for windows authentication. Required. :vartype password: ~azure.mgmt.datafactory.models.SecureString """ _validation = { - 'domain': {'required': True}, - 'user_name': {'required': True}, - 'password': {'required': True}, + "domain": {"required": True}, + "user_name": {"required": True}, + "password": {"required": True}, } _attribute_map = { - 'domain': {'key': 'domain', 'type': 'object'}, - 'user_name': {'key': 'userName', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecureString'}, + "domain": {"key": "domain", "type": "object"}, + "user_name": {"key": "userName", "type": "object"}, + "password": {"key": "password", "type": "SecureString"}, } - def __init__( - self, - *, - domain: Any, - user_name: Any, - password: "_models.SecureString", - **kwargs - ): + def __init__(self, *, domain: JSON, user_name: JSON, password: "_models.SecureString", **kwargs): """ - :keyword domain: Required. Domain for windows authentication. - :paramtype domain: any - :keyword user_name: Required. UseName for windows authentication. - :paramtype user_name: any - :keyword password: Required. Password for windows authentication. + :keyword domain: Domain for windows authentication. Required. + :paramtype domain: JSON + :keyword user_name: UseName for windows authentication. Required. + :paramtype user_name: JSON + :keyword password: Password for windows authentication. Required. :paramtype password: ~azure.mgmt.datafactory.models.SecureString """ - super(SSISExecutionCredential, self).__init__(**kwargs) + super().__init__(**kwargs) self.domain = domain self.user_name = user_name self.password = password -class SSISExecutionParameter(msrest.serialization.Model): +class SSISExecutionParameter(_serialization.Model): """SSIS execution parameter. All required parameters must be populated in order to send to Azure. - :ivar value: Required. SSIS package execution parameter value. Type: string (or Expression with - resultType string). - :vartype value: any + :ivar value: SSIS package execution parameter value. Type: string (or Expression with + resultType string). Required. + :vartype value: JSON """ _validation = { - 'value': {'required': True}, + "value": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': 'object'}, + "value": {"key": "value", "type": "object"}, } - def __init__( - self, - *, - value: Any, - **kwargs - ): + def __init__(self, *, value: JSON, **kwargs): """ - :keyword value: Required. SSIS package execution parameter value. Type: string (or Expression - with resultType string). - :paramtype value: any + :keyword value: SSIS package execution parameter value. Type: string (or Expression with + resultType string). Required. + :paramtype value: JSON """ - super(SSISExecutionParameter, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value @@ -55257,11 +58715,11 @@ class SsisFolder(SsisObjectMetadata): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Type of metadata.Constant filled by server. Known values are: "Folder", - "Project", "Package", "Environment". + :ivar type: Type of metadata. Required. Known values are: "Folder", "Project", "Package", and + "Environment". :vartype type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType :ivar id: Metadata id. - :vartype id: long + :vartype id: int :ivar name: Metadata name. :vartype name: str :ivar description: Metadata description. @@ -55269,96 +58727,96 @@ class SsisFolder(SsisObjectMetadata): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, + "type": {"key": "type", "type": "str"}, + "id": {"key": "id", "type": "int"}, + "name": {"key": "name", "type": "str"}, + "description": {"key": "description", "type": "str"}, } def __init__( self, *, - id: Optional[int] = None, + id: Optional[int] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, description: Optional[str] = None, **kwargs ): """ :keyword id: Metadata id. - :paramtype id: long + :paramtype id: int :keyword name: Metadata name. :paramtype name: str :keyword description: Metadata description. :paramtype description: str """ - super(SsisFolder, self).__init__(id=id, name=name, description=description, **kwargs) - self.type = 'Folder' # type: str + super().__init__(id=id, name=name, description=description, **kwargs) + self.type = "Folder" # type: str -class SSISLogLocation(msrest.serialization.Model): +class SSISLogLocation(_serialization.Model): """SSIS package execution log location. All required parameters must be populated in order to send to Azure. - :ivar log_path: Required. The SSIS package execution log path. Type: string (or Expression with - resultType string). - :vartype log_path: any - :ivar type: Required. The type of SSIS log location. Known values are: "File". + :ivar log_path: The SSIS package execution log path. Type: string (or Expression with + resultType string). Required. + :vartype log_path: JSON + :ivar type: The type of SSIS log location. Required. "File" :vartype type: str or ~azure.mgmt.datafactory.models.SsisLogLocationType :ivar access_credential: The package execution log access credential. :vartype access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential :ivar log_refresh_interval: Specifies the interval to refresh log. The default interval is 5 minutes. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype log_refresh_interval: any + :vartype log_refresh_interval: JSON """ _validation = { - 'log_path': {'required': True}, - 'type': {'required': True}, + "log_path": {"required": True}, + "type": {"required": True}, } _attribute_map = { - 'log_path': {'key': 'logPath', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, - 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, + "log_path": {"key": "logPath", "type": "object"}, + "type": {"key": "type", "type": "str"}, + "access_credential": {"key": "typeProperties.accessCredential", "type": "SSISAccessCredential"}, + "log_refresh_interval": {"key": "typeProperties.logRefreshInterval", "type": "object"}, } def __init__( self, *, - log_path: Any, + log_path: JSON, type: Union[str, "_models.SsisLogLocationType"], access_credential: Optional["_models.SSISAccessCredential"] = None, - log_refresh_interval: Optional[Any] = None, + log_refresh_interval: Optional[JSON] = None, **kwargs ): """ - :keyword log_path: Required. The SSIS package execution log path. Type: string (or Expression - with resultType string). - :paramtype log_path: any - :keyword type: Required. The type of SSIS log location. Known values are: "File". + :keyword log_path: The SSIS package execution log path. Type: string (or Expression with + resultType string). Required. + :paramtype log_path: JSON + :keyword type: The type of SSIS log location. Required. "File" :paramtype type: str or ~azure.mgmt.datafactory.models.SsisLogLocationType :keyword access_credential: The package execution log access credential. :paramtype access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential :keyword log_refresh_interval: Specifies the interval to refresh log. The default interval is 5 minutes. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype log_refresh_interval: any + :paramtype log_refresh_interval: JSON """ - super(SSISLogLocation, self).__init__(**kwargs) + super().__init__(**kwargs) self.log_path = log_path self.type = type self.access_credential = access_credential self.log_refresh_interval = log_refresh_interval -class SsisObjectMetadataListResponse(msrest.serialization.Model): +class SsisObjectMetadataListResponse(_serialization.Model): """A list of SSIS object metadata. :ivar value: List of SSIS object metadata. @@ -55368,16 +58826,12 @@ class SsisObjectMetadataListResponse(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': '[SsisObjectMetadata]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[SsisObjectMetadata]"}, + "next_link": {"key": "nextLink", "type": "str"}, } def __init__( - self, - *, - value: Optional[List["_models.SsisObjectMetadata"]] = None, - next_link: Optional[str] = None, - **kwargs + self, *, value: Optional[List["_models.SsisObjectMetadata"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: List of SSIS object metadata. @@ -55385,12 +58839,12 @@ def __init__( :keyword next_link: The link to the next page of results, if any remaining results exist. :paramtype next_link: str """ - super(SsisObjectMetadataListResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.next_link = next_link -class SsisObjectMetadataStatusResponse(msrest.serialization.Model): +class SsisObjectMetadataStatusResponse(_serialization.Model): """The status of the operation. :ivar status: The status of the operation. @@ -55404,10 +58858,10 @@ class SsisObjectMetadataStatusResponse(msrest.serialization.Model): """ _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'str'}, + "status": {"key": "status", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "properties": {"key": "properties", "type": "str"}, + "error": {"key": "error", "type": "str"}, } def __init__( @@ -55429,7 +58883,7 @@ def __init__( :keyword error: The operation error message. :paramtype error: str """ - super(SsisObjectMetadataStatusResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.status = status self.name = name self.properties = properties @@ -55441,44 +58895,44 @@ class SsisPackage(SsisObjectMetadata): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Type of metadata.Constant filled by server. Known values are: "Folder", - "Project", "Package", "Environment". + :ivar type: Type of metadata. Required. Known values are: "Folder", "Project", "Package", and + "Environment". :vartype type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType :ivar id: Metadata id. - :vartype id: long + :vartype id: int :ivar name: Metadata name. :vartype name: str :ivar description: Metadata description. :vartype description: str :ivar folder_id: Folder id which contains package. - :vartype folder_id: long + :vartype folder_id: int :ivar project_version: Project version which contains package. - :vartype project_version: long + :vartype project_version: int :ivar project_id: Project id which contains package. - :vartype project_id: long + :vartype project_id: int :ivar parameters: Parameters in package. :vartype parameters: list[~azure.mgmt.datafactory.models.SsisParameter] """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'folder_id': {'key': 'folderId', 'type': 'long'}, - 'project_version': {'key': 'projectVersion', 'type': 'long'}, - 'project_id': {'key': 'projectId', 'type': 'long'}, - 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + "type": {"key": "type", "type": "str"}, + "id": {"key": "id", "type": "int"}, + "name": {"key": "name", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "folder_id": {"key": "folderId", "type": "int"}, + "project_version": {"key": "projectVersion", "type": "int"}, + "project_id": {"key": "projectId", "type": "int"}, + "parameters": {"key": "parameters", "type": "[SsisParameter]"}, } def __init__( self, *, - id: Optional[int] = None, + id: Optional[int] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, description: Optional[str] = None, folder_id: Optional[int] = None, @@ -55489,35 +58943,35 @@ def __init__( ): """ :keyword id: Metadata id. - :paramtype id: long + :paramtype id: int :keyword name: Metadata name. :paramtype name: str :keyword description: Metadata description. :paramtype description: str :keyword folder_id: Folder id which contains package. - :paramtype folder_id: long + :paramtype folder_id: int :keyword project_version: Project version which contains package. - :paramtype project_version: long + :paramtype project_version: int :keyword project_id: Project id which contains package. - :paramtype project_id: long + :paramtype project_id: int :keyword parameters: Parameters in package. :paramtype parameters: list[~azure.mgmt.datafactory.models.SsisParameter] """ - super(SsisPackage, self).__init__(id=id, name=name, description=description, **kwargs) - self.type = 'Package' # type: str + super().__init__(id=id, name=name, description=description, **kwargs) + self.type = "Package" # type: str self.folder_id = folder_id self.project_version = project_version self.project_id = project_id self.parameters = parameters -class SSISPackageLocation(msrest.serialization.Model): +class SSISPackageLocation(_serialization.Model): """SSIS package location. :ivar package_path: The SSIS package path. Type: string (or Expression with resultType string). - :vartype package_path: any + :vartype package_path: JSON :ivar type: The type of SSIS package location. Known values are: "SSISDB", "File", - "InlinePackage", "PackageStore". + "InlinePackage", and "PackageStore". :vartype type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType :ivar package_password: Password of the package. :vartype package_password: ~azure.mgmt.datafactory.models.SecretBase @@ -55525,14 +58979,14 @@ class SSISPackageLocation(msrest.serialization.Model): :vartype access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential :ivar configuration_path: The configuration file of the package execution. Type: string (or Expression with resultType string). - :vartype configuration_path: any + :vartype configuration_path: JSON :ivar configuration_access_credential: The configuration file access credential. :vartype configuration_access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential :ivar package_name: The package name. :vartype package_name: str :ivar package_content: The embedded package content. Type: string (or Expression with resultType string). - :vartype package_content: any + :vartype package_content: JSON :ivar package_last_modified_date: The embedded package last modified date. :vartype package_last_modified_date: str :ivar child_packages: The embedded child package list. @@ -55540,29 +58994,32 @@ class SSISPackageLocation(msrest.serialization.Model): """ _attribute_map = { - 'package_path': {'key': 'packagePath', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecretBase'}, - 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, - 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, - 'configuration_access_credential': {'key': 'typeProperties.configurationAccessCredential', 'type': 'SSISAccessCredential'}, - 'package_name': {'key': 'typeProperties.packageName', 'type': 'str'}, - 'package_content': {'key': 'typeProperties.packageContent', 'type': 'object'}, - 'package_last_modified_date': {'key': 'typeProperties.packageLastModifiedDate', 'type': 'str'}, - 'child_packages': {'key': 'typeProperties.childPackages', 'type': '[SSISChildPackage]'}, + "package_path": {"key": "packagePath", "type": "object"}, + "type": {"key": "type", "type": "str"}, + "package_password": {"key": "typeProperties.packagePassword", "type": "SecretBase"}, + "access_credential": {"key": "typeProperties.accessCredential", "type": "SSISAccessCredential"}, + "configuration_path": {"key": "typeProperties.configurationPath", "type": "object"}, + "configuration_access_credential": { + "key": "typeProperties.configurationAccessCredential", + "type": "SSISAccessCredential", + }, + "package_name": {"key": "typeProperties.packageName", "type": "str"}, + "package_content": {"key": "typeProperties.packageContent", "type": "object"}, + "package_last_modified_date": {"key": "typeProperties.packageLastModifiedDate", "type": "str"}, + "child_packages": {"key": "typeProperties.childPackages", "type": "[SSISChildPackage]"}, } def __init__( self, *, - package_path: Optional[Any] = None, + package_path: Optional[JSON] = None, type: Optional[Union[str, "_models.SsisPackageLocationType"]] = None, package_password: Optional["_models.SecretBase"] = None, access_credential: Optional["_models.SSISAccessCredential"] = None, - configuration_path: Optional[Any] = None, + configuration_path: Optional[JSON] = None, configuration_access_credential: Optional["_models.SSISAccessCredential"] = None, package_name: Optional[str] = None, - package_content: Optional[Any] = None, + package_content: Optional[JSON] = None, package_last_modified_date: Optional[str] = None, child_packages: Optional[List["_models.SSISChildPackage"]] = None, **kwargs @@ -55570,9 +59027,9 @@ def __init__( """ :keyword package_path: The SSIS package path. Type: string (or Expression with resultType string). - :paramtype package_path: any + :paramtype package_path: JSON :keyword type: The type of SSIS package location. Known values are: "SSISDB", "File", - "InlinePackage", "PackageStore". + "InlinePackage", and "PackageStore". :paramtype type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType :keyword package_password: Password of the package. :paramtype package_password: ~azure.mgmt.datafactory.models.SecretBase @@ -55580,20 +59037,20 @@ def __init__( :paramtype access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential :keyword configuration_path: The configuration file of the package execution. Type: string (or Expression with resultType string). - :paramtype configuration_path: any + :paramtype configuration_path: JSON :keyword configuration_access_credential: The configuration file access credential. :paramtype configuration_access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential :keyword package_name: The package name. :paramtype package_name: str :keyword package_content: The embedded package content. Type: string (or Expression with resultType string). - :paramtype package_content: any + :paramtype package_content: JSON :keyword package_last_modified_date: The embedded package last modified date. :paramtype package_last_modified_date: str :keyword child_packages: The embedded child package list. :paramtype child_packages: list[~azure.mgmt.datafactory.models.SSISChildPackage] """ - super(SSISPackageLocation, self).__init__(**kwargs) + super().__init__(**kwargs) self.package_path = package_path self.type = type self.package_password = package_password @@ -55606,11 +59063,11 @@ def __init__( self.child_packages = child_packages -class SsisParameter(msrest.serialization.Model): +class SsisParameter(_serialization.Model): # pylint: disable=too-many-instance-attributes """Ssis parameter. :ivar id: Parameter id. - :vartype id: long + :vartype id: int :ivar name: Parameter name. :vartype name: str :ivar description: Parameter description. @@ -55636,24 +59093,24 @@ class SsisParameter(msrest.serialization.Model): """ _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'required': {'key': 'required', 'type': 'bool'}, - 'sensitive': {'key': 'sensitive', 'type': 'bool'}, - 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'str'}, - 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, - 'value_type': {'key': 'valueType', 'type': 'str'}, - 'value_set': {'key': 'valueSet', 'type': 'bool'}, - 'variable': {'key': 'variable', 'type': 'str'}, + "id": {"key": "id", "type": "int"}, + "name": {"key": "name", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "data_type": {"key": "dataType", "type": "str"}, + "required": {"key": "required", "type": "bool"}, + "sensitive": {"key": "sensitive", "type": "bool"}, + "design_default_value": {"key": "designDefaultValue", "type": "str"}, + "default_value": {"key": "defaultValue", "type": "str"}, + "sensitive_default_value": {"key": "sensitiveDefaultValue", "type": "str"}, + "value_type": {"key": "valueType", "type": "str"}, + "value_set": {"key": "valueSet", "type": "bool"}, + "variable": {"key": "variable", "type": "str"}, } def __init__( self, *, - id: Optional[int] = None, + id: Optional[int] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, description: Optional[str] = None, data_type: Optional[str] = None, @@ -55669,7 +59126,7 @@ def __init__( ): """ :keyword id: Parameter id. - :paramtype id: long + :paramtype id: int :keyword name: Parameter name. :paramtype name: str :keyword description: Parameter description. @@ -55693,7 +59150,7 @@ def __init__( :keyword variable: Parameter reference variable. :paramtype variable: str """ - super(SsisParameter, self).__init__(**kwargs) + super().__init__(**kwargs) self.id = id self.name = name self.description = description @@ -55713,19 +59170,19 @@ class SsisProject(SsisObjectMetadata): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Type of metadata.Constant filled by server. Known values are: "Folder", - "Project", "Package", "Environment". + :ivar type: Type of metadata. Required. Known values are: "Folder", "Project", "Package", and + "Environment". :vartype type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType :ivar id: Metadata id. - :vartype id: long + :vartype id: int :ivar name: Metadata name. :vartype name: str :ivar description: Metadata description. :vartype description: str :ivar folder_id: Folder id which contains project. - :vartype folder_id: long + :vartype folder_id: int :ivar version: Project version. - :vartype version: long + :vartype version: int :ivar environment_refs: Environment reference in project. :vartype environment_refs: list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] :ivar parameters: Parameters in project. @@ -55733,24 +59190,24 @@ class SsisProject(SsisObjectMetadata): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'folder_id': {'key': 'folderId', 'type': 'long'}, - 'version': {'key': 'version', 'type': 'long'}, - 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, - 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + "type": {"key": "type", "type": "str"}, + "id": {"key": "id", "type": "int"}, + "name": {"key": "name", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "folder_id": {"key": "folderId", "type": "int"}, + "version": {"key": "version", "type": "int"}, + "environment_refs": {"key": "environmentRefs", "type": "[SsisEnvironmentReference]"}, + "parameters": {"key": "parameters", "type": "[SsisParameter]"}, } def __init__( self, *, - id: Optional[int] = None, + id: Optional[int] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, description: Optional[str] = None, folder_id: Optional[int] = None, @@ -55761,75 +59218,69 @@ def __init__( ): """ :keyword id: Metadata id. - :paramtype id: long + :paramtype id: int :keyword name: Metadata name. :paramtype name: str :keyword description: Metadata description. :paramtype description: str :keyword folder_id: Folder id which contains project. - :paramtype folder_id: long + :paramtype folder_id: int :keyword version: Project version. - :paramtype version: long + :paramtype version: int :keyword environment_refs: Environment reference in project. :paramtype environment_refs: list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] :keyword parameters: Parameters in project. :paramtype parameters: list[~azure.mgmt.datafactory.models.SsisParameter] """ - super(SsisProject, self).__init__(id=id, name=name, description=description, **kwargs) - self.type = 'Project' # type: str + super().__init__(id=id, name=name, description=description, **kwargs) + self.type = "Project" # type: str self.folder_id = folder_id self.version = version self.environment_refs = environment_refs self.parameters = parameters -class SSISPropertyOverride(msrest.serialization.Model): +class SSISPropertyOverride(_serialization.Model): """SSIS property override. All required parameters must be populated in order to send to Azure. - :ivar value: Required. SSIS package property override value. Type: string (or Expression with - resultType string). - :vartype value: any + :ivar value: SSIS package property override value. Type: string (or Expression with resultType + string). Required. + :vartype value: JSON :ivar is_sensitive: Whether SSIS package property override value is sensitive data. Value will be encrypted in SSISDB if it is true. :vartype is_sensitive: bool """ _validation = { - 'value': {'required': True}, + "value": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': 'object'}, - 'is_sensitive': {'key': 'isSensitive', 'type': 'bool'}, + "value": {"key": "value", "type": "object"}, + "is_sensitive": {"key": "isSensitive", "type": "bool"}, } - def __init__( - self, - *, - value: Any, - is_sensitive: Optional[bool] = None, - **kwargs - ): + def __init__(self, *, value: JSON, is_sensitive: Optional[bool] = None, **kwargs): """ - :keyword value: Required. SSIS package property override value. Type: string (or Expression - with resultType string). - :paramtype value: any + :keyword value: SSIS package property override value. Type: string (or Expression with + resultType string). Required. + :paramtype value: JSON :keyword is_sensitive: Whether SSIS package property override value is sensitive data. Value will be encrypted in SSISDB if it is true. :paramtype is_sensitive: bool """ - super(SSISPropertyOverride, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.is_sensitive = is_sensitive -class SsisVariable(msrest.serialization.Model): +class SsisVariable(_serialization.Model): """Ssis variable. :ivar id: Variable id. - :vartype id: long + :vartype id: int :ivar name: Variable name. :vartype name: str :ivar description: Variable description. @@ -55845,19 +59296,19 @@ class SsisVariable(msrest.serialization.Model): """ _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'sensitive': {'key': 'sensitive', 'type': 'bool'}, - 'value': {'key': 'value', 'type': 'str'}, - 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, + "id": {"key": "id", "type": "int"}, + "name": {"key": "name", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "data_type": {"key": "dataType", "type": "str"}, + "sensitive": {"key": "sensitive", "type": "bool"}, + "value": {"key": "value", "type": "str"}, + "sensitive_value": {"key": "sensitiveValue", "type": "str"}, } def __init__( self, *, - id: Optional[int] = None, + id: Optional[int] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, description: Optional[str] = None, data_type: Optional[str] = None, @@ -55868,7 +59319,7 @@ def __init__( ): """ :keyword id: Variable id. - :paramtype id: long + :paramtype id: int :keyword name: Variable name. :paramtype name: str :keyword description: Variable description. @@ -55882,7 +59333,7 @@ def __init__( :keyword sensitive_value: Variable sensitive value. :paramtype sensitive_value: str """ - super(SsisVariable, self).__init__(**kwargs) + super().__init__(**kwargs) self.id = id self.name = name self.description = description @@ -55892,96 +59343,96 @@ def __init__( self.sensitive_value = sensitive_value -class StagingSettings(msrest.serialization.Model): +class StagingSettings(_serialization.Model): """Staging settings. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar linked_service_name: Required. Staging linked service reference. + :vartype additional_properties: dict[str, JSON] + :ivar linked_service_name: Staging linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar path: The path to storage for storing the interim data. Type: string (or Expression with resultType string). - :vartype path: any + :vartype path: JSON :ivar enable_compression: Specifies whether to use compression when copying data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). - :vartype enable_compression: any + :vartype enable_compression: JSON """ _validation = { - 'linked_service_name': {'required': True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'path': {'key': 'path', 'type': 'object'}, - 'enable_compression': {'key': 'enableCompression', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "path": {"key": "path", "type": "object"}, + "enable_compression": {"key": "enableCompression", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, - path: Optional[Any] = None, - enable_compression: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + path: Optional[JSON] = None, + enable_compression: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword linked_service_name: Required. Staging linked service reference. + :paramtype additional_properties: dict[str, JSON] + :keyword linked_service_name: Staging linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword path: The path to storage for storing the interim data. Type: string (or Expression with resultType string). - :paramtype path: any + :paramtype path: JSON :keyword enable_compression: Specifies whether to use compression when copying data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). - :paramtype enable_compression: any + :paramtype enable_compression: JSON """ - super(StagingSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.linked_service_name = linked_service_name self.path = path self.enable_compression = enable_compression -class StoredProcedureParameter(msrest.serialization.Model): +class StoredProcedureParameter(_serialization.Model): """SQL stored procedure parameter. :ivar value: Stored procedure parameter value. Type: string (or Expression with resultType string). - :vartype value: any + :vartype value: JSON :ivar type: Stored procedure parameter type. Known values are: "String", "Int", "Int64", - "Decimal", "Guid", "Boolean", "Date". + "Decimal", "Guid", "Boolean", and "Date". :vartype type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType """ _attribute_map = { - 'value': {'key': 'value', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, + "value": {"key": "value", "type": "object"}, + "type": {"key": "type", "type": "str"}, } def __init__( self, *, - value: Optional[Any] = None, + value: Optional[JSON] = None, type: Optional[Union[str, "_models.StoredProcedureParameterType"]] = None, **kwargs ): """ :keyword value: Stored procedure parameter value. Type: string (or Expression with resultType string). - :paramtype value: any + :paramtype value: JSON :keyword type: Stored procedure parameter type. Known values are: "String", "Int", "Int64", - "Decimal", "Guid", "Boolean", "Date". + "Decimal", "Guid", "Boolean", and "Date". :paramtype type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType """ - super(StoredProcedureParameter, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.type = type @@ -55993,10 +59444,10 @@ class SwitchActivity(ControlActivity): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -56004,8 +59455,8 @@ class SwitchActivity(ControlActivity): :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :ivar on: Required. An expression that would evaluate to a string or integer. This is used to - determine the block of activities in cases that will be executed. + :ivar on: An expression that would evaluate to a string or integer. This is used to determine + the block of activities in cases that will be executed. Required. :vartype on: ~azure.mgmt.datafactory.models.Expression :ivar cases: List of cases that correspond to expected values of the 'on' property. This is an optional property and if not provided, the activity will execute activities provided in @@ -56017,21 +59468,21 @@ class SwitchActivity(ControlActivity): """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'on': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "on": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'on': {'key': 'typeProperties.on', 'type': 'Expression'}, - 'cases': {'key': 'typeProperties.cases', 'type': '[SwitchCase]'}, - 'default_activities': {'key': 'typeProperties.defaultActivities', 'type': '[Activity]'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "on": {"key": "typeProperties.on", "type": "Expression"}, + "cases": {"key": "typeProperties.cases", "type": "[SwitchCase]"}, + "default_activities": {"key": "typeProperties.defaultActivities", "type": "[Activity]"}, } def __init__( @@ -56039,7 +59490,7 @@ def __init__( *, name: str, on: "_models.Expression", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, @@ -56050,8 +59501,8 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -56059,8 +59510,8 @@ def __init__( :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :keyword on: Required. An expression that would evaluate to a string or integer. This is used - to determine the block of activities in cases that will be executed. + :keyword on: An expression that would evaluate to a string or integer. This is used to + determine the block of activities in cases that will be executed. Required. :paramtype on: ~azure.mgmt.datafactory.models.Expression :keyword cases: List of cases that correspond to expected values of the 'on' property. This is an optional property and if not provided, the activity will execute activities provided in @@ -56070,14 +59521,21 @@ def __init__( This is an optional property and if not provided, the activity will exit without any action. :paramtype default_activities: list[~azure.mgmt.datafactory.models.Activity] """ - super(SwitchActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'Switch' # type: str + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + **kwargs + ) + self.type = "Switch" # type: str self.on = on self.cases = cases self.default_activities = default_activities -class SwitchCase(msrest.serialization.Model): +class SwitchCase(_serialization.Model): """Switch cases with have a value and corresponding activities. :ivar value: Expected value that satisfies the expression result of the 'on' property. @@ -56087,37 +59545,31 @@ class SwitchCase(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': 'str'}, - 'activities': {'key': 'activities', 'type': '[Activity]'}, + "value": {"key": "value", "type": "str"}, + "activities": {"key": "activities", "type": "[Activity]"}, } - def __init__( - self, - *, - value: Optional[str] = None, - activities: Optional[List["_models.Activity"]] = None, - **kwargs - ): + def __init__(self, *, value: Optional[str] = None, activities: Optional[List["_models.Activity"]] = None, **kwargs): """ :keyword value: Expected value that satisfies the expression result of the 'on' property. :paramtype value: str :keyword activities: List of activities to execute for satisfied case condition. :paramtype activities: list[~azure.mgmt.datafactory.models.Activity] """ - super(SwitchCase, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.activities = activities -class SybaseLinkedService(LinkedService): +class SybaseLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Linked service for Sybase data source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -56126,72 +59578,72 @@ class SybaseLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar server: Required. Server name for connection. Type: string (or Expression with resultType - string). - :vartype server: any - :ivar database: Required. Database name for connection. Type: string (or Expression with - resultType string). - :vartype database: any + :vartype annotations: list[JSON] + :ivar server: Server name for connection. Type: string (or Expression with resultType string). + Required. + :vartype server: JSON + :ivar database: Database name for connection. Type: string (or Expression with resultType + string). Required. + :vartype database: JSON :ivar schema: Schema name for connection. Type: string (or Expression with resultType string). - :vartype schema: any + :vartype schema: JSON :ivar authentication_type: AuthenticationType to be used for connection. Known values are: - "Basic", "Windows". + "Basic" and "Windows". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.SybaseAuthenticationType :ivar username: Username for authentication. Type: string (or Expression with resultType string). - :vartype username: any + :vartype username: JSON :ivar password: Password for authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, + "type": {"required": True}, + "server": {"required": True}, + "database": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "server": {"key": "typeProperties.server", "type": "object"}, + "database": {"key": "typeProperties.database", "type": "object"}, + "schema": {"key": "typeProperties.schema", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - server: Any, - database: Any, - additional_properties: Optional[Dict[str, Any]] = None, + server: JSON, + database: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - schema: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + schema: Optional[JSON] = None, authentication_type: Optional[Union[str, "_models.SybaseAuthenticationType"]] = None, - username: Optional[Any] = None, + username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -56199,31 +59651,38 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword server: Required. Server name for connection. Type: string (or Expression with - resultType string). - :paramtype server: any - :keyword database: Required. Database name for connection. Type: string (or Expression with - resultType string). - :paramtype database: any + :paramtype annotations: list[JSON] + :keyword server: Server name for connection. Type: string (or Expression with resultType + string). Required. + :paramtype server: JSON + :keyword database: Database name for connection. Type: string (or Expression with resultType + string). Required. + :paramtype database: JSON :keyword schema: Schema name for connection. Type: string (or Expression with resultType string). - :paramtype schema: any + :paramtype schema: JSON :keyword authentication_type: AuthenticationType to be used for connection. Known values are: - "Basic", "Windows". + "Basic" and "Windows". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.SybaseAuthenticationType :keyword username: Username for authentication. Type: string (or Expression with resultType string). - :paramtype username: any + :paramtype username: JSON :keyword password: Password for authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(SybaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Sybase' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Sybase" # type: str self.server = server self.database = database self.schema = schema @@ -56240,87 +59699,96 @@ class SybaseSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: Database query. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: Database query. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'SybaseSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "SybaseSource" # type: str self.query = query @@ -56331,92 +59799,502 @@ class SybaseTableDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The Sybase table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The Sybase table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(SybaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'SybaseTable' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "SybaseTable" # type: str self.table_name = table_name +class SynapseNotebookActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes + """Execute Synapse notebook activity. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. + :vartype name: str + :ivar type: Type of activity. Required. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar notebook: Synapse notebook reference. Required. + :vartype notebook: ~azure.mgmt.datafactory.models.SynapseNotebookReference + :ivar spark_pool: The name of the big data pool which will be used to execute the notebook. + :vartype spark_pool: ~azure.mgmt.datafactory.models.BigDataPoolParametrizationReference + :ivar parameters: Notebook parameters. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.NotebookParameter] + :ivar executor_size: Number of core and memory to be used for executors allocated in the + specified Spark pool for the session, which will be used for overriding 'executorCores' and + 'executorMemory' of the notebook you provide. Type: string (or Expression with resultType + string). + :vartype executor_size: JSON + :ivar conf: Spark configuration properties, which will override the 'conf' of the notebook you + provide. + :vartype conf: JSON + :ivar driver_size: Number of core and memory to be used for driver allocated in the specified + Spark pool for the session, which will be used for overriding 'driverCores' and 'driverMemory' + of the notebook you provide. Type: string (or Expression with resultType string). + :vartype driver_size: JSON + :ivar num_executors: Number of executors to launch for this session, which will override the + 'numExecutors' of the notebook you provide. + :vartype num_executors: int + """ + + _validation = { + "name": {"required": True}, + "type": {"required": True}, + "notebook": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "notebook": {"key": "typeProperties.notebook", "type": "SynapseNotebookReference"}, + "spark_pool": {"key": "typeProperties.sparkPool", "type": "BigDataPoolParametrizationReference"}, + "parameters": {"key": "typeProperties.parameters", "type": "{NotebookParameter}"}, + "executor_size": {"key": "typeProperties.executorSize", "type": "object"}, + "conf": {"key": "typeProperties.conf", "type": "object"}, + "driver_size": {"key": "typeProperties.driverSize", "type": "object"}, + "num_executors": {"key": "typeProperties.numExecutors", "type": "int"}, + } + + def __init__( + self, + *, + name: str, + notebook: "_models.SynapseNotebookReference", + additional_properties: Optional[Dict[str, JSON]] = None, + description: Optional[str] = None, + depends_on: Optional[List["_models.ActivityDependency"]] = None, + user_properties: Optional[List["_models.UserProperty"]] = None, + linked_service_name: Optional["_models.LinkedServiceReference"] = None, + policy: Optional["_models.ActivityPolicy"] = None, + spark_pool: Optional["_models.BigDataPoolParametrizationReference"] = None, + parameters: Optional[Dict[str, "_models.NotebookParameter"]] = None, + executor_size: Optional[JSON] = None, + conf: Optional[JSON] = None, + driver_size: Optional[JSON] = None, + num_executors: Optional[int] = None, + **kwargs + ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword notebook: Synapse notebook reference. Required. + :paramtype notebook: ~azure.mgmt.datafactory.models.SynapseNotebookReference + :keyword spark_pool: The name of the big data pool which will be used to execute the notebook. + :paramtype spark_pool: ~azure.mgmt.datafactory.models.BigDataPoolParametrizationReference + :keyword parameters: Notebook parameters. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.NotebookParameter] + :keyword executor_size: Number of core and memory to be used for executors allocated in the + specified Spark pool for the session, which will be used for overriding 'executorCores' and + 'executorMemory' of the notebook you provide. Type: string (or Expression with resultType + string). + :paramtype executor_size: JSON + :keyword conf: Spark configuration properties, which will override the 'conf' of the notebook + you provide. + :paramtype conf: JSON + :keyword driver_size: Number of core and memory to be used for driver allocated in the + specified Spark pool for the session, which will be used for overriding 'driverCores' and + 'driverMemory' of the notebook you provide. Type: string (or Expression with resultType + string). + :paramtype driver_size: JSON + :keyword num_executors: Number of executors to launch for this session, which will override the + 'numExecutors' of the notebook you provide. + :paramtype num_executors: int + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "SynapseNotebook" # type: str + self.notebook = notebook + self.spark_pool = spark_pool + self.parameters = parameters + self.executor_size = executor_size + self.conf = conf + self.driver_size = driver_size + self.num_executors = num_executors + + +class SynapseNotebookReference(_serialization.Model): + """Synapse notebook reference type. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Synapse notebook reference type. Required. "NotebookReference" + :vartype type: str or ~azure.mgmt.datafactory.models.NotebookReferenceType + :ivar reference_name: Reference notebook name. Type: string (or Expression with resultType + string). Required. + :vartype reference_name: JSON + """ + + _validation = { + "type": {"required": True}, + "reference_name": {"required": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "reference_name": {"key": "referenceName", "type": "object"}, + } + + def __init__(self, *, type: Union[str, "_models.NotebookReferenceType"], reference_name: JSON, **kwargs): + """ + :keyword type: Synapse notebook reference type. Required. "NotebookReference" + :paramtype type: str or ~azure.mgmt.datafactory.models.NotebookReferenceType + :keyword reference_name: Reference notebook name. Type: string (or Expression with resultType + string). Required. + :paramtype reference_name: JSON + """ + super().__init__(**kwargs) + self.type = type + self.reference_name = reference_name + + +class SynapseSparkJobDefinitionActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes + """Execute spark job activity. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. + :vartype name: str + :ivar type: Type of activity. Required. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar spark_job: Synapse spark job reference. Required. + :vartype spark_job: ~azure.mgmt.datafactory.models.SynapseSparkJobReference + :ivar arguments: User specified arguments to SynapseSparkJobDefinitionActivity. + :vartype arguments: list[any] + :ivar file: The main file used for the job, which will override the 'file' of the spark job + definition you provide. Type: string (or Expression with resultType string). + :vartype file: JSON + :ivar class_name: The fully-qualified identifier or the main class that is in the main + definition file, which will override the 'className' of the spark job definition you provide. + Type: string (or Expression with resultType string). + :vartype class_name: JSON + :ivar files: Additional files used for reference in the main definition file, which will + override the 'files' of the spark job definition you provide. + :vartype files: list[any] + :ivar target_big_data_pool: The name of the big data pool which will be used to execute the + spark batch job, which will override the 'targetBigDataPool' of the spark job definition you + provide. + :vartype target_big_data_pool: + ~azure.mgmt.datafactory.models.BigDataPoolParametrizationReference + :ivar executor_size: Number of core and memory to be used for executors allocated in the + specified Spark pool for the job, which will be used for overriding 'executorCores' and + 'executorMemory' of the spark job definition you provide. Type: string (or Expression with + resultType string). + :vartype executor_size: JSON + :ivar conf: Spark configuration properties, which will override the 'conf' of the spark job + definition you provide. + :vartype conf: JSON + :ivar driver_size: Number of core and memory to be used for driver allocated in the specified + Spark pool for the job, which will be used for overriding 'driverCores' and 'driverMemory' of + the spark job definition you provide. Type: string (or Expression with resultType string). + :vartype driver_size: JSON + :ivar num_executors: Number of executors to launch for this job, which will override the + 'numExecutors' of the spark job definition you provide. + :vartype num_executors: int + """ + + _validation = { + "name": {"required": True}, + "type": {"required": True}, + "spark_job": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "spark_job": {"key": "typeProperties.sparkJob", "type": "SynapseSparkJobReference"}, + "arguments": {"key": "typeProperties.args", "type": "[object]"}, + "file": {"key": "typeProperties.file", "type": "object"}, + "class_name": {"key": "typeProperties.className", "type": "object"}, + "files": {"key": "typeProperties.files", "type": "[object]"}, + "target_big_data_pool": { + "key": "typeProperties.targetBigDataPool", + "type": "BigDataPoolParametrizationReference", + }, + "executor_size": {"key": "typeProperties.executorSize", "type": "object"}, + "conf": {"key": "typeProperties.conf", "type": "object"}, + "driver_size": {"key": "typeProperties.driverSize", "type": "object"}, + "num_executors": {"key": "typeProperties.numExecutors", "type": "int"}, + } + + def __init__( + self, + *, + name: str, + spark_job: "_models.SynapseSparkJobReference", + additional_properties: Optional[Dict[str, JSON]] = None, + description: Optional[str] = None, + depends_on: Optional[List["_models.ActivityDependency"]] = None, + user_properties: Optional[List["_models.UserProperty"]] = None, + linked_service_name: Optional["_models.LinkedServiceReference"] = None, + policy: Optional["_models.ActivityPolicy"] = None, + arguments: Optional[List[Any]] = None, + file: Optional[JSON] = None, + class_name: Optional[JSON] = None, + files: Optional[List[Any]] = None, + target_big_data_pool: Optional["_models.BigDataPoolParametrizationReference"] = None, + executor_size: Optional[JSON] = None, + conf: Optional[JSON] = None, + driver_size: Optional[JSON] = None, + num_executors: Optional[int] = None, + **kwargs + ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword spark_job: Synapse spark job reference. Required. + :paramtype spark_job: ~azure.mgmt.datafactory.models.SynapseSparkJobReference + :keyword arguments: User specified arguments to SynapseSparkJobDefinitionActivity. + :paramtype arguments: list[any] + :keyword file: The main file used for the job, which will override the 'file' of the spark job + definition you provide. Type: string (or Expression with resultType string). + :paramtype file: JSON + :keyword class_name: The fully-qualified identifier or the main class that is in the main + definition file, which will override the 'className' of the spark job definition you provide. + Type: string (or Expression with resultType string). + :paramtype class_name: JSON + :keyword files: Additional files used for reference in the main definition file, which will + override the 'files' of the spark job definition you provide. + :paramtype files: list[any] + :keyword target_big_data_pool: The name of the big data pool which will be used to execute the + spark batch job, which will override the 'targetBigDataPool' of the spark job definition you + provide. + :paramtype target_big_data_pool: + ~azure.mgmt.datafactory.models.BigDataPoolParametrizationReference + :keyword executor_size: Number of core and memory to be used for executors allocated in the + specified Spark pool for the job, which will be used for overriding 'executorCores' and + 'executorMemory' of the spark job definition you provide. Type: string (or Expression with + resultType string). + :paramtype executor_size: JSON + :keyword conf: Spark configuration properties, which will override the 'conf' of the spark job + definition you provide. + :paramtype conf: JSON + :keyword driver_size: Number of core and memory to be used for driver allocated in the + specified Spark pool for the job, which will be used for overriding 'driverCores' and + 'driverMemory' of the spark job definition you provide. Type: string (or Expression with + resultType string). + :paramtype driver_size: JSON + :keyword num_executors: Number of executors to launch for this job, which will override the + 'numExecutors' of the spark job definition you provide. + :paramtype num_executors: int + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "SparkJob" # type: str + self.spark_job = spark_job + self.arguments = arguments + self.file = file + self.class_name = class_name + self.files = files + self.target_big_data_pool = target_big_data_pool + self.executor_size = executor_size + self.conf = conf + self.driver_size = driver_size + self.num_executors = num_executors + + +class SynapseSparkJobReference(_serialization.Model): + """Synapse spark job reference type. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Synapse spark job reference type. Required. "SparkJobDefinitionReference" + :vartype type: str or ~azure.mgmt.datafactory.models.SparkJobReferenceType + :ivar reference_name: Reference spark job name. Required. + :vartype reference_name: str + """ + + _validation = { + "type": {"required": True}, + "reference_name": {"required": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "reference_name": {"key": "referenceName", "type": "str"}, + } + + def __init__(self, *, type: Union[str, "_models.SparkJobReferenceType"], reference_name: str, **kwargs): + """ + :keyword type: Synapse spark job reference type. Required. "SparkJobDefinitionReference" + :paramtype type: str or ~azure.mgmt.datafactory.models.SparkJobReferenceType + :keyword reference_name: Reference spark job name. Required. + :paramtype reference_name: str + """ + super().__init__(**kwargs) + self.type = type + self.reference_name = reference_name + + class TabularTranslator(CopyTranslator): """A copy activity tabular translator. @@ -56424,99 +60302,99 @@ class TabularTranslator(CopyTranslator): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy translator type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy translator type. Required. :vartype type: str :ivar column_mappings: Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: MyName" Type: string (or Expression with resultType string). This property will be retired. Please use mappings property. - :vartype column_mappings: any + :vartype column_mappings: JSON :ivar schema_mapping: The schema mapping to map between tabular data and hierarchical data. Example: {"Column1": "$.Column1", "Column2": "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or Expression with resultType object). This property will be retired. Please use mappings property. - :vartype schema_mapping: any + :vartype schema_mapping: JSON :ivar collection_reference: The JSON Path of the Nested Array that is going to do cross-apply. Type: object (or Expression with resultType object). - :vartype collection_reference: any + :vartype collection_reference: JSON :ivar map_complex_values_to_string: Whether to map complex (array and object) values to simple strings in json format. Type: boolean (or Expression with resultType boolean). - :vartype map_complex_values_to_string: any + :vartype map_complex_values_to_string: JSON :ivar mappings: Column mappings with logical types. Tabular->tabular example: [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Hierarchical->tabular example: [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Type: object (or Expression with resultType object). - :vartype mappings: any + :vartype mappings: JSON :ivar type_conversion: Whether to enable the advanced type conversion feature in the Copy activity. Type: boolean (or Expression with resultType boolean). - :vartype type_conversion: any + :vartype type_conversion: JSON :ivar type_conversion_settings: Type conversion settings. :vartype type_conversion_settings: ~azure.mgmt.datafactory.models.TypeConversionSettings """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, - 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, - 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, - 'map_complex_values_to_string': {'key': 'mapComplexValuesToString', 'type': 'object'}, - 'mappings': {'key': 'mappings', 'type': 'object'}, - 'type_conversion': {'key': 'typeConversion', 'type': 'object'}, - 'type_conversion_settings': {'key': 'typeConversionSettings', 'type': 'TypeConversionSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "column_mappings": {"key": "columnMappings", "type": "object"}, + "schema_mapping": {"key": "schemaMapping", "type": "object"}, + "collection_reference": {"key": "collectionReference", "type": "object"}, + "map_complex_values_to_string": {"key": "mapComplexValuesToString", "type": "object"}, + "mappings": {"key": "mappings", "type": "object"}, + "type_conversion": {"key": "typeConversion", "type": "object"}, + "type_conversion_settings": {"key": "typeConversionSettings", "type": "TypeConversionSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - column_mappings: Optional[Any] = None, - schema_mapping: Optional[Any] = None, - collection_reference: Optional[Any] = None, - map_complex_values_to_string: Optional[Any] = None, - mappings: Optional[Any] = None, - type_conversion: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + column_mappings: Optional[JSON] = None, + schema_mapping: Optional[JSON] = None, + collection_reference: Optional[JSON] = None, + map_complex_values_to_string: Optional[JSON] = None, + mappings: Optional[JSON] = None, + type_conversion: Optional[JSON] = None, type_conversion_settings: Optional["_models.TypeConversionSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword column_mappings: Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: MyName" Type: string (or Expression with resultType string). This property will be retired. Please use mappings property. - :paramtype column_mappings: any + :paramtype column_mappings: JSON :keyword schema_mapping: The schema mapping to map between tabular data and hierarchical data. Example: {"Column1": "$.Column1", "Column2": "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or Expression with resultType object). This property will be retired. Please use mappings property. - :paramtype schema_mapping: any + :paramtype schema_mapping: JSON :keyword collection_reference: The JSON Path of the Nested Array that is going to do cross-apply. Type: object (or Expression with resultType object). - :paramtype collection_reference: any + :paramtype collection_reference: JSON :keyword map_complex_values_to_string: Whether to map complex (array and object) values to simple strings in json format. Type: boolean (or Expression with resultType boolean). - :paramtype map_complex_values_to_string: any + :paramtype map_complex_values_to_string: JSON :keyword mappings: Column mappings with logical types. Tabular->tabular example: [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Hierarchical->tabular example: [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Type: object (or Expression with resultType object). - :paramtype mappings: any + :paramtype mappings: JSON :keyword type_conversion: Whether to enable the advanced type conversion feature in the Copy activity. Type: boolean (or Expression with resultType boolean). - :paramtype type_conversion: any + :paramtype type_conversion: JSON :keyword type_conversion_settings: Type conversion settings. :paramtype type_conversion_settings: ~azure.mgmt.datafactory.models.TypeConversionSettings """ - super(TabularTranslator, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'TabularTranslator' # type: str + super().__init__(additional_properties=additional_properties, **kwargs) + self.type = "TabularTranslator" # type: str self.column_mappings = column_mappings self.schema_mapping = schema_mapping self.collection_reference = collection_reference @@ -56533,41 +60411,41 @@ class TarGZipReadSettings(CompressionReadSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The Compression setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The Compression setting type. Required. :vartype type: str :ivar preserve_compression_file_name_as_folder: Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). - :vartype preserve_compression_file_name_as_folder: any + :vartype preserve_compression_file_name_as_folder: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "preserve_compression_file_name_as_folder": {"key": "preserveCompressionFileNameAsFolder", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - preserve_compression_file_name_as_folder: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + preserve_compression_file_name_as_folder: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword preserve_compression_file_name_as_folder: Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). - :paramtype preserve_compression_file_name_as_folder: any + :paramtype preserve_compression_file_name_as_folder: JSON """ - super(TarGZipReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'TarGZipReadSettings' # type: str + super().__init__(additional_properties=additional_properties, **kwargs) + self.type = "TarGZipReadSettings" # type: str self.preserve_compression_file_name_as_folder = preserve_compression_file_name_as_folder @@ -56578,53 +60456,53 @@ class TarReadSettings(CompressionReadSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The Compression setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The Compression setting type. Required. :vartype type: str :ivar preserve_compression_file_name_as_folder: Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). - :vartype preserve_compression_file_name_as_folder: any + :vartype preserve_compression_file_name_as_folder: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "preserve_compression_file_name_as_folder": {"key": "preserveCompressionFileNameAsFolder", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - preserve_compression_file_name_as_folder: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + preserve_compression_file_name_as_folder: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword preserve_compression_file_name_as_folder: Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). - :paramtype preserve_compression_file_name_as_folder: any + :paramtype preserve_compression_file_name_as_folder: JSON """ - super(TarReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'TarReadSettings' # type: str + super().__init__(additional_properties=additional_properties, **kwargs) + self.type = "TarReadSettings" # type: str self.preserve_compression_file_name_as_folder = preserve_compression_file_name_as_folder -class TeamDeskLinkedService(LinkedService): +class TeamDeskLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Linked service for TeamDesk. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -56633,16 +60511,16 @@ class TeamDeskLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar authentication_type: Required. The authentication type to use. Known values are: "Basic", - "Token". + :vartype annotations: list[JSON] + :ivar authentication_type: The authentication type to use. Required. Known values are: "Basic" + and "Token". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.TeamDeskAuthenticationType - :ivar url: Required. The url to connect TeamDesk source. Type: string (or Expression with - resultType string). - :vartype url: any + :ivar url: The url to connect TeamDesk source. Type: string (or Expression with resultType + string). Required. + :vartype url: JSON :ivar user_name: The username of the TeamDesk source. Type: string (or Expression with resultType string). - :vartype user_name: any + :vartype user_name: JSON :ivar password: The password of the TeamDesk source. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar api_token: The api token for the TeamDesk source. @@ -56650,50 +60528,50 @@ class TeamDeskLinkedService(LinkedService): :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'authentication_type': {'required': True}, - 'url': {'required': True}, + "type": {"required": True}, + "authentication_type": {"required": True}, + "url": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'api_token': {'key': 'typeProperties.apiToken', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, + "url": {"key": "typeProperties.url", "type": "object"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "api_token": {"key": "typeProperties.apiToken", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, authentication_type: Union[str, "_models.TeamDeskAuthenticationType"], - url: Any, - additional_properties: Optional[Dict[str, Any]] = None, + url: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - user_name: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, api_token: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -56701,17 +60579,17 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword authentication_type: Required. The authentication type to use. Known values are: - "Basic", "Token". + :paramtype annotations: list[JSON] + :keyword authentication_type: The authentication type to use. Required. Known values are: + "Basic" and "Token". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.TeamDeskAuthenticationType - :keyword url: Required. The url to connect TeamDesk source. Type: string (or Expression with - resultType string). - :paramtype url: any + :keyword url: The url to connect TeamDesk source. Type: string (or Expression with resultType + string). Required. + :paramtype url: JSON :keyword user_name: The username of the TeamDesk source. Type: string (or Expression with resultType string). - :paramtype user_name: any + :paramtype user_name: JSON :keyword password: The password of the TeamDesk source. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword api_token: The api token for the TeamDesk source. @@ -56719,10 +60597,17 @@ def __init__( :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(TeamDeskLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'TeamDesk' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "TeamDesk" # type: str self.authentication_type = authentication_type self.url = url self.user_name = user_name @@ -56731,15 +60616,15 @@ def __init__( self.encrypted_credential = encrypted_credential -class TeradataLinkedService(LinkedService): +class TeradataLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Linked service for Teradata data source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -56748,65 +60633,65 @@ class TeradataLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_string: Teradata ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype connection_string: JSON :ivar server: Server name for connection. Type: string (or Expression with resultType string). - :vartype server: any + :vartype server: JSON :ivar authentication_type: AuthenticationType to be used for connection. Known values are: - "Basic", "Windows". + "Basic" and "Windows". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.TeradataAuthenticationType :ivar username: Username for authentication. Type: string (or Expression with resultType string). - :vartype username: any + :vartype username: JSON :ivar password: Password for authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "server": {"key": "typeProperties.server", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_string: Optional[Any] = None, - server: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_string: Optional[JSON] = None, + server: Optional[JSON] = None, authentication_type: Optional[Union[str, "_models.TeradataAuthenticationType"]] = None, - username: Optional[Any] = None, + username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -56814,29 +60699,36 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_string: Teradata ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype connection_string: JSON :keyword server: Server name for connection. Type: string (or Expression with resultType string). - :paramtype server: any + :paramtype server: JSON :keyword authentication_type: AuthenticationType to be used for connection. Known values are: - "Basic", "Windows". + "Basic" and "Windows". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.TeradataAuthenticationType :keyword username: Username for authentication. Type: string (or Expression with resultType string). - :paramtype username: any + :paramtype username: JSON :keyword password: Password for authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(TeradataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Teradata' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Teradata" # type: str self.connection_string = connection_string self.server = server self.authentication_type = authentication_type @@ -56845,385 +60737,406 @@ def __init__( self.encrypted_credential = encrypted_credential -class TeradataPartitionSettings(msrest.serialization.Model): +class TeradataPartitionSettings(_serialization.Model): """The settings that will be leveraged for teradata source partitioning. :ivar partition_column_name: The name of the column that will be used for proceeding range or hash partitioning. Type: string (or Expression with resultType string). - :vartype partition_column_name: any + :vartype partition_column_name: JSON :ivar partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :vartype partition_upper_bound: any + :vartype partition_upper_bound: JSON :ivar partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :vartype partition_lower_bound: any + :vartype partition_lower_bound: JSON """ _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + "partition_column_name": {"key": "partitionColumnName", "type": "object"}, + "partition_upper_bound": {"key": "partitionUpperBound", "type": "object"}, + "partition_lower_bound": {"key": "partitionLowerBound", "type": "object"}, } def __init__( self, *, - partition_column_name: Optional[Any] = None, - partition_upper_bound: Optional[Any] = None, - partition_lower_bound: Optional[Any] = None, + partition_column_name: Optional[JSON] = None, + partition_upper_bound: Optional[JSON] = None, + partition_lower_bound: Optional[JSON] = None, **kwargs ): """ :keyword partition_column_name: The name of the column that will be used for proceeding range or hash partitioning. Type: string (or Expression with resultType string). - :paramtype partition_column_name: any + :paramtype partition_column_name: JSON :keyword partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :paramtype partition_upper_bound: any + :paramtype partition_upper_bound: JSON :keyword partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :paramtype partition_lower_bound: any + :paramtype partition_lower_bound: JSON """ - super(TeradataPartitionSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.partition_column_name = partition_column_name self.partition_upper_bound = partition_upper_bound self.partition_lower_bound = partition_lower_bound -class TeradataSource(TabularSource): +class TeradataSource(TabularSource): # pylint: disable=too-many-instance-attributes """A copy activity Teradata source. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: Teradata query. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON :ivar partition_option: The partition mechanism that will be used for teradata read in parallel. Possible values include: "None", "Hash", "DynamicRange". - :vartype partition_option: any + :vartype partition_option: JSON :ivar partition_settings: The settings that will be leveraged for teradata source partitioning. :vartype partition_settings: ~azure.mgmt.datafactory.models.TeradataPartitionSettings """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "partition_option": {"key": "partitionOption", "type": "object"}, + "partition_settings": {"key": "partitionSettings", "type": "TeradataPartitionSettings"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, - partition_option: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, + partition_option: Optional[JSON] = None, partition_settings: Optional["_models.TeradataPartitionSettings"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: Teradata query. Type: string (or Expression with resultType string). - :paramtype query: any + :paramtype query: JSON :keyword partition_option: The partition mechanism that will be used for teradata read in parallel. Possible values include: "None", "Hash", "DynamicRange". - :paramtype partition_option: any + :paramtype partition_option: JSON :keyword partition_settings: The settings that will be leveraged for teradata source partitioning. :paramtype partition_settings: ~azure.mgmt.datafactory.models.TeradataPartitionSettings """ - super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'TeradataSource' # type: str + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "TeradataSource" # type: str self.query = query self.partition_option = partition_option self.partition_settings = partition_settings -class TeradataTableDataset(Dataset): +class TeradataTableDataset(Dataset): # pylint: disable=too-many-instance-attributes """The Teradata database dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar database: The database name of Teradata. Type: string (or Expression with resultType string). - :vartype database: any + :vartype database: JSON :ivar table: The table name of Teradata. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "database": {"key": "typeProperties.database", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - database: Optional[Any] = None, - table: Optional[Any] = None, + database: Optional[JSON] = None, + table: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword database: The database name of Teradata. Type: string (or Expression with resultType string). - :paramtype database: any + :paramtype database: JSON :keyword table: The table name of Teradata. Type: string (or Expression with resultType string). - :paramtype table: any - """ - super(TeradataTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'TeradataTable' # type: str + :paramtype table: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "TeradataTable" # type: str self.database = database self.table = table -class TextFormat(DatasetStorageFormat): +class TextFormat(DatasetStorageFormat): # pylint: disable=too-many-instance-attributes """The data stored in text format. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset storage format.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage format. Required. :vartype type: str :ivar serializer: Serializer. Type: string (or Expression with resultType string). - :vartype serializer: any + :vartype serializer: JSON :ivar deserializer: Deserializer. Type: string (or Expression with resultType string). - :vartype deserializer: any + :vartype deserializer: JSON :ivar column_delimiter: The column delimiter. Type: string (or Expression with resultType string). - :vartype column_delimiter: any + :vartype column_delimiter: JSON :ivar row_delimiter: The row delimiter. Type: string (or Expression with resultType string). - :vartype row_delimiter: any + :vartype row_delimiter: JSON :ivar escape_char: The escape character. Type: string (or Expression with resultType string). - :vartype escape_char: any + :vartype escape_char: JSON :ivar quote_char: The quote character. Type: string (or Expression with resultType string). - :vartype quote_char: any + :vartype quote_char: JSON :ivar null_value: The null value string. Type: string (or Expression with resultType string). - :vartype null_value: any + :vartype null_value: JSON :ivar encoding_name: The code page name of the preferred encoding. If miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :vartype encoding_name: any + :vartype encoding_name: JSON :ivar treat_empty_as_null: Treat empty column values in the text file as null. The default value is true. Type: boolean (or Expression with resultType boolean). - :vartype treat_empty_as_null: any + :vartype treat_empty_as_null: JSON :ivar skip_line_count: The number of lines/rows to be skipped when parsing text files. The default value is 0. Type: integer (or Expression with resultType integer). - :vartype skip_line_count: any + :vartype skip_line_count: JSON :ivar first_row_as_header: When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). - :vartype first_row_as_header: any + :vartype first_row_as_header: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'column_delimiter': {'key': 'columnDelimiter', 'type': 'object'}, - 'row_delimiter': {'key': 'rowDelimiter', 'type': 'object'}, - 'escape_char': {'key': 'escapeChar', 'type': 'object'}, - 'quote_char': {'key': 'quoteChar', 'type': 'object'}, - 'null_value': {'key': 'nullValue', 'type': 'object'}, - 'encoding_name': {'key': 'encodingName', 'type': 'object'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, - 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, - 'first_row_as_header': {'key': 'firstRowAsHeader', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "serializer": {"key": "serializer", "type": "object"}, + "deserializer": {"key": "deserializer", "type": "object"}, + "column_delimiter": {"key": "columnDelimiter", "type": "object"}, + "row_delimiter": {"key": "rowDelimiter", "type": "object"}, + "escape_char": {"key": "escapeChar", "type": "object"}, + "quote_char": {"key": "quoteChar", "type": "object"}, + "null_value": {"key": "nullValue", "type": "object"}, + "encoding_name": {"key": "encodingName", "type": "object"}, + "treat_empty_as_null": {"key": "treatEmptyAsNull", "type": "object"}, + "skip_line_count": {"key": "skipLineCount", "type": "object"}, + "first_row_as_header": {"key": "firstRowAsHeader", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - serializer: Optional[Any] = None, - deserializer: Optional[Any] = None, - column_delimiter: Optional[Any] = None, - row_delimiter: Optional[Any] = None, - escape_char: Optional[Any] = None, - quote_char: Optional[Any] = None, - null_value: Optional[Any] = None, - encoding_name: Optional[Any] = None, - treat_empty_as_null: Optional[Any] = None, - skip_line_count: Optional[Any] = None, - first_row_as_header: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + serializer: Optional[JSON] = None, + deserializer: Optional[JSON] = None, + column_delimiter: Optional[JSON] = None, + row_delimiter: Optional[JSON] = None, + escape_char: Optional[JSON] = None, + quote_char: Optional[JSON] = None, + null_value: Optional[JSON] = None, + encoding_name: Optional[JSON] = None, + treat_empty_as_null: Optional[JSON] = None, + skip_line_count: Optional[JSON] = None, + first_row_as_header: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword serializer: Serializer. Type: string (or Expression with resultType string). - :paramtype serializer: any + :paramtype serializer: JSON :keyword deserializer: Deserializer. Type: string (or Expression with resultType string). - :paramtype deserializer: any + :paramtype deserializer: JSON :keyword column_delimiter: The column delimiter. Type: string (or Expression with resultType string). - :paramtype column_delimiter: any + :paramtype column_delimiter: JSON :keyword row_delimiter: The row delimiter. Type: string (or Expression with resultType string). - :paramtype row_delimiter: any + :paramtype row_delimiter: JSON :keyword escape_char: The escape character. Type: string (or Expression with resultType string). - :paramtype escape_char: any + :paramtype escape_char: JSON :keyword quote_char: The quote character. Type: string (or Expression with resultType string). - :paramtype quote_char: any + :paramtype quote_char: JSON :keyword null_value: The null value string. Type: string (or Expression with resultType string). - :paramtype null_value: any + :paramtype null_value: JSON :keyword encoding_name: The code page name of the preferred encoding. If miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :paramtype encoding_name: any + :paramtype encoding_name: JSON :keyword treat_empty_as_null: Treat empty column values in the text file as null. The default value is true. Type: boolean (or Expression with resultType boolean). - :paramtype treat_empty_as_null: any + :paramtype treat_empty_as_null: JSON :keyword skip_line_count: The number of lines/rows to be skipped when parsing text files. The default value is 0. Type: integer (or Expression with resultType integer). - :paramtype skip_line_count: any + :paramtype skip_line_count: JSON :keyword first_row_as_header: When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). - :paramtype first_row_as_header: any + :paramtype first_row_as_header: JSON """ - super(TextFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.type = 'TextFormat' # type: str + super().__init__( + additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs + ) + self.type = "TextFormat" # type: str self.column_delimiter = column_delimiter self.row_delimiter = row_delimiter self.escape_char = escape_char @@ -57238,47 +61151,40 @@ def __init__( class TriggerDependencyReference(DependencyReference): """Trigger referenced dependency. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: TumblingWindowTriggerDependencyReference. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + TumblingWindowTriggerDependencyReference All required parameters must be populated in order to send to Azure. - :ivar type: Required. The type of dependency reference.Constant filled by server. + :ivar type: The type of dependency reference. Required. :vartype type: str - :ivar reference_trigger: Required. Referenced trigger. + :ivar reference_trigger: Referenced trigger. Required. :vartype reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference """ _validation = { - 'type': {'required': True}, - 'reference_trigger': {'required': True}, + "type": {"required": True}, + "reference_trigger": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + "type": {"key": "type", "type": "str"}, + "reference_trigger": {"key": "referenceTrigger", "type": "TriggerReference"}, } - _subtype_map = { - 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} - } + _subtype_map = {"type": {"TumblingWindowTriggerDependencyReference": "TumblingWindowTriggerDependencyReference"}} - def __init__( - self, - *, - reference_trigger: "_models.TriggerReference", - **kwargs - ): + def __init__(self, *, reference_trigger: "_models.TriggerReference", **kwargs): """ - :keyword reference_trigger: Required. Referenced trigger. + :keyword reference_trigger: Referenced trigger. Required. :paramtype reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference """ - super(TriggerDependencyReference, self).__init__(**kwargs) - self.type = 'TriggerDependencyReference' # type: str + super().__init__(**kwargs) + self.type = "TriggerDependencyReference" # type: str self.reference_trigger = reference_trigger -class TriggerFilterParameters(msrest.serialization.Model): +class TriggerFilterParameters(_serialization.Model): """Query parameters for triggers. :ivar continuation_token: The continuation token for getting the next page of results. Null for @@ -57290,16 +61196,12 @@ class TriggerFilterParameters(msrest.serialization.Model): """ _attribute_map = { - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - 'parent_trigger_name': {'key': 'parentTriggerName', 'type': 'str'}, + "continuation_token": {"key": "continuationToken", "type": "str"}, + "parent_trigger_name": {"key": "parentTriggerName", "type": "str"}, } def __init__( - self, - *, - continuation_token: Optional[str] = None, - parent_trigger_name: Optional[str] = None, - **kwargs + self, *, continuation_token: Optional[str] = None, parent_trigger_name: Optional[str] = None, **kwargs ): """ :keyword continuation_token: The continuation token for getting the next page of results. Null @@ -57309,87 +61211,81 @@ def __init__( rerun triggers. :paramtype parent_trigger_name: str """ - super(TriggerFilterParameters, self).__init__(**kwargs) + super().__init__(**kwargs) self.continuation_token = continuation_token self.parent_trigger_name = parent_trigger_name -class TriggerListResponse(msrest.serialization.Model): +class TriggerListResponse(_serialization.Model): """A list of trigger resources. All required parameters must be populated in order to send to Azure. - :ivar value: Required. List of triggers. + :ivar value: List of triggers. Required. :vartype value: list[~azure.mgmt.datafactory.models.TriggerResource] :ivar next_link: The link to the next page of results, if any remaining results exist. :vartype next_link: str """ _validation = { - 'value': {'required': True}, + "value": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[TriggerResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[TriggerResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - *, - value: List["_models.TriggerResource"], - next_link: Optional[str] = None, - **kwargs - ): + def __init__(self, *, value: List["_models.TriggerResource"], next_link: Optional[str] = None, **kwargs): """ - :keyword value: Required. List of triggers. + :keyword value: List of triggers. Required. :paramtype value: list[~azure.mgmt.datafactory.models.TriggerResource] :keyword next_link: The link to the next page of results, if any remaining results exist. :paramtype next_link: str """ - super(TriggerListResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.next_link = next_link -class TriggerPipelineReference(msrest.serialization.Model): +class TriggerPipelineReference(_serialization.Model): """Pipeline that needs to be triggered with the given parameters. :ivar pipeline_reference: Pipeline reference. :vartype pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference :ivar parameters: Pipeline parameters. - :vartype parameters: dict[str, any] + :vartype parameters: dict[str, JSON] """ _attribute_map = { - 'pipeline_reference': {'key': 'pipelineReference', 'type': 'PipelineReference'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, + "pipeline_reference": {"key": "pipelineReference", "type": "PipelineReference"}, + "parameters": {"key": "parameters", "type": "{object}"}, } def __init__( self, *, pipeline_reference: Optional["_models.PipelineReference"] = None, - parameters: Optional[Dict[str, Any]] = None, + parameters: Optional[Dict[str, JSON]] = None, **kwargs ): """ :keyword pipeline_reference: Pipeline reference. :paramtype pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference :keyword parameters: Pipeline parameters. - :paramtype parameters: dict[str, any] + :paramtype parameters: dict[str, JSON] """ - super(TriggerPipelineReference, self).__init__(**kwargs) + super().__init__(**kwargs) self.pipeline_reference = pipeline_reference self.parameters = parameters -class TriggerQueryResponse(msrest.serialization.Model): +class TriggerQueryResponse(_serialization.Model): """A query of triggers. All required parameters must be populated in order to send to Azure. - :ivar value: Required. List of triggers. + :ivar value: List of triggers. Required. :vartype value: list[~azure.mgmt.datafactory.models.TriggerResource] :ivar continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. @@ -57397,68 +61293,56 @@ class TriggerQueryResponse(msrest.serialization.Model): """ _validation = { - 'value': {'required': True}, + "value": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[TriggerResource]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + "value": {"key": "value", "type": "[TriggerResource]"}, + "continuation_token": {"key": "continuationToken", "type": "str"}, } - def __init__( - self, - *, - value: List["_models.TriggerResource"], - continuation_token: Optional[str] = None, - **kwargs - ): + def __init__(self, *, value: List["_models.TriggerResource"], continuation_token: Optional[str] = None, **kwargs): """ - :keyword value: Required. List of triggers. + :keyword value: List of triggers. Required. :paramtype value: list[~azure.mgmt.datafactory.models.TriggerResource] :keyword continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. :paramtype continuation_token: str """ - super(TriggerQueryResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.continuation_token = continuation_token -class TriggerReference(msrest.serialization.Model): +class TriggerReference(_serialization.Model): """Trigger reference type. All required parameters must be populated in order to send to Azure. - :ivar type: Required. Trigger reference type. Known values are: "TriggerReference". + :ivar type: Trigger reference type. Required. "TriggerReference" :vartype type: str or ~azure.mgmt.datafactory.models.TriggerReferenceType - :ivar reference_name: Required. Reference trigger name. + :ivar reference_name: Reference trigger name. Required. :vartype reference_name: str """ _validation = { - 'type': {'required': True}, - 'reference_name': {'required': True}, + "type": {"required": True}, + "reference_name": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, + "type": {"key": "type", "type": "str"}, + "reference_name": {"key": "referenceName", "type": "str"}, } - def __init__( - self, - *, - type: Union[str, "_models.TriggerReferenceType"], - reference_name: str, - **kwargs - ): + def __init__(self, *, type: Union[str, "_models.TriggerReferenceType"], reference_name: str, **kwargs): """ - :keyword type: Required. Trigger reference type. Known values are: "TriggerReference". + :keyword type: Trigger reference type. Required. "TriggerReference" :paramtype type: str or ~azure.mgmt.datafactory.models.TriggerReferenceType - :keyword reference_name: Required. Reference trigger name. + :keyword reference_name: Reference trigger name. Required. :paramtype reference_name: str """ - super(TriggerReference, self).__init__(**kwargs) + super().__init__(**kwargs) self.type = type self.reference_name = reference_name @@ -57478,48 +61362,43 @@ class TriggerResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :ivar properties: Required. Properties of the trigger. + :ivar properties: Properties of the trigger. Required. :vartype properties: ~azure.mgmt.datafactory.models.Trigger """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "etag": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Trigger'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "etag": {"key": "etag", "type": "str"}, + "properties": {"key": "properties", "type": "Trigger"}, } - def __init__( - self, - *, - properties: "_models.Trigger", - **kwargs - ): + def __init__(self, *, properties: "_models.Trigger", **kwargs): """ - :keyword properties: Required. Properties of the trigger. + :keyword properties: Properties of the trigger. Required. :paramtype properties: ~azure.mgmt.datafactory.models.Trigger """ - super(TriggerResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class TriggerRun(msrest.serialization.Model): +class TriggerRun(_serialization.Model): # pylint: disable=too-many-instance-attributes """Trigger runs. Variables are only populated by the server, and will be ignored when sending a request. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] + :vartype additional_properties: dict[str, JSON] :ivar trigger_run_id: Trigger run id. :vartype trigger_run_id: str :ivar trigger_name: Trigger name. @@ -57528,7 +61407,7 @@ class TriggerRun(msrest.serialization.Model): :vartype trigger_type: str :ivar trigger_run_timestamp: Trigger run start time. :vartype trigger_run_timestamp: ~datetime.datetime - :ivar status: Trigger run status. Known values are: "Succeeded", "Failed", "Inprogress". + :ivar status: Trigger run status. Known values are: "Succeeded", "Failed", and "Inprogress". :vartype status: str or ~azure.mgmt.datafactory.models.TriggerRunStatus :ivar message: Trigger error message. :vartype message: str @@ -57540,48 +61419,43 @@ class TriggerRun(msrest.serialization.Model): :ivar run_dimension: Run dimension for which trigger was fired. :vartype run_dimension: dict[str, str] :ivar dependency_status: Status of the upstream pipelines. - :vartype dependency_status: dict[str, any] + :vartype dependency_status: dict[str, JSON] """ _validation = { - 'trigger_run_id': {'readonly': True}, - 'trigger_name': {'readonly': True}, - 'trigger_type': {'readonly': True}, - 'trigger_run_timestamp': {'readonly': True}, - 'status': {'readonly': True}, - 'message': {'readonly': True}, - 'properties': {'readonly': True}, - 'triggered_pipelines': {'readonly': True}, - 'run_dimension': {'readonly': True}, - 'dependency_status': {'readonly': True}, + "trigger_run_id": {"readonly": True}, + "trigger_name": {"readonly": True}, + "trigger_type": {"readonly": True}, + "trigger_run_timestamp": {"readonly": True}, + "status": {"readonly": True}, + "message": {"readonly": True}, + "properties": {"readonly": True}, + "triggered_pipelines": {"readonly": True}, + "run_dimension": {"readonly": True}, + "dependency_status": {"readonly": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'trigger_run_id': {'key': 'triggerRunId', 'type': 'str'}, - 'trigger_name': {'key': 'triggerName', 'type': 'str'}, - 'trigger_type': {'key': 'triggerType', 'type': 'str'}, - 'trigger_run_timestamp': {'key': 'triggerRunTimestamp', 'type': 'iso-8601'}, - 'status': {'key': 'status', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'triggered_pipelines': {'key': 'triggeredPipelines', 'type': '{str}'}, - 'run_dimension': {'key': 'runDimension', 'type': '{str}'}, - 'dependency_status': {'key': 'dependencyStatus', 'type': '{object}'}, + "additional_properties": {"key": "", "type": "{object}"}, + "trigger_run_id": {"key": "triggerRunId", "type": "str"}, + "trigger_name": {"key": "triggerName", "type": "str"}, + "trigger_type": {"key": "triggerType", "type": "str"}, + "trigger_run_timestamp": {"key": "triggerRunTimestamp", "type": "iso-8601"}, + "status": {"key": "status", "type": "str"}, + "message": {"key": "message", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "triggered_pipelines": {"key": "triggeredPipelines", "type": "{str}"}, + "run_dimension": {"key": "runDimension", "type": "{str}"}, + "dependency_status": {"key": "dependencyStatus", "type": "{object}"}, } - def __init__( - self, - *, - additional_properties: Optional[Dict[str, Any]] = None, - **kwargs - ): + def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, **kwargs): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] """ - super(TriggerRun, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.trigger_run_id = None self.trigger_name = None @@ -57595,12 +61469,12 @@ def __init__( self.dependency_status = None -class TriggerRunsQueryResponse(msrest.serialization.Model): +class TriggerRunsQueryResponse(_serialization.Model): """A list of trigger runs. All required parameters must be populated in order to send to Azure. - :ivar value: Required. List of trigger runs. + :ivar value: List of trigger runs. Required. :vartype value: list[~azure.mgmt.datafactory.models.TriggerRun] :ivar continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. @@ -57608,34 +61482,28 @@ class TriggerRunsQueryResponse(msrest.serialization.Model): """ _validation = { - 'value': {'required': True}, + "value": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[TriggerRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + "value": {"key": "value", "type": "[TriggerRun]"}, + "continuation_token": {"key": "continuationToken", "type": "str"}, } - def __init__( - self, - *, - value: List["_models.TriggerRun"], - continuation_token: Optional[str] = None, - **kwargs - ): + def __init__(self, *, value: List["_models.TriggerRun"], continuation_token: Optional[str] = None, **kwargs): """ - :keyword value: Required. List of trigger runs. + :keyword value: List of trigger runs. Required. :paramtype value: list[~azure.mgmt.datafactory.models.TriggerRun] :keyword continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. :paramtype continuation_token: str """ - super(TriggerRunsQueryResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.continuation_token = continuation_token -class TriggerSubscriptionOperationStatus(msrest.serialization.Model): +class TriggerSubscriptionOperationStatus(_serialization.Model): """Defines the response of a trigger subscription operation. Variables are only populated by the server, and will be ignored when sending a request. @@ -57643,32 +61511,28 @@ class TriggerSubscriptionOperationStatus(msrest.serialization.Model): :ivar trigger_name: Trigger name. :vartype trigger_name: str :ivar status: Event Subscription Status. Known values are: "Enabled", "Provisioning", - "Deprovisioning", "Disabled", "Unknown". + "Deprovisioning", "Disabled", and "Unknown". :vartype status: str or ~azure.mgmt.datafactory.models.EventSubscriptionStatus """ _validation = { - 'trigger_name': {'readonly': True}, - 'status': {'readonly': True}, + "trigger_name": {"readonly": True}, + "status": {"readonly": True}, } _attribute_map = { - 'trigger_name': {'key': 'triggerName', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, + "trigger_name": {"key": "triggerName", "type": "str"}, + "status": {"key": "status", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.trigger_name = None self.status = None -class TumblingWindowTrigger(Trigger): +class TumblingWindowTrigger(Trigger): # pylint: disable=too-many-instance-attributes """Trigger that schedules pipeline runs for all fixed time interval windows from a start time without gaps and also supports backfill scenarios (when start time is in the past). Variables are only populated by the server, and will be ignored when sending a request. @@ -57677,27 +61541,27 @@ class TumblingWindowTrigger(Trigger): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Trigger type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Trigger type. Required. :vartype type: str :ivar description: Trigger description. :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Known values are: "Started", "Stopped", "Disabled". + called on the Trigger. Known values are: "Started", "Stopped", and "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :ivar annotations: List of tags that can be used for describing the trigger. - :vartype annotations: list[any] - :ivar pipeline: Required. Pipeline for which runs are created when an event is fired for - trigger window that is ready. + :vartype annotations: list[JSON] + :ivar pipeline: Pipeline for which runs are created when an event is fired for trigger window + that is ready. Required. :vartype pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference - :ivar frequency: Required. The frequency of the time windows. Known values are: "Minute", - "Hour", "Month". + :ivar frequency: The frequency of the time windows. Required. Known values are: "Minute", + "Hour", and "Month". :vartype frequency: str or ~azure.mgmt.datafactory.models.TumblingWindowFrequency - :ivar interval: Required. The interval of the time windows. The minimum interval allowed is 15 - Minutes. + :ivar interval: The interval of the time windows. The minimum interval allowed is 15 Minutes. + Required. :vartype interval: int - :ivar start_time: Required. The start time for the time period for the trigger during which - events are fired for windows that are ready. Only UTC time is currently supported. + :ivar start_time: The start time for the time period for the trigger during which events are + fired for windows that are ready. Only UTC time is currently supported. Required. :vartype start_time: ~datetime.datetime :ivar end_time: The end time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is currently supported. @@ -57705,9 +61569,9 @@ class TumblingWindowTrigger(Trigger): :ivar delay: Specifies how long the trigger waits past due time before triggering new run. It doesn't alter window start and end time. The default is 0. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype delay: any - :ivar max_concurrency: Required. The max number of parallel time windows (ready for execution) - for which a new run is triggered. + :vartype delay: JSON + :ivar max_concurrency: The max number of parallel time windows (ready for execution) for which + a new run is triggered. Required. :vartype max_concurrency: int :ivar retry_policy: Retry policy that will be applied for failed pipeline runs. :vartype retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy @@ -57717,30 +61581,30 @@ class TumblingWindowTrigger(Trigger): """ _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, - 'pipeline': {'required': True}, - 'frequency': {'required': True}, - 'interval': {'required': True}, - 'start_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + "type": {"required": True}, + "runtime_state": {"readonly": True}, + "pipeline": {"required": True}, + "frequency": {"required": True}, + "interval": {"required": True}, + "start_time": {"required": True}, + "max_concurrency": {"required": True, "maximum": 50, "minimum": 1}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, - 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, - 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, - 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'typeProperties.endTime', 'type': 'iso-8601'}, - 'delay': {'key': 'typeProperties.delay', 'type': 'object'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, - 'retry_policy': {'key': 'typeProperties.retryPolicy', 'type': 'RetryPolicy'}, - 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "runtime_state": {"key": "runtimeState", "type": "str"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "pipeline": {"key": "pipeline", "type": "TriggerPipelineReference"}, + "frequency": {"key": "typeProperties.frequency", "type": "str"}, + "interval": {"key": "typeProperties.interval", "type": "int"}, + "start_time": {"key": "typeProperties.startTime", "type": "iso-8601"}, + "end_time": {"key": "typeProperties.endTime", "type": "iso-8601"}, + "delay": {"key": "typeProperties.delay", "type": "object"}, + "max_concurrency": {"key": "typeProperties.maxConcurrency", "type": "int"}, + "retry_policy": {"key": "typeProperties.retryPolicy", "type": "RetryPolicy"}, + "depends_on": {"key": "typeProperties.dependsOn", "type": "[DependencyReference]"}, } def __init__( @@ -57751,11 +61615,11 @@ def __init__( interval: int, start_time: datetime.datetime, max_concurrency: int, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, end_time: Optional[datetime.datetime] = None, - delay: Optional[Any] = None, + delay: Optional[JSON] = None, retry_policy: Optional["_models.RetryPolicy"] = None, depends_on: Optional[List["_models.DependencyReference"]] = None, **kwargs @@ -57763,22 +61627,22 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Trigger description. :paramtype description: str :keyword annotations: List of tags that can be used for describing the trigger. - :paramtype annotations: list[any] - :keyword pipeline: Required. Pipeline for which runs are created when an event is fired for - trigger window that is ready. + :paramtype annotations: list[JSON] + :keyword pipeline: Pipeline for which runs are created when an event is fired for trigger + window that is ready. Required. :paramtype pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference - :keyword frequency: Required. The frequency of the time windows. Known values are: "Minute", - "Hour", "Month". + :keyword frequency: The frequency of the time windows. Required. Known values are: "Minute", + "Hour", and "Month". :paramtype frequency: str or ~azure.mgmt.datafactory.models.TumblingWindowFrequency - :keyword interval: Required. The interval of the time windows. The minimum interval allowed is - 15 Minutes. + :keyword interval: The interval of the time windows. The minimum interval allowed is 15 + Minutes. Required. :paramtype interval: int - :keyword start_time: Required. The start time for the time period for the trigger during which - events are fired for windows that are ready. Only UTC time is currently supported. + :keyword start_time: The start time for the time period for the trigger during which events are + fired for windows that are ready. Only UTC time is currently supported. Required. :paramtype start_time: ~datetime.datetime :keyword end_time: The end time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is currently supported. @@ -57786,9 +61650,9 @@ def __init__( :keyword delay: Specifies how long the trigger waits past due time before triggering new run. It doesn't alter window start and end time. The default is 0. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype delay: any - :keyword max_concurrency: Required. The max number of parallel time windows (ready for - execution) for which a new run is triggered. + :paramtype delay: JSON + :keyword max_concurrency: The max number of parallel time windows (ready for execution) for + which a new run is triggered. Required. :paramtype max_concurrency: int :keyword retry_policy: Retry policy that will be applied for failed pipeline runs. :paramtype retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy @@ -57796,8 +61660,10 @@ def __init__( supported. :paramtype depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] """ - super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.type = 'TumblingWindowTrigger' # type: str + super().__init__( + additional_properties=additional_properties, description=description, annotations=annotations, **kwargs + ) + self.type = "TumblingWindowTrigger" # type: str self.pipeline = pipeline self.frequency = frequency self.interval = interval @@ -57814,9 +61680,9 @@ class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): All required parameters must be populated in order to send to Azure. - :ivar type: Required. The type of dependency reference.Constant filled by server. + :ivar type: The type of dependency reference. Required. :vartype type: str - :ivar reference_trigger: Required. Referenced trigger. + :ivar reference_trigger: Referenced trigger. Required. :vartype reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference :ivar offset: Timespan applied to the start time of a tumbling window when evaluating dependency. @@ -57827,17 +61693,21 @@ class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): """ _validation = { - 'type': {'required': True}, - 'reference_trigger': {'required': True}, - 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'-?((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + "type": {"required": True}, + "reference_trigger": {"required": True}, + "offset": { + "max_length": 15, + "min_length": 8, + "pattern": r"-?((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))", + }, + "size": {"max_length": 15, "min_length": 8, "pattern": r"((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))"}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, - 'offset': {'key': 'offset', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'str'}, + "type": {"key": "type", "type": "str"}, + "reference_trigger": {"key": "referenceTrigger", "type": "TriggerReference"}, + "offset": {"key": "offset", "type": "str"}, + "size": {"key": "size", "type": "str"}, } def __init__( @@ -57849,7 +61719,7 @@ def __init__( **kwargs ): """ - :keyword reference_trigger: Required. Referenced trigger. + :keyword reference_trigger: Referenced trigger. Required. :paramtype reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference :keyword offset: Timespan applied to the start time of a tumbling window when evaluating dependency. @@ -57858,8 +61728,8 @@ def __init__( frequency of the tumbling window will be used. :paramtype size: str """ - super(TumblingWindowTriggerDependencyReference, self).__init__(reference_trigger=reference_trigger, **kwargs) - self.type = 'TumblingWindowTriggerDependencyReference' # type: str + super().__init__(reference_trigger=reference_trigger, **kwargs) + self.type = "TumblingWindowTriggerDependencyReference" # type: str self.offset = offset self.size = size @@ -57871,8 +61741,8 @@ class TwilioLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -57881,46 +61751,46 @@ class TwilioLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar user_name: Required. The Account SID of Twilio service. - :vartype user_name: any - :ivar password: Required. The auth token of Twilio service. + :vartype annotations: list[JSON] + :ivar user_name: The Account SID of Twilio service. Required. + :vartype user_name: JSON + :ivar password: The auth token of Twilio service. Required. :vartype password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { - 'type': {'required': True}, - 'user_name': {'required': True}, - 'password': {'required': True}, + "type": {"required": True}, + "user_name": {"required": True}, + "password": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, } def __init__( self, *, - user_name: Any, + user_name: JSON, password: "_models.SecretBase", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -57928,82 +61798,89 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword user_name: Required. The Account SID of Twilio service. - :paramtype user_name: any - :keyword password: Required. The auth token of Twilio service. + :paramtype annotations: list[JSON] + :keyword user_name: The Account SID of Twilio service. Required. + :paramtype user_name: JSON + :keyword password: The auth token of Twilio service. Required. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase """ - super(TwilioLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Twilio' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Twilio" # type: str self.user_name = user_name self.password = password -class TypeConversionSettings(msrest.serialization.Model): +class TypeConversionSettings(_serialization.Model): """Type conversion settings. :ivar allow_data_truncation: Whether to allow data truncation when converting the data. Type: boolean (or Expression with resultType boolean). - :vartype allow_data_truncation: any + :vartype allow_data_truncation: JSON :ivar treat_boolean_as_number: Whether to treat boolean values as numbers. Type: boolean (or Expression with resultType boolean). - :vartype treat_boolean_as_number: any + :vartype treat_boolean_as_number: JSON :ivar date_time_format: The format for DateTime values. Type: string (or Expression with resultType string). - :vartype date_time_format: any + :vartype date_time_format: JSON :ivar date_time_offset_format: The format for DateTimeOffset values. Type: string (or Expression with resultType string). - :vartype date_time_offset_format: any + :vartype date_time_offset_format: JSON :ivar time_span_format: The format for TimeSpan values. Type: string (or Expression with resultType string). - :vartype time_span_format: any + :vartype time_span_format: JSON :ivar culture: The culture used to convert data from/to string. Type: string (or Expression with resultType string). - :vartype culture: any + :vartype culture: JSON """ _attribute_map = { - 'allow_data_truncation': {'key': 'allowDataTruncation', 'type': 'object'}, - 'treat_boolean_as_number': {'key': 'treatBooleanAsNumber', 'type': 'object'}, - 'date_time_format': {'key': 'dateTimeFormat', 'type': 'object'}, - 'date_time_offset_format': {'key': 'dateTimeOffsetFormat', 'type': 'object'}, - 'time_span_format': {'key': 'timeSpanFormat', 'type': 'object'}, - 'culture': {'key': 'culture', 'type': 'object'}, + "allow_data_truncation": {"key": "allowDataTruncation", "type": "object"}, + "treat_boolean_as_number": {"key": "treatBooleanAsNumber", "type": "object"}, + "date_time_format": {"key": "dateTimeFormat", "type": "object"}, + "date_time_offset_format": {"key": "dateTimeOffsetFormat", "type": "object"}, + "time_span_format": {"key": "timeSpanFormat", "type": "object"}, + "culture": {"key": "culture", "type": "object"}, } def __init__( self, *, - allow_data_truncation: Optional[Any] = None, - treat_boolean_as_number: Optional[Any] = None, - date_time_format: Optional[Any] = None, - date_time_offset_format: Optional[Any] = None, - time_span_format: Optional[Any] = None, - culture: Optional[Any] = None, + allow_data_truncation: Optional[JSON] = None, + treat_boolean_as_number: Optional[JSON] = None, + date_time_format: Optional[JSON] = None, + date_time_offset_format: Optional[JSON] = None, + time_span_format: Optional[JSON] = None, + culture: Optional[JSON] = None, **kwargs ): """ :keyword allow_data_truncation: Whether to allow data truncation when converting the data. Type: boolean (or Expression with resultType boolean). - :paramtype allow_data_truncation: any + :paramtype allow_data_truncation: JSON :keyword treat_boolean_as_number: Whether to treat boolean values as numbers. Type: boolean (or Expression with resultType boolean). - :paramtype treat_boolean_as_number: any + :paramtype treat_boolean_as_number: JSON :keyword date_time_format: The format for DateTime values. Type: string (or Expression with resultType string). - :paramtype date_time_format: any + :paramtype date_time_format: JSON :keyword date_time_offset_format: The format for DateTimeOffset values. Type: string (or Expression with resultType string). - :paramtype date_time_offset_format: any + :paramtype date_time_offset_format: JSON :keyword time_span_format: The format for TimeSpan values. Type: string (or Expression with resultType string). - :paramtype time_span_format: any + :paramtype time_span_format: JSON :keyword culture: The culture used to convert data from/to string. Type: string (or Expression with resultType string). - :paramtype culture: any + :paramtype culture: JSON """ - super(TypeConversionSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.allow_data_truncation = allow_data_truncation self.treat_boolean_as_number = treat_boolean_as_number self.date_time_format = date_time_format @@ -58019,10 +61896,10 @@ class UntilActivity(ControlActivity): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -58030,36 +61907,36 @@ class UntilActivity(ControlActivity): :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :ivar expression: Required. An expression that would evaluate to Boolean. The loop will - continue until this expression evaluates to true. + :ivar expression: An expression that would evaluate to Boolean. The loop will continue until + this expression evaluates to true. Required. :vartype expression: ~azure.mgmt.datafactory.models.Expression :ivar timeout: Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype timeout: any - :ivar activities: Required. List of activities to execute. + :vartype timeout: JSON + :ivar activities: List of activities to execute. Required. :vartype activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'expression': {'required': True}, - 'activities': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "expression": {"required": True}, + "activities": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, - 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, - 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "expression": {"key": "typeProperties.expression", "type": "Expression"}, + "timeout": {"key": "typeProperties.timeout", "type": "object"}, + "activities": {"key": "typeProperties.activities", "type": "[Activity]"}, } def __init__( @@ -58068,18 +61945,18 @@ def __init__( name: str, expression: "_models.Expression", activities: List["_models.Activity"], - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, - timeout: Optional[Any] = None, + timeout: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -58087,26 +61964,33 @@ def __init__( :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :keyword expression: Required. An expression that would evaluate to Boolean. The loop will - continue until this expression evaluates to true. + :keyword expression: An expression that would evaluate to Boolean. The loop will continue until + this expression evaluates to true. Required. :paramtype expression: ~azure.mgmt.datafactory.models.Expression :keyword timeout: Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype timeout: any - :keyword activities: Required. List of activities to execute. + :paramtype timeout: JSON + :keyword activities: List of activities to execute. Required. :paramtype activities: list[~azure.mgmt.datafactory.models.Activity] """ - super(UntilActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'Until' # type: str + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + **kwargs + ) + self.type = "Until" # type: str self.expression = expression self.timeout = timeout self.activities = activities -class UpdateIntegrationRuntimeNodeRequest(msrest.serialization.Model): +class UpdateIntegrationRuntimeNodeRequest(_serialization.Model): """Update integration runtime node request. :ivar concurrent_jobs_limit: The number of concurrent jobs permitted to run on the integration @@ -58115,33 +61999,28 @@ class UpdateIntegrationRuntimeNodeRequest(msrest.serialization.Model): """ _validation = { - 'concurrent_jobs_limit': {'minimum': 1}, + "concurrent_jobs_limit": {"minimum": 1}, } _attribute_map = { - 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + "concurrent_jobs_limit": {"key": "concurrentJobsLimit", "type": "int"}, } - def __init__( - self, - *, - concurrent_jobs_limit: Optional[int] = None, - **kwargs - ): + def __init__(self, *, concurrent_jobs_limit: Optional[int] = None, **kwargs): """ :keyword concurrent_jobs_limit: The number of concurrent jobs permitted to run on the integration runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed. :paramtype concurrent_jobs_limit: int """ - super(UpdateIntegrationRuntimeNodeRequest, self).__init__(**kwargs) + super().__init__(**kwargs) self.concurrent_jobs_limit = concurrent_jobs_limit -class UpdateIntegrationRuntimeRequest(msrest.serialization.Model): +class UpdateIntegrationRuntimeRequest(_serialization.Model): """Update integration runtime request. :ivar auto_update: Enables or disables the auto-update feature of the self-hosted integration - runtime. See https://go.microsoft.com/fwlink/?linkid=854189. Known values are: "On", "Off". + runtime. See https://go.microsoft.com/fwlink/?linkid=854189. Known values are: "On" and "Off". :vartype auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate :ivar update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time. @@ -58149,8 +62028,8 @@ class UpdateIntegrationRuntimeRequest(msrest.serialization.Model): """ _attribute_map = { - 'auto_update': {'key': 'autoUpdate', 'type': 'str'}, - 'update_delay_offset': {'key': 'updateDelayOffset', 'type': 'str'}, + "auto_update": {"key": "autoUpdate", "type": "str"}, + "update_delay_offset": {"key": "updateDelayOffset", "type": "str"}, } def __init__( @@ -58162,19 +62041,19 @@ def __init__( ): """ :keyword auto_update: Enables or disables the auto-update feature of the self-hosted - integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189. Known values are: - "On", "Off". + integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189. Known values are: "On" + and "Off". :paramtype auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate :keyword update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time. :paramtype update_delay_offset: str """ - super(UpdateIntegrationRuntimeRequest, self).__init__(**kwargs) + super().__init__(**kwargs) self.auto_update = auto_update self.update_delay_offset = update_delay_offset -class UserAccessPolicy(msrest.serialization.Model): +class UserAccessPolicy(_serialization.Model): """Get Data Plane read only token request definition. :ivar permissions: The string with permissions for Data Plane access. Currently only 'r' is @@ -58194,11 +62073,11 @@ class UserAccessPolicy(msrest.serialization.Model): """ _attribute_map = { - 'permissions': {'key': 'permissions', 'type': 'str'}, - 'access_resource_path': {'key': 'accessResourcePath', 'type': 'str'}, - 'profile_name': {'key': 'profileName', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'expire_time': {'key': 'expireTime', 'type': 'str'}, + "permissions": {"key": "permissions", "type": "str"}, + "access_resource_path": {"key": "accessResourcePath", "type": "str"}, + "profile_name": {"key": "profileName", "type": "str"}, + "start_time": {"key": "startTime", "type": "str"}, + "expire_time": {"key": "expireTime", "type": "str"}, } def __init__( @@ -58227,7 +62106,7 @@ def __init__( hours and by default the token will expire in eight hours. :paramtype expire_time: str """ - super(UserAccessPolicy, self).__init__(**kwargs) + super().__init__(**kwargs) self.permissions = permissions self.access_resource_path = access_resource_path self.profile_name = profile_name @@ -58235,58 +62114,52 @@ def __init__( self.expire_time = expire_time -class UserProperty(msrest.serialization.Model): +class UserProperty(_serialization.Model): """User property. All required parameters must be populated in order to send to Azure. - :ivar name: Required. User property name. + :ivar name: User property name. Required. :vartype name: str - :ivar value: Required. User property value. Type: string (or Expression with resultType - string). - :vartype value: any + :ivar value: User property value. Type: string (or Expression with resultType string). + Required. + :vartype value: JSON """ _validation = { - 'name': {'required': True}, - 'value': {'required': True}, + "name": {"required": True}, + "value": {"required": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'object'}, + "name": {"key": "name", "type": "str"}, + "value": {"key": "value", "type": "object"}, } - def __init__( - self, - *, - name: str, - value: Any, - **kwargs - ): + def __init__(self, *, name: str, value: JSON, **kwargs): """ - :keyword name: Required. User property name. + :keyword name: User property name. Required. :paramtype name: str - :keyword value: Required. User property value. Type: string (or Expression with resultType - string). - :paramtype value: any + :keyword value: User property value. Type: string (or Expression with resultType string). + Required. + :paramtype value: JSON """ - super(UserProperty, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name self.value = value -class ValidationActivity(ControlActivity): +class ValidationActivity(ControlActivity): # pylint: disable=too-many-instance-attributes """This activity verifies that an external resource exists. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -58298,39 +62171,39 @@ class ValidationActivity(ControlActivity): it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype timeout: any + :vartype timeout: JSON :ivar sleep: A delay in seconds between validation attempts. If no value is specified, 10 seconds will be used as the default. Type: integer (or Expression with resultType integer). - :vartype sleep: any + :vartype sleep: JSON :ivar minimum_size: Can be used if dataset points to a file. The file must be greater than or equal in size to the value specified. Type: integer (or Expression with resultType integer). - :vartype minimum_size: any + :vartype minimum_size: JSON :ivar child_items: Can be used if dataset points to a folder. If set to true, the folder must have at least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType boolean). - :vartype child_items: any - :ivar dataset: Required. Validation activity dataset reference. + :vartype child_items: JSON + :ivar dataset: Validation activity dataset reference. Required. :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'dataset': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "dataset": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, - 'sleep': {'key': 'typeProperties.sleep', 'type': 'object'}, - 'minimum_size': {'key': 'typeProperties.minimumSize', 'type': 'object'}, - 'child_items': {'key': 'typeProperties.childItems', 'type': 'object'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "timeout": {"key": "typeProperties.timeout", "type": "object"}, + "sleep": {"key": "typeProperties.sleep", "type": "object"}, + "minimum_size": {"key": "typeProperties.minimumSize", "type": "object"}, + "child_items": {"key": "typeProperties.childItems", "type": "object"}, + "dataset": {"key": "typeProperties.dataset", "type": "DatasetReference"}, } def __init__( @@ -58338,21 +62211,21 @@ def __init__( *, name: str, dataset: "_models.DatasetReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, - timeout: Optional[Any] = None, - sleep: Optional[Any] = None, - minimum_size: Optional[Any] = None, - child_items: Optional[Any] = None, + timeout: Optional[JSON] = None, + sleep: Optional[JSON] = None, + minimum_size: Optional[JSON] = None, + child_items: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -58364,22 +62237,29 @@ def __init__( specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype timeout: any + :paramtype timeout: JSON :keyword sleep: A delay in seconds between validation attempts. If no value is specified, 10 seconds will be used as the default. Type: integer (or Expression with resultType integer). - :paramtype sleep: any + :paramtype sleep: JSON :keyword minimum_size: Can be used if dataset points to a file. The file must be greater than or equal in size to the value specified. Type: integer (or Expression with resultType integer). - :paramtype minimum_size: any + :paramtype minimum_size: JSON :keyword child_items: Can be used if dataset points to a folder. If set to true, the folder must have at least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType boolean). - :paramtype child_items: any - :keyword dataset: Required. Validation activity dataset reference. + :paramtype child_items: JSON + :keyword dataset: Validation activity dataset reference. Required. :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference """ - super(ValidationActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'Validation' # type: str + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + **kwargs + ) + self.type = "Validation" # type: str self.timeout = timeout self.sleep = sleep self.minimum_size = minimum_size @@ -58387,40 +62267,34 @@ def __init__( self.dataset = dataset -class VariableSpecification(msrest.serialization.Model): +class VariableSpecification(_serialization.Model): """Definition of a single variable for a Pipeline. All required parameters must be populated in order to send to Azure. - :ivar type: Required. Variable type. Known values are: "String", "Bool", "Array". + :ivar type: Variable type. Required. Known values are: "String", "Bool", and "Array". :vartype type: str or ~azure.mgmt.datafactory.models.VariableType :ivar default_value: Default value of variable. - :vartype default_value: any + :vartype default_value: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'object'}, + "type": {"key": "type", "type": "str"}, + "default_value": {"key": "defaultValue", "type": "object"}, } - def __init__( - self, - *, - type: Union[str, "_models.VariableType"], - default_value: Optional[Any] = None, - **kwargs - ): + def __init__(self, *, type: Union[str, "_models.VariableType"], default_value: Optional[JSON] = None, **kwargs): """ - :keyword type: Required. Variable type. Known values are: "String", "Bool", "Array". + :keyword type: Variable type. Required. Known values are: "String", "Bool", and "Array". :paramtype type: str or ~azure.mgmt.datafactory.models.VariableType :keyword default_value: Default value of variable. - :paramtype default_value: any + :paramtype default_value: JSON """ - super(VariableSpecification, self).__init__(**kwargs) + super().__init__(**kwargs) self.type = type self.default_value = default_value @@ -58432,8 +62306,8 @@ class VerticaLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -58442,51 +62316,51 @@ class VerticaLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :vartype connection_string: any + :vartype connection_string: JSON :ivar pwd: The Azure key vault secret reference of password in connection string. :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "pwd": {"key": "typeProperties.pwd", "type": "AzureKeyVaultSecretReference"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_string: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_string: Optional[JSON] = None, pwd: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -58494,19 +62368,26 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :paramtype connection_string: any + :paramtype connection_string: JSON :keyword pwd: The Azure key vault secret reference of password in connection string. :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(VerticaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Vertica' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Vertica" # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential @@ -58519,199 +62400,218 @@ class VerticaSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'VerticaSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "VerticaSource" # type: str self.query = query -class VerticaTableDataset(Dataset): +class VerticaTableDataset(Dataset): # pylint: disable=too-many-instance-attributes """Vertica dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :vartype table_name: any + :vartype table_name: JSON :ivar table: The table name of the Vertica. Type: string (or Expression with resultType string). - :vartype table: any + :vartype table: JSON :ivar schema_type_properties_schema: The schema name of the Vertica. Type: string (or Expression with resultType string). - :vartype schema_type_properties_schema: any + :vartype schema_type_properties_schema: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, + "table": {"key": "typeProperties.table", "type": "object"}, + "schema_type_properties_schema": {"key": "typeProperties.schema", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, - table: Optional[Any] = None, - schema_type_properties_schema: Optional[Any] = None, + table_name: Optional[JSON] = None, + table: Optional[JSON] = None, + schema_type_properties_schema: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: This property will be retired. Please consider using schema + table properties instead. - :paramtype table_name: any + :paramtype table_name: JSON :keyword table: The table name of the Vertica. Type: string (or Expression with resultType string). - :paramtype table: any + :paramtype table: JSON :keyword schema_type_properties_schema: The schema name of the Vertica. Type: string (or Expression with resultType string). - :paramtype schema_type_properties_schema: any - """ - super(VerticaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'VerticaTable' # type: str + :paramtype schema_type_properties_schema: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "VerticaTable" # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -58724,10 +62624,10 @@ class WaitActivity(ControlActivity): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -58735,32 +62635,32 @@ class WaitActivity(ControlActivity): :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :ivar wait_time_in_seconds: Required. Duration in seconds. - :vartype wait_time_in_seconds: any + :ivar wait_time_in_seconds: Duration in seconds. Required. + :vartype wait_time_in_seconds: JSON """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'wait_time_in_seconds': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "wait_time_in_seconds": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'wait_time_in_seconds': {'key': 'typeProperties.waitTimeInSeconds', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "wait_time_in_seconds": {"key": "typeProperties.waitTimeInSeconds", "type": "object"}, } def __init__( self, *, name: str, - wait_time_in_seconds: Any, - additional_properties: Optional[Dict[str, Any]] = None, + wait_time_in_seconds: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, @@ -58769,8 +62669,8 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -58778,25 +62678,32 @@ def __init__( :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :keyword wait_time_in_seconds: Required. Duration in seconds. - :paramtype wait_time_in_seconds: any - """ - super(WaitActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'Wait' # type: str + :keyword wait_time_in_seconds: Duration in seconds. Required. + :paramtype wait_time_in_seconds: JSON + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + **kwargs + ) + self.type = "Wait" # type: str self.wait_time_in_seconds = wait_time_in_seconds -class WebActivity(ExecutionActivity): +class WebActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """Web activity. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -58808,19 +62715,19 @@ class WebActivity(ExecutionActivity): :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar policy: Activity policy. :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :ivar method: Required. Rest API method for target endpoint. Known values are: "GET", "POST", - "PUT", "DELETE". + :ivar method: Rest API method for target endpoint. Required. Known values are: "GET", "POST", + "PUT", and "DELETE". :vartype method: str or ~azure.mgmt.datafactory.models.WebActivityMethod - :ivar url: Required. Web activity target endpoint and path. Type: string (or Expression with - resultType string). - :vartype url: any + :ivar url: Web activity target endpoint and path. Type: string (or Expression with resultType + string). Required. + :vartype url: JSON :ivar headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :vartype headers: any + :vartype headers: JSON :ivar body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). - :vartype body: any + :vartype body: JSON :ivar authentication: Authentication method used for calling the endpoint. :vartype authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication :ivar disable_cert_validation: When set to true, Certificate validation will be disabled. @@ -58834,30 +62741,30 @@ class WebActivity(ExecutionActivity): """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True}, - 'url': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "method": {"required": True}, + "url": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, - 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, - 'disable_cert_validation': {'key': 'typeProperties.disableCertValidation', 'type': 'bool'}, - 'datasets': {'key': 'typeProperties.datasets', 'type': '[DatasetReference]'}, - 'linked_services': {'key': 'typeProperties.linkedServices', 'type': '[LinkedServiceReference]'}, - 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "policy": {"key": "policy", "type": "ActivityPolicy"}, + "method": {"key": "typeProperties.method", "type": "str"}, + "url": {"key": "typeProperties.url", "type": "object"}, + "headers": {"key": "typeProperties.headers", "type": "object"}, + "body": {"key": "typeProperties.body", "type": "object"}, + "authentication": {"key": "typeProperties.authentication", "type": "WebActivityAuthentication"}, + "disable_cert_validation": {"key": "typeProperties.disableCertValidation", "type": "bool"}, + "datasets": {"key": "typeProperties.datasets", "type": "[DatasetReference]"}, + "linked_services": {"key": "typeProperties.linkedServices", "type": "[LinkedServiceReference]"}, + "connect_via": {"key": "typeProperties.connectVia", "type": "IntegrationRuntimeReference"}, } def __init__( @@ -58865,15 +62772,15 @@ def __init__( *, name: str, method: Union[str, "_models.WebActivityMethod"], - url: Any, - additional_properties: Optional[Dict[str, Any]] = None, + url: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, - headers: Optional[Any] = None, - body: Optional[Any] = None, + headers: Optional[JSON] = None, + body: Optional[JSON] = None, authentication: Optional["_models.WebActivityAuthentication"] = None, disable_cert_validation: Optional[bool] = None, datasets: Optional[List["_models.DatasetReference"]] = None, @@ -58884,8 +62791,8 @@ def __init__( """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -58897,19 +62804,19 @@ def __init__( :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword policy: Activity policy. :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :keyword method: Required. Rest API method for target endpoint. Known values are: "GET", - "POST", "PUT", "DELETE". + :keyword method: Rest API method for target endpoint. Required. Known values are: "GET", + "POST", "PUT", and "DELETE". :paramtype method: str or ~azure.mgmt.datafactory.models.WebActivityMethod - :keyword url: Required. Web activity target endpoint and path. Type: string (or Expression with - resultType string). - :paramtype url: any + :keyword url: Web activity target endpoint and path. Type: string (or Expression with + resultType string). Required. + :paramtype url: JSON :keyword headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :paramtype headers: any + :paramtype headers: JSON :keyword body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). - :paramtype body: any + :paramtype body: JSON :keyword authentication: Authentication method used for calling the endpoint. :paramtype authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication :keyword disable_cert_validation: When set to true, Certificate validation will be disabled. @@ -58921,8 +62828,17 @@ def __init__( :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference """ - super(WebActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'WebActivity' # type: str + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + linked_service_name=linked_service_name, + policy=policy, + **kwargs + ) + self.type = "WebActivity" # type: str self.method = method self.url = url self.headers = headers @@ -58934,7 +62850,7 @@ def __init__( self.connect_via = connect_via -class WebActivityAuthentication(msrest.serialization.Model): +class WebActivityAuthentication(_serialization.Model): """Web activity authentication properties. :ivar type: Web activity authentication (Basic/ClientCertificate/MSI/ServicePrincipal). @@ -58943,28 +62859,28 @@ class WebActivityAuthentication(msrest.serialization.Model): :vartype pfx: ~azure.mgmt.datafactory.models.SecretBase :ivar username: Web activity authentication user name for basic authentication or ClientID when used for ServicePrincipal. Type: string (or Expression with resultType string). - :vartype username: any + :vartype username: JSON :ivar password: Password for the PFX file or basic authentication / Secret when used for ServicePrincipal. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar resource: Resource for which Azure Auth token will be requested when using MSI Authentication. Type: string (or Expression with resultType string). - :vartype resource: any + :vartype resource: JSON :ivar user_tenant: TenantId for which Azure Auth token will be requested when using ServicePrincipal Authentication. Type: string (or Expression with resultType string). - :vartype user_tenant: any + :vartype user_tenant: JSON :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, - 'username': {'key': 'username', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecretBase'}, - 'resource': {'key': 'resource', 'type': 'object'}, - 'user_tenant': {'key': 'userTenant', 'type': 'object'}, - 'credential': {'key': 'credential', 'type': 'CredentialReference'}, + "type": {"key": "type", "type": "str"}, + "pfx": {"key": "pfx", "type": "SecretBase"}, + "username": {"key": "username", "type": "object"}, + "password": {"key": "password", "type": "SecretBase"}, + "resource": {"key": "resource", "type": "object"}, + "user_tenant": {"key": "userTenant", "type": "object"}, + "credential": {"key": "credential", "type": "CredentialReference"}, } def __init__( @@ -58972,10 +62888,10 @@ def __init__( *, type: Optional[str] = None, pfx: Optional["_models.SecretBase"] = None, - username: Optional[Any] = None, + username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - resource: Optional[Any] = None, - user_tenant: Optional[Any] = None, + resource: Optional[JSON] = None, + user_tenant: Optional[JSON] = None, credential: Optional["_models.CredentialReference"] = None, **kwargs ): @@ -58987,20 +62903,20 @@ def __init__( :paramtype pfx: ~azure.mgmt.datafactory.models.SecretBase :keyword username: Web activity authentication user name for basic authentication or ClientID when used for ServicePrincipal. Type: string (or Expression with resultType string). - :paramtype username: any + :paramtype username: JSON :keyword password: Password for the PFX file or basic authentication / Secret when used for ServicePrincipal. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword resource: Resource for which Azure Auth token will be requested when using MSI Authentication. Type: string (or Expression with resultType string). - :paramtype resource: any + :paramtype resource: JSON :keyword user_tenant: TenantId for which Azure Auth token will be requested when using ServicePrincipal Authentication. Type: string (or Expression with resultType string). - :paramtype user_tenant: any + :paramtype user_tenant: JSON :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ - super(WebActivityAuthentication, self).__init__(**kwargs) + super().__init__(**kwargs) self.type = type self.pfx = pfx self.username = username @@ -59010,48 +62926,47 @@ def __init__( self.credential = credential -class WebLinkedServiceTypeProperties(msrest.serialization.Model): +class WebLinkedServiceTypeProperties(_serialization.Model): """Base definition of WebLinkedServiceTypeProperties, this typeProperties is polymorphic based on authenticationType, so not flattened in SDK models. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: WebAnonymousAuthentication, WebBasicAuthentication, WebClientCertificateAuthentication. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + WebAnonymousAuthentication, WebBasicAuthentication, WebClientCertificateAuthentication All required parameters must be populated in order to send to Azure. - :ivar url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: - string (or Expression with resultType string). - :vartype url: any - :ivar authentication_type: Required. Type of authentication used to connect to the web table - source.Constant filled by server. Known values are: "Basic", "Anonymous", "ClientCertificate". + :ivar url: The URL of the web service endpoint, e.g. https://www.microsoft.com . Type: string + (or Expression with resultType string). Required. + :vartype url: JSON + :ivar authentication_type: Type of authentication used to connect to the web table source. + Required. Known values are: "Basic", "Anonymous", and "ClientCertificate". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType """ _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, + "url": {"required": True}, + "authentication_type": {"required": True}, } _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + "url": {"key": "url", "type": "object"}, + "authentication_type": {"key": "authenticationType", "type": "str"}, } _subtype_map = { - 'authentication_type': {'Anonymous': 'WebAnonymousAuthentication', 'Basic': 'WebBasicAuthentication', 'ClientCertificate': 'WebClientCertificateAuthentication'} + "authentication_type": { + "Anonymous": "WebAnonymousAuthentication", + "Basic": "WebBasicAuthentication", + "ClientCertificate": "WebClientCertificateAuthentication", + } } - def __init__( - self, - *, - url: Any, - **kwargs - ): + def __init__(self, *, url: JSON, **kwargs): """ - :keyword url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . - Type: string (or Expression with resultType string). - :paramtype url: any + :keyword url: The URL of the web service endpoint, e.g. https://www.microsoft.com . Type: + string (or Expression with resultType string). Required. + :paramtype url: JSON """ - super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.url = url self.authentication_type = None # type: Optional[str] @@ -59061,37 +62976,32 @@ class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): All required parameters must be populated in order to send to Azure. - :ivar url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: - string (or Expression with resultType string). - :vartype url: any - :ivar authentication_type: Required. Type of authentication used to connect to the web table - source.Constant filled by server. Known values are: "Basic", "Anonymous", "ClientCertificate". + :ivar url: The URL of the web service endpoint, e.g. https://www.microsoft.com . Type: string + (or Expression with resultType string). Required. + :vartype url: JSON + :ivar authentication_type: Type of authentication used to connect to the web table source. + Required. Known values are: "Basic", "Anonymous", and "ClientCertificate". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType """ _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, + "url": {"required": True}, + "authentication_type": {"required": True}, } _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + "url": {"key": "url", "type": "object"}, + "authentication_type": {"key": "authenticationType", "type": "str"}, } - def __init__( - self, - *, - url: Any, - **kwargs - ): + def __init__(self, *, url: JSON, **kwargs): """ - :keyword url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . - Type: string (or Expression with resultType string). - :paramtype url: any + :keyword url: The URL of the web service endpoint, e.g. https://www.microsoft.com . Type: + string (or Expression with resultType string). Required. + :paramtype url: JSON """ - super(WebAnonymousAuthentication, self).__init__(url=url, **kwargs) - self.authentication_type = 'Anonymous' # type: str + super().__init__(url=url, **kwargs) + self.authentication_type = "Anonymous" # type: str class WebBasicAuthentication(WebLinkedServiceTypeProperties): @@ -59099,53 +63009,46 @@ class WebBasicAuthentication(WebLinkedServiceTypeProperties): All required parameters must be populated in order to send to Azure. - :ivar url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: - string (or Expression with resultType string). - :vartype url: any - :ivar authentication_type: Required. Type of authentication used to connect to the web table - source.Constant filled by server. Known values are: "Basic", "Anonymous", "ClientCertificate". + :ivar url: The URL of the web service endpoint, e.g. https://www.microsoft.com . Type: string + (or Expression with resultType string). Required. + :vartype url: JSON + :ivar authentication_type: Type of authentication used to connect to the web table source. + Required. Known values are: "Basic", "Anonymous", and "ClientCertificate". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType - :ivar username: Required. User name for Basic authentication. Type: string (or Expression with - resultType string). - :vartype username: any - :ivar password: Required. The password for Basic authentication. + :ivar username: User name for Basic authentication. Type: string (or Expression with resultType + string). Required. + :vartype username: JSON + :ivar password: The password for Basic authentication. Required. :vartype password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - 'password': {'required': True}, + "url": {"required": True}, + "authentication_type": {"required": True}, + "username": {"required": True}, + "password": {"required": True}, } _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - 'username': {'key': 'username', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecretBase'}, + "url": {"key": "url", "type": "object"}, + "authentication_type": {"key": "authenticationType", "type": "str"}, + "username": {"key": "username", "type": "object"}, + "password": {"key": "password", "type": "SecretBase"}, } - def __init__( - self, - *, - url: Any, - username: Any, - password: "_models.SecretBase", - **kwargs - ): + def __init__(self, *, url: JSON, username: JSON, password: "_models.SecretBase", **kwargs): """ - :keyword url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . - Type: string (or Expression with resultType string). - :paramtype url: any - :keyword username: Required. User name for Basic authentication. Type: string (or Expression - with resultType string). - :paramtype username: any - :keyword password: Required. The password for Basic authentication. + :keyword url: The URL of the web service endpoint, e.g. https://www.microsoft.com . Type: + string (or Expression with resultType string). Required. + :paramtype url: JSON + :keyword username: User name for Basic authentication. Type: string (or Expression with + resultType string). Required. + :paramtype username: JSON + :keyword password: The password for Basic authentication. Required. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase """ - super(WebBasicAuthentication, self).__init__(url=url, **kwargs) - self.authentication_type = 'Basic' # type: str + super().__init__(url=url, **kwargs) + self.authentication_type = "Basic" # type: str self.username = username self.password = password @@ -59155,66 +63058,59 @@ class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): All required parameters must be populated in order to send to Azure. - :ivar url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: - string (or Expression with resultType string). - :vartype url: any - :ivar authentication_type: Required. Type of authentication used to connect to the web table - source.Constant filled by server. Known values are: "Basic", "Anonymous", "ClientCertificate". + :ivar url: The URL of the web service endpoint, e.g. https://www.microsoft.com . Type: string + (or Expression with resultType string). Required. + :vartype url: JSON + :ivar authentication_type: Type of authentication used to connect to the web table source. + Required. Known values are: "Basic", "Anonymous", and "ClientCertificate". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType - :ivar pfx: Required. Base64-encoded contents of a PFX file. + :ivar pfx: Base64-encoded contents of a PFX file. Required. :vartype pfx: ~azure.mgmt.datafactory.models.SecretBase - :ivar password: Required. Password for the PFX file. + :ivar password: Password for the PFX file. Required. :vartype password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - 'pfx': {'required': True}, - 'password': {'required': True}, + "url": {"required": True}, + "authentication_type": {"required": True}, + "pfx": {"required": True}, + "password": {"required": True}, } _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, - 'password': {'key': 'password', 'type': 'SecretBase'}, + "url": {"key": "url", "type": "object"}, + "authentication_type": {"key": "authenticationType", "type": "str"}, + "pfx": {"key": "pfx", "type": "SecretBase"}, + "password": {"key": "password", "type": "SecretBase"}, } - def __init__( - self, - *, - url: Any, - pfx: "_models.SecretBase", - password: "_models.SecretBase", - **kwargs - ): + def __init__(self, *, url: JSON, pfx: "_models.SecretBase", password: "_models.SecretBase", **kwargs): """ - :keyword url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . - Type: string (or Expression with resultType string). - :paramtype url: any - :keyword pfx: Required. Base64-encoded contents of a PFX file. + :keyword url: The URL of the web service endpoint, e.g. https://www.microsoft.com . Type: + string (or Expression with resultType string). Required. + :paramtype url: JSON + :keyword pfx: Base64-encoded contents of a PFX file. Required. :paramtype pfx: ~azure.mgmt.datafactory.models.SecretBase - :keyword password: Required. Password for the PFX file. + :keyword password: Password for the PFX file. Required. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase """ - super(WebClientCertificateAuthentication, self).__init__(url=url, **kwargs) - self.authentication_type = 'ClientCertificate' # type: str + super().__init__(url=url, **kwargs) + self.authentication_type = "ClientCertificate" # type: str self.pfx = pfx self.password = password -class WebHookActivity(ControlActivity): +class WebHookActivity(ControlActivity): # pylint: disable=too-many-instance-attributes """WebHook activity. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar name: Required. Activity name. + :vartype additional_properties: dict[str, JSON] + :ivar name: Activity name. Required. :vartype name: str - :ivar type: Required. Type of activity.Constant filled by server. + :ivar type: Type of activity. Required. :vartype type: str :ivar description: Activity description. :vartype description: str @@ -59222,11 +63118,11 @@ class WebHookActivity(ControlActivity): :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :ivar method: Required. Rest API method for target endpoint. Known values are: "POST". + :ivar method: Rest API method for target endpoint. Required. "POST" :vartype method: str or ~azure.mgmt.datafactory.models.WebHookActivityMethod - :ivar url: Required. WebHook activity target endpoint and path. Type: string (or Expression - with resultType string). - :vartype url: any + :ivar url: WebHook activity target endpoint and path. Type: string (or Expression with + resultType string). Required. + :vartype url: JSON :ivar timeout: The timeout within which the webhook should be called back. If there is no value specified, it defaults to 10 minutes. Type: string. Pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). @@ -59234,40 +63130,40 @@ class WebHookActivity(ControlActivity): :ivar headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :vartype headers: any + :vartype headers: JSON :ivar body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). - :vartype body: any + :vartype body: JSON :ivar authentication: Authentication method used for calling the endpoint. :vartype authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication :ivar report_status_on_call_back: When set to true, statusCode, output and error in callback request body will be consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype report_status_on_call_back: any + :vartype report_status_on_call_back: JSON """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True}, - 'url': {'required': True}, + "name": {"required": True}, + "type": {"required": True}, + "method": {"required": True}, + "url": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'timeout': {'key': 'typeProperties.timeout', 'type': 'str'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, - 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, - 'report_status_on_call_back': {'key': 'typeProperties.reportStatusOnCallBack', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, + "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "method": {"key": "typeProperties.method", "type": "str"}, + "url": {"key": "typeProperties.url", "type": "object"}, + "timeout": {"key": "typeProperties.timeout", "type": "str"}, + "headers": {"key": "typeProperties.headers", "type": "object"}, + "body": {"key": "typeProperties.body", "type": "object"}, + "authentication": {"key": "typeProperties.authentication", "type": "WebActivityAuthentication"}, + "report_status_on_call_back": {"key": "typeProperties.reportStatusOnCallBack", "type": "object"}, } def __init__( @@ -59275,23 +63171,23 @@ def __init__( *, name: str, method: Union[str, "_models.WebHookActivityMethod"], - url: Any, - additional_properties: Optional[Dict[str, Any]] = None, + url: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, timeout: Optional[str] = None, - headers: Optional[Any] = None, - body: Optional[Any] = None, + headers: Optional[JSON] = None, + body: Optional[JSON] = None, authentication: Optional["_models.WebActivityAuthentication"] = None, - report_status_on_call_back: Optional[Any] = None, + report_status_on_call_back: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Required. Activity name. + :paramtype additional_properties: dict[str, JSON] + :keyword name: Activity name. Required. :paramtype name: str :keyword description: Activity description. :paramtype description: str @@ -59299,11 +63195,11 @@ def __init__( :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :keyword method: Required. Rest API method for target endpoint. Known values are: "POST". + :keyword method: Rest API method for target endpoint. Required. "POST" :paramtype method: str or ~azure.mgmt.datafactory.models.WebHookActivityMethod - :keyword url: Required. WebHook activity target endpoint and path. Type: string (or Expression - with resultType string). - :paramtype url: any + :keyword url: WebHook activity target endpoint and path. Type: string (or Expression with + resultType string). Required. + :paramtype url: JSON :keyword timeout: The timeout within which the webhook should be called back. If there is no value specified, it defaults to 10 minutes. Type: string. Pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). @@ -59311,20 +63207,27 @@ def __init__( :keyword headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :paramtype headers: any + :paramtype headers: JSON :keyword body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). - :paramtype body: any + :paramtype body: JSON :keyword authentication: Authentication method used for calling the endpoint. :paramtype authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication :keyword report_status_on_call_back: When set to true, statusCode, output and error in callback request body will be consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype report_status_on_call_back: any - """ - super(WebHookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'WebHook' # type: str + :paramtype report_status_on_call_back: JSON + """ + super().__init__( + additional_properties=additional_properties, + name=name, + description=description, + depends_on=depends_on, + user_properties=user_properties, + **kwargs + ) + self.type = "WebHook" # type: str self.method = method self.url = url self.timeout = timeout @@ -59341,8 +63244,8 @@ class WebLinkedService(LinkedService): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -59351,41 +63254,41 @@ class WebLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar type_properties: Required. Web linked service properties. + :vartype annotations: list[JSON] + :ivar type_properties: Web linked service properties. Required. :vartype type_properties: ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties """ _validation = { - 'type': {'required': True}, - 'type_properties': {'required': True}, + "type": {"required": True}, + "type_properties": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type_properties': {'key': 'typeProperties', 'type': 'WebLinkedServiceTypeProperties'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "type_properties": {"key": "typeProperties", "type": "WebLinkedServiceTypeProperties"}, } def __init__( self, *, type_properties: "_models.WebLinkedServiceTypeProperties", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -59393,12 +63296,19 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword type_properties: Required. Web linked service properties. + :paramtype annotations: list[JSON] + :keyword type_properties: Web linked service properties. Required. :paramtype type_properties: ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties """ - super(WebLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Web' # type: str + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Web" # type: str self.type_properties = type_properties @@ -59409,176 +63319,193 @@ class WebSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - additional_columns: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'WebSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "WebSource" # type: str self.additional_columns = additional_columns -class WebTableDataset(Dataset): +class WebTableDataset(Dataset): # pylint: disable=too-many-instance-attributes """The dataset points to a HTML table in the web page. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :ivar index: Required. The zero-based index of the table in the web page. Type: integer (or - Expression with resultType integer), minimum: 0. - :vartype index: any + :ivar index: The zero-based index of the table in the web page. Type: integer (or Expression + with resultType integer), minimum: 0. Required. + :vartype index: JSON :ivar path: The relative URL to the web page from the linked service URL. Type: string (or Expression with resultType string). - :vartype path: any + :vartype path: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'index': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, + "index": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'index': {'key': 'typeProperties.index', 'type': 'object'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "index": {"key": "typeProperties.index", "type": "object"}, + "path": {"key": "typeProperties.path", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - index: Any, - additional_properties: Optional[Dict[str, Any]] = None, + index: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - path: Optional[Any] = None, + path: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :keyword index: Required. The zero-based index of the table in the web page. Type: integer (or - Expression with resultType integer), minimum: 0. - :paramtype index: any + :keyword index: The zero-based index of the table in the web page. Type: integer (or Expression + with resultType integer), minimum: 0. Required. + :paramtype index: JSON :keyword path: The relative URL to the web page from the linked service URL. Type: string (or Expression with resultType string). - :paramtype path: any - """ - super(WebTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'WebTable' # type: str + :paramtype path: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "WebTable" # type: str self.index = index self.path = path @@ -59588,12 +63515,12 @@ class WranglingDataFlow(DataFlow): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Type of data flow.Constant filled by server. + :ivar type: Type of data flow. Required. :vartype type: str :ivar description: The description of the data flow. :vartype description: str :ivar annotations: List of tags that can be used for describing the data flow. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DataFlowFolder @@ -59606,24 +63533,24 @@ class WranglingDataFlow(DataFlow): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, - 'sources': {'key': 'typeProperties.sources', 'type': '[PowerQuerySource]'}, - 'script': {'key': 'typeProperties.script', 'type': 'str'}, - 'document_locale': {'key': 'typeProperties.documentLocale', 'type': 'str'}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DataFlowFolder"}, + "sources": {"key": "typeProperties.sources", "type": "[PowerQuerySource]"}, + "script": {"key": "typeProperties.script", "type": "str"}, + "document_locale": {"key": "typeProperties.documentLocale", "type": "str"}, } def __init__( self, *, description: Optional[str] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DataFlowFolder"] = None, sources: Optional[List["_models.PowerQuerySource"]] = None, script: Optional[str] = None, @@ -59634,7 +63561,7 @@ def __init__( :keyword description: The description of the data flow. :paramtype description: str :keyword annotations: List of tags that can be used for describing the data flow. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DataFlowFolder @@ -59645,22 +63572,22 @@ def __init__( :keyword document_locale: Locale of the Power query mashup document. :paramtype document_locale: str """ - super(WranglingDataFlow, self).__init__(description=description, annotations=annotations, folder=folder, **kwargs) - self.type = 'WranglingDataFlow' # type: str + super().__init__(description=description, annotations=annotations, folder=folder, **kwargs) + self.type = "WranglingDataFlow" # type: str self.sources = sources self.script = script self.document_locale = document_locale -class XeroLinkedService(LinkedService): +class XeroLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Xero Service linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -59669,12 +63596,12 @@ class XeroLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_properties: Properties used to connect to Xero. It is mutually exclusive with any other properties in the linked service. Type: object. - :vartype connection_properties: any + :vartype connection_properties: JSON :ivar host: The endpoint of the Xero server. (i.e. api.xero.com). - :vartype host: any + :vartype host: JSON :ivar consumer_key: The consumer key associated with the Xero application. :vartype consumer_key: ~azure.mgmt.datafactory.models.SecretBase :ivar private_key: The private key from the .pem file that was generated for your Xero private @@ -59683,63 +63610,63 @@ class XeroLinkedService(LinkedService): :vartype private_key: ~azure.mgmt.datafactory.models.SecretBase :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :vartype use_encrypted_endpoints: any + :vartype use_encrypted_endpoints: JSON :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :vartype use_host_verification: any + :vartype use_host_verification: JSON :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :vartype use_peer_verification: any + :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'SecretBase'}, - 'private_key': {'key': 'typeProperties.privateKey', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_properties": {"key": "typeProperties.connectionProperties", "type": "object"}, + "host": {"key": "typeProperties.host", "type": "object"}, + "consumer_key": {"key": "typeProperties.consumerKey", "type": "SecretBase"}, + "private_key": {"key": "typeProperties.privateKey", "type": "SecretBase"}, + "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, + "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, + "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_properties: Optional[Any] = None, - host: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_properties: Optional[JSON] = None, + host: Optional[JSON] = None, consumer_key: Optional["_models.SecretBase"] = None, private_key: Optional["_models.SecretBase"] = None, - use_encrypted_endpoints: Optional[Any] = None, - use_host_verification: Optional[Any] = None, - use_peer_verification: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + use_encrypted_endpoints: Optional[JSON] = None, + use_host_verification: Optional[JSON] = None, + use_peer_verification: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -59747,12 +63674,12 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_properties: Properties used to connect to Xero. It is mutually exclusive with any other properties in the linked service. Type: object. - :paramtype connection_properties: any + :paramtype connection_properties: JSON :keyword host: The endpoint of the Xero server. (i.e. api.xero.com). - :paramtype host: any + :paramtype host: JSON :keyword consumer_key: The consumer key associated with the Xero application. :paramtype consumer_key: ~azure.mgmt.datafactory.models.SecretBase :keyword private_key: The private key from the .pem file that was generated for your Xero @@ -59762,21 +63689,28 @@ def __init__( :paramtype private_key: ~azure.mgmt.datafactory.models.SecretBase :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :paramtype use_encrypted_endpoints: any + :paramtype use_encrypted_endpoints: JSON :keyword use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :paramtype use_host_verification: any + :paramtype use_host_verification: JSON :keyword use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :paramtype use_peer_verification: any + :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(XeroLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Xero' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Xero" # type: str self.connection_properties = connection_properties self.host = host self.consumer_key = consumer_key @@ -59794,88 +63728,98 @@ class XeroObjectDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(XeroObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'XeroObject' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "XeroObject" # type: str self.table_name = table_name @@ -59886,116 +63830,125 @@ class XeroSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'XeroSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "XeroSource" # type: str self.query = query -class XmlDataset(Dataset): +class XmlDataset(Dataset): # pylint: disable=too-many-instance-attributes """Xml dataset. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -60006,69 +63959,69 @@ class XmlDataset(Dataset): of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :vartype encoding_name: any + :vartype encoding_name: JSON :ivar null_value: The null value string. Type: string (or Expression with resultType string). - :vartype null_value: any + :vartype null_value: JSON :ivar compression: The data compression method used for the json dataset. :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "location": {"key": "typeProperties.location", "type": "DatasetLocation"}, + "encoding_name": {"key": "typeProperties.encodingName", "type": "object"}, + "null_value": {"key": "typeProperties.nullValue", "type": "object"}, + "compression": {"key": "typeProperties.compression", "type": "DatasetCompression"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, location: Optional["_models.DatasetLocation"] = None, - encoding_name: Optional[Any] = None, - null_value: Optional[Any] = None, + encoding_name: Optional[JSON] = None, + null_value: Optional[JSON] = None, compression: Optional["_models.DatasetCompression"] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -60079,15 +64032,25 @@ def __init__( of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :paramtype encoding_name: any + :paramtype encoding_name: JSON :keyword null_value: The null value string. Type: string (or Expression with resultType string). - :paramtype null_value: any + :paramtype null_value: JSON :keyword compression: The data compression method used for the json dataset. :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ - super(XmlDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'Xml' # type: str + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "Xml" # type: str self.location = location self.encoding_name = encoding_name self.null_value = null_value @@ -60101,75 +64064,75 @@ class XmlReadSettings(FormatReadSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The read setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. :vartype type: str :ivar compression_properties: Compression settings. :vartype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings :ivar validation_mode: Indicates what validation method is used when reading the xml files. Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). - :vartype validation_mode: any + :vartype validation_mode: JSON :ivar detect_data_type: Indicates whether type detection is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). - :vartype detect_data_type: any + :vartype detect_data_type: JSON :ivar namespaces: Indicates whether namespace is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). - :vartype namespaces: any + :vartype namespaces: JSON :ivar namespace_prefixes: Namespace uri to prefix mappings to override the prefixes in column names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml element/attribute name in the xml data file will be used. Example: "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression with resultType object). - :vartype namespace_prefixes: any + :vartype namespace_prefixes: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, - 'validation_mode': {'key': 'validationMode', 'type': 'object'}, - 'detect_data_type': {'key': 'detectDataType', 'type': 'object'}, - 'namespaces': {'key': 'namespaces', 'type': 'object'}, - 'namespace_prefixes': {'key': 'namespacePrefixes', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "compression_properties": {"key": "compressionProperties", "type": "CompressionReadSettings"}, + "validation_mode": {"key": "validationMode", "type": "object"}, + "detect_data_type": {"key": "detectDataType", "type": "object"}, + "namespaces": {"key": "namespaces", "type": "object"}, + "namespace_prefixes": {"key": "namespacePrefixes", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, compression_properties: Optional["_models.CompressionReadSettings"] = None, - validation_mode: Optional[Any] = None, - detect_data_type: Optional[Any] = None, - namespaces: Optional[Any] = None, - namespace_prefixes: Optional[Any] = None, + validation_mode: Optional[JSON] = None, + detect_data_type: Optional[JSON] = None, + namespaces: Optional[JSON] = None, + namespace_prefixes: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword compression_properties: Compression settings. :paramtype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings :keyword validation_mode: Indicates what validation method is used when reading the xml files. Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). - :paramtype validation_mode: any + :paramtype validation_mode: JSON :keyword detect_data_type: Indicates whether type detection is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). - :paramtype detect_data_type: any + :paramtype detect_data_type: JSON :keyword namespaces: Indicates whether namespace is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). - :paramtype namespaces: any + :paramtype namespaces: JSON :keyword namespace_prefixes: Namespace uri to prefix mappings to override the prefixes in column names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml element/attribute name in the xml data file will be used. Example: "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression with resultType object). - :paramtype namespace_prefixes: any + :paramtype namespace_prefixes: JSON """ - super(XmlReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'XmlReadSettings' # type: str + super().__init__(additional_properties=additional_properties, **kwargs) + self.type = "XmlReadSettings" # type: str self.compression_properties = compression_properties self.validation_mode = validation_mode self.detect_data_type = detect_data_type @@ -60184,99 +64147,106 @@ class XmlSource(CopySource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar store_settings: Xml store settings. :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :ivar format_settings: Xml format settings. :vartype format_settings: ~azure.mgmt.datafactory.models.XmlReadSettings :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'XmlReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "store_settings": {"key": "storeSettings", "type": "StoreReadSettings"}, + "format_settings": {"key": "formatSettings", "type": "XmlReadSettings"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, store_settings: Optional["_models.StoreReadSettings"] = None, format_settings: Optional["_models.XmlReadSettings"] = None, - additional_columns: Optional[Any] = None, + additional_columns: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword store_settings: Xml store settings. :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :keyword format_settings: Xml format settings. :paramtype format_settings: ~azure.mgmt.datafactory.models.XmlReadSettings :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any - """ - super(XmlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) - self.type = 'XmlSource' # type: str + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type = "XmlSource" # type: str self.store_settings = store_settings self.format_settings = format_settings self.additional_columns = additional_columns -class ZendeskLinkedService(LinkedService): +class ZendeskLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Linked service for Zendesk. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -60285,16 +64255,16 @@ class ZendeskLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] - :ivar authentication_type: Required. The authentication type to use. Known values are: "Basic", - "Token". + :vartype annotations: list[JSON] + :ivar authentication_type: The authentication type to use. Required. Known values are: "Basic" + and "Token". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.ZendeskAuthenticationType - :ivar url: Required. The url to connect Zendesk source. Type: string (or Expression with - resultType string). - :vartype url: any + :ivar url: The url to connect Zendesk source. Type: string (or Expression with resultType + string). Required. + :vartype url: JSON :ivar user_name: The username of the Zendesk source. Type: string (or Expression with resultType string). - :vartype user_name: any + :vartype user_name: JSON :ivar password: The password of the Zendesk source. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar api_token: The api token for the Zendesk source. @@ -60302,50 +64272,50 @@ class ZendeskLinkedService(LinkedService): :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, - 'authentication_type': {'required': True}, - 'url': {'required': True}, + "type": {"required": True}, + "authentication_type": {"required": True}, + "url": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'api_token': {'key': 'typeProperties.apiToken', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, + "url": {"key": "typeProperties.url", "type": "object"}, + "user_name": {"key": "typeProperties.userName", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "SecretBase"}, + "api_token": {"key": "typeProperties.apiToken", "type": "SecretBase"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, authentication_type: Union[str, "_models.ZendeskAuthenticationType"], - url: Any, - additional_properties: Optional[Dict[str, Any]] = None, + url: JSON, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - user_name: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, api_token: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[Any] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -60353,16 +64323,16 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] - :keyword authentication_type: Required. The authentication type to use. Known values are: - "Basic", "Token". + :paramtype annotations: list[JSON] + :keyword authentication_type: The authentication type to use. Required. Known values are: + "Basic" and "Token". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.ZendeskAuthenticationType - :keyword url: Required. The url to connect Zendesk source. Type: string (or Expression with - resultType string). - :paramtype url: any + :keyword url: The url to connect Zendesk source. Type: string (or Expression with resultType + string). Required. + :paramtype url: JSON :keyword user_name: The username of the Zendesk source. Type: string (or Expression with resultType string). - :paramtype user_name: any + :paramtype user_name: JSON :keyword password: The password of the Zendesk source. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword api_token: The api token for the Zendesk source. @@ -60370,10 +64340,17 @@ def __init__( :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(ZendeskLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Zendesk' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Zendesk" # type: str self.authentication_type = authentication_type self.url = url self.user_name = user_name @@ -60389,53 +64366,53 @@ class ZipDeflateReadSettings(CompressionReadSettings): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. The Compression setting type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: The Compression setting type. Required. :vartype type: str :ivar preserve_zip_file_name_as_folder: Preserve the zip file name as folder path. Type: boolean (or Expression with resultType boolean). - :vartype preserve_zip_file_name_as_folder: any + :vartype preserve_zip_file_name_as_folder: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'preserve_zip_file_name_as_folder': {'key': 'preserveZipFileNameAsFolder', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "preserve_zip_file_name_as_folder": {"key": "preserveZipFileNameAsFolder", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - preserve_zip_file_name_as_folder: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + preserve_zip_file_name_as_folder: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword preserve_zip_file_name_as_folder: Preserve the zip file name as folder path. Type: boolean (or Expression with resultType boolean). - :paramtype preserve_zip_file_name_as_folder: any + :paramtype preserve_zip_file_name_as_folder: JSON """ - super(ZipDeflateReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'ZipDeflateReadSettings' # type: str + super().__init__(additional_properties=additional_properties, **kwargs) + self.type = "ZipDeflateReadSettings" # type: str self.preserve_zip_file_name_as_folder = preserve_zip_file_name_as_folder -class ZohoLinkedService(LinkedService): +class ZohoLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Zoho server linked service. All required parameters must be populated in order to send to Azure. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of linked service.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. :vartype type: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference @@ -60444,71 +64421,71 @@ class ZohoLinkedService(LinkedService): :ivar parameters: Parameters for linked service. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar connection_properties: Properties used to connect to Zoho. It is mutually exclusive with any other properties in the linked service. Type: object. - :vartype connection_properties: any + :vartype connection_properties: JSON :ivar endpoint: The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private). - :vartype endpoint: any + :vartype endpoint: JSON :ivar access_token: The access token for Zoho authentication. :vartype access_token: ~azure.mgmt.datafactory.models.SecretBase :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :vartype use_encrypted_endpoints: any + :vartype use_encrypted_endpoints: JSON :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :vartype use_host_verification: any + :vartype use_host_verification: JSON :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :vartype use_peer_verification: any + :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :vartype encrypted_credential: any + :vartype encrypted_credential: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "connection_properties": {"key": "typeProperties.connectionProperties", "type": "object"}, + "endpoint": {"key": "typeProperties.endpoint", "type": "object"}, + "access_token": {"key": "typeProperties.accessToken", "type": "SecretBase"}, + "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, + "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, + "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, - connection_properties: Optional[Any] = None, - endpoint: Optional[Any] = None, + annotations: Optional[List[JSON]] = None, + connection_properties: Optional[JSON] = None, + endpoint: Optional[JSON] = None, access_token: Optional["_models.SecretBase"] = None, - use_encrypted_endpoints: Optional[Any] = None, - use_host_verification: Optional[Any] = None, - use_peer_verification: Optional[Any] = None, - encrypted_credential: Optional[Any] = None, + use_encrypted_endpoints: Optional[JSON] = None, + use_host_verification: Optional[JSON] = None, + use_peer_verification: Optional[JSON] = None, + encrypted_credential: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -60516,31 +64493,38 @@ def __init__( :keyword parameters: Parameters for linked service. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword connection_properties: Properties used to connect to Zoho. It is mutually exclusive with any other properties in the linked service. Type: object. - :paramtype connection_properties: any + :paramtype connection_properties: JSON :keyword endpoint: The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private). - :paramtype endpoint: any + :paramtype endpoint: JSON :keyword access_token: The access token for Zoho authentication. :paramtype access_token: ~azure.mgmt.datafactory.models.SecretBase :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :paramtype use_encrypted_endpoints: any + :paramtype use_encrypted_endpoints: JSON :keyword use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :paramtype use_host_verification: any + :paramtype use_host_verification: JSON :keyword use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :paramtype use_peer_verification: any + :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: any - """ - super(ZohoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Zoho' # type: str + :paramtype encrypted_credential: JSON + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type = "Zoho" # type: str self.connection_properties = connection_properties self.endpoint = endpoint self.access_token = access_token @@ -60557,88 +64541,98 @@ class ZohoObjectDataset(Dataset): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Type of dataset.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. :vartype type: str :ivar description: Dataset description. :vartype description: str :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :vartype structure: any + :vartype structure: JSON :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :vartype schema: any - :ivar linked_service_name: Required. Linked service reference. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar parameters: Parameters for dataset. :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the Dataset. - :vartype annotations: list[any] + :vartype annotations: list[JSON] :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder :ivar table_name: The table name. Type: string (or Expression with resultType string). - :vartype table_name: any + :vartype table_name: JSON """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + "type": {"required": True}, + "linked_service_name": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table_name": {"key": "typeProperties.tableName", "type": "object"}, } def __init__( self, *, linked_service_name: "_models.LinkedServiceReference", - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, - structure: Optional[Any] = None, - schema: Optional[Any] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, - annotations: Optional[List[Any]] = None, + annotations: Optional[List[JSON]] = None, folder: Optional["_models.DatasetFolder"] = None, - table_name: Optional[Any] = None, + table_name: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword description: Dataset description. :paramtype description: str :keyword structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :paramtype structure: any + :paramtype structure: JSON :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :paramtype schema: any - :keyword linked_service_name: Required. Linked service reference. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword parameters: Parameters for dataset. :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the Dataset. - :paramtype annotations: list[any] + :paramtype annotations: list[JSON] :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder :keyword table_name: The table name. Type: string (or Expression with resultType string). - :paramtype table_name: any - """ - super(ZohoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'ZohoObject' # type: str + :paramtype table_name: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type = "ZohoObject" # type: str self.table_name = table_name @@ -60649,87 +64643,96 @@ class ZohoSource(TabularSource): :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :vartype additional_properties: dict[str, any] - :ivar type: Required. Copy source type.Constant filled by server. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. :vartype type: str :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :vartype source_retry_count: any + :vartype source_retry_count: JSON :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype source_retry_wait: any + :vartype source_retry_wait: JSON :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :vartype max_concurrent_connections: any + :vartype max_concurrent_connections: JSON :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :vartype disable_metrics_collection: any + :vartype disable_metrics_collection: JSON :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :vartype query_timeout: any + :vartype query_timeout: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: any + :vartype additional_columns: JSON :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :vartype query: any + :vartype query: JSON """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + "query": {"key": "query", "type": "object"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - source_retry_count: Optional[Any] = None, - source_retry_wait: Optional[Any] = None, - max_concurrent_connections: Optional[Any] = None, - disable_metrics_collection: Optional[Any] = None, - query_timeout: Optional[Any] = None, - additional_columns: Optional[Any] = None, - query: Optional[Any] = None, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + query: Optional[JSON] = None, **kwargs ): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. - :paramtype additional_properties: dict[str, any] + :paramtype additional_properties: dict[str, JSON] :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :paramtype source_retry_count: any + :paramtype source_retry_count: JSON :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype source_retry_wait: any + :paramtype source_retry_wait: JSON :keyword max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :paramtype max_concurrent_connections: any + :paramtype max_concurrent_connections: JSON :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :paramtype disable_metrics_collection: any + :paramtype disable_metrics_collection: JSON :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :paramtype query_timeout: any + :paramtype query_timeout: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: any + :paramtype additional_columns: JSON :keyword query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :paramtype query: any - """ - super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.type = 'ZohoSource' # type: str + :paramtype query: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, + **kwargs + ) + self.type = "ZohoSource" # type: str self.query = query diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_patch.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_patch.py index 0ad201a8c58..f7dd3251033 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_patch.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_patch.py @@ -10,6 +10,7 @@ __all__: List[str] = [] # Add all objects you want publicly available to users at this package level + def patch_sdk(): """Do not remove from this file. diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/__init__.py index 61caa433ff9..a20c42cd8bf 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/__init__.py @@ -31,28 +31,29 @@ from ._patch import __all__ as _patch_all from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import from ._patch import patch_sdk as _patch_sdk + __all__ = [ - 'Operations', - 'FactoriesOperations', - 'ExposureControlOperations', - 'IntegrationRuntimesOperations', - 'IntegrationRuntimeObjectMetadataOperations', - 'IntegrationRuntimeNodesOperations', - 'LinkedServicesOperations', - 'DatasetsOperations', - 'PipelinesOperations', - 'PipelineRunsOperations', - 'ActivityRunsOperations', - 'TriggersOperations', - 'TriggerRunsOperations', - 'DataFlowsOperations', - 'DataFlowDebugSessionOperations', - 'ManagedVirtualNetworksOperations', - 'ManagedPrivateEndpointsOperations', - 'PrivateEndPointConnectionsOperations', - 'PrivateEndpointConnectionOperations', - 'PrivateLinkResourcesOperations', - 'GlobalParametersOperations', + "Operations", + "FactoriesOperations", + "ExposureControlOperations", + "IntegrationRuntimesOperations", + "IntegrationRuntimeObjectMetadataOperations", + "IntegrationRuntimeNodesOperations", + "LinkedServicesOperations", + "DatasetsOperations", + "PipelinesOperations", + "PipelineRunsOperations", + "ActivityRunsOperations", + "TriggersOperations", + "TriggerRunsOperations", + "DataFlowsOperations", + "DataFlowDebugSessionOperations", + "ManagedVirtualNetworksOperations", + "ManagedPrivateEndpointsOperations", + "PrivateEndPointConnectionsOperations", + "PrivateEndpointConnectionOperations", + "PrivateLinkResourcesOperations", + "GlobalParametersOperations", ] __all__.extend([p for p in _patch_all if p not in __all__]) -_patch_sdk() \ No newline at end of file +_patch_sdk() diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_runs_operations.py index dccae114dcc..0557b8a38d9 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_runs_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_runs_operations.py @@ -6,11 +6,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Optional, TypeVar - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest @@ -19,58 +23,59 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False + def build_query_by_pipeline_run_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - run_id: str, - *, - json: Optional[_models.RunFilterParameters] = None, - content: Any = None, - **kwargs: Any + resource_group_name: str, factory_name: str, run_id: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "runId": _SERIALIZER.url("run_id", run_id, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "runId": _SERIALIZER.url("run_id", run_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + class ActivityRunsOperations: """ @@ -91,54 +96,122 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace + @overload def query_by_pipeline_run( self, resource_group_name: str, factory_name: str, run_id: str, filter_parameters: _models.RunFilterParameters, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.ActivityRunsQueryResponse: """Query activity runs based on input filter conditions. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param run_id: The pipeline run identifier. + :param run_id: The pipeline run identifier. Required. :type run_id: str - :param filter_parameters: Parameters to filter the activity runs. + :param filter_parameters: Parameters to filter the activity runs. Required. :type filter_parameters: ~azure.mgmt.datafactory.models.RunFilterParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ActivityRunsQueryResponse, or the result of cls(response) + :return: ActivityRunsQueryResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.ActivityRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + def query_by_pipeline_run( + self, + resource_group_name: str, + factory_name: str, + run_id: str, + filter_parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ActivityRunsQueryResponse: + """Query activity runs based on input filter conditions. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param run_id: The pipeline run identifier. Required. + :type run_id: str + :param filter_parameters: Parameters to filter the activity runs. Required. + :type filter_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ActivityRunsQueryResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ActivityRunsQueryResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def query_by_pipeline_run( + self, + resource_group_name: str, + factory_name: str, + run_id: str, + filter_parameters: Union[_models.RunFilterParameters, IO], + **kwargs: Any + ) -> _models.ActivityRunsQueryResponse: + """Query activity runs based on input filter conditions. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param run_id: The pipeline run identifier. Required. + :type run_id: str + :param filter_parameters: Parameters to filter the activity runs. Is either a model type or a + IO type. Required. + :type filter_parameters: ~azure.mgmt.datafactory.models.RunFilterParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ActivityRunsQueryResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ActivityRunsQueryResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.ActivityRunsQueryResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ActivityRunsQueryResponse] - _json = self._serialize.body(filter_parameters, 'RunFilterParameters') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(filter_parameters, (IO, bytes)): + _content = filter_parameters + else: + _json = self._serialize.body(filter_parameters, "RunFilterParameters") request = build_query_by_pipeline_run_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, run_id=run_id, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.query_by_pipeline_run.metadata['url'], + content=_content, + template_url=self.query_by_pipeline_run.metadata["url"], headers=_headers, params=_params, ) @@ -146,22 +219,20 @@ def query_by_pipeline_run( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('ActivityRunsQueryResponse', pipeline_response) + deserialized = self._deserialize("ActivityRunsQueryResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - query_by_pipeline_run.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns"} # type: ignore - + query_by_pipeline_run.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py index e1bf4990158..90095b53b36 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py @@ -6,11 +6,16 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse @@ -22,228 +27,227 @@ from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -def build_create_request_initial( - subscription_id: str, - resource_group_name: str, - factory_name: str, - *, - json: Optional[_models.CreateDataFlowDebugSessionRequest] = None, - content: Any = None, - **kwargs: Any + +def build_create_request( + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_query_by_factory_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryDataFlowDebugSessions") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryDataFlowDebugSessions", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_add_data_flow_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - *, - json: Optional[_models.DataFlowDebugPackage] = None, - content: Any = None, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/addDataFlowToDebugSession") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/addDataFlowToDebugSession", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - *, - json: Optional[_models.DeleteDataFlowDebugSessionRequest] = None, - content: Any = None, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/deleteDataFlowDebugSession") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/deleteDataFlowDebugSession", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) - - -def build_execute_command_request_initial( - subscription_id: str, - resource_group_name: str, - factory_name: str, - *, - json: Optional[_models.DataFlowDebugCommandRequest] = None, - content: Any = None, - **kwargs: Any + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_execute_command_request( + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + class DataFlowDebugSessionOperations: """ @@ -264,36 +268,40 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - def _create_initial( self, resource_group_name: str, factory_name: str, - request: _models.CreateDataFlowDebugSessionRequest, + request: Union[_models.CreateDataFlowDebugSessionRequest, IO], **kwargs: Any ) -> Optional[_models.CreateDataFlowDebugSessionResponse]: - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.CreateDataFlowDebugSessionResponse]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.CreateDataFlowDebugSessionResponse]] - _json = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(request, (IO, bytes)): + _content = request + else: + _json = self._serialize.body(request, "CreateDataFlowDebugSessionRequest") - request = build_create_request_initial( - subscription_id=self._config.subscription_id, + request = build_create_request( resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_initial.metadata['url'], + content=_content, + template_url=self._create_initial.metadata["url"], headers=_headers, params=_params, ) @@ -301,10 +309,9 @@ def _create_initial( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: @@ -314,36 +321,75 @@ def _create_initial( deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', pipeline_response) + deserialized = self._deserialize("CreateDataFlowDebugSessionResponse", pipeline_response) if response.status_code == 202: - response_headers['location']=self._deserialize('str', response.headers.get('location')) - + response_headers["location"] = self._deserialize("str", response.headers.get("location")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _create_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession"} # type: ignore + _create_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession"} # type: ignore - - @distributed_trace + @overload def begin_create( self, resource_group_name: str, factory_name: str, request: _models.CreateDataFlowDebugSessionRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> LROPoller[_models.CreateDataFlowDebugSessionResponse]: """Creates a data flow debug session. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param request: Data flow debug session definition. + :param request: Data flow debug session definition. Required. :type request: ~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CreateDataFlowDebugSessionResponse or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create( + self, + resource_group_name: str, + factory_name: str, + request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.CreateDataFlowDebugSessionResponse]: + """Creates a data flow debug session. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param request: Data flow debug session definition. Required. + :type request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -356,20 +402,52 @@ def begin_create( result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create( + self, + resource_group_name: str, + factory_name: str, + request: Union[_models.CreateDataFlowDebugSessionRequest, IO], + **kwargs: Any + ) -> LROPoller[_models.CreateDataFlowDebugSessionResponse]: + """Creates a data flow debug session. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param request: Data flow debug session definition. Is either a model type or a IO type. + Required. + :type request: ~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CreateDataFlowDebugSessionResponse or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionResponse] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.CreateDataFlowDebugSessionResponse] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CreateDataFlowDebugSessionResponse] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._create_initial( # type: ignore resource_group_name=resource_group_name, @@ -377,79 +455,70 @@ def begin_create( request=request, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', pipeline_response) + deserialized = self._deserialize("CreateDataFlowDebugSessionResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: - polling_method = cast(PollingMethod, ARMPolling( - lro_delay, - - - **kwargs - )) # type: PollingMethod - elif polling is False: polling_method = cast(PollingMethod, NoPolling()) - else: polling_method = polling + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_create.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession"} # type: ignore + begin_create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession"} # type: ignore @distributed_trace def query_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any - ) -> Iterable[_models.QueryDataFlowDebugSessionsResponse]: + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> Iterable["_models.DataFlowDebugSessionInfo"]: """Query all active data flow debug sessions. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either QueryDataFlowDebugSessionsResponse or the result - of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.QueryDataFlowDebugSessionsResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either DataFlowDebugSessionInfo or the result of + cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.DataFlowDebugSessionInfo] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.QueryDataFlowDebugSessionsResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.QueryDataFlowDebugSessionsResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_query_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.query_by_factory.metadata['url'], + template_url=self.query_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -457,16 +526,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_query_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -482,10 +546,8 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -495,55 +557,116 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - query_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryDataFlowDebugSessions"} # type: ignore + query_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryDataFlowDebugSessions"} # type: ignore - @distributed_trace + @overload def add_data_flow( self, resource_group_name: str, factory_name: str, request: _models.DataFlowDebugPackage, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.AddDataFlowToDebugSessionResponse: """Add a data flow into debug session. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param request: Data flow debug session definition with debug content. + :param request: Data flow debug session definition with debug content. Required. :type request: ~azure.mgmt.datafactory.models.DataFlowDebugPackage + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AddDataFlowToDebugSessionResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.AddDataFlowToDebugSessionResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def add_data_flow( + self, + resource_group_name: str, + factory_name: str, + request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.AddDataFlowToDebugSessionResponse: + """Add a data flow into debug session. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param request: Data flow debug session definition with debug content. Required. + :type request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: AddDataFlowToDebugSessionResponse, or the result of cls(response) + :return: AddDataFlowToDebugSessionResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.AddDataFlowToDebugSessionResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @distributed_trace + def add_data_flow( + self, + resource_group_name: str, + factory_name: str, + request: Union[_models.DataFlowDebugPackage, IO], + **kwargs: Any + ) -> _models.AddDataFlowToDebugSessionResponse: + """Add a data flow into debug session. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param request: Data flow debug session definition with debug content. Is either a model type + or a IO type. Required. + :type request: ~azure.mgmt.datafactory.models.DataFlowDebugPackage or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AddDataFlowToDebugSessionResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.AddDataFlowToDebugSessionResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.AddDataFlowToDebugSessionResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AddDataFlowToDebugSessionResponse] - _json = self._serialize.body(request, 'DataFlowDebugPackage') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(request, (IO, bytes)): + _content = request + else: + _json = self._serialize.body(request, "DataFlowDebugPackage") request = build_add_data_flow_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.add_data_flow.metadata['url'], + content=_content, + template_url=self.add_data_flow.metadata["url"], headers=_headers, params=_params, ) @@ -551,69 +674,130 @@ def add_data_flow( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('AddDataFlowToDebugSessionResponse', pipeline_response) + deserialized = self._deserialize("AddDataFlowToDebugSessionResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - add_data_flow.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/addDataFlowToDebugSession"} # type: ignore - + add_data_flow.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/addDataFlowToDebugSession"} # type: ignore - @distributed_trace + @overload def delete( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, factory_name: str, request: _models.DeleteDataFlowDebugSessionRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> None: """Deletes a data flow debug session. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param request: Data flow debug session definition for deletion. + :param request: Data flow debug session definition for deletion. Required. :type request: ~azure.mgmt.datafactory.models.DeleteDataFlowDebugSessionRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + factory_name: str, + request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Deletes a data flow debug session. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param request: Data flow debug session definition for deletion. Required. + :type request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + factory_name: str, + request: Union[_models.DeleteDataFlowDebugSessionRequest, IO], + **kwargs: Any + ) -> None: + """Deletes a data flow debug session. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param request: Data flow debug session definition for deletion. Is either a model type or a IO + type. Required. + :type request: ~azure.mgmt.datafactory.models.DeleteDataFlowDebugSessionRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] - _json = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(request, (IO, bytes)): + _content = request + else: + _json = self._serialize.body(request, "DeleteDataFlowDebugSessionRequest") request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.delete.metadata['url'], + content=_content, + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -621,10 +805,9 @@ def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -634,38 +817,42 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/deleteDataFlowDebugSession"} # type: ignore - + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/deleteDataFlowDebugSession"} # type: ignore def _execute_command_initial( self, resource_group_name: str, factory_name: str, - request: _models.DataFlowDebugCommandRequest, + request: Union[_models.DataFlowDebugCommandRequest, IO], **kwargs: Any ) -> Optional[_models.DataFlowDebugCommandResponse]: - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.DataFlowDebugCommandResponse]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.DataFlowDebugCommandResponse]] - _json = self._serialize.body(request, 'DataFlowDebugCommandRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(request, (IO, bytes)): + _content = request + else: + _json = self._serialize.body(request, "DataFlowDebugCommandRequest") - request = build_execute_command_request_initial( - subscription_id=self._config.subscription_id, + request = build_execute_command_request( resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._execute_command_initial.metadata['url'], + content=_content, + template_url=self._execute_command_initial.metadata["url"], headers=_headers, params=_params, ) @@ -673,10 +860,9 @@ def _execute_command_initial( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: @@ -686,36 +872,39 @@ def _execute_command_initial( deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('DataFlowDebugCommandResponse', pipeline_response) + deserialized = self._deserialize("DataFlowDebugCommandResponse", pipeline_response) if response.status_code == 202: - response_headers['location']=self._deserialize('str', response.headers.get('location')) - + response_headers["location"] = self._deserialize("str", response.headers.get("location")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _execute_command_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand"} # type: ignore - + _execute_command_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand"} # type: ignore - @distributed_trace + @overload def begin_execute_command( self, resource_group_name: str, factory_name: str, request: _models.DataFlowDebugCommandRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> LROPoller[_models.DataFlowDebugCommandResponse]: """Execute a data flow debug command. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param request: Data flow debug command definition. + :param request: Data flow debug command definition. Required. :type request: ~azure.mgmt.datafactory.models.DataFlowDebugCommandRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -728,20 +917,88 @@ def begin_execute_command( result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.DataFlowDebugCommandResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_execute_command( + self, + resource_group_name: str, + factory_name: str, + request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DataFlowDebugCommandResponse]: + """Execute a data flow debug command. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param request: Data flow debug command definition. Required. + :type request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DataFlowDebugCommandResponse or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.DataFlowDebugCommandResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_execute_command( + self, + resource_group_name: str, + factory_name: str, + request: Union[_models.DataFlowDebugCommandRequest, IO], + **kwargs: Any + ) -> LROPoller[_models.DataFlowDebugCommandResponse]: + """Execute a data flow debug command. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param request: Data flow debug command definition. Is either a model type or a IO type. + Required. + :type request: ~azure.mgmt.datafactory.models.DataFlowDebugCommandRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DataFlowDebugCommandResponse or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.DataFlowDebugCommandResponse] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.DataFlowDebugCommandResponse] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DataFlowDebugCommandResponse] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._execute_command_initial( # type: ignore resource_group_name=resource_group_name, @@ -749,36 +1006,32 @@ def begin_execute_command( request=request, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('DataFlowDebugCommandResponse', pipeline_response) + deserialized = self._deserialize("DataFlowDebugCommandResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: - polling_method = cast(PollingMethod, ARMPolling( - lro_delay, - - - **kwargs - )) # type: PollingMethod - elif polling is False: polling_method = cast(PollingMethod, NoPolling()) - else: polling_method = polling + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_execute_command.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand"} # type: ignore + begin_execute_command.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flows_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flows_operations.py index 75dd987151d..601e0fdbae5 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flows_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flows_operations.py @@ -6,11 +6,16 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse @@ -20,68 +25,80 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False + def build_create_or_update_request( - subscription_id: str, resource_group_name: str, factory_name: str, data_flow_name: str, + subscription_id: str, *, - json: Optional[_models.DataFlowResource] = None, - content: Any = None, if_match: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "dataFlowName": _SERIALIZER.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "dataFlowName": _SERIALIZER.url( + "data_flow_name", + data_flow_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if if_match is not None: - _headers['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id: str, resource_group_name: str, factory_name: str, data_flow_name: str, + subscription_id: str, *, if_none_match: Optional[str] = None, **kwargs: Any @@ -89,111 +106,137 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "dataFlowName": _SERIALIZER.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "dataFlowName": _SERIALIZER.url( + "data_flow_name", + data_flow_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if if_none_match is not None: - _headers['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - data_flow_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, data_flow_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "dataFlowName": _SERIALIZER.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "dataFlowName": _SERIALIZER.url( + "data_flow_name", + data_flow_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_list_by_factory_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) class DataFlowsOperations: """ @@ -214,8 +257,7 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace + @overload def create_or_update( self, resource_group_name: str, @@ -223,50 +265,126 @@ def create_or_update( data_flow_name: str, data_flow: _models.DataFlowResource, if_match: Optional[str] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.DataFlowResource: """Creates or updates a data flow. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param data_flow_name: The data flow name. + :param data_flow_name: The data flow name. Required. :type data_flow_name: str - :param data_flow: Data flow resource definition. + :param data_flow: Data flow resource definition. Required. :type data_flow: ~azure.mgmt.datafactory.models.DataFlowResource :param if_match: ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. Default value is None. :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataFlowResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.DataFlowResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + data_flow_name: str, + data_flow: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DataFlowResource: + """Creates or updates a data flow. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param data_flow_name: The data flow name. Required. + :type data_flow_name: str + :param data_flow: Data flow resource definition. Required. + :type data_flow: IO + :param if_match: ETag of the data flow entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. Default value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataFlowResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.DataFlowResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + data_flow_name: str, + data_flow: Union[_models.DataFlowResource, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.DataFlowResource: + """Creates or updates a data flow. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param data_flow_name: The data flow name. Required. + :type data_flow_name: str + :param data_flow: Data flow resource definition. Is either a model type or a IO type. Required. + :type data_flow: ~azure.mgmt.datafactory.models.DataFlowResource or IO + :param if_match: ETag of the data flow entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. Default value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataFlowResource, or the result of cls(response) + :return: DataFlowResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.DataFlowResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.DataFlowResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DataFlowResource] - _json = self._serialize.body(data_flow, 'DataFlowResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(data_flow, (IO, bytes)): + _content = data_flow + else: + _json = self._serialize.body(data_flow, "DataFlowResource") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, data_flow_name=data_flow_name, + subscription_id=self._config.subscription_id, + if_match=if_match, api_version=api_version, content_type=content_type, json=_json, - if_match=if_match, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -274,25 +392,23 @@ def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('DataFlowResource', pipeline_response) + deserialized = self._deserialize("DataFlowResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}"} # type: ignore - + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}"} # type: ignore @distributed_trace def get( @@ -305,41 +421,38 @@ def get( ) -> _models.DataFlowResource: """Gets a data flow. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param data_flow_name: The data flow name. + :param data_flow_name: The data flow name. Required. :type data_flow_name: str :param if_none_match: ETag of the data flow entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. Default value is None. :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataFlowResource, or the result of cls(response) + :return: DataFlowResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.DataFlowResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.DataFlowResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DataFlowResource] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, data_flow_name=data_flow_name, - api_version=api_version, + subscription_id=self._config.subscription_id, if_none_match=if_none_match, - template_url=self.get.metadata['url'], + api_version=api_version, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -347,66 +460,57 @@ def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('DataFlowResource', pipeline_response) + deserialized = self._deserialize("DataFlowResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}"} # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - data_flow_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, data_flow_name: str, **kwargs: Any ) -> None: """Deletes a data flow. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param data_flow_name: The data flow name. + :param data_flow_name: The data flow name. Required. :type data_flow_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, data_flow_name=data_flow_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -414,10 +518,9 @@ def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -427,47 +530,41 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}"} # type: ignore - + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}"} # type: ignore @distributed_trace def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any - ) -> Iterable[_models.DataFlowListResponse]: + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> Iterable["_models.DataFlowResource"]: """Lists data flows. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DataFlowListResponse or the result of - cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.DataFlowListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either DataFlowResource or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.DataFlowResource] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.DataFlowListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DataFlowListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_factory.metadata['url'], + template_url=self.list_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -475,16 +572,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -500,10 +592,8 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -513,8 +603,6 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows"} # type: ignore + list_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_datasets_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_datasets_operations.py index 7351787f6ca..f7b1689c1af 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_datasets_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_datasets_operations.py @@ -6,11 +6,16 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse @@ -20,105 +25,120 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False + def build_list_by_factory_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id: str, resource_group_name: str, factory_name: str, dataset_name: str, + subscription_id: str, *, - json: Optional[_models.DatasetResource] = None, - content: Any = None, if_match: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "datasetName": _SERIALIZER.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "datasetName": _SERIALIZER.url( + "dataset_name", + dataset_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if if_match is not None: - _headers['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id: str, resource_group_name: str, factory_name: str, dataset_name: str, + subscription_id: str, *, if_none_match: Optional[str] = None, **kwargs: Any @@ -126,74 +146,97 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "datasetName": _SERIALIZER.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "datasetName": _SERIALIZER.url( + "dataset_name", + dataset_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if if_none_match is not None: - _headers['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - dataset_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, dataset_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "datasetName": _SERIALIZER.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "datasetName": _SERIALIZER.url( + "dataset_name", + dataset_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) - return HttpRequest( - method="DELETE", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) class DatasetsOperations: """ @@ -214,44 +257,39 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any - ) -> Iterable[_models.DatasetListResponse]: + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> Iterable["_models.DatasetResource"]: """Lists datasets. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DatasetListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.DatasetListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either DatasetResource or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.DatasetResource] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.DatasetListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DatasetListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_factory.metadata['url'], + template_url=self.list_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -259,16 +297,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -284,10 +317,8 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -297,13 +328,11 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets"} # type: ignore + list_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets"} # type: ignore - @distributed_trace + @overload def create_or_update( self, resource_group_name: str, @@ -311,50 +340,126 @@ def create_or_update( dataset_name: str, dataset: _models.DatasetResource, if_match: Optional[str] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.DatasetResource: """Creates or updates a dataset. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param dataset_name: The dataset name. + :param dataset_name: The dataset name. Required. :type dataset_name: str - :param dataset: Dataset resource definition. + :param dataset: Dataset resource definition. Required. :type dataset: ~azure.mgmt.datafactory.models.DatasetResource :param if_match: ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. Default value is None. :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DatasetResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.DatasetResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + dataset_name: str, + dataset: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DatasetResource: + """Creates or updates a dataset. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param dataset_name: The dataset name. Required. + :type dataset_name: str + :param dataset: Dataset resource definition. Required. + :type dataset: IO + :param if_match: ETag of the dataset entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. Default value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DatasetResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.DatasetResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + dataset_name: str, + dataset: Union[_models.DatasetResource, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.DatasetResource: + """Creates or updates a dataset. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param dataset_name: The dataset name. Required. + :type dataset_name: str + :param dataset: Dataset resource definition. Is either a model type or a IO type. Required. + :type dataset: ~azure.mgmt.datafactory.models.DatasetResource or IO + :param if_match: ETag of the dataset entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. Default value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatasetResource, or the result of cls(response) + :return: DatasetResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.DatasetResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.DatasetResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DatasetResource] - _json = self._serialize.body(dataset, 'DatasetResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(dataset, (IO, bytes)): + _content = dataset + else: + _json = self._serialize.body(dataset, "DatasetResource") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, dataset_name=dataset_name, + subscription_id=self._config.subscription_id, + if_match=if_match, api_version=api_version, content_type=content_type, json=_json, - if_match=if_match, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -362,25 +467,23 @@ def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('DatasetResource', pipeline_response) + deserialized = self._deserialize("DatasetResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}"} # type: ignore - + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}"} # type: ignore @distributed_trace def get( @@ -393,41 +496,38 @@ def get( ) -> Optional[_models.DatasetResource]: """Gets a dataset. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param dataset_name: The dataset name. + :param dataset_name: The dataset name. Required. :type dataset_name: str :param if_none_match: ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. Default value is None. :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatasetResource, or the result of cls(response) + :return: DatasetResource or None or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.DatasetResource or None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.DatasetResource]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.DatasetResource]] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, dataset_name=dataset_name, - api_version=api_version, + subscription_id=self._config.subscription_id, if_none_match=if_none_match, - template_url=self.get.metadata['url'], + api_version=api_version, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -435,10 +535,9 @@ def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 304]: @@ -447,56 +546,48 @@ def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('DatasetResource', pipeline_response) + deserialized = self._deserialize("DatasetResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}"} # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - dataset_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, dataset_name: str, **kwargs: Any ) -> None: """Deletes a dataset. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param dataset_name: The dataset name. + :param dataset_name: The dataset name. Required. :type dataset_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, dataset_name=dataset_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -504,10 +595,9 @@ def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -517,5 +607,4 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}"} # type: ignore - + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py index c7846220239..db60b09b8ad 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py @@ -6,11 +6,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Optional, TypeVar - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest @@ -19,144 +23,132 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -def build_get_feature_value_request( - subscription_id: str, - location_id: str, - *, - json: Optional[_models.ExposureControlRequest] = None, - content: Any = None, - **kwargs: Any -) -> HttpRequest: + +def build_get_feature_value_request(location_id: str, subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/getFeatureValue") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/getFeatureValue", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "locationId": _SERIALIZER.url("location_id", location_id, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "locationId": _SERIALIZER.url("location_id", location_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_get_feature_value_by_factory_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - *, - json: Optional[_models.ExposureControlRequest] = None, - content: Any = None, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_query_feature_values_by_factory_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - *, - json: Optional[_models.ExposureControlBatchRequest] = None, - content: Any = None, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + class ExposureControlOperations: """ @@ -177,46 +169,94 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace + @overload def get_feature_value( self, location_id: str, exposure_control_request: _models.ExposureControlRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.ExposureControlResponse: """Get exposure control feature for specific location. - :param location_id: The location identifier. + :param location_id: The location identifier. Required. :type location_id: str - :param exposure_control_request: The exposure control request. + :param exposure_control_request: The exposure control request. Required. :type exposure_control_request: ~azure.mgmt.datafactory.models.ExposureControlRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExposureControlResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def get_feature_value( + self, location_id: str, exposure_control_request: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.ExposureControlResponse: + """Get exposure control feature for specific location. + + :param location_id: The location identifier. Required. + :type location_id: str + :param exposure_control_request: The exposure control request. Required. + :type exposure_control_request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ExposureControlResponse, or the result of cls(response) + :return: ExposureControlResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @distributed_trace + def get_feature_value( + self, location_id: str, exposure_control_request: Union[_models.ExposureControlRequest, IO], **kwargs: Any + ) -> _models.ExposureControlResponse: + """Get exposure control feature for specific location. + + :param location_id: The location identifier. Required. + :type location_id: str + :param exposure_control_request: The exposure control request. Is either a model type or a IO + type. Required. + :type exposure_control_request: ~azure.mgmt.datafactory.models.ExposureControlRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExposureControlResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.ExposureControlResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ExposureControlResponse] - _json = self._serialize.body(exposure_control_request, 'ExposureControlRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(exposure_control_request, (IO, bytes)): + _content = exposure_control_request + else: + _json = self._serialize.body(exposure_control_request, "ExposureControlRequest") request = build_get_feature_value_request( - subscription_id=self._config.subscription_id, location_id=location_id, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.get_feature_value.metadata['url'], + content=_content, + template_url=self.get_feature_value.metadata["url"], headers=_headers, params=_params, ) @@ -224,69 +264,130 @@ def get_feature_value( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('ExposureControlResponse', pipeline_response) + deserialized = self._deserialize("ExposureControlResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_feature_value.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/getFeatureValue"} # type: ignore - + get_feature_value.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/getFeatureValue"} # type: ignore - @distributed_trace + @overload def get_feature_value_by_factory( self, resource_group_name: str, factory_name: str, exposure_control_request: _models.ExposureControlRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.ExposureControlResponse: """Get exposure control feature for specific factory. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param exposure_control_request: The exposure control request. + :param exposure_control_request: The exposure control request. Required. :type exposure_control_request: ~azure.mgmt.datafactory.models.ExposureControlRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExposureControlResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def get_feature_value_by_factory( + self, + resource_group_name: str, + factory_name: str, + exposure_control_request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ExposureControlResponse: + """Get exposure control feature for specific factory. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param exposure_control_request: The exposure control request. Required. + :type exposure_control_request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExposureControlResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def get_feature_value_by_factory( + self, + resource_group_name: str, + factory_name: str, + exposure_control_request: Union[_models.ExposureControlRequest, IO], + **kwargs: Any + ) -> _models.ExposureControlResponse: + """Get exposure control feature for specific factory. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param exposure_control_request: The exposure control request. Is either a model type or a IO + type. Required. + :type exposure_control_request: ~azure.mgmt.datafactory.models.ExposureControlRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ExposureControlResponse, or the result of cls(response) + :return: ExposureControlResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.ExposureControlResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ExposureControlResponse] - _json = self._serialize.body(exposure_control_request, 'ExposureControlRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(exposure_control_request, (IO, bytes)): + _content = exposure_control_request + else: + _json = self._serialize.body(exposure_control_request, "ExposureControlRequest") request = build_get_feature_value_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.get_feature_value_by_factory.metadata['url'], + content=_content, + template_url=self.get_feature_value_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -294,70 +395,134 @@ def get_feature_value_by_factory( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('ExposureControlResponse', pipeline_response) + deserialized = self._deserialize("ExposureControlResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_feature_value_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue"} # type: ignore - + get_feature_value_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue"} # type: ignore - @distributed_trace + @overload def query_feature_values_by_factory( self, resource_group_name: str, factory_name: str, exposure_control_batch_request: _models.ExposureControlBatchRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.ExposureControlBatchResponse: """Get list of exposure control features for specific factory. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :param exposure_control_batch_request: The exposure control request for list of features. + Required. :type exposure_control_batch_request: ~azure.mgmt.datafactory.models.ExposureControlBatchRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ExposureControlBatchResponse, or the result of cls(response) + :return: ExposureControlBatchResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.ExposureControlBatchResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + def query_feature_values_by_factory( + self, + resource_group_name: str, + factory_name: str, + exposure_control_batch_request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ExposureControlBatchResponse: + """Get list of exposure control features for specific factory. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param exposure_control_batch_request: The exposure control request for list of features. + Required. + :type exposure_control_batch_request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExposureControlBatchResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ExposureControlBatchResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def query_feature_values_by_factory( + self, + resource_group_name: str, + factory_name: str, + exposure_control_batch_request: Union[_models.ExposureControlBatchRequest, IO], + **kwargs: Any + ) -> _models.ExposureControlBatchResponse: + """Get list of exposure control features for specific factory. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param exposure_control_batch_request: The exposure control request for list of features. Is + either a model type or a IO type. Required. + :type exposure_control_batch_request: + ~azure.mgmt.datafactory.models.ExposureControlBatchRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExposureControlBatchResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ExposureControlBatchResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.ExposureControlBatchResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ExposureControlBatchResponse] - _json = self._serialize.body(exposure_control_batch_request, 'ExposureControlBatchRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(exposure_control_batch_request, (IO, bytes)): + _content = exposure_control_batch_request + else: + _json = self._serialize.body(exposure_control_batch_request, "ExposureControlBatchRequest") request = build_query_feature_values_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.query_feature_values_by_factory.metadata['url'], + content=_content, + template_url=self.query_feature_values_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -365,22 +530,20 @@ def query_feature_values_by_factory( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('ExposureControlBatchResponse', pipeline_response) + deserialized = self._deserialize("ExposureControlBatchResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - query_feature_values_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue"} # type: ignore - + query_feature_values_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factories_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factories_operations.py index b404303ac75..71960fada3a 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factories_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factories_operations.py @@ -6,11 +6,16 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse @@ -20,221 +25,193 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -def build_list_request( - subscription_id: str, - **kwargs: Any -) -> HttpRequest: + +def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories") path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_configure_factory_repo_request( - subscription_id: str, - location_id: str, - *, - json: Optional[_models.FactoryRepoUpdate] = None, - content: Any = None, - **kwargs: Any -) -> HttpRequest: +def build_configure_factory_repo_request(location_id: str, subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "locationId": _SERIALIZER.url("location_id", location_id, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "locationId": _SERIALIZER.url("location_id", location_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_list_by_resource_group_request( - subscription_id: str, - resource_group_name: str, - **kwargs: Any -) -> HttpRequest: +def build_list_by_resource_group_request(resource_group_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - *, - json: Optional[_models.Factory] = None, - content: Any = None, - if_match: Optional[str] = None, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, *, if_match: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if if_match is not None: - _headers['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_update_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - *, - json: Optional[_models.FactoryUpdateParameters] = None, - content: Any = None, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PATCH", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id: str, resource_group_name: str, factory_name: str, + subscription_id: str, *, if_none_match: Optional[str] = None, **kwargs: Any @@ -242,161 +219,167 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if if_none_match is not None: - _headers['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_git_hub_access_token_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - *, - json: Optional[_models.GitHubAccessTokenRequest] = None, - content: Any = None, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_get_data_plane_access_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - *, - json: Optional[_models.UserAccessPolicy] = None, - content: Any = None, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + class FactoriesOperations: """ @@ -417,36 +400,31 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace - def list( - self, - **kwargs: Any - ) -> Iterable[_models.FactoryListResponse]: + def list(self, **kwargs: Any) -> Iterable["_models.Factory"]: """Lists factories under the specified subscription. :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.FactoryListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Factory or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.Factory] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.FactoryListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.FactoryListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_request( subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], headers=_headers, params=_params, ) @@ -454,14 +432,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -477,10 +452,8 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -490,51 +463,98 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories"} # type: ignore + list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories"} # type: ignore - @distributed_trace + @overload def configure_factory_repo( self, location_id: str, factory_repo_update: _models.FactoryRepoUpdate, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.Factory: """Updates a factory's repo information. - :param location_id: The location identifier. + :param location_id: The location identifier. Required. :type location_id: str - :param factory_repo_update: Update factory repo request definition. + :param factory_repo_update: Update factory repo request definition. Required. :type factory_repo_update: ~azure.mgmt.datafactory.models.FactoryRepoUpdate + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Factory or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.Factory + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def configure_factory_repo( + self, location_id: str, factory_repo_update: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.Factory: + """Updates a factory's repo information. + + :param location_id: The location identifier. Required. + :type location_id: str + :param factory_repo_update: Update factory repo request definition. Required. + :type factory_repo_update: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) + :return: Factory or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @distributed_trace + def configure_factory_repo( + self, location_id: str, factory_repo_update: Union[_models.FactoryRepoUpdate, IO], **kwargs: Any + ) -> _models.Factory: + """Updates a factory's repo information. + + :param location_id: The location identifier. Required. + :type location_id: str + :param factory_repo_update: Update factory repo request definition. Is either a model type or a + IO type. Required. + :type factory_repo_update: ~azure.mgmt.datafactory.models.FactoryRepoUpdate or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Factory or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.Factory + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.Factory] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Factory] - _json = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(factory_repo_update, (IO, bytes)): + _content = factory_repo_update + else: + _json = self._serialize.body(factory_repo_update, "FactoryRepoUpdate") request = build_configure_factory_repo_request( - subscription_id=self._config.subscription_id, location_id=location_id, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.configure_factory_repo.metadata['url'], + content=_content, + template_url=self.configure_factory_repo.metadata["url"], headers=_headers, params=_params, ) @@ -542,59 +562,52 @@ def configure_factory_repo( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('Factory', pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - configure_factory_repo.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo"} # type: ignore - + configure_factory_repo.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo"} # type: ignore @distributed_trace - def list_by_resource_group( - self, - resource_group_name: str, - **kwargs: Any - ) -> Iterable[_models.FactoryListResponse]: + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.Factory"]: """Lists factories. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.FactoryListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Factory or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.Factory] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.FactoryListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.FactoryListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_resource_group_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_resource_group.metadata['url'], + template_url=self.list_by_resource_group.metadata["url"], headers=_headers, params=_params, ) @@ -602,15 +615,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_resource_group_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -626,10 +635,8 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -639,60 +646,128 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list_by_resource_group.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories"} # type: ignore + list_by_resource_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories"} # type: ignore - @distributed_trace + @overload def create_or_update( self, resource_group_name: str, factory_name: str, factory: _models.Factory, if_match: Optional[str] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.Factory: """Creates or updates a factory. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param factory: Factory resource definition. + :param factory: Factory resource definition. Required. :type factory: ~azure.mgmt.datafactory.models.Factory :param if_match: ETag of the factory entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. Default value is None. :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) + :return: Factory or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + factory: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Factory: + """Creates or updates a factory. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param factory: Factory resource definition. Required. + :type factory: IO + :param if_match: ETag of the factory entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. Default value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Factory or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.Factory + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + factory: Union[_models.Factory, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.Factory: + """Creates or updates a factory. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param factory: Factory resource definition. Is either a model type or a IO type. Required. + :type factory: ~azure.mgmt.datafactory.models.Factory or IO + :param if_match: ETag of the factory entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. Default value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Factory or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.Factory + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.Factory] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Factory] - _json = self._serialize.body(factory, 'Factory') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(factory, (IO, bytes)): + _content = factory + else: + _json = self._serialize.body(factory, "Factory") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, + if_match=if_match, api_version=api_version, content_type=content_type, json=_json, - if_match=if_match, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -700,69 +775,130 @@ def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('Factory', pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}"} # type: ignore + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}"} # type: ignore - - @distributed_trace + @overload def update( self, resource_group_name: str, factory_name: str, factory_update_parameters: _models.FactoryUpdateParameters, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.Factory: """Updates a factory. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param factory_update_parameters: The parameters for updating a factory. + :param factory_update_parameters: The parameters for updating a factory. Required. :type factory_update_parameters: ~azure.mgmt.datafactory.models.FactoryUpdateParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) + :return: Factory or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + def update( + self, + resource_group_name: str, + factory_name: str, + factory_update_parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Factory: + """Updates a factory. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param factory_update_parameters: The parameters for updating a factory. Required. + :type factory_update_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Factory or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.Factory + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update( + self, + resource_group_name: str, + factory_name: str, + factory_update_parameters: Union[_models.FactoryUpdateParameters, IO], + **kwargs: Any + ) -> _models.Factory: + """Updates a factory. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param factory_update_parameters: The parameters for updating a factory. Is either a model type + or a IO type. Required. + :type factory_update_parameters: ~azure.mgmt.datafactory.models.FactoryUpdateParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Factory or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.Factory + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.Factory] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Factory] - _json = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(factory_update_parameters, (IO, bytes)): + _content = factory_update_parameters + else: + _json = self._serialize.body(factory_update_parameters, "FactoryUpdateParameters") request = build_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.update.metadata['url'], + content=_content, + template_url=self.update.metadata["url"], headers=_headers, params=_params, ) @@ -770,68 +906,59 @@ def update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('Factory', pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}"} # type: ignore - + update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}"} # type: ignore @distributed_trace def get( - self, - resource_group_name: str, - factory_name: str, - if_none_match: Optional[str] = None, - **kwargs: Any + self, resource_group_name: str, factory_name: str, if_none_match: Optional[str] = None, **kwargs: Any ) -> Optional[_models.Factory]: """Gets a factory. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :param if_none_match: ETag of the factory entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. Default value is None. :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) + :return: Factory or None or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.Factory or None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.Factory]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.Factory]] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, - api_version=api_version, + subscription_id=self._config.subscription_id, if_none_match=if_none_match, - template_url=self.get.metadata['url'], + api_version=api_version, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -839,10 +966,9 @@ def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 304]: @@ -851,52 +977,45 @@ def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('Factory', pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}"} # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, **kwargs: Any ) -> None: """Deletes a factory. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -904,10 +1023,9 @@ def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -917,52 +1035,115 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}"} # type: ignore - + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}"} # type: ignore - @distributed_trace + @overload def get_git_hub_access_token( self, resource_group_name: str, factory_name: str, git_hub_access_token_request: _models.GitHubAccessTokenRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.GitHubAccessTokenResponse: """Get GitHub Access Token. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param git_hub_access_token_request: Get GitHub access token request definition. + :param git_hub_access_token_request: Get GitHub access token request definition. Required. :type git_hub_access_token_request: ~azure.mgmt.datafactory.models.GitHubAccessTokenRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: GitHubAccessTokenResponse, or the result of cls(response) + :return: GitHubAccessTokenResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.GitHubAccessTokenResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + def get_git_hub_access_token( + self, + resource_group_name: str, + factory_name: str, + git_hub_access_token_request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.GitHubAccessTokenResponse: + """Get GitHub Access Token. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param git_hub_access_token_request: Get GitHub access token request definition. Required. + :type git_hub_access_token_request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GitHubAccessTokenResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.GitHubAccessTokenResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def get_git_hub_access_token( + self, + resource_group_name: str, + factory_name: str, + git_hub_access_token_request: Union[_models.GitHubAccessTokenRequest, IO], + **kwargs: Any + ) -> _models.GitHubAccessTokenResponse: + """Get GitHub Access Token. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param git_hub_access_token_request: Get GitHub access token request definition. Is either a + model type or a IO type. Required. + :type git_hub_access_token_request: ~azure.mgmt.datafactory.models.GitHubAccessTokenRequest or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GitHubAccessTokenResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.GitHubAccessTokenResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.GitHubAccessTokenResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.GitHubAccessTokenResponse] - _json = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(git_hub_access_token_request, (IO, bytes)): + _content = git_hub_access_token_request + else: + _json = self._serialize.body(git_hub_access_token_request, "GitHubAccessTokenRequest") request = build_get_git_hub_access_token_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.get_git_hub_access_token.metadata['url'], + content=_content, + template_url=self.get_git_hub_access_token.metadata["url"], headers=_headers, params=_params, ) @@ -970,69 +1151,126 @@ def get_git_hub_access_token( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('GitHubAccessTokenResponse', pipeline_response) + deserialized = self._deserialize("GitHubAccessTokenResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_git_hub_access_token.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken"} # type: ignore + get_git_hub_access_token.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken"} # type: ignore - - @distributed_trace + @overload def get_data_plane_access( self, resource_group_name: str, factory_name: str, policy: _models.UserAccessPolicy, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.AccessPolicyResponse: """Get Data Plane access. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param policy: Data Plane user access policy definition. + :param policy: Data Plane user access policy definition. Required. :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AccessPolicyResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.AccessPolicyResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def get_data_plane_access( + self, + resource_group_name: str, + factory_name: str, + policy: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.AccessPolicyResponse: + """Get Data Plane access. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param policy: Data Plane user access policy definition. Required. + :type policy: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: AccessPolicyResponse, or the result of cls(response) + :return: AccessPolicyResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.AccessPolicyResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @distributed_trace + def get_data_plane_access( + self, resource_group_name: str, factory_name: str, policy: Union[_models.UserAccessPolicy, IO], **kwargs: Any + ) -> _models.AccessPolicyResponse: + """Get Data Plane access. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param policy: Data Plane user access policy definition. Is either a model type or a IO type. + Required. + :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AccessPolicyResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.AccessPolicyResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.AccessPolicyResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AccessPolicyResponse] - _json = self._serialize.body(policy, 'UserAccessPolicy') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(policy, (IO, bytes)): + _content = policy + else: + _json = self._serialize.body(policy, "UserAccessPolicy") request = build_get_data_plane_access_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.get_data_plane_access.metadata['url'], + content=_content, + template_url=self.get_data_plane_access.metadata["url"], headers=_headers, params=_params, ) @@ -1040,22 +1278,20 @@ def get_data_plane_access( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('AccessPolicyResponse', pipeline_response) + deserialized = self._deserialize("AccessPolicyResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_data_plane_access.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess"} # type: ignore - + get_data_plane_access.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_global_parameters_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_global_parameters_operations.py index 27fd9ca770a..2a48f0f65ea 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_global_parameters_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_global_parameters_operations.py @@ -6,11 +6,16 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse @@ -20,173 +25,202 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False + def build_list_by_factory_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - global_parameter_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, global_parameter_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "globalParameterName": _SERIALIZER.url("global_parameter_name", global_parameter_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "globalParameterName": _SERIALIZER.url( + "global_parameter_name", + global_parameter_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - global_parameter_name: str, - *, - json: Optional[_models.GlobalParameterResource] = None, - content: Any = None, - **kwargs: Any + resource_group_name: str, factory_name: str, global_parameter_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "globalParameterName": _SERIALIZER.url("global_parameter_name", global_parameter_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "globalParameterName": _SERIALIZER.url( + "global_parameter_name", + global_parameter_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - global_parameter_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, global_parameter_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "globalParameterName": _SERIALIZER.url("global_parameter_name", global_parameter_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "globalParameterName": _SERIALIZER.url( + "global_parameter_name", + global_parameter_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) - return HttpRequest( - method="DELETE", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) class GlobalParametersOperations: """ @@ -207,46 +241,40 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any - ) -> Iterable[_models.GlobalParameterListResponse]: + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> Iterable["_models.GlobalParameterResource"]: """Lists Global parameters. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either GlobalParameterListResponse or the result of + :return: An iterator like instance of either GlobalParameterResource or the result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.GlobalParameterListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.GlobalParameterResource] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.GlobalParameterListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.GlobalParameterListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_factory.metadata['url'], + template_url=self.list_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -254,16 +282,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -279,10 +302,8 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -292,52 +313,43 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters"} # type: ignore + list_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters"} # type: ignore @distributed_trace def get( - self, - resource_group_name: str, - factory_name: str, - global_parameter_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, global_parameter_name: str, **kwargs: Any ) -> _models.GlobalParameterResource: """Gets a Global parameter. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param global_parameter_name: The global parameter name. + :param global_parameter_name: The global parameter name. Required. :type global_parameter_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: GlobalParameterResource, or the result of cls(response) + :return: GlobalParameterResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.GlobalParameterResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.GlobalParameterResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.GlobalParameterResource] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, global_parameter_name=global_parameter_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -345,73 +357,140 @@ def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('GlobalParameterResource', pipeline_response) + deserialized = self._deserialize("GlobalParameterResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}"} # type: ignore - @distributed_trace + @overload def create_or_update( self, resource_group_name: str, factory_name: str, global_parameter_name: str, default: _models.GlobalParameterResource, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.GlobalParameterResource: """Creates or updates a Global parameter. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param global_parameter_name: The global parameter name. + :param global_parameter_name: The global parameter name. Required. :type global_parameter_name: str - :param default: Global parameter resource definition. + :param default: Global parameter resource definition. Required. :type default: ~azure.mgmt.datafactory.models.GlobalParameterResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: GlobalParameterResource, or the result of cls(response) + :return: GlobalParameterResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.GlobalParameterResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + global_parameter_name: str, + default: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.GlobalParameterResource: + """Creates or updates a Global parameter. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param global_parameter_name: The global parameter name. Required. + :type global_parameter_name: str + :param default: Global parameter resource definition. Required. + :type default: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GlobalParameterResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.GlobalParameterResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + global_parameter_name: str, + default: Union[_models.GlobalParameterResource, IO], + **kwargs: Any + ) -> _models.GlobalParameterResource: + """Creates or updates a Global parameter. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param global_parameter_name: The global parameter name. Required. + :type global_parameter_name: str + :param default: Global parameter resource definition. Is either a model type or a IO type. + Required. + :type default: ~azure.mgmt.datafactory.models.GlobalParameterResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GlobalParameterResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.GlobalParameterResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.GlobalParameterResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.GlobalParameterResource] - _json = self._serialize.body(default, 'GlobalParameterResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(default, (IO, bytes)): + _content = default + else: + _json = self._serialize.body(default, "GlobalParameterResource") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, global_parameter_name=global_parameter_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -419,66 +498,57 @@ def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('GlobalParameterResource', pipeline_response) + deserialized = self._deserialize("GlobalParameterResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}"} # type: ignore - + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}"} # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - global_parameter_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, global_parameter_name: str, **kwargs: Any ) -> None: """Deletes a Global parameter. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param global_parameter_name: The global parameter name. + :param global_parameter_name: The global parameter name. Required. :type global_parameter_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, global_parameter_name=global_parameter_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -486,10 +556,9 @@ def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -499,5 +568,4 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}"} # type: ignore - + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_nodes_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_nodes_operations.py index 124a588dc6d..8314064fbee 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_nodes_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_nodes_operations.py @@ -6,11 +6,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Optional, TypeVar - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest @@ -19,183 +23,242 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False + def build_get_request( - subscription_id: str, resource_group_name: str, factory_name: str, integration_runtime_name: str, node_name: str, + subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "nodeName": _SERIALIZER.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "nodeName": _SERIALIZER.url( + "node_name", node_name, "str", max_length=150, min_length=1, pattern=r"^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$" + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id: str, resource_group_name: str, factory_name: str, integration_runtime_name: str, node_name: str, + subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "nodeName": _SERIALIZER.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "nodeName": _SERIALIZER.url( + "node_name", node_name, "str", max_length=150, min_length=1, pattern=r"^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$" + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_update_request( - subscription_id: str, resource_group_name: str, factory_name: str, integration_runtime_name: str, node_name: str, - *, - json: Optional[_models.UpdateIntegrationRuntimeNodeRequest] = None, - content: Any = None, + subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "nodeName": _SERIALIZER.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "nodeName": _SERIALIZER.url( + "node_name", node_name, "str", max_length=150, min_length=1, pattern=r"^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$" + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PATCH", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) def build_get_ip_address_request( - subscription_id: str, resource_group_name: str, factory_name: str, integration_runtime_name: str, node_name: str, + subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "nodeName": _SERIALIZER.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "nodeName": _SERIALIZER.url( + "node_name", node_name, "str", max_length=150, min_length=1, pattern=r"^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$" + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) class IntegrationRuntimeNodesOperations: """ @@ -216,51 +279,42 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace def get( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - node_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, node_name: str, **kwargs: Any ) -> _models.SelfHostedIntegrationRuntimeNode: """Gets a self-hosted integration runtime node. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str - :param node_name: The integration runtime node name. + :param node_name: The integration runtime node name. Required. :type node_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) + :return: SelfHostedIntegrationRuntimeNode or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.SelfHostedIntegrationRuntimeNode] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.SelfHostedIntegrationRuntimeNode] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, node_name=node_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -268,70 +322,60 @@ def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response) + deserialized = self._deserialize("SelfHostedIntegrationRuntimeNode", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}"} # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - node_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, node_name: str, **kwargs: Any ) -> None: """Deletes a self-hosted integration runtime node. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str - :param node_name: The integration runtime node name. + :param node_name: The integration runtime node name. Required. :type node_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, node_name=node_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -339,10 +383,9 @@ def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -352,10 +395,9 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}"} # type: ignore - + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}"} # type: ignore - @distributed_trace + @overload def update( self, resource_group_name: str, @@ -363,51 +405,128 @@ def update( integration_runtime_name: str, node_name: str, update_integration_runtime_node_request: _models.UpdateIntegrationRuntimeNodeRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.SelfHostedIntegrationRuntimeNode: """Updates a self-hosted integration runtime node. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str - :param node_name: The integration runtime node name. + :param node_name: The integration runtime node name. Required. :type node_name: str :param update_integration_runtime_node_request: The parameters for updating an integration - runtime node. + runtime node. Required. :type update_integration_runtime_node_request: ~azure.mgmt.datafactory.models.UpdateIntegrationRuntimeNodeRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) + :return: SelfHostedIntegrationRuntimeNode or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + def update( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + node_name: str, + update_integration_runtime_node_request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.SelfHostedIntegrationRuntimeNode: + """Updates a self-hosted integration runtime node. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param node_name: The integration runtime node name. Required. + :type node_name: str + :param update_integration_runtime_node_request: The parameters for updating an integration + runtime node. Required. + :type update_integration_runtime_node_request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SelfHostedIntegrationRuntimeNode or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + node_name: str, + update_integration_runtime_node_request: Union[_models.UpdateIntegrationRuntimeNodeRequest, IO], + **kwargs: Any + ) -> _models.SelfHostedIntegrationRuntimeNode: + """Updates a self-hosted integration runtime node. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param node_name: The integration runtime node name. Required. + :type node_name: str + :param update_integration_runtime_node_request: The parameters for updating an integration + runtime node. Is either a model type or a IO type. Required. + :type update_integration_runtime_node_request: + ~azure.mgmt.datafactory.models.UpdateIntegrationRuntimeNodeRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SelfHostedIntegrationRuntimeNode or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.SelfHostedIntegrationRuntimeNode] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.SelfHostedIntegrationRuntimeNode] - _json = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(update_integration_runtime_node_request, (IO, bytes)): + _content = update_integration_runtime_node_request + else: + _json = self._serialize.body(update_integration_runtime_node_request, "UpdateIntegrationRuntimeNodeRequest") request = build_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, node_name=node_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.update.metadata['url'], + content=_content, + template_url=self.update.metadata["url"], headers=_headers, params=_params, ) @@ -415,70 +534,60 @@ def update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response) + deserialized = self._deserialize("SelfHostedIntegrationRuntimeNode", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}"} # type: ignore - + update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}"} # type: ignore @distributed_trace def get_ip_address( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - node_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, node_name: str, **kwargs: Any ) -> _models.IntegrationRuntimeNodeIpAddress: """Get the IP address of self-hosted integration runtime node. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str - :param node_name: The integration runtime node name. + :param node_name: The integration runtime node name. Required. :type node_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeNodeIpAddress, or the result of cls(response) + :return: IntegrationRuntimeNodeIpAddress or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeNodeIpAddress - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeNodeIpAddress] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeNodeIpAddress] - request = build_get_ip_address_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, node_name=node_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_ip_address.metadata['url'], + template_url=self.get_ip_address.metadata["url"], headers=_headers, params=_params, ) @@ -486,22 +595,20 @@ def get_ip_address( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('IntegrationRuntimeNodeIpAddress', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeNodeIpAddress", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_ip_address.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress"} # type: ignore - + get_ip_address.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py index b9fb6fff344..f3dd3129583 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py @@ -6,11 +6,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Optional, TypeVar, Union, cast - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod @@ -21,97 +25,114 @@ from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -def build_refresh_request_initial( - subscription_id: str, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + +def build_refresh_request( + resource_group_name: str, factory_name: str, integration_runtime_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - *, - json: Optional[_models.GetSsisObjectMetadataRequest] = None, - content: Any = None, - **kwargs: Any + resource_group_name: str, factory_name: str, integration_runtime_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getObjectMetadata") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getObjectMetadata", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + class IntegrationRuntimeObjectMetadataOperations: """ @@ -132,33 +153,25 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - def _refresh_initial( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> Optional[_models.SsisObjectMetadataStatusResponse]: - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.SsisObjectMetadataStatusResponse]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.SsisObjectMetadataStatusResponse]] - - request = build_refresh_request_initial( - subscription_id=self._config.subscription_id, + request = build_refresh_request( resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._refresh_initial.metadata['url'], + template_url=self._refresh_initial.metadata["url"], headers=_headers, params=_params, ) @@ -166,10 +179,9 @@ def _refresh_initial( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: @@ -178,31 +190,26 @@ def _refresh_initial( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('SsisObjectMetadataStatusResponse', pipeline_response) + deserialized = self._deserialize("SsisObjectMetadataStatusResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _refresh_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata"} # type: ignore - + _refresh_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata"} # type: ignore @distributed_trace def begin_refresh( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> LROPoller[_models.SsisObjectMetadataStatusResponse]: """Refresh a SSIS integration runtime object metadata. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -216,111 +223,176 @@ def begin_refresh( result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.SsisObjectMetadataStatusResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.SsisObjectMetadataStatusResponse] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.SsisObjectMetadataStatusResponse] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._refresh_initial( # type: ignore resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('SsisObjectMetadataStatusResponse', pipeline_response) + deserialized = self._deserialize("SsisObjectMetadataStatusResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: - polling_method = cast(PollingMethod, ARMPolling( - lro_delay, - - - **kwargs - )) # type: PollingMethod - elif polling is False: polling_method = cast(PollingMethod, NoPolling()) - else: polling_method = polling + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_refresh.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata"} # type: ignore + begin_refresh.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata"} # type: ignore - @distributed_trace + @overload def get( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, get_metadata_request: Optional[_models.GetSsisObjectMetadataRequest] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.SsisObjectMetadataListResponse: """Get a SSIS integration runtime object metadata by specified path. The return is pageable metadata list. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :param get_metadata_request: The parameters for getting a SSIS object metadata. Default value is None. :type get_metadata_request: ~azure.mgmt.datafactory.models.GetSsisObjectMetadataRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: SsisObjectMetadataListResponse, or the result of cls(response) + :return: SsisObjectMetadataListResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.SsisObjectMetadataListResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + def get( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + get_metadata_request: Optional[IO] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.SsisObjectMetadataListResponse: + """Get a SSIS integration runtime object metadata by specified path. The return is pageable + metadata list. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param get_metadata_request: The parameters for getting a SSIS object metadata. Default value + is None. + :type get_metadata_request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SsisObjectMetadataListResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.SsisObjectMetadataListResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def get( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + get_metadata_request: Optional[Union[_models.GetSsisObjectMetadataRequest, IO]] = None, + **kwargs: Any + ) -> _models.SsisObjectMetadataListResponse: + """Get a SSIS integration runtime object metadata by specified path. The return is pageable + metadata list. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param get_metadata_request: The parameters for getting a SSIS object metadata. Is either a + model type or a IO type. Default value is None. + :type get_metadata_request: ~azure.mgmt.datafactory.models.GetSsisObjectMetadataRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SsisObjectMetadataListResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.SsisObjectMetadataListResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.SsisObjectMetadataListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.SsisObjectMetadataListResponse] - if get_metadata_request is not None: - _json = self._serialize.body(get_metadata_request, 'GetSsisObjectMetadataRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(get_metadata_request, (IO, bytes)): + _content = get_metadata_request else: - _json = None + if get_metadata_request is not None: + _json = self._serialize.body(get_metadata_request, "GetSsisObjectMetadataRequest") + else: + _json = None request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.get.metadata['url'], + content=_content, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -328,22 +400,20 @@ def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('SsisObjectMetadataListResponse', pipeline_response) + deserialized = self._deserialize("SsisObjectMetadataListResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getObjectMetadata"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getObjectMetadata"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py index 881840db2f5..4ea5460d765 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py @@ -6,11 +6,16 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse @@ -22,105 +27,120 @@ from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False + def build_list_by_factory_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id: str, resource_group_name: str, factory_name: str, integration_runtime_name: str, + subscription_id: str, *, - json: Optional[_models.IntegrationRuntimeResource] = None, - content: Any = None, if_match: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if if_match is not None: - _headers['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id: str, resource_group_name: str, factory_name: str, integration_runtime_name: str, + subscription_id: str, *, if_none_match: Optional[str] = None, **kwargs: Any @@ -128,613 +148,733 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if if_none_match is not None: - _headers['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_update_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - *, - json: Optional[_models.UpdateIntegrationRuntimeRequest] = None, - content: Any = None, - **kwargs: Any + resource_group_name: str, factory_name: str, integration_runtime_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PATCH", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, integration_runtime_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_status_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, integration_runtime_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_list_outbound_network_dependencies_endpoints_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, integration_runtime_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/outboundNetworkDependenciesEndpoints") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/outboundNetworkDependenciesEndpoints", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_get_connection_info_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, integration_runtime_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_regenerate_auth_key_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - *, - json: Optional[_models.IntegrationRuntimeRegenerateKeyParameters] = None, - content: Any = None, - **kwargs: Any + resource_group_name: str, factory_name: str, integration_runtime_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_list_auth_keys_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, integration_runtime_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_start_request_initial( - subscription_id: str, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any +def build_start_request( + resource_group_name: str, factory_name: str, integration_runtime_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_stop_request_initial( - subscription_id: str, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any +def build_stop_request( + resource_group_name: str, factory_name: str, integration_runtime_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_sync_credentials_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, integration_runtime_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_get_monitoring_data_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, integration_runtime_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_upgrade_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, integration_runtime_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_remove_links_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - *, - json: Optional[_models.LinkedIntegrationRuntimeRequest] = None, - content: Any = None, - **kwargs: Any + resource_group_name: str, factory_name: str, integration_runtime_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_create_linked_integration_runtime_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - *, - json: Optional[_models.CreateLinkedIntegrationRuntimeRequest] = None, - content: Any = None, - **kwargs: Any + resource_group_name: str, factory_name: str, integration_runtime_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "integrationRuntimeName": _SERIALIZER.url( + "integration_runtime_name", + integration_runtime_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + class IntegrationRuntimesOperations: """ @@ -755,46 +895,40 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any - ) -> Iterable[_models.IntegrationRuntimeListResponse]: + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> Iterable["_models.IntegrationRuntimeResource"]: """Lists integration runtimes. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either IntegrationRuntimeListResponse or the result of + :return: An iterator like instance of either IntegrationRuntimeResource or the result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.IntegrationRuntimeListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.IntegrationRuntimeResource] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_factory.metadata['url'], + template_url=self.list_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -802,16 +936,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -827,10 +956,8 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -840,13 +967,11 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes"} # type: ignore + list_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes"} # type: ignore - @distributed_trace + @overload def create_or_update( self, resource_group_name: str, @@ -854,51 +979,130 @@ def create_or_update( integration_runtime_name: str, integration_runtime: _models.IntegrationRuntimeResource, if_match: Optional[str] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.IntegrationRuntimeResource: """Creates or updates an integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str - :param integration_runtime: Integration runtime resource definition. + :param integration_runtime: Integration runtime resource definition. Required. :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource :param if_match: ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. Default value is None. :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + integration_runtime: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.IntegrationRuntimeResource: + """Creates or updates an integration runtime. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param integration_runtime: Integration runtime resource definition. Required. + :type integration_runtime: IO + :param if_match: ETag of the integration runtime entity. Should only be specified for update, + for which it should match existing entity or can be * for unconditional update. Default value + is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) + :return: IntegrationRuntimeResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + integration_runtime: Union[_models.IntegrationRuntimeResource, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.IntegrationRuntimeResource: + """Creates or updates an integration runtime. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param integration_runtime: Integration runtime resource definition. Is either a model type or + a IO type. Required. + :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource or IO + :param if_match: ETag of the integration runtime entity. Should only be specified for update, + for which it should match existing entity or can be * for unconditional update. Default value + is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeResource] - _json = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(integration_runtime, (IO, bytes)): + _content = integration_runtime + else: + _json = self._serialize.body(integration_runtime, "IntegrationRuntimeResource") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, + if_match=if_match, api_version=api_version, content_type=content_type, json=_json, - if_match=if_match, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -906,25 +1110,23 @@ def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}"} # type: ignore - + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}"} # type: ignore @distributed_trace def get( @@ -937,41 +1139,38 @@ def get( ) -> Optional[_models.IntegrationRuntimeResource]: """Gets an integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :param if_none_match: ETag of the integration runtime entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. Default value is None. :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) + :return: IntegrationRuntimeResource or None or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource or None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.IntegrationRuntimeResource]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.IntegrationRuntimeResource]] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, - api_version=api_version, + subscription_id=self._config.subscription_id, if_none_match=if_none_match, - template_url=self.get.metadata['url'], + api_version=api_version, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -979,10 +1178,9 @@ def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 304]: @@ -991,64 +1189,135 @@ def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}"} # type: ignore - @distributed_trace + @overload def update( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, update_integration_runtime_request: _models.UpdateIntegrationRuntimeRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.IntegrationRuntimeResource: """Updates an integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :param update_integration_runtime_request: The parameters for updating an integration runtime. + Required. :type update_integration_runtime_request: ~azure.mgmt.datafactory.models.UpdateIntegrationRuntimeRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) + :return: IntegrationRuntimeResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + def update( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + update_integration_runtime_request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.IntegrationRuntimeResource: + """Updates an integration runtime. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param update_integration_runtime_request: The parameters for updating an integration runtime. + Required. + :type update_integration_runtime_request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + update_integration_runtime_request: Union[_models.UpdateIntegrationRuntimeRequest, IO], + **kwargs: Any + ) -> _models.IntegrationRuntimeResource: + """Updates an integration runtime. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param update_integration_runtime_request: The parameters for updating an integration runtime. + Is either a model type or a IO type. Required. + :type update_integration_runtime_request: + ~azure.mgmt.datafactory.models.UpdateIntegrationRuntimeRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeResource] - _json = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(update_integration_runtime_request, (IO, bytes)): + _content = update_integration_runtime_request + else: + _json = self._serialize.body(update_integration_runtime_request, "UpdateIntegrationRuntimeRequest") request = build_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.update.metadata['url'], + content=_content, + template_url=self.update.metadata["url"], headers=_headers, params=_params, ) @@ -1056,66 +1325,57 @@ def update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}"} # type: ignore - + update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}"} # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> None: """Deletes an integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -1123,10 +1383,9 @@ def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -1136,49 +1395,41 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}"} # type: ignore - + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}"} # type: ignore @distributed_trace def get_status( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> _models.IntegrationRuntimeStatusResponse: """Gets detailed status information for an integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeStatusResponse, or the result of cls(response) + :return: IntegrationRuntimeStatusResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeStatusResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeStatusResponse] - request = build_get_status_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_status.metadata['url'], + template_url=self.get_status.metadata["url"], headers=_headers, params=_params, ) @@ -1186,68 +1437,61 @@ def get_status( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_status.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus"} # type: ignore - + get_status.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus"} # type: ignore @distributed_trace def list_outbound_network_dependencies_endpoints( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> _models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse: """Gets the list of outbound network dependencies for a given Azure-SSIS integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, or the result of + :return: IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop( + "cls", None + ) # type: ClsType[_models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse] - request = build_list_outbound_network_dependencies_endpoints_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_outbound_network_dependencies_endpoints.metadata['url'], + template_url=self.list_outbound_network_dependencies_endpoints.metadata["url"], headers=_headers, params=_params, ) @@ -1255,67 +1499,60 @@ def list_outbound_network_dependencies_endpoints( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse', pipeline_response) + deserialized = self._deserialize( + "IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse", pipeline_response + ) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_outbound_network_dependencies_endpoints.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/outboundNetworkDependenciesEndpoints"} # type: ignore - + list_outbound_network_dependencies_endpoints.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/outboundNetworkDependenciesEndpoints"} # type: ignore @distributed_trace def get_connection_info( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> _models.IntegrationRuntimeConnectionInfo: """Gets the on-premises integration runtime connection information for encrypting the on-premises data source credentials. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeConnectionInfo, or the result of cls(response) + :return: IntegrationRuntimeConnectionInfo or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeConnectionInfo - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeConnectionInfo] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeConnectionInfo] - request = build_get_connection_info_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_connection_info.metadata['url'], + template_url=self.get_connection_info.metadata["url"], headers=_headers, params=_params, ) @@ -1323,75 +1560,144 @@ def get_connection_info( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('IntegrationRuntimeConnectionInfo', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeConnectionInfo", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_connection_info.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo"} # type: ignore + get_connection_info.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo"} # type: ignore - - @distributed_trace + @overload def regenerate_auth_key( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, regenerate_key_parameters: _models.IntegrationRuntimeRegenerateKeyParameters, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.IntegrationRuntimeAuthKeys: """Regenerates the authentication key for an integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :param regenerate_key_parameters: The parameters for regenerating integration runtime - authentication key. + authentication key. Required. :type regenerate_key_parameters: ~azure.mgmt.datafactory.models.IntegrationRuntimeRegenerateKeyParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeAuthKeys or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def regenerate_auth_key( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + regenerate_key_parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.IntegrationRuntimeAuthKeys: + """Regenerates the authentication key for an integration runtime. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param regenerate_key_parameters: The parameters for regenerating integration runtime + authentication key. Required. + :type regenerate_key_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeAuthKeys or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def regenerate_auth_key( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + regenerate_key_parameters: Union[_models.IntegrationRuntimeRegenerateKeyParameters, IO], + **kwargs: Any + ) -> _models.IntegrationRuntimeAuthKeys: + """Regenerates the authentication key for an integration runtime. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param regenerate_key_parameters: The parameters for regenerating integration runtime + authentication key. Is either a model type or a IO type. Required. + :type regenerate_key_parameters: + ~azure.mgmt.datafactory.models.IntegrationRuntimeRegenerateKeyParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeAuthKeys, or the result of cls(response) + :return: IntegrationRuntimeAuthKeys or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeAuthKeys] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeAuthKeys] - _json = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(regenerate_key_parameters, (IO, bytes)): + _content = regenerate_key_parameters + else: + _json = self._serialize.body(regenerate_key_parameters, "IntegrationRuntimeRegenerateKeyParameters") request = build_regenerate_auth_key_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.regenerate_auth_key.metadata['url'], + content=_content, + template_url=self.regenerate_auth_key.metadata["url"], headers=_headers, params=_params, ) @@ -1399,66 +1705,57 @@ def regenerate_auth_key( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeAuthKeys", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - regenerate_auth_key.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey"} # type: ignore - + regenerate_auth_key.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey"} # type: ignore @distributed_trace def list_auth_keys( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> _models.IntegrationRuntimeAuthKeys: """Retrieves the authentication keys for an integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeAuthKeys, or the result of cls(response) + :return: IntegrationRuntimeAuthKeys or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeAuthKeys] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeAuthKeys] - request = build_list_auth_keys_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_auth_keys.metadata['url'], + template_url=self.list_auth_keys.metadata["url"], headers=_headers, params=_params, ) @@ -1466,52 +1763,43 @@ def list_auth_keys( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeAuthKeys", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_auth_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys"} # type: ignore - + list_auth_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys"} # type: ignore def _start_initial( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> Optional[_models.IntegrationRuntimeStatusResponse]: - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.IntegrationRuntimeStatusResponse]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.IntegrationRuntimeStatusResponse]] - - request = build_start_request_initial( - subscription_id=self._config.subscription_id, + request = build_start_request( resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._start_initial.metadata['url'], + template_url=self._start_initial.metadata["url"], headers=_headers, params=_params, ) @@ -1519,10 +1807,9 @@ def _start_initial( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: @@ -1531,31 +1818,26 @@ def _start_initial( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _start_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start"} # type: ignore - + _start_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start"} # type: ignore @distributed_trace def begin_start( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> LROPoller[_models.IntegrationRuntimeStatusResponse]: """Starts a ManagedReserved type integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -1569,85 +1851,71 @@ def begin_start( result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeStatusResponse] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeStatusResponse] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._start_initial( # type: ignore resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: - polling_method = cast(PollingMethod, ARMPolling( - lro_delay, - - - **kwargs - )) # type: PollingMethod - elif polling is False: polling_method = cast(PollingMethod, NoPolling()) - else: polling_method = polling + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_start.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start"} # type: ignore + begin_start.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start"} # type: ignore def _stop_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> None: - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - - request = build_stop_request_initial( - subscription_id=self._config.subscription_id, + request = build_stop_request( resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._stop_initial.metadata['url'], + template_url=self._stop_initial.metadata["url"], headers=_headers, params=_params, ) @@ -1655,10 +1923,9 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: @@ -1668,24 +1935,19 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - _stop_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop"} # type: ignore - + _stop_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop"} # type: ignore @distributed_trace - def begin_stop( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + def begin_stop( + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> LROPoller[None]: """Stops a ManagedReserved type integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -1697,100 +1959,86 @@ def begin_stop( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._stop_initial( # type: ignore resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - if polling is True: - polling_method = cast(PollingMethod, ARMPolling( - lro_delay, - - - **kwargs - )) # type: PollingMethod - elif polling is False: polling_method = cast(PollingMethod, NoPolling()) - else: polling_method = polling + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_stop.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop"} # type: ignore + begin_stop.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop"} # type: ignore @distributed_trace def sync_credentials( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> None: """Force the integration runtime to synchronize credentials across integration runtime nodes, and this will override the credentials across all worker nodes with those available on the dispatcher node. If you already have the latest credential backup file, you should manually import it (preferred) on any self-hosted integration runtime node than using this API directly. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_sync_credentials_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.sync_credentials.metadata['url'], + template_url=self.sync_credentials.metadata["url"], headers=_headers, params=_params, ) @@ -1798,10 +2046,9 @@ def sync_credentials( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1811,50 +2058,42 @@ def sync_credentials( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - sync_credentials.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials"} # type: ignore - + sync_credentials.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials"} # type: ignore @distributed_trace def get_monitoring_data( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> _models.IntegrationRuntimeMonitoringData: """Get the integration runtime monitoring data, which includes the monitor data for all the nodes under this integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeMonitoringData, or the result of cls(response) + :return: IntegrationRuntimeMonitoringData or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeMonitoringData - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeMonitoringData] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeMonitoringData] - request = build_get_monitoring_data_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_monitoring_data.metadata['url'], + template_url=self.get_monitoring_data.metadata["url"], headers=_headers, params=_params, ) @@ -1862,66 +2101,57 @@ def get_monitoring_data( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('IntegrationRuntimeMonitoringData', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeMonitoringData", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_monitoring_data.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData"} # type: ignore - + get_monitoring_data.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData"} # type: ignore @distributed_trace def upgrade( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any ) -> None: """Upgrade self-hosted integration runtime to latest version if availability. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_upgrade_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.upgrade.metadata['url'], + template_url=self.upgrade.metadata["url"], headers=_headers, params=_params, ) @@ -1929,10 +2159,9 @@ def upgrade( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1942,59 +2171,131 @@ def upgrade( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - upgrade.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade"} # type: ignore + upgrade.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade"} # type: ignore - - @distributed_trace + @overload def remove_links( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, factory_name: str, integration_runtime_name: str, linked_integration_runtime_request: _models.LinkedIntegrationRuntimeRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> None: """Remove all linked integration runtimes under specific data factory in a self-hosted integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :param linked_integration_runtime_request: The data factory name for the linked integration - runtime. + runtime. Required. :type linked_integration_runtime_request: ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + def remove_links( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + linked_integration_runtime_request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Remove all linked integration runtimes under specific data factory in a self-hosted integration + runtime. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param linked_integration_runtime_request: The data factory name for the linked integration + runtime. Required. + :type linked_integration_runtime_request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def remove_links( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + linked_integration_runtime_request: Union[_models.LinkedIntegrationRuntimeRequest, IO], + **kwargs: Any + ) -> None: + """Remove all linked integration runtimes under specific data factory in a self-hosted integration + runtime. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param linked_integration_runtime_request: The data factory name for the linked integration + runtime. Is either a model type or a IO type. Required. + :type linked_integration_runtime_request: + ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] - _json = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(linked_integration_runtime_request, (IO, bytes)): + _content = linked_integration_runtime_request + else: + _json = self._serialize.body(linked_integration_runtime_request, "LinkedIntegrationRuntimeRequest") request = build_remove_links_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.remove_links.metadata['url'], + content=_content, + template_url=self.remove_links.metadata["url"], headers=_headers, params=_params, ) @@ -2002,10 +2303,9 @@ def remove_links( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -2015,57 +2315,130 @@ def remove_links( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - remove_links.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks"} # type: ignore + remove_links.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks"} # type: ignore - - @distributed_trace + @overload def create_linked_integration_runtime( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, create_linked_integration_runtime_request: _models.CreateLinkedIntegrationRuntimeRequest, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.IntegrationRuntimeStatusResponse: """Create a linked integration runtime entry in a shared integration runtime. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param integration_runtime_name: The integration runtime name. + :param integration_runtime_name: The integration runtime name. Required. :type integration_runtime_name: str :param create_linked_integration_runtime_request: The linked integration runtime properties. + Required. :type create_linked_integration_runtime_request: ~azure.mgmt.datafactory.models.CreateLinkedIntegrationRuntimeRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeStatusResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_linked_integration_runtime( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + create_linked_integration_runtime_request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.IntegrationRuntimeStatusResponse: + """Create a linked integration runtime entry in a shared integration runtime. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param create_linked_integration_runtime_request: The linked integration runtime properties. + Required. + :type create_linked_integration_runtime_request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeStatusResponse, or the result of cls(response) + :return: IntegrationRuntimeStatusResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @distributed_trace + def create_linked_integration_runtime( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + create_linked_integration_runtime_request: Union[_models.CreateLinkedIntegrationRuntimeRequest, IO], + **kwargs: Any + ) -> _models.IntegrationRuntimeStatusResponse: + """Create a linked integration runtime entry in a shared integration runtime. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. Required. + :type integration_runtime_name: str + :param create_linked_integration_runtime_request: The linked integration runtime properties. Is + either a model type or a IO type. Required. + :type create_linked_integration_runtime_request: + ~azure.mgmt.datafactory.models.CreateLinkedIntegrationRuntimeRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeStatusResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.IntegrationRuntimeStatusResponse] - - _json = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.IntegrationRuntimeStatusResponse] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(create_linked_integration_runtime_request, (IO, bytes)): + _content = create_linked_integration_runtime_request + else: + _json = self._serialize.body( + create_linked_integration_runtime_request, "CreateLinkedIntegrationRuntimeRequest" + ) request = build_create_linked_integration_runtime_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_linked_integration_runtime.metadata['url'], + content=_content, + template_url=self.create_linked_integration_runtime.metadata["url"], headers=_headers, params=_params, ) @@ -2073,22 +2446,20 @@ def create_linked_integration_runtime( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_linked_integration_runtime.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime"} # type: ignore - + create_linked_integration_runtime.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_services_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_services_operations.py index ec1580d954c..16239a5a04e 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_services_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_services_operations.py @@ -6,11 +6,16 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse @@ -20,105 +25,120 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False + def build_list_by_factory_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id: str, resource_group_name: str, factory_name: str, linked_service_name: str, + subscription_id: str, *, - json: Optional[_models.LinkedServiceResource] = None, - content: Any = None, if_match: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "linkedServiceName": _SERIALIZER.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "linkedServiceName": _SERIALIZER.url( + "linked_service_name", + linked_service_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if if_match is not None: - _headers['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id: str, resource_group_name: str, factory_name: str, linked_service_name: str, + subscription_id: str, *, if_none_match: Optional[str] = None, **kwargs: Any @@ -126,74 +146,97 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "linkedServiceName": _SERIALIZER.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "linkedServiceName": _SERIALIZER.url( + "linked_service_name", + linked_service_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if if_none_match is not None: - _headers['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - linked_service_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, linked_service_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "linkedServiceName": _SERIALIZER.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "linkedServiceName": _SERIALIZER.url( + "linked_service_name", + linked_service_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) - return HttpRequest( - method="DELETE", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) class LinkedServicesOperations: """ @@ -214,45 +257,40 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any - ) -> Iterable[_models.LinkedServiceListResponse]: + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> Iterable["_models.LinkedServiceResource"]: """Lists linked services. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either LinkedServiceListResponse or the result of + :return: An iterator like instance of either LinkedServiceResource or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.LinkedServiceListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.LinkedServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.LinkedServiceListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.LinkedServiceListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_factory.metadata['url'], + template_url=self.list_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -260,16 +298,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -285,10 +318,8 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -298,13 +329,11 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices"} # type: ignore + list_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices"} # type: ignore - @distributed_trace + @overload def create_or_update( self, resource_group_name: str, @@ -312,51 +341,130 @@ def create_or_update( linked_service_name: str, linked_service: _models.LinkedServiceResource, if_match: Optional[str] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.LinkedServiceResource: """Creates or updates a linked service. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param linked_service_name: The linked service name. + :param linked_service_name: The linked service name. Required. :type linked_service_name: str - :param linked_service: Linked service resource definition. + :param linked_service: Linked service resource definition. Required. :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceResource :param if_match: ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. Default value is None. :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: LinkedServiceResource, or the result of cls(response) + :return: LinkedServiceResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.LinkedServiceResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + linked_service_name: str, + linked_service: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.LinkedServiceResource: + """Creates or updates a linked service. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param linked_service_name: The linked service name. Required. + :type linked_service_name: str + :param linked_service: Linked service resource definition. Required. + :type linked_service: IO + :param if_match: ETag of the linkedService entity. Should only be specified for update, for + which it should match existing entity or can be * for unconditional update. Default value is + None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LinkedServiceResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.LinkedServiceResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + linked_service_name: str, + linked_service: Union[_models.LinkedServiceResource, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.LinkedServiceResource: + """Creates or updates a linked service. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param linked_service_name: The linked service name. Required. + :type linked_service_name: str + :param linked_service: Linked service resource definition. Is either a model type or a IO type. + Required. + :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceResource or IO + :param if_match: ETag of the linkedService entity. Should only be specified for update, for + which it should match existing entity or can be * for unconditional update. Default value is + None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LinkedServiceResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.LinkedServiceResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.LinkedServiceResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.LinkedServiceResource] - _json = self._serialize.body(linked_service, 'LinkedServiceResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(linked_service, (IO, bytes)): + _content = linked_service + else: + _json = self._serialize.body(linked_service, "LinkedServiceResource") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, linked_service_name=linked_service_name, + subscription_id=self._config.subscription_id, + if_match=if_match, api_version=api_version, content_type=content_type, json=_json, - if_match=if_match, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -364,25 +472,23 @@ def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('LinkedServiceResource', pipeline_response) + deserialized = self._deserialize("LinkedServiceResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}"} # type: ignore - + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}"} # type: ignore @distributed_trace def get( @@ -395,41 +501,38 @@ def get( ) -> Optional[_models.LinkedServiceResource]: """Gets a linked service. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param linked_service_name: The linked service name. + :param linked_service_name: The linked service name. Required. :type linked_service_name: str :param if_none_match: ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. Default value is None. :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: LinkedServiceResource, or the result of cls(response) + :return: LinkedServiceResource or None or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.LinkedServiceResource or None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.LinkedServiceResource]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.LinkedServiceResource]] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, linked_service_name=linked_service_name, - api_version=api_version, + subscription_id=self._config.subscription_id, if_none_match=if_none_match, - template_url=self.get.metadata['url'], + api_version=api_version, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -437,10 +540,9 @@ def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 304]: @@ -449,56 +551,48 @@ def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('LinkedServiceResource', pipeline_response) + deserialized = self._deserialize("LinkedServiceResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}"} # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - linked_service_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, linked_service_name: str, **kwargs: Any ) -> None: """Deletes a linked service. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param linked_service_name: The linked service name. + :param linked_service_name: The linked service name. Required. :type linked_service_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, linked_service_name=linked_service_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -506,10 +600,9 @@ def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -519,5 +612,4 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}"} # type: ignore - + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoints_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoints_operations.py index 2065ae2b049..5b7f0f12262 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoints_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoints_operations.py @@ -6,11 +6,16 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse @@ -20,110 +25,138 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False + def build_list_by_factory_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - managed_virtual_network_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, managed_virtual_network_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "managedVirtualNetworkName": _SERIALIZER.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "managedVirtualNetworkName": _SERIALIZER.url( + "managed_virtual_network_name", + managed_virtual_network_name, + "str", + max_length=127, + min_length=1, + pattern=r"^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id: str, resource_group_name: str, factory_name: str, managed_virtual_network_name: str, managed_private_endpoint_name: str, + subscription_id: str, *, - json: Optional[_models.ManagedPrivateEndpointResource] = None, - content: Any = None, if_match: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "managedVirtualNetworkName": _SERIALIZER.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - "managedPrivateEndpointName": _SERIALIZER.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "managedVirtualNetworkName": _SERIALIZER.url( + "managed_virtual_network_name", + managed_virtual_network_name, + "str", + max_length=127, + min_length=1, + pattern=r"^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$", + ), + "managedPrivateEndpointName": _SERIALIZER.url( + "managed_private_endpoint_name", + managed_private_endpoint_name, + "str", + max_length=127, + min_length=1, + pattern=r"^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if if_match is not None: - _headers['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id: str, resource_group_name: str, factory_name: str, managed_virtual_network_name: str, managed_private_endpoint_name: str, + subscription_id: str, *, if_none_match: Optional[str] = None, **kwargs: Any @@ -131,77 +164,118 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "managedVirtualNetworkName": _SERIALIZER.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - "managedPrivateEndpointName": _SERIALIZER.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "managedVirtualNetworkName": _SERIALIZER.url( + "managed_virtual_network_name", + managed_virtual_network_name, + "str", + max_length=127, + min_length=1, + pattern=r"^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$", + ), + "managedPrivateEndpointName": _SERIALIZER.url( + "managed_private_endpoint_name", + managed_private_endpoint_name, + "str", + max_length=127, + min_length=1, + pattern=r"^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if if_none_match is not None: - _headers['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id: str, resource_group_name: str, factory_name: str, managed_virtual_network_name: str, managed_private_endpoint_name: str, + subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "managedVirtualNetworkName": _SERIALIZER.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - "managedPrivateEndpointName": _SERIALIZER.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "managedVirtualNetworkName": _SERIALIZER.url( + "managed_virtual_network_name", + managed_virtual_network_name, + "str", + max_length=127, + min_length=1, + pattern=r"^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$", + ), + "managedPrivateEndpointName": _SERIALIZER.url( + "managed_private_endpoint_name", + managed_private_endpoint_name, + "str", + max_length=127, + min_length=1, + pattern=r"^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) - return HttpRequest( - method="DELETE", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) class ManagedPrivateEndpointsOperations: """ @@ -222,50 +296,44 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - managed_virtual_network_name: str, - **kwargs: Any - ) -> Iterable[_models.ManagedPrivateEndpointListResponse]: + self, resource_group_name: str, factory_name: str, managed_virtual_network_name: str, **kwargs: Any + ) -> Iterable["_models.ManagedPrivateEndpointResource"]: """Lists managed private endpoints. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. + :param managed_virtual_network_name: Managed virtual network name. Required. :type managed_virtual_network_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result - of cls(response) + :return: An iterator like instance of either ManagedPrivateEndpointResource or the result of + cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.ManagedPrivateEndpointListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.ManagedPrivateEndpointListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ManagedPrivateEndpointListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, managed_virtual_network_name=managed_virtual_network_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_factory.metadata['url'], + template_url=self.list_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -273,17 +341,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - managed_virtual_network_name=managed_virtual_network_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -299,10 +361,8 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -312,13 +372,11 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints"} # type: ignore + list_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints"} # type: ignore - @distributed_trace + @overload def create_or_update( self, resource_group_name: str, @@ -327,54 +385,140 @@ def create_or_update( managed_private_endpoint_name: str, managed_private_endpoint: _models.ManagedPrivateEndpointResource, if_match: Optional[str] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.ManagedPrivateEndpointResource: """Creates or updates a managed private endpoint. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. + :param managed_virtual_network_name: Managed virtual network name. Required. :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. + :param managed_private_endpoint_name: Managed private endpoint name. Required. :type managed_private_endpoint_name: str - :param managed_private_endpoint: Managed private endpoint resource definition. + :param managed_private_endpoint: Managed private endpoint resource definition. Required. :type managed_private_endpoint: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource :param if_match: ETag of the managed private endpoint entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. Default value is None. :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedPrivateEndpointResource, or the result of cls(response) + :return: ManagedPrivateEndpointResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + managed_virtual_network_name: str, + managed_private_endpoint_name: str, + managed_private_endpoint: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ManagedPrivateEndpointResource: + """Creates or updates a managed private endpoint. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. Required. + :type managed_virtual_network_name: str + :param managed_private_endpoint_name: Managed private endpoint name. Required. + :type managed_private_endpoint_name: str + :param managed_private_endpoint: Managed private endpoint resource definition. Required. + :type managed_private_endpoint: IO + :param if_match: ETag of the managed private endpoint entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. Default + value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedPrivateEndpointResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + managed_virtual_network_name: str, + managed_private_endpoint_name: str, + managed_private_endpoint: Union[_models.ManagedPrivateEndpointResource, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.ManagedPrivateEndpointResource: + """Creates or updates a managed private endpoint. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. Required. + :type managed_virtual_network_name: str + :param managed_private_endpoint_name: Managed private endpoint name. Required. + :type managed_private_endpoint_name: str + :param managed_private_endpoint: Managed private endpoint resource definition. Is either a + model type or a IO type. Required. + :type managed_private_endpoint: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource + or IO + :param if_match: ETag of the managed private endpoint entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. Default + value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedPrivateEndpointResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.ManagedPrivateEndpointResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ManagedPrivateEndpointResource] - _json = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(managed_private_endpoint, (IO, bytes)): + _content = managed_private_endpoint + else: + _json = self._serialize.body(managed_private_endpoint, "ManagedPrivateEndpointResource") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, managed_virtual_network_name=managed_virtual_network_name, managed_private_endpoint_name=managed_private_endpoint_name, + subscription_id=self._config.subscription_id, + if_match=if_match, api_version=api_version, content_type=content_type, json=_json, - if_match=if_match, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -382,25 +526,23 @@ def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) + deserialized = self._deserialize("ManagedPrivateEndpointResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}"} # type: ignore - + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}"} # type: ignore @distributed_trace def get( @@ -414,44 +556,41 @@ def get( ) -> _models.ManagedPrivateEndpointResource: """Gets a managed private endpoint. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. + :param managed_virtual_network_name: Managed virtual network name. Required. :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. + :param managed_private_endpoint_name: Managed private endpoint name. Required. :type managed_private_endpoint_name: str :param if_none_match: ETag of the managed private endpoint entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. Default value is None. :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedPrivateEndpointResource, or the result of cls(response) + :return: ManagedPrivateEndpointResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.ManagedPrivateEndpointResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ManagedPrivateEndpointResource] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, managed_virtual_network_name=managed_virtual_network_name, managed_private_endpoint_name=managed_private_endpoint_name, - api_version=api_version, + subscription_id=self._config.subscription_id, if_none_match=if_none_match, - template_url=self.get.metadata['url'], + api_version=api_version, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -459,25 +598,23 @@ def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) + deserialized = self._deserialize("ManagedPrivateEndpointResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}"} # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements @@ -490,39 +627,36 @@ def delete( # pylint: disable=inconsistent-return-statements ) -> None: """Deletes a managed private endpoint. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. + :param managed_virtual_network_name: Managed virtual network name. Required. :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. + :param managed_private_endpoint_name: Managed private endpoint name. Required. :type managed_private_endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, managed_virtual_network_name=managed_virtual_network_name, managed_private_endpoint_name=managed_private_endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -530,10 +664,9 @@ def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -543,5 +676,4 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}"} # type: ignore - + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_networks_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_networks_operations.py index 271499f835e..129bc8d672f 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_networks_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_networks_operations.py @@ -6,11 +6,16 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse @@ -20,105 +25,120 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False + def build_list_by_factory_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id: str, resource_group_name: str, factory_name: str, managed_virtual_network_name: str, + subscription_id: str, *, - json: Optional[_models.ManagedVirtualNetworkResource] = None, - content: Any = None, if_match: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "managedVirtualNetworkName": _SERIALIZER.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "managedVirtualNetworkName": _SERIALIZER.url( + "managed_virtual_network_name", + managed_virtual_network_name, + "str", + max_length=127, + min_length=1, + pattern=r"^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if if_match is not None: - _headers['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id: str, resource_group_name: str, factory_name: str, managed_virtual_network_name: str, + subscription_id: str, *, if_none_match: Optional[str] = None, **kwargs: Any @@ -126,35 +146,49 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "managedVirtualNetworkName": _SERIALIZER.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "managedVirtualNetworkName": _SERIALIZER.url( + "managed_virtual_network_name", + managed_virtual_network_name, + "str", + max_length=127, + min_length=1, + pattern=r"^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if if_none_match is not None: - _headers['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) class ManagedVirtualNetworksOperations: """ @@ -175,46 +209,41 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any - ) -> Iterable[_models.ManagedVirtualNetworkListResponse]: + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> Iterable["_models.ManagedVirtualNetworkResource"]: """Lists managed Virtual Networks. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of + :return: An iterator like instance of either ManagedVirtualNetworkResource or the result of cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.ManagedVirtualNetworkListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.ManagedVirtualNetworkListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ManagedVirtualNetworkListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_factory.metadata['url'], + template_url=self.list_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -222,16 +251,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -247,10 +271,8 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -260,13 +282,11 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks"} # type: ignore + list_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks"} # type: ignore - @distributed_trace + @overload def create_or_update( self, resource_group_name: str, @@ -274,51 +294,131 @@ def create_or_update( managed_virtual_network_name: str, managed_virtual_network: _models.ManagedVirtualNetworkResource, if_match: Optional[str] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.ManagedVirtualNetworkResource: """Creates or updates a managed Virtual Network. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. + :param managed_virtual_network_name: Managed virtual network name. Required. :type managed_virtual_network_name: str - :param managed_virtual_network: Managed Virtual Network resource definition. + :param managed_virtual_network: Managed Virtual Network resource definition. Required. :type managed_virtual_network: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource :param if_match: ETag of the managed Virtual Network entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. Default value is None. :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedVirtualNetworkResource, or the result of cls(response) + :return: ManagedVirtualNetworkResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + managed_virtual_network_name: str, + managed_virtual_network: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ManagedVirtualNetworkResource: + """Creates or updates a managed Virtual Network. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. Required. + :type managed_virtual_network_name: str + :param managed_virtual_network: Managed Virtual Network resource definition. Required. + :type managed_virtual_network: IO + :param if_match: ETag of the managed Virtual Network entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. Default + value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedVirtualNetworkResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + managed_virtual_network_name: str, + managed_virtual_network: Union[_models.ManagedVirtualNetworkResource, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.ManagedVirtualNetworkResource: + """Creates or updates a managed Virtual Network. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. Required. + :type managed_virtual_network_name: str + :param managed_virtual_network: Managed Virtual Network resource definition. Is either a model + type or a IO type. Required. + :type managed_virtual_network: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource or + IO + :param if_match: ETag of the managed Virtual Network entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. Default + value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedVirtualNetworkResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.ManagedVirtualNetworkResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ManagedVirtualNetworkResource] - _json = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(managed_virtual_network, (IO, bytes)): + _content = managed_virtual_network + else: + _json = self._serialize.body(managed_virtual_network, "ManagedVirtualNetworkResource") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, managed_virtual_network_name=managed_virtual_network_name, + subscription_id=self._config.subscription_id, + if_match=if_match, api_version=api_version, content_type=content_type, json=_json, - if_match=if_match, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -326,25 +426,23 @@ def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) + deserialized = self._deserialize("ManagedVirtualNetworkResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}"} # type: ignore - + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}"} # type: ignore @distributed_trace def get( @@ -357,41 +455,38 @@ def get( ) -> _models.ManagedVirtualNetworkResource: """Gets a managed Virtual Network. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. + :param managed_virtual_network_name: Managed virtual network name. Required. :type managed_virtual_network_name: str :param if_none_match: ETag of the managed Virtual Network entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. Default value is None. :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedVirtualNetworkResource, or the result of cls(response) + :return: ManagedVirtualNetworkResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.ManagedVirtualNetworkResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ManagedVirtualNetworkResource] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, managed_virtual_network_name=managed_virtual_network_name, - api_version=api_version, + subscription_id=self._config.subscription_id, if_none_match=if_none_match, - template_url=self.get.metadata['url'], + api_version=api_version, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -399,22 +494,20 @@ def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) + deserialized = self._deserialize("ManagedVirtualNetworkResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operations.py index ba08846fa0e..2ad46a56ba4 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operations.py @@ -7,10 +7,15 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse @@ -20,38 +25,34 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -def build_list_request( - **kwargs: Any -) -> HttpRequest: + +def build_list_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop("template_url", "/providers/Microsoft.DataFactory/operations") # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) class Operations: """ @@ -72,36 +73,30 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace - def list( - self, - **kwargs: Any - ) -> Iterable[_models.OperationListResponse]: + def list(self, **kwargs: Any) -> Iterable["_models.Operation"]: """Lists the available Azure Data Factory API operations. :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OperationListResponse or the result of - cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.OperationListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Operation or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.Operation] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.OperationListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.OperationListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_request( api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], headers=_headers, params=_params, ) @@ -109,13 +104,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_request( - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -131,10 +124,8 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -144,8 +135,6 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/providers/Microsoft.DataFactory/operations"} # type: ignore + list.metadata = {"url": "/providers/Microsoft.DataFactory/operations"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_patch.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_patch.py index 0ad201a8c58..f7dd3251033 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_patch.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_patch.py @@ -10,6 +10,7 @@ __all__: List[str] = [] # Add all objects you want publicly available to users at this package level + def patch_sdk(): """Do not remove from this file. diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_runs_operations.py index 23009b417c8..ba52685ae58 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_runs_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_runs_operations.py @@ -6,11 +6,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Optional, TypeVar - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest @@ -19,102 +23,105 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False + def build_query_by_factory_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - *, - json: Optional[_models.RunFilterParameters] = None, - content: Any = None, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - run_id: str, - **kwargs: Any + resource_group_name: str, factory_name: str, run_id: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "runId": _SERIALIZER.url("run_id", run_id, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "runId": _SERIALIZER.url("run_id", run_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_cancel_request( - subscription_id: str, resource_group_name: str, factory_name: str, run_id: str, + subscription_id: str, *, is_recursive: Optional[bool] = None, **kwargs: Any @@ -122,35 +129,42 @@ def build_cancel_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "runId": _SERIALIZER.url("run_id", run_id, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "runId": _SERIALIZER.url("run_id", run_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters if is_recursive is not None: - _params['isRecursive'] = _SERIALIZER.query("is_recursive", is_recursive, 'bool') - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["isRecursive"] = _SERIALIZER.query("is_recursive", is_recursive, "bool") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) class PipelineRunsOperations: """ @@ -171,50 +185,112 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace + @overload def query_by_factory( self, resource_group_name: str, factory_name: str, filter_parameters: _models.RunFilterParameters, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.PipelineRunsQueryResponse: """Query pipeline runs in the factory based on input filter conditions. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param filter_parameters: Parameters to filter the pipeline run. + :param filter_parameters: Parameters to filter the pipeline run. Required. :type filter_parameters: ~azure.mgmt.datafactory.models.RunFilterParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineRunsQueryResponse, or the result of cls(response) + :return: PipelineRunsQueryResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.PipelineRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + def query_by_factory( + self, + resource_group_name: str, + factory_name: str, + filter_parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PipelineRunsQueryResponse: + """Query pipeline runs in the factory based on input filter conditions. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param filter_parameters: Parameters to filter the pipeline run. Required. + :type filter_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PipelineRunsQueryResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.PipelineRunsQueryResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def query_by_factory( + self, + resource_group_name: str, + factory_name: str, + filter_parameters: Union[_models.RunFilterParameters, IO], + **kwargs: Any + ) -> _models.PipelineRunsQueryResponse: + """Query pipeline runs in the factory based on input filter conditions. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param filter_parameters: Parameters to filter the pipeline run. Is either a model type or a IO + type. Required. + :type filter_parameters: ~azure.mgmt.datafactory.models.RunFilterParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PipelineRunsQueryResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.PipelineRunsQueryResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.PipelineRunsQueryResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.PipelineRunsQueryResponse] - _json = self._serialize.body(filter_parameters, 'RunFilterParameters') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(filter_parameters, (IO, bytes)): + _content = filter_parameters + else: + _json = self._serialize.body(filter_parameters, "RunFilterParameters") request = build_query_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.query_by_factory.metadata['url'], + content=_content, + template_url=self.query_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -222,66 +298,55 @@ def query_by_factory( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('PipelineRunsQueryResponse', pipeline_response) + deserialized = self._deserialize("PipelineRunsQueryResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - query_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns"} # type: ignore - + query_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns"} # type: ignore @distributed_trace - def get( - self, - resource_group_name: str, - factory_name: str, - run_id: str, - **kwargs: Any - ) -> _models.PipelineRun: + def get(self, resource_group_name: str, factory_name: str, run_id: str, **kwargs: Any) -> _models.PipelineRun: """Get a pipeline run by its run ID. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param run_id: The pipeline run identifier. + :param run_id: The pipeline run identifier. Required. :type run_id: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineRun, or the result of cls(response) + :return: PipelineRun or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.PipelineRun - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.PipelineRun] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PipelineRun] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, run_id=run_id, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -289,25 +354,23 @@ def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('PipelineRun', pipeline_response) + deserialized = self._deserialize("PipelineRun", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}"} # type: ignore @distributed_trace def cancel( # pylint: disable=inconsistent-return-statements @@ -320,40 +383,37 @@ def cancel( # pylint: disable=inconsistent-return-statements ) -> None: """Cancel a pipeline run by its run ID. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param run_id: The pipeline run identifier. + :param run_id: The pipeline run identifier. Required. :type run_id: str :param is_recursive: If true, cancel all the Child pipelines that are triggered by the current pipeline. Default value is None. :type is_recursive: bool :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_cancel_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, run_id=run_id, - api_version=api_version, + subscription_id=self._config.subscription_id, is_recursive=is_recursive, - template_url=self.cancel.metadata['url'], + api_version=api_version, + template_url=self.cancel.metadata["url"], headers=_headers, params=_params, ) @@ -361,10 +421,9 @@ def cancel( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -374,5 +433,4 @@ def cancel( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - cancel.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel"} # type: ignore - + cancel.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipelines_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipelines_operations.py index 37d9fe1eb0f..5fb7573b0dc 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipelines_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipelines_operations.py @@ -6,11 +6,17 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse @@ -20,105 +26,125 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -T = TypeVar('T') + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False + def build_list_by_factory_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id: str, resource_group_name: str, factory_name: str, pipeline_name: str, + subscription_id: str, *, - json: Optional[_models.PipelineResource] = None, - content: Any = None, if_match: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "pipelineName": _SERIALIZER.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "pipelineName": _SERIALIZER.url( + "pipeline_name", + pipeline_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if if_match is not None: - _headers['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id: str, resource_group_name: str, factory_name: str, pipeline_name: str, + subscription_id: str, *, if_none_match: Optional[str] = None, **kwargs: Any @@ -126,84 +152,104 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "pipelineName": _SERIALIZER.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "pipelineName": _SERIALIZER.url( + "pipeline_name", + pipeline_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if if_none_match is not None: - _headers['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - pipeline_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, pipeline_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "pipelineName": _SERIALIZER.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "pipelineName": _SERIALIZER.url( + "pipeline_name", + pipeline_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_create_run_request( - subscription_id: str, resource_group_name: str, factory_name: str, pipeline_name: str, + subscription_id: str, *, - json: Optional[Dict[str, Any]] = None, - content: Any = None, reference_pipeline_run_id: Optional[str] = None, is_recovery: Optional[bool] = None, start_activity_name: Optional[str] = None, @@ -213,46 +259,60 @@ def build_create_run_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "pipelineName": _SERIALIZER.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "pipelineName": _SERIALIZER.url( + "pipeline_name", + pipeline_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if reference_pipeline_run_id is not None: - _params['referencePipelineRunId'] = _SERIALIZER.query("reference_pipeline_run_id", reference_pipeline_run_id, 'str') + _params["referencePipelineRunId"] = _SERIALIZER.query( + "reference_pipeline_run_id", reference_pipeline_run_id, "str" + ) if is_recovery is not None: - _params['isRecovery'] = _SERIALIZER.query("is_recovery", is_recovery, 'bool') + _params["isRecovery"] = _SERIALIZER.query("is_recovery", is_recovery, "bool") if start_activity_name is not None: - _params['startActivityName'] = _SERIALIZER.query("start_activity_name", start_activity_name, 'str') + _params["startActivityName"] = _SERIALIZER.query("start_activity_name", start_activity_name, "str") if start_from_failure is not None: - _params['startFromFailure'] = _SERIALIZER.query("start_from_failure", start_from_failure, 'bool') + _params["startFromFailure"] = _SERIALIZER.query("start_from_failure", start_from_failure, "bool") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + class PipelinesOperations: """ @@ -273,45 +333,39 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any - ) -> Iterable[_models.PipelineListResponse]: + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> Iterable["_models.PipelineResource"]: """Lists pipelines. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PipelineListResponse or the result of - cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.PipelineListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either PipelineResource or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.PipelineResource] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.PipelineListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PipelineListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_factory.metadata['url'], + template_url=self.list_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -319,16 +373,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -344,10 +393,8 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -357,13 +404,11 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines"} # type: ignore + list_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines"} # type: ignore - @distributed_trace + @overload def create_or_update( self, resource_group_name: str, @@ -371,50 +416,126 @@ def create_or_update( pipeline_name: str, pipeline: _models.PipelineResource, if_match: Optional[str] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.PipelineResource: """Creates or updates a pipeline. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param pipeline_name: The pipeline name. + :param pipeline_name: The pipeline name. Required. :type pipeline_name: str - :param pipeline: Pipeline resource definition. + :param pipeline: Pipeline resource definition. Required. :type pipeline: ~azure.mgmt.datafactory.models.PipelineResource :param if_match: ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. Default value is None. :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PipelineResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.PipelineResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + pipeline_name: str, + pipeline: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PipelineResource: + """Creates or updates a pipeline. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param pipeline_name: The pipeline name. Required. + :type pipeline_name: str + :param pipeline: Pipeline resource definition. Required. + :type pipeline: IO + :param if_match: ETag of the pipeline entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. Default value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineResource, or the result of cls(response) + :return: PipelineResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.PipelineResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + pipeline_name: str, + pipeline: Union[_models.PipelineResource, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.PipelineResource: + """Creates or updates a pipeline. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param pipeline_name: The pipeline name. Required. + :type pipeline_name: str + :param pipeline: Pipeline resource definition. Is either a model type or a IO type. Required. + :type pipeline: ~azure.mgmt.datafactory.models.PipelineResource or IO + :param if_match: ETag of the pipeline entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. Default value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PipelineResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.PipelineResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.PipelineResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.PipelineResource] - _json = self._serialize.body(pipeline, 'PipelineResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(pipeline, (IO, bytes)): + _content = pipeline + else: + _json = self._serialize.body(pipeline, "PipelineResource") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, pipeline_name=pipeline_name, + subscription_id=self._config.subscription_id, + if_match=if_match, api_version=api_version, content_type=content_type, json=_json, - if_match=if_match, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -422,25 +543,23 @@ def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('PipelineResource', pipeline_response) + deserialized = self._deserialize("PipelineResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}"} # type: ignore - + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}"} # type: ignore @distributed_trace def get( @@ -453,41 +572,38 @@ def get( ) -> Optional[_models.PipelineResource]: """Gets a pipeline. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param pipeline_name: The pipeline name. + :param pipeline_name: The pipeline name. Required. :type pipeline_name: str :param if_none_match: ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. Default value is None. :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineResource, or the result of cls(response) + :return: PipelineResource or None or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.PipelineResource or None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.PipelineResource]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.PipelineResource]] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, pipeline_name=pipeline_name, - api_version=api_version, + subscription_id=self._config.subscription_id, if_none_match=if_none_match, - template_url=self.get.metadata['url'], + api_version=api_version, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -495,10 +611,9 @@ def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 304]: @@ -507,56 +622,48 @@ def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('PipelineResource', pipeline_response) + deserialized = self._deserialize("PipelineResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}"} # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - pipeline_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, pipeline_name: str, **kwargs: Any ) -> None: """Deletes a pipeline. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param pipeline_name: The pipeline name. + :param pipeline_name: The pipeline name. Required. :type pipeline_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, pipeline_name=pipeline_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -564,10 +671,9 @@ def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -577,10 +683,58 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}"} # type: ignore + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}"} # type: ignore + @overload + def create_run( + self, + resource_group_name: str, + factory_name: str, + pipeline_name: str, + reference_pipeline_run_id: Optional[str] = None, + is_recovery: Optional[bool] = None, + start_activity_name: Optional[str] = None, + start_from_failure: Optional[bool] = None, + parameters: Optional[Dict[str, JSON]] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CreateRunResponse: + """Creates a run of a pipeline. - @distributed_trace + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param pipeline_name: The pipeline name. Required. + :type pipeline_name: str + :param reference_pipeline_run_id: The pipeline run identifier. If run ID is specified the + parameters of the specified run will be used to create a new run. Default value is None. + :type reference_pipeline_run_id: str + :param is_recovery: Recovery mode flag. If recovery mode is set to true, the specified + referenced pipeline run and the new run will be grouped under the same groupId. Default value + is None. + :type is_recovery: bool + :param start_activity_name: In recovery mode, the rerun will start from this activity. If not + specified, all activities will run. Default value is None. + :type start_activity_name: str + :param start_from_failure: In recovery mode, if set to true, the rerun will start from failed + activities. The property will be used only if startActivityName is not specified. Default value + is None. + :type start_from_failure: bool + :param parameters: Parameters of the pipeline run. These parameters will be used only if the + runId is not specified. Default value is None. + :type parameters: dict[str, JSON] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CreateRunResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.CreateRunResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload def create_run( self, resource_group_name: str, @@ -590,16 +744,18 @@ def create_run( is_recovery: Optional[bool] = None, start_activity_name: Optional[str] = None, start_from_failure: Optional[bool] = None, - parameters: Optional[Dict[str, Any]] = None, + parameters: Optional[IO] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.CreateRunResponse: """Creates a run of a pipeline. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param pipeline_name: The pipeline name. + :param pipeline_name: The pipeline name. Required. :type pipeline_name: str :param reference_pipeline_run_id: The pipeline run identifier. If run ID is specified the parameters of the specified run will be used to create a new run. Default value is None. @@ -617,42 +773,97 @@ def create_run( :type start_from_failure: bool :param parameters: Parameters of the pipeline run. These parameters will be used only if the runId is not specified. Default value is None. - :type parameters: dict[str, any] + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CreateRunResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.CreateRunResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_run( + self, + resource_group_name: str, + factory_name: str, + pipeline_name: str, + reference_pipeline_run_id: Optional[str] = None, + is_recovery: Optional[bool] = None, + start_activity_name: Optional[str] = None, + start_from_failure: Optional[bool] = None, + parameters: Optional[Union[Dict[str, JSON], IO]] = None, + **kwargs: Any + ) -> _models.CreateRunResponse: + """Creates a run of a pipeline. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param pipeline_name: The pipeline name. Required. + :type pipeline_name: str + :param reference_pipeline_run_id: The pipeline run identifier. If run ID is specified the + parameters of the specified run will be used to create a new run. Default value is None. + :type reference_pipeline_run_id: str + :param is_recovery: Recovery mode flag. If recovery mode is set to true, the specified + referenced pipeline run and the new run will be grouped under the same groupId. Default value + is None. + :type is_recovery: bool + :param start_activity_name: In recovery mode, the rerun will start from this activity. If not + specified, all activities will run. Default value is None. + :type start_activity_name: str + :param start_from_failure: In recovery mode, if set to true, the rerun will start from failed + activities. The property will be used only if startActivityName is not specified. Default value + is None. + :type start_from_failure: bool + :param parameters: Parameters of the pipeline run. These parameters will be used only if the + runId is not specified. Is either a dict type or a IO type. Default value is None. + :type parameters: dict[str, JSON] or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: CreateRunResponse, or the result of cls(response) + :return: CreateRunResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.CreateRunResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.CreateRunResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CreateRunResponse] - if parameters is not None: - _json = self._serialize.body(parameters, '{object}') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters else: - _json = None + if parameters is not None: + _json = self._serialize.body(parameters, "{object}") + else: + _json = None request = build_create_run_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, pipeline_name=pipeline_name, - api_version=api_version, - content_type=content_type, - json=_json, + subscription_id=self._config.subscription_id, reference_pipeline_run_id=reference_pipeline_run_id, is_recovery=is_recovery, start_activity_name=start_activity_name, start_from_failure=start_from_failure, - template_url=self.create_run.metadata['url'], + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_run.metadata["url"], headers=_headers, params=_params, ) @@ -660,22 +871,20 @@ def create_run( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('CreateRunResponse', pipeline_response) + deserialized = self._deserialize("CreateRunResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_run.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun"} # type: ignore - + create_run.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_end_point_connections_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_end_point_connections_operations.py index bdd7978b624..d352ef5a6e0 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_end_point_connections_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_end_point_connections_operations.py @@ -7,10 +7,15 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse @@ -20,48 +25,55 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False + def build_list_by_factory_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndPointConnections") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndPointConnections", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) class PrivateEndPointConnectionsOperations: """ @@ -82,46 +94,41 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any - ) -> Iterable[_models.PrivateEndpointConnectionListResponse]: + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> Iterable["_models.PrivateEndpointConnectionResource"]: """Lists Private endpoint connections. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PrivateEndpointConnectionListResponse or the - result of cls(response) + :return: An iterator like instance of either PrivateEndpointConnectionResource or the result of + cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.PrivateEndpointConnectionListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.PrivateEndpointConnectionListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnectionListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_factory.metadata['url'], + template_url=self.list_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -129,16 +136,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -154,10 +156,8 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -167,8 +167,6 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndPointConnections"} # type: ignore + list_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndPointConnections"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_endpoint_connection_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_endpoint_connection_operations.py index a567986ddc3..d5a0e2c2815 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_endpoint_connection_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_endpoint_connection_operations.py @@ -6,11 +6,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Optional, TypeVar - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest @@ -19,68 +23,75 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False + def build_create_or_update_request( - subscription_id: str, resource_group_name: str, factory_name: str, private_endpoint_connection_name: str, + subscription_id: str, *, - json: Optional[_models.PrivateLinkConnectionApprovalRequestResource] = None, - content: Any = None, if_match: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "privateEndpointConnectionName": _SERIALIZER.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if if_match is not None: - _headers['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id: str, resource_group_name: str, factory_name: str, private_endpoint_connection_name: str, + subscription_id: str, *, if_none_match: Optional[str] = None, **kwargs: Any @@ -88,74 +99,91 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "privateEndpointConnectionName": _SERIALIZER.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if if_none_match is not None: - _headers['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id: str, resource_group_name: str, factory_name: str, private_endpoint_connection_name: str, + subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "privateEndpointConnectionName": _SERIALIZER.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) - return HttpRequest( - method="DELETE", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) class PrivateEndpointConnectionOperations: """ @@ -176,8 +204,7 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace + @overload def create_or_update( self, resource_group_name: str, @@ -185,52 +212,131 @@ def create_or_update( private_endpoint_connection_name: str, private_endpoint_wrapper: _models.PrivateLinkConnectionApprovalRequestResource, if_match: Optional[str] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.PrivateEndpointConnectionResource: """Approves or rejects a private endpoint connection. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param private_endpoint_connection_name: The private endpoint connection name. + :param private_endpoint_connection_name: The private endpoint connection name. Required. :type private_endpoint_connection_name: str - :param private_endpoint_wrapper: + :param private_endpoint_wrapper: Required. :type private_endpoint_wrapper: ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequestResource :param if_match: ETag of the private endpoint connection entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. Default value is None. :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnectionResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + private_endpoint_connection_name: str, + private_endpoint_wrapper: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PrivateEndpointConnectionResource: + """Approves or rejects a private endpoint connection. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param private_endpoint_connection_name: The private endpoint connection name. Required. + :type private_endpoint_connection_name: str + :param private_endpoint_wrapper: Required. + :type private_endpoint_wrapper: IO + :param if_match: ETag of the private endpoint connection entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. Default + value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnectionResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + private_endpoint_connection_name: str, + private_endpoint_wrapper: Union[_models.PrivateLinkConnectionApprovalRequestResource, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.PrivateEndpointConnectionResource: + """Approves or rejects a private endpoint connection. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param private_endpoint_connection_name: The private endpoint connection name. Required. + :type private_endpoint_connection_name: str + :param private_endpoint_wrapper: Is either a model type or a IO type. Required. + :type private_endpoint_wrapper: + ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequestResource or IO + :param if_match: ETag of the private endpoint connection entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. Default + value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PrivateEndpointConnectionResource, or the result of cls(response) + :return: PrivateEndpointConnectionResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.PrivateEndpointConnectionResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnectionResource] - _json = self._serialize.body(private_endpoint_wrapper, 'PrivateLinkConnectionApprovalRequestResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(private_endpoint_wrapper, (IO, bytes)): + _content = private_endpoint_wrapper + else: + _json = self._serialize.body(private_endpoint_wrapper, "PrivateLinkConnectionApprovalRequestResource") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + if_match=if_match, api_version=api_version, content_type=content_type, json=_json, - if_match=if_match, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -238,25 +344,23 @@ def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('PrivateEndpointConnectionResource', pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnectionResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore - + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore @distributed_trace def get( @@ -269,41 +373,38 @@ def get( ) -> _models.PrivateEndpointConnectionResource: """Gets a private endpoint connection. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param private_endpoint_connection_name: The private endpoint connection name. + :param private_endpoint_connection_name: The private endpoint connection name. Required. :type private_endpoint_connection_name: str :param if_none_match: ETag of the private endpoint connection entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. Default value is None. :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PrivateEndpointConnectionResource, or the result of cls(response) + :return: PrivateEndpointConnectionResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.PrivateEndpointConnectionResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnectionResource] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, private_endpoint_connection_name=private_endpoint_connection_name, - api_version=api_version, + subscription_id=self._config.subscription_id, if_none_match=if_none_match, - template_url=self.get.metadata['url'], + api_version=api_version, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -311,66 +412,57 @@ def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('PrivateEndpointConnectionResource', pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnectionResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - private_endpoint_connection_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, private_endpoint_connection_name: str, **kwargs: Any ) -> None: """Deletes a private endpoint connection. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param private_endpoint_connection_name: The private endpoint connection name. + :param private_endpoint_connection_name: The private endpoint connection name. Required. :type private_endpoint_connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -378,10 +470,9 @@ def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -391,5 +482,4 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore - + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_link_resources_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_link_resources_operations.py index 01f774cb793..9f0798a7157 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_link_resources_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_link_resources_operations.py @@ -8,9 +8,13 @@ # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Optional, TypeVar -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest @@ -19,48 +23,53 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -def build_get_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - **kwargs: Any -) -> HttpRequest: + +def build_get_request(resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateLinkResources") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateLinkResources", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) class PrivateLinkResourcesOperations: """ @@ -81,43 +90,34 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace - def get( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any - ) -> _models.PrivateLinkResourcesWrapper: + def get(self, resource_group_name: str, factory_name: str, **kwargs: Any) -> _models.PrivateLinkResourcesWrapper: """Gets the private link resources. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PrivateLinkResourcesWrapper, or the result of cls(response) + :return: PrivateLinkResourcesWrapper or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.PrivateLinkResourcesWrapper - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.PrivateLinkResourcesWrapper] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateLinkResourcesWrapper] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -125,22 +125,20 @@ def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('PrivateLinkResourcesWrapper', pipeline_response) + deserialized = self._deserialize("PrivateLinkResourcesWrapper", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateLinkResources"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateLinkResources"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_runs_operations.py index 7046d48e62e..1c97049a860 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_runs_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_runs_operations.py @@ -6,11 +6,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Optional, TypeVar - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest @@ -19,138 +23,156 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False + def build_rerun_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - trigger_name: str, - run_id: str, - **kwargs: Any + resource_group_name: str, factory_name: str, trigger_name: str, run_id: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "triggerName": _SERIALIZER.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - "runId": _SERIALIZER.url("run_id", run_id, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "triggerName": _SERIALIZER.url( + "trigger_name", + trigger_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), + "runId": _SERIALIZER.url("run_id", run_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_cancel_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - trigger_name: str, - run_id: str, - **kwargs: Any + resource_group_name: str, factory_name: str, trigger_name: str, run_id: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "triggerName": _SERIALIZER.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - "runId": _SERIALIZER.url("run_id", run_id, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "triggerName": _SERIALIZER.url( + "trigger_name", + trigger_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), + "runId": _SERIALIZER.url("run_id", run_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_query_by_factory_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - *, - json: Optional[_models.RunFilterParameters] = None, - content: Any = None, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + class TriggerRunsOperations: """ @@ -171,51 +193,42 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace def rerun( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - run_id: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, trigger_name: str, run_id: str, **kwargs: Any ) -> None: """Rerun single trigger instance by runId. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param trigger_name: The trigger name. + :param trigger_name: The trigger name. Required. :type trigger_name: str - :param run_id: The pipeline run identifier. + :param run_id: The pipeline run identifier. Required. :type run_id: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_rerun_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, run_id=run_id, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.rerun.metadata['url'], + template_url=self.rerun.metadata["url"], headers=_headers, params=_params, ) @@ -223,10 +236,9 @@ def rerun( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -236,53 +248,44 @@ def rerun( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - rerun.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun"} # type: ignore - + rerun.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun"} # type: ignore @distributed_trace def cancel( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - run_id: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, trigger_name: str, run_id: str, **kwargs: Any ) -> None: """Cancel a single trigger instance by runId. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param trigger_name: The trigger name. + :param trigger_name: The trigger name. Required. :type trigger_name: str - :param run_id: The pipeline run identifier. + :param run_id: The pipeline run identifier. Required. :type run_id: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_cancel_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, run_id=run_id, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.cancel.metadata['url'], + template_url=self.cancel.metadata["url"], headers=_headers, params=_params, ) @@ -290,10 +293,9 @@ def cancel( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -303,52 +305,114 @@ def cancel( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - cancel.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel"} # type: ignore + cancel.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel"} # type: ignore - - @distributed_trace + @overload def query_by_factory( self, resource_group_name: str, factory_name: str, filter_parameters: _models.RunFilterParameters, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.TriggerRunsQueryResponse: """Query trigger runs. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param filter_parameters: Parameters to filter the pipeline run. + :param filter_parameters: Parameters to filter the pipeline run. Required. :type filter_parameters: ~azure.mgmt.datafactory.models.RunFilterParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerRunsQueryResponse, or the result of cls(response) + :return: TriggerRunsQueryResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.TriggerRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + def query_by_factory( + self, + resource_group_name: str, + factory_name: str, + filter_parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.TriggerRunsQueryResponse: + """Query trigger runs. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param filter_parameters: Parameters to filter the pipeline run. Required. + :type filter_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerRunsQueryResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.TriggerRunsQueryResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def query_by_factory( + self, + resource_group_name: str, + factory_name: str, + filter_parameters: Union[_models.RunFilterParameters, IO], + **kwargs: Any + ) -> _models.TriggerRunsQueryResponse: + """Query trigger runs. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param filter_parameters: Parameters to filter the pipeline run. Is either a model type or a IO + type. Required. + :type filter_parameters: ~azure.mgmt.datafactory.models.RunFilterParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerRunsQueryResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.TriggerRunsQueryResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.TriggerRunsQueryResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.TriggerRunsQueryResponse] - _json = self._serialize.body(filter_parameters, 'RunFilterParameters') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(filter_parameters, (IO, bytes)): + _content = filter_parameters + else: + _json = self._serialize.body(filter_parameters, "RunFilterParameters") request = build_query_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.query_by_factory.metadata['url'], + content=_content, + template_url=self.query_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -356,22 +420,20 @@ def query_by_factory( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('TriggerRunsQueryResponse', pipeline_response) + deserialized = self._deserialize("TriggerRunsQueryResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - query_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns"} # type: ignore - + query_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns"} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_triggers_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_triggers_operations.py index d384d40e174..9efea710085 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_triggers_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_triggers_operations.py @@ -6,11 +6,16 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse @@ -22,150 +27,163 @@ from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False + def build_list_by_factory_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_query_by_factory_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - *, - json: Optional[_models.TriggerFilterParameters] = None, - content: Any = None, - **kwargs: Any + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id: str, resource_group_name: str, factory_name: str, trigger_name: str, + subscription_id: str, *, - json: Optional[_models.TriggerResource] = None, - content: Any = None, if_match: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "triggerName": _SERIALIZER.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "triggerName": _SERIALIZER.url( + "trigger_name", + trigger_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if if_match is not None: - _headers['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") if content_type is not None: - _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_params, - headers=_headers, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id: str, resource_group_name: str, factory_name: str, trigger_name: str, + subscription_id: str, *, if_none_match: Optional[str] = None, **kwargs: Any @@ -173,269 +191,337 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "triggerName": _SERIALIZER.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "triggerName": _SERIALIZER.url( + "trigger_name", + trigger_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if if_none_match is not None: - _headers['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, trigger_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "triggerName": _SERIALIZER.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "triggerName": _SERIALIZER.url( + "trigger_name", + trigger_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) -def build_subscribe_to_events_request_initial( - subscription_id: str, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any +def build_subscribe_to_events_request( + resource_group_name: str, factory_name: str, trigger_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "triggerName": _SERIALIZER.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "triggerName": _SERIALIZER.url( + "trigger_name", + trigger_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_get_event_subscription_status_request( - subscription_id: str, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + resource_group_name: str, factory_name: str, trigger_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "triggerName": _SERIALIZER.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "triggerName": _SERIALIZER.url( + "trigger_name", + trigger_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_unsubscribe_from_events_request_initial( - subscription_id: str, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any +def build_unsubscribe_from_events_request( + resource_group_name: str, factory_name: str, trigger_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "triggerName": _SERIALIZER.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "triggerName": _SERIALIZER.url( + "trigger_name", + trigger_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_start_request_initial( - subscription_id: str, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any +def build_start_request( + resource_group_name: str, factory_name: str, trigger_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "triggerName": _SERIALIZER.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "triggerName": _SERIALIZER.url( + "trigger_name", + trigger_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_stop_request_initial( - subscription_id: str, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any +def build_stop_request( + resource_group_name: str, factory_name: str, trigger_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - accept = _headers.pop('Accept', "application/json") + api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) # type: str + accept = _headers.pop("Accept", "application/json") # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - "triggerName": _SERIALIZER.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "triggerName": _SERIALIZER.url( + "trigger_name", + trigger_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters - _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs - ) class TriggersOperations: """ @@ -456,44 +542,39 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs: Any - ) -> Iterable[_models.TriggerListResponse]: + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> Iterable["_models.TriggerResource"]: """Lists triggers. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either TriggerListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.TriggerListResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either TriggerResource or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.TriggerResource] + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.TriggerListResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.TriggerListResponse] + + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): if not next_link: - + request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_factory.metadata['url'], + template_url=self.list_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -501,16 +582,11 @@ def prepare_request(next_link=None): request.url = self._client.format_url(request.url) # type: ignore else: - - request = build_list_by_factory_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - factory_name=factory_name, - api_version=api_version, - template_url=next_link, - headers=_headers, - params=_params, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -526,10 +602,8 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -539,55 +613,116 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers"} # type: ignore + list_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers"} # type: ignore - @distributed_trace + @overload def query_by_factory( self, resource_group_name: str, factory_name: str, filter_parameters: _models.TriggerFilterParameters, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.TriggerQueryResponse: """Query triggers. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param filter_parameters: Parameters to filter the triggers. + :param filter_parameters: Parameters to filter the triggers. Required. :type filter_parameters: ~azure.mgmt.datafactory.models.TriggerFilterParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerQueryResponse, or the result of cls(response) + :return: TriggerQueryResponse or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.TriggerQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @overload + def query_by_factory( + self, + resource_group_name: str, + factory_name: str, + filter_parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.TriggerQueryResponse: + """Query triggers. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param filter_parameters: Parameters to filter the triggers. Required. + :type filter_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerQueryResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.TriggerQueryResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def query_by_factory( + self, + resource_group_name: str, + factory_name: str, + filter_parameters: Union[_models.TriggerFilterParameters, IO], + **kwargs: Any + ) -> _models.TriggerQueryResponse: + """Query triggers. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param filter_parameters: Parameters to filter the triggers. Is either a model type or a IO + type. Required. + :type filter_parameters: ~azure.mgmt.datafactory.models.TriggerFilterParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerQueryResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.TriggerQueryResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.TriggerQueryResponse] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.TriggerQueryResponse] - _json = self._serialize.body(filter_parameters, 'TriggerFilterParameters') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(filter_parameters, (IO, bytes)): + _content = filter_parameters + else: + _json = self._serialize.body(filter_parameters, "TriggerFilterParameters") request = build_query_by_factory_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.query_by_factory.metadata['url'], + content=_content, + template_url=self.query_by_factory.metadata["url"], headers=_headers, params=_params, ) @@ -595,27 +730,25 @@ def query_by_factory( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('TriggerQueryResponse', pipeline_response) + deserialized = self._deserialize("TriggerQueryResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - query_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers"} # type: ignore + query_by_factory.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers"} # type: ignore - - @distributed_trace + @overload def create_or_update( self, resource_group_name: str, @@ -623,50 +756,126 @@ def create_or_update( trigger_name: str, trigger: _models.TriggerResource, if_match: Optional[str] = None, + *, + content_type: str = "application/json", **kwargs: Any ) -> _models.TriggerResource: """Creates or updates a trigger. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param trigger_name: The trigger name. + :param trigger_name: The trigger name. Required. :type trigger_name: str - :param trigger: Trigger resource definition. + :param trigger: Trigger resource definition. Required. :type trigger: ~azure.mgmt.datafactory.models.TriggerResource :param if_match: ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. Default value is None. :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.TriggerResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + trigger: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.TriggerResource: + """Creates or updates a trigger. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param trigger_name: The trigger name. Required. + :type trigger_name: str + :param trigger: Trigger resource definition. Required. + :type trigger: IO + :param if_match: ETag of the trigger entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. Default value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerResource, or the result of cls(response) + :return: TriggerResource or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.TriggerResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + trigger: Union[_models.TriggerResource, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.TriggerResource: + """Creates or updates a trigger. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param trigger_name: The trigger name. Required. + :type trigger_name: str + :param trigger: Trigger resource definition. Is either a model type or a IO type. Required. + :type trigger: ~azure.mgmt.datafactory.models.TriggerResource or IO + :param if_match: ETag of the trigger entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. Default value is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.TriggerResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] - cls = kwargs.pop('cls', None) # type: ClsType[_models.TriggerResource] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.TriggerResource] - _json = self._serialize.body(trigger, 'TriggerResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(trigger, (IO, bytes)): + _content = trigger + else: + _json = self._serialize.body(trigger, "TriggerResource") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, + subscription_id=self._config.subscription_id, + if_match=if_match, api_version=api_version, content_type=content_type, json=_json, - if_match=if_match, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) @@ -674,25 +883,23 @@ def create_or_update( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('TriggerResource', pipeline_response) + deserialized = self._deserialize("TriggerResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}"} # type: ignore - + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}"} # type: ignore @distributed_trace def get( @@ -705,41 +912,38 @@ def get( ) -> Optional[_models.TriggerResource]: """Gets a trigger. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param trigger_name: The trigger name. + :param trigger_name: The trigger name. Required. :type trigger_name: str :param if_none_match: ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. Default value is None. :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerResource, or the result of cls(response) + :return: TriggerResource or None or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.TriggerResource or None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.TriggerResource]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.TriggerResource]] - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, - api_version=api_version, + subscription_id=self._config.subscription_id, if_none_match=if_none_match, - template_url=self.get.metadata['url'], + api_version=api_version, + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -747,10 +951,9 @@ def get( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 304]: @@ -759,56 +962,48 @@ def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('TriggerResource', pipeline_response) + deserialized = self._deserialize("TriggerResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}"} # type: ignore - + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}"} # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any ) -> None: """Deletes a trigger. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param trigger_name: The trigger name. + :param trigger_name: The trigger name. Required. :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) @@ -816,10 +1011,9 @@ def delete( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -829,35 +1023,27 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}"} # type: ignore - + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}"} # type: ignore def _subscribe_to_events_initial( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any ) -> Optional[_models.TriggerSubscriptionOperationStatus]: - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.TriggerSubscriptionOperationStatus]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.TriggerSubscriptionOperationStatus]] - - request = build_subscribe_to_events_request_initial( - subscription_id=self._config.subscription_id, + request = build_subscribe_to_events_request( resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._subscribe_to_events_initial.metadata['url'], + template_url=self._subscribe_to_events_initial.metadata["url"], headers=_headers, params=_params, ) @@ -865,10 +1051,9 @@ def _subscribe_to_events_initial( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: @@ -877,31 +1062,26 @@ def _subscribe_to_events_initial( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _subscribe_to_events_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents"} # type: ignore - + _subscribe_to_events_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents"} # type: ignore @distributed_trace def begin_subscribe_to_events( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any ) -> LROPoller[_models.TriggerSubscriptionOperationStatus]: """Subscribe event trigger to events. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param trigger_name: The trigger name. + :param trigger_name: The trigger name. Required. :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -915,99 +1095,85 @@ def begin_subscribe_to_events( result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.TriggerSubscriptionOperationStatus] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.TriggerSubscriptionOperationStatus] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._subscribe_to_events_initial( # type: ignore resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: - polling_method = cast(PollingMethod, ARMPolling( - lro_delay, - - - **kwargs - )) # type: PollingMethod - elif polling is False: polling_method = cast(PollingMethod, NoPolling()) - else: polling_method = polling + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_subscribe_to_events.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents"} # type: ignore + begin_subscribe_to_events.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents"} # type: ignore @distributed_trace def get_event_subscription_status( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any ) -> _models.TriggerSubscriptionOperationStatus: """Get a trigger's event subscription status. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param trigger_name: The trigger name. + :param trigger_name: The trigger name. Required. :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerSubscriptionOperationStatus, or the result of cls(response) + :return: TriggerSubscriptionOperationStatus or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.TriggerSubscriptionOperationStatus] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.TriggerSubscriptionOperationStatus] - request = build_get_event_subscription_status_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_event_subscription_status.metadata['url'], + template_url=self.get_event_subscription_status.metadata["url"], headers=_headers, params=_params, ) @@ -1015,52 +1181,43 @@ def get_event_subscription_status( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_event_subscription_status.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus"} # type: ignore - + get_event_subscription_status.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus"} # type: ignore def _unsubscribe_from_events_initial( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any ) -> Optional[_models.TriggerSubscriptionOperationStatus]: - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.TriggerSubscriptionOperationStatus]] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.TriggerSubscriptionOperationStatus]] - - request = build_unsubscribe_from_events_request_initial( - subscription_id=self._config.subscription_id, + request = build_unsubscribe_from_events_request( resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._unsubscribe_from_events_initial.metadata['url'], + template_url=self._unsubscribe_from_events_initial.metadata["url"], headers=_headers, params=_params, ) @@ -1068,10 +1225,9 @@ def _unsubscribe_from_events_initial( request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: @@ -1080,31 +1236,26 @@ def _unsubscribe_from_events_initial( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _unsubscribe_from_events_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents"} # type: ignore - + _unsubscribe_from_events_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents"} # type: ignore @distributed_trace def begin_unsubscribe_from_events( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any ) -> LROPoller[_models.TriggerSubscriptionOperationStatus]: """Unsubscribe event trigger from events. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param trigger_name: The trigger name. + :param trigger_name: The trigger name. Required. :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -1118,85 +1269,71 @@ def begin_unsubscribe_from_events( result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[_models.TriggerSubscriptionOperationStatus] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.TriggerSubscriptionOperationStatus] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._unsubscribe_from_events_initial( # type: ignore resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: - polling_method = cast(PollingMethod, ARMPolling( - lro_delay, - - - **kwargs - )) # type: PollingMethod - elif polling is False: polling_method = cast(PollingMethod, NoPolling()) - else: polling_method = polling + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_unsubscribe_from_events.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents"} # type: ignore + begin_unsubscribe_from_events.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents"} # type: ignore def _start_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any ) -> None: - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - - request = build_start_request_initial( - subscription_id=self._config.subscription_id, + request = build_start_request( resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._start_initial.metadata['url'], + template_url=self._start_initial.metadata["url"], headers=_headers, params=_params, ) @@ -1204,10 +1341,9 @@ def _start_initial( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1217,24 +1353,19 @@ def _start_initial( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - _start_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start"} # type: ignore - + _start_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start"} # type: ignore @distributed_trace - def begin_start( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + def begin_start( + self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any ) -> LROPoller[None]: """Starts a trigger. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param trigger_name: The trigger name. + :param trigger_name: The trigger name. Required. :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -1246,83 +1377,69 @@ def begin_start( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._start_initial( # type: ignore resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - if polling is True: - polling_method = cast(PollingMethod, ARMPolling( - lro_delay, - - - **kwargs - )) # type: PollingMethod - elif polling is False: polling_method = cast(PollingMethod, NoPolling()) - else: polling_method = polling + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_start.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start"} # type: ignore + begin_start.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start"} # type: ignore def _stop_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any ) -> None: - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {}) or {}) + error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] - - request = build_stop_request_initial( - subscription_id=self._config.subscription_id, + request = build_stop_request( resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._stop_initial.metadata['url'], + template_url=self._stop_initial.metadata["url"], headers=_headers, params=_params, ) @@ -1330,10 +1447,9 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, - stream=False, - **kwargs + request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1343,24 +1459,19 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - _stop_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop"} # type: ignore - + _stop_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop"} # type: ignore @distributed_trace - def begin_stop( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs: Any + def begin_stop( + self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any ) -> LROPoller[None]: """Stops a trigger. - :param resource_group_name: The resource group name. + :param resource_group_name: The resource group name. Required. :type resource_group_name: str - :param factory_name: The factory name. + :param factory_name: The factory name. Required. :type factory_name: str - :param trigger_name: The trigger name. + :param trigger_name: The trigger name. Required. :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -1372,53 +1483,46 @@ def begin_stop( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', _params.pop('api-version', "2018-06-01")) # type: str - cls = kwargs.pop('cls', None) # type: ClsType[None] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._stop_initial( # type: ignore resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - if polling is True: - polling_method = cast(PollingMethod, ARMPolling( - lro_delay, - - - **kwargs - )) # type: PollingMethod - elif polling is False: polling_method = cast(PollingMethod, NoPolling()) - else: polling_method = polling + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_stop.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop"} # type: ignore + begin_stop.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop"} # type: ignore diff --git a/src/datafactory/report.md b/src/datafactory/report.md index f7cac3328fb..4c5a9e8e7e3 100644 --- a/src/datafactory/report.md +++ b/src/datafactory/report.md @@ -1,12 +1,15 @@ # Azure CLI Module Creation Report ## EXTENSION + |CLI Extension|Command Groups| |---------|------------| |az datafactory|[groups](#CommandGroups) ## GROUPS + ### Command groups in `az datafactory` extension + |CLI Command Group|Group Swagger name|Commands| |---------|------------|--------| |az datafactory|Factories|[commands](#CommandsInFactories)| @@ -23,7 +26,9 @@ |az datafactory trigger-run|TriggerRuns|[commands](#CommandsInTriggerRuns)| ## COMMANDS + ### Commands in `az datafactory` group + |CLI Command|Operation Swagger name|Parameters|Examples| |---------|------------|--------|-----------| |[az datafactory list](#FactoriesListByResourceGroup)|ListByResourceGroup|[Parameters](#ParametersFactoriesListByResourceGroup)|[Example](#ExamplesFactoriesListByResourceGroup)| @@ -37,11 +42,13 @@ |[az datafactory get-git-hub-access-token](#FactoriesGetGitHubAccessToken)|GetGitHubAccessToken|[Parameters](#ParametersFactoriesGetGitHubAccessToken)|[Example](#ExamplesFactoriesGetGitHubAccessToken)| ### Commands in `az datafactory activity-run` group + |CLI Command|Operation Swagger name|Parameters|Examples| |---------|------------|--------|-----------| |[az datafactory activity-run query-by-pipeline-run](#ActivityRunsQueryByPipelineRun)|QueryByPipelineRun|[Parameters](#ParametersActivityRunsQueryByPipelineRun)|[Example](#ExamplesActivityRunsQueryByPipelineRun)| ### Commands in `az datafactory dataset` group + |CLI Command|Operation Swagger name|Parameters|Examples| |---------|------------|--------|-----------| |[az datafactory dataset list](#DatasetsListByFactory)|ListByFactory|[Parameters](#ParametersDatasetsListByFactory)|[Example](#ExamplesDatasetsListByFactory)| @@ -51,6 +58,7 @@ |[az datafactory dataset delete](#DatasetsDelete)|Delete|[Parameters](#ParametersDatasetsDelete)|[Example](#ExamplesDatasetsDelete)| ### Commands in `az datafactory integration-runtime` group + |CLI Command|Operation Swagger name|Parameters|Examples| |---------|------------|--------|-----------| |[az datafactory integration-runtime list](#IntegrationRuntimesListByFactory)|ListByFactory|[Parameters](#ParametersIntegrationRuntimesListByFactory)|[Example](#ExamplesIntegrationRuntimesListByFactory)| @@ -72,6 +80,7 @@ |[az datafactory integration-runtime upgrade](#IntegrationRuntimesUpgrade)|Upgrade|[Parameters](#ParametersIntegrationRuntimesUpgrade)|[Example](#ExamplesIntegrationRuntimesUpgrade)| ### Commands in `az datafactory integration-runtime-node` group + |CLI Command|Operation Swagger name|Parameters|Examples| |---------|------------|--------|-----------| |[az datafactory integration-runtime-node show](#IntegrationRuntimeNodesGet)|Get|[Parameters](#ParametersIntegrationRuntimeNodesGet)|[Example](#ExamplesIntegrationRuntimeNodesGet)| @@ -80,6 +89,7 @@ |[az datafactory integration-runtime-node get-ip-address](#IntegrationRuntimeNodesGetIpAddress)|GetIpAddress|[Parameters](#ParametersIntegrationRuntimeNodesGetIpAddress)|[Example](#ExamplesIntegrationRuntimeNodesGetIpAddress)| ### Commands in `az datafactory linked-service` group + |CLI Command|Operation Swagger name|Parameters|Examples| |---------|------------|--------|-----------| |[az datafactory linked-service list](#LinkedServicesListByFactory)|ListByFactory|[Parameters](#ParametersLinkedServicesListByFactory)|[Example](#ExamplesLinkedServicesListByFactory)| @@ -89,6 +99,7 @@ |[az datafactory linked-service delete](#LinkedServicesDelete)|Delete|[Parameters](#ParametersLinkedServicesDelete)|[Example](#ExamplesLinkedServicesDelete)| ### Commands in `az datafactory managed-private-endpoint` group + |CLI Command|Operation Swagger name|Parameters|Examples| |---------|------------|--------|-----------| |[az datafactory managed-private-endpoint list](#ManagedPrivateEndpointsListByFactory)|ListByFactory|[Parameters](#ParametersManagedPrivateEndpointsListByFactory)|[Example](#ExamplesManagedPrivateEndpointsListByFactory)| @@ -98,6 +109,7 @@ |[az datafactory managed-private-endpoint delete](#ManagedPrivateEndpointsDelete)|Delete|[Parameters](#ParametersManagedPrivateEndpointsDelete)|[Example](#ExamplesManagedPrivateEndpointsDelete)| ### Commands in `az datafactory managed-virtual-network` group + |CLI Command|Operation Swagger name|Parameters|Examples| |---------|------------|--------|-----------| |[az datafactory managed-virtual-network list](#ManagedVirtualNetworksListByFactory)|ListByFactory|[Parameters](#ParametersManagedVirtualNetworksListByFactory)|[Example](#ExamplesManagedVirtualNetworksListByFactory)| @@ -106,6 +118,7 @@ |[az datafactory managed-virtual-network update](#ManagedVirtualNetworksCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersManagedVirtualNetworksCreateOrUpdate#Update)|Not Found| ### Commands in `az datafactory pipeline` group + |CLI Command|Operation Swagger name|Parameters|Examples| |---------|------------|--------|-----------| |[az datafactory pipeline list](#PipelinesListByFactory)|ListByFactory|[Parameters](#ParametersPipelinesListByFactory)|[Example](#ExamplesPipelinesListByFactory)| @@ -116,6 +129,7 @@ |[az datafactory pipeline create-run](#PipelinesCreateRun)|CreateRun|[Parameters](#ParametersPipelinesCreateRun)|[Example](#ExamplesPipelinesCreateRun)| ### Commands in `az datafactory pipeline-run` group + |CLI Command|Operation Swagger name|Parameters|Examples| |---------|------------|--------|-----------| |[az datafactory pipeline-run show](#PipelineRunsGet)|Get|[Parameters](#ParametersPipelineRunsGet)|[Example](#ExamplesPipelineRunsGet)| @@ -123,6 +137,7 @@ |[az datafactory pipeline-run query-by-factory](#PipelineRunsQueryByFactory)|QueryByFactory|[Parameters](#ParametersPipelineRunsQueryByFactory)|[Example](#ExamplesPipelineRunsQueryByFactory)| ### Commands in `az datafactory trigger` group + |CLI Command|Operation Swagger name|Parameters|Examples| |---------|------------|--------|-----------| |[az datafactory trigger list](#TriggersListByFactory)|ListByFactory|[Parameters](#ParametersTriggersListByFactory)|[Example](#ExamplesTriggersListByFactory)| @@ -138,22 +153,27 @@ |[az datafactory trigger unsubscribe-from-event](#TriggersUnsubscribeFromEvents)|UnsubscribeFromEvents|[Parameters](#ParametersTriggersUnsubscribeFromEvents)|[Example](#ExamplesTriggersUnsubscribeFromEvents)| ### Commands in `az datafactory trigger-run` group + |CLI Command|Operation Swagger name|Parameters|Examples| |---------|------------|--------|-----------| |[az datafactory trigger-run cancel](#TriggerRunsCancel)|Cancel|[Parameters](#ParametersTriggerRunsCancel)|[Example](#ExamplesTriggerRunsCancel)| |[az datafactory trigger-run query-by-factory](#TriggerRunsQueryByFactory)|QueryByFactory|[Parameters](#ParametersTriggerRunsQueryByFactory)|[Example](#ExamplesTriggerRunsQueryByFactory)| |[az datafactory trigger-run rerun](#TriggerRunsRerun)|Rerun|[Parameters](#ParametersTriggerRunsRerun)|[Example](#ExamplesTriggerRunsRerun)| - ## COMMAND DETAILS + ### group `az datafactory` + #### Command `az datafactory list` ##### Example + ``` az datafactory list --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -161,20 +181,26 @@ az datafactory list --resource-group "exampleResourceGroup" #### Command `az datafactory list` ##### Example + ``` az datafactory list ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| #### Command `az datafactory show` ##### Example + ``` az datafactory show --name "exampleFactoryName" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -184,10 +210,13 @@ az datafactory show --name "exampleFactoryName" --resource-group "exampleResourc #### Command `az datafactory create` ##### Example + ``` az datafactory create --location "East US" --name "exampleFactoryName" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -202,11 +231,14 @@ az datafactory create --location "East US" --name "exampleFactoryName" --resourc #### Command `az datafactory update` ##### Example + ``` az datafactory update --name "exampleFactoryName" --tags exampleTag="exampleValue" --resource-group \ "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -216,10 +248,13 @@ az datafactory update --name "exampleFactoryName" --tags exampleTag="exampleValu #### Command `az datafactory delete` ##### Example + ``` az datafactory delete --name "exampleFactoryName" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -228,13 +263,16 @@ az datafactory delete --name "exampleFactoryName" --resource-group "exampleResou #### Command `az datafactory configure-factory-repo` ##### Example + ``` az datafactory configure-factory-repo --factory-resource-id "/subscriptions/12345678-1234-1234-1234-12345678abc/resourc\ eGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName" \ --factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" project-name="project" \ repository-name="repo" root-folder="/" tenant-id="" --location "East US" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--location**|string|The location identifier.|location|locationId| @@ -245,12 +283,15 @@ repository-name="repo" root-folder="/" tenant-id="" --location "East US" #### Command `az datafactory get-data-plane-access` ##### Example + ``` az datafactory get-data-plane-access --name "exampleFactoryName" --access-resource-path "" --expire-time \ "2018-11-10T09:46:20.2659347Z" --permissions "r" --profile-name "DefaultProfile" --start-time \ "2018-11-10T02:46:20.2659347Z" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -264,11 +305,14 @@ az datafactory get-data-plane-access --name "exampleFactoryName" --access-resour #### Command `az datafactory get-git-hub-access-token` ##### Example + ``` az datafactory get-git-hub-access-token --name "exampleFactoryName" --git-hub-access-code "some" \ --git-hub-access-token-base-url "some" --git-hub-client-id "some" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -278,15 +322,19 @@ az datafactory get-git-hub-access-token --name "exampleFactoryName" --git-hub-ac |**--git-hub-client-id**|string|GitHub application client ID.|git_hub_client_id|gitHubClientId| ### group `az datafactory activity-run` + #### Command `az datafactory activity-run query-by-pipeline-run` ##### Example + ``` az datafactory activity-run query-by-pipeline-run --factory-name "exampleFactoryName" --last-updated-after \ "2018-06-16T00:36:44.3345758Z" --last-updated-before "2018-06-16T00:49:48.3686473Z" --resource-group \ "exampleResourceGroup" --run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -299,13 +347,17 @@ az datafactory activity-run query-by-pipeline-run --factory-name "exampleFactory |**--order-by**|array|List of OrderBy option.|order_by|orderBy| ### group `az datafactory dataset` + #### Command `az datafactory dataset list` ##### Example + ``` az datafactory dataset list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -314,11 +366,14 @@ az datafactory dataset list --factory-name "exampleFactoryName" --resource-group #### Command `az datafactory dataset show` ##### Example + ``` az datafactory dataset show --name "exampleDataset" --factory-name "exampleFactoryName" --resource-group \ "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -329,6 +384,7 @@ az datafactory dataset show --name "exampleDataset" --factory-name "exampleFacto #### Command `az datafactory dataset create` ##### Example + ``` az datafactory dataset create --properties "{\\"type\\":\\"AzureBlob\\",\\"linkedServiceName\\":{\\"type\\":\\"LinkedSe\ rviceReference\\",\\"referenceName\\":\\"exampleLinkedService\\"},\\"parameters\\":{\\"MyFileName\\":{\\"type\\":\\"Str\ @@ -337,7 +393,9 @@ ing\\"},\\"MyFolderPath\\":{\\"type\\":\\"String\\"}},\\"typeProperties\\":{\\"f pression\\",\\"value\\":\\"@dataset().MyFolderPath\\"}}}" --name "exampleDataset" --factory-name "exampleFactoryName" \ --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -348,8 +406,8 @@ pression\\",\\"value\\":\\"@dataset().MyFolderPath\\"}}}" --name "exampleDataset #### Command `az datafactory dataset update` +##### Parameters -##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -367,11 +425,14 @@ pression\\",\\"value\\":\\"@dataset().MyFolderPath\\"}}}" --name "exampleDataset #### Command `az datafactory dataset delete` ##### Example + ``` az datafactory dataset delete --name "exampleDataset" --factory-name "exampleFactoryName" --resource-group \ "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -379,13 +440,17 @@ az datafactory dataset delete --name "exampleDataset" --factory-name "exampleFac |**--dataset-name**|string|The dataset name.|dataset_name|datasetName| ### group `az datafactory integration-runtime` + #### Command `az datafactory integration-runtime list` ##### Example + ``` az datafactory integration-runtime list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -394,11 +459,14 @@ az datafactory integration-runtime list --factory-name "exampleFactoryName" --re #### Command `az datafactory integration-runtime show` ##### Example + ``` az datafactory integration-runtime show --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -409,13 +477,16 @@ az datafactory integration-runtime show --factory-name "exampleFactoryName" --na #### Command `az datafactory integration-runtime linked-integration-runtime create` ##### Example + ``` az datafactory integration-runtime linked-integration-runtime create --name "bfa92911-9fb6-4fbe-8f23-beae87bc1c83" \ --location "West US" --data-factory-name "e9955d6d-56ea-4be3-841c-52a12c1a9981" --subscription-id \ "061774c7-4b5a-4159-a55b-365581830283" --factory-name "exampleFactoryName" --integration-runtime-name \ "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -428,8 +499,8 @@ az datafactory integration-runtime linked-integration-runtime create --name "bfa #### Command `az datafactory integration-runtime managed create` +##### Parameters -##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -443,11 +514,14 @@ az datafactory integration-runtime linked-integration-runtime create --name "bfa #### Command `az datafactory integration-runtime self-hosted create` ##### Example + ``` az datafactory integration-runtime self-hosted create --factory-name "exampleFactoryName" --description "A selfhosted \ integration runtime" --name "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -460,27 +534,33 @@ integration runtime" --name "exampleIntegrationRuntime" --resource-group "exampl #### Command `az datafactory integration-runtime update` ##### Example + ``` az datafactory integration-runtime update --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ --resource-group "exampleResourceGroup" --auto-update "Off" --update-delay-offset "\\"PT3H\\"" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--auto-update**|choice|Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189.|auto_update|autoUpdate| +|**--auto-update**|choice|Enables or disables the auto-update feature of the self-hosted integration runtime. See .|auto_update|autoUpdate| |**--update-delay-offset**|string|The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time.|update_delay_offset|updateDelayOffset| #### Command `az datafactory integration-runtime delete` ##### Example + ``` az datafactory integration-runtime delete --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -490,11 +570,14 @@ az datafactory integration-runtime delete --factory-name "exampleFactoryName" -- #### Command `az datafactory integration-runtime get-connection-info` ##### Example + ``` az datafactory integration-runtime get-connection-info --factory-name "exampleFactoryName" --name \ "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -504,11 +587,14 @@ az datafactory integration-runtime get-connection-info --factory-name "exampleFa #### Command `az datafactory integration-runtime get-monitoring-data` ##### Example + ``` az datafactory integration-runtime get-monitoring-data --factory-name "exampleFactoryName" --name \ "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -518,11 +604,14 @@ az datafactory integration-runtime get-monitoring-data --factory-name "exampleFa #### Command `az datafactory integration-runtime get-status` ##### Example + ``` az datafactory integration-runtime get-status --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -532,11 +621,14 @@ az datafactory integration-runtime get-status --factory-name "exampleFactoryName #### Command `az datafactory integration-runtime list-auth-key` ##### Example + ``` az datafactory integration-runtime list-auth-key --factory-name "exampleFactoryName" --name \ "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -546,11 +638,14 @@ az datafactory integration-runtime list-auth-key --factory-name "exampleFactoryN #### Command `az datafactory integration-runtime regenerate-auth-key` ##### Example + ``` az datafactory integration-runtime regenerate-auth-key --factory-name "exampleFactoryName" --name \ "exampleIntegrationRuntime" --key-name "authKey2" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -561,11 +656,14 @@ az datafactory integration-runtime regenerate-auth-key --factory-name "exampleFa #### Command `az datafactory integration-runtime remove-link` ##### Example + ``` az datafactory integration-runtime remove-link --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ --linked-factory-name "exampleFactoryName-linked" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -576,11 +674,14 @@ az datafactory integration-runtime remove-link --factory-name "exampleFactoryNam #### Command `az datafactory integration-runtime start` ##### Example + ``` az datafactory integration-runtime start --factory-name "exampleFactoryName" --name "exampleManagedIntegrationRuntime" \ --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -590,11 +691,14 @@ az datafactory integration-runtime start --factory-name "exampleFactoryName" --n #### Command `az datafactory integration-runtime stop` ##### Example + ``` az datafactory integration-runtime stop --factory-name "exampleFactoryName" --name "exampleManagedIntegrationRuntime" \ --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -604,11 +708,14 @@ az datafactory integration-runtime stop --factory-name "exampleFactoryName" --na #### Command `az datafactory integration-runtime sync-credentials` ##### Example + ``` az datafactory integration-runtime sync-credentials --factory-name "exampleFactoryName" --name \ "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -618,11 +725,14 @@ az datafactory integration-runtime sync-credentials --factory-name "exampleFacto #### Command `az datafactory integration-runtime upgrade` ##### Example + ``` az datafactory integration-runtime upgrade --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -630,14 +740,18 @@ az datafactory integration-runtime upgrade --factory-name "exampleFactoryName" - |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| ### group `az datafactory integration-runtime-node` + #### Command `az datafactory integration-runtime-node show` ##### Example + ``` az datafactory integration-runtime-node show --factory-name "exampleFactoryName" --integration-runtime-name \ "exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -648,11 +762,14 @@ az datafactory integration-runtime-node show --factory-name "exampleFactoryName" #### Command `az datafactory integration-runtime-node update` ##### Example + ``` az datafactory integration-runtime-node update --factory-name "exampleFactoryName" --integration-runtime-name \ "exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" --concurrent-jobs-limit 2 ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -664,11 +781,14 @@ az datafactory integration-runtime-node update --factory-name "exampleFactoryNam #### Command `az datafactory integration-runtime-node delete` ##### Example + ``` az datafactory integration-runtime-node delete --factory-name "exampleFactoryName" --integration-runtime-name \ "exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -679,11 +799,14 @@ az datafactory integration-runtime-node delete --factory-name "exampleFactoryNam #### Command `az datafactory integration-runtime-node get-ip-address` ##### Example + ``` az datafactory integration-runtime-node get-ip-address --factory-name "exampleFactoryName" --integration-runtime-name \ "exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -692,13 +815,17 @@ az datafactory integration-runtime-node get-ip-address --factory-name "exampleFa |**--node-name**|string|The integration runtime node name.|node_name|nodeName| ### group `az datafactory linked-service` + #### Command `az datafactory linked-service list` ##### Example + ``` az datafactory linked-service list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -707,11 +834,14 @@ az datafactory linked-service list --factory-name "exampleFactoryName" --resourc #### Command `az datafactory linked-service show` ##### Example + ``` az datafactory linked-service show --factory-name "exampleFactoryName" --name "exampleLinkedService" --resource-group \ "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -722,13 +852,16 @@ az datafactory linked-service show --factory-name "exampleFactoryName" --name "e #### Command `az datafactory linked-service create` ##### Example + ``` az datafactory linked-service create --factory-name "exampleFactoryName" --properties "{\\"type\\":\\"AzureStorage\\",\ \\"typeProperties\\":{\\"connectionString\\":{\\"type\\":\\"SecureString\\",\\"value\\":\\"DefaultEndpointsProtocol=htt\ ps;AccountName=examplestorageaccount;AccountKey=\\"}}}" --name "exampleLinkedService" --resource-group \ "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -739,8 +872,8 @@ ps;AccountName=examplestorageaccount;AccountKey=\\"}}}" --name "exa #### Command `az datafactory linked-service update` +##### Parameters -##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -755,11 +888,14 @@ ps;AccountName=examplestorageaccount;AccountKey=\\"}}}" --name "exa #### Command `az datafactory linked-service delete` ##### Example + ``` az datafactory linked-service delete --factory-name "exampleFactoryName" --name "exampleLinkedService" \ --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -767,14 +903,18 @@ az datafactory linked-service delete --factory-name "exampleFactoryName" --name |**--linked-service-name**|string|The linked service name.|linked_service_name|linkedServiceName| ### group `az datafactory managed-private-endpoint` + #### Command `az datafactory managed-private-endpoint list` ##### Example + ``` az datafactory managed-private-endpoint list --factory-name "exampleFactoryName" --managed-virtual-network-name \ "exampleManagedVirtualNetworkName" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -784,11 +924,14 @@ az datafactory managed-private-endpoint list --factory-name "exampleFactoryName" #### Command `az datafactory managed-private-endpoint show` ##### Example + ``` az datafactory managed-private-endpoint show --factory-name "exampleFactoryName" --name "exampleManagedPrivateEndpointN\ ame" --managed-virtual-network-name "exampleManagedVirtualNetworkName" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -800,13 +943,16 @@ ame" --managed-virtual-network-name "exampleManagedVirtualNetworkName" --resourc #### Command `az datafactory managed-private-endpoint create` ##### Example + ``` az datafactory managed-private-endpoint create --factory-name "exampleFactoryName" --group-id "blob" \ --private-link-resource-id "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/prov\ iders/Microsoft.Storage/storageAccounts/exampleBlobStorage" --name "exampleManagedPrivateEndpointName" \ --managed-virtual-network-name "exampleManagedVirtualNetworkName" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -820,8 +966,8 @@ iders/Microsoft.Storage/storageAccounts/exampleBlobStorage" --name "exampleManag #### Command `az datafactory managed-private-endpoint update` +##### Parameters -##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -836,12 +982,15 @@ iders/Microsoft.Storage/storageAccounts/exampleBlobStorage" --name "exampleManag #### Command `az datafactory managed-private-endpoint delete` ##### Example + ``` az datafactory managed-private-endpoint delete --factory-name "exampleFactoryName" --name \ "exampleManagedPrivateEndpointName" --managed-virtual-network-name "exampleManagedVirtualNetworkName" --resource-group \ "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -850,14 +999,18 @@ az datafactory managed-private-endpoint delete --factory-name "exampleFactoryNam |**--managed-private-endpoint-name**|string|Managed private endpoint name|managed_private_endpoint_name|managedPrivateEndpointName| ### group `az datafactory managed-virtual-network` + #### Command `az datafactory managed-virtual-network list` ##### Example + ``` az datafactory managed-virtual-network list --factory-name "exampleFactoryName" --resource-group \ "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -866,11 +1019,14 @@ az datafactory managed-virtual-network list --factory-name "exampleFactoryName" #### Command `az datafactory managed-virtual-network show` ##### Example + ``` az datafactory managed-virtual-network show --factory-name "exampleFactoryName" --name "exampleManagedVirtualNetworkNam\ e" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -881,11 +1037,14 @@ e" --resource-group "exampleResourceGroup" #### Command `az datafactory managed-virtual-network create` ##### Example + ``` az datafactory managed-virtual-network create --factory-name "exampleFactoryName" --name \ "exampleManagedVirtualNetworkName" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -895,8 +1054,8 @@ az datafactory managed-virtual-network create --factory-name "exampleFactoryName #### Command `az datafactory managed-virtual-network update` +##### Parameters -##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -905,13 +1064,17 @@ az datafactory managed-virtual-network create --factory-name "exampleFactoryName |**--if-match**|string|ETag of the managed Virtual Network entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| ### group `az datafactory pipeline` + #### Command `az datafactory pipeline list` ##### Example + ``` az datafactory pipeline list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -920,11 +1083,14 @@ az datafactory pipeline list --factory-name "exampleFactoryName" --resource-grou #### Command `az datafactory pipeline show` ##### Example + ``` az datafactory pipeline show --factory-name "exampleFactoryName" --name "examplePipeline" --resource-group \ "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -935,6 +1101,7 @@ az datafactory pipeline show --factory-name "exampleFactoryName" --name "example #### Command `az datafactory pipeline create` ##### Example + ``` az datafactory pipeline create --factory-name "exampleFactoryName" --pipeline "{\\"activities\\":[{\\"name\\":\\"Exampl\ eForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typeProperties\\":{\\"activities\\":[{\\"name\\":\\"ExampleCopyActivity\ @@ -949,7 +1116,9 @@ es\\":{\\"TestVariableArray\\":{\\"type\\":\\"Array\\"}},\\"runDimensions\\":{\\ \\"value\\":\\"@pipeline().parameters.JobId\\"}},\\"duration\\":\\"0.00:10:00\\"}" --name "examplePipeline" \ --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -961,6 +1130,7 @@ es\\":{\\"TestVariableArray\\":{\\"type\\":\\"Array\\"}},\\"runDimensions\\":{\\ #### Command `az datafactory pipeline update` ##### Example + ``` az datafactory pipeline update --factory-name "exampleFactoryName" --description "Example description" --activities \ "[{\\"name\\":\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typeProperties\\":{\\"activities\\":[{\\"name\\"\ @@ -973,7 +1143,9 @@ obSource\\"}}}],\\"isSequential\\":true,\\"items\\":{\\"type\\":\\"Expression\\" OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}}" --duration "0.00:10:00" \ --name "examplePipeline" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -993,11 +1165,14 @@ OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\ #### Command `az datafactory pipeline delete` ##### Example + ``` az datafactory pipeline delete --factory-name "exampleFactoryName" --name "examplePipeline" --resource-group \ "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1007,11 +1182,14 @@ az datafactory pipeline delete --factory-name "exampleFactoryName" --name "examp #### Command `az datafactory pipeline create-run` ##### Example + ``` az datafactory pipeline create-run --factory-name "exampleFactoryName" --parameters "{\\"OutputBlobNameList\\":[\\"exam\ pleoutput.csv\\"]}" --name "examplePipeline" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1024,14 +1202,18 @@ pleoutput.csv\\"]}" --name "examplePipeline" --resource-group "exampleResourceGr |**--parameters**|dictionary|Parameters of the pipeline run. These parameters will be used only if the runId is not specified.|parameters|parameters| ### group `az datafactory pipeline-run` + #### Command `az datafactory pipeline-run show` ##### Example + ``` az datafactory pipeline-run show --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --run-id \ "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1041,11 +1223,14 @@ az datafactory pipeline-run show --factory-name "exampleFactoryName" --resource- #### Command `az datafactory pipeline-run cancel` ##### Example + ``` az datafactory pipeline-run cancel --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" \ --run-id "16ac5348-ff82-4f95-a80d-638c1d47b721" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1056,12 +1241,15 @@ az datafactory pipeline-run cancel --factory-name "exampleFactoryName" --resourc #### Command `az datafactory pipeline-run query-by-factory` ##### Example + ``` az datafactory pipeline-run query-by-factory --factory-name "exampleFactoryName" --filters operand="PipelineName" \ operator="Equals" values="examplePipeline" --last-updated-after "2018-06-16T00:36:44.3345758Z" --last-updated-before \ "2018-06-16T00:49:48.3686473Z" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1073,13 +1261,17 @@ operator="Equals" values="examplePipeline" --last-updated-after "2018-06-16T00:3 |**--order-by**|array|List of OrderBy option.|order_by|orderBy| ### group `az datafactory trigger` + #### Command `az datafactory trigger list` ##### Example + ``` az datafactory trigger list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1088,11 +1280,14 @@ az datafactory trigger list --factory-name "exampleFactoryName" --resource-group #### Command `az datafactory trigger show` ##### Example + ``` az datafactory trigger show --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --name \ "exampleTrigger" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1103,6 +1298,7 @@ az datafactory trigger show --factory-name "exampleFactoryName" --resource-group #### Command `az datafactory trigger create` ##### Example + ``` az datafactory trigger create --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --properties \ "{\\"type\\":\\"ScheduleTrigger\\",\\"pipelines\\":[{\\"parameters\\":{\\"OutputBlobNameList\\":[\\"exampleoutput.csv\\\ @@ -1110,7 +1306,9 @@ az datafactory trigger create --factory-name "exampleFactoryName" --resource-gro perties\\":{\\"recurrence\\":{\\"endTime\\":\\"2018-06-16T00:55:13.8441801Z\\",\\"frequency\\":\\"Minute\\",\\"interval\ \\":4,\\"startTime\\":\\"2018-06-16T00:39:13.8441801Z\\",\\"timeZone\\":\\"UTC\\"}}}" --name "exampleTrigger" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1121,8 +1319,8 @@ perties\\":{\\"recurrence\\":{\\"endTime\\":\\"2018-06-16T00:55:13.8441801Z\\",\ #### Command `az datafactory trigger update` +##### Parameters -##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1135,11 +1333,14 @@ perties\\":{\\"recurrence\\":{\\"endTime\\":\\"2018-06-16T00:55:13.8441801Z\\",\ #### Command `az datafactory trigger delete` ##### Example + ``` az datafactory trigger delete --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --name \ "exampleTrigger" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1149,11 +1350,14 @@ az datafactory trigger delete --factory-name "exampleFactoryName" --resource-gro #### Command `az datafactory trigger get-event-subscription-status` ##### Example + ``` az datafactory trigger get-event-subscription-status --factory-name "exampleFactoryName" --resource-group \ "exampleResourceGroup" --name "exampleTrigger" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1163,11 +1367,14 @@ az datafactory trigger get-event-subscription-status --factory-name "exampleFact #### Command `az datafactory trigger query-by-factory` ##### Example + ``` az datafactory trigger query-by-factory --factory-name "exampleFactoryName" --parent-trigger-name "exampleTrigger" \ --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1178,11 +1385,14 @@ az datafactory trigger query-by-factory --factory-name "exampleFactoryName" --pa #### Command `az datafactory trigger start` ##### Example + ``` az datafactory trigger start --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --name \ "exampleTrigger" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1192,11 +1402,14 @@ az datafactory trigger start --factory-name "exampleFactoryName" --resource-grou #### Command `az datafactory trigger stop` ##### Example + ``` az datafactory trigger stop --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --name \ "exampleTrigger" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1206,11 +1419,14 @@ az datafactory trigger stop --factory-name "exampleFactoryName" --resource-group #### Command `az datafactory trigger subscribe-to-event` ##### Example + ``` az datafactory trigger subscribe-to-event --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" \ --name "exampleTrigger" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1220,11 +1436,14 @@ az datafactory trigger subscribe-to-event --factory-name "exampleFactoryName" -- #### Command `az datafactory trigger unsubscribe-from-event` ##### Example + ``` az datafactory trigger unsubscribe-from-event --factory-name "exampleFactoryName" --resource-group \ "exampleResourceGroup" --name "exampleTrigger" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1232,14 +1451,18 @@ az datafactory trigger unsubscribe-from-event --factory-name "exampleFactoryName |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| ### group `az datafactory trigger-run` + #### Command `az datafactory trigger-run cancel` ##### Example + ``` az datafactory trigger-run cancel --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --run-id \ "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" --trigger-name "exampleTrigger" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1250,12 +1473,15 @@ az datafactory trigger-run cancel --factory-name "exampleFactoryName" --resource #### Command `az datafactory trigger-run query-by-factory` ##### Example + ``` az datafactory trigger-run query-by-factory --factory-name "exampleFactoryName" --filters operand="TriggerName" \ operator="Equals" values="exampleTrigger" --last-updated-after "2018-06-16T00:36:44.3345758Z" --last-updated-before \ "2018-06-16T00:49:48.3686473Z" --resource-group "exampleResourceGroup" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1269,11 +1495,14 @@ operator="Equals" values="exampleTrigger" --last-updated-after "2018-06-16T00:36 #### Command `az datafactory trigger-run rerun` ##### Example + ``` az datafactory trigger-run rerun --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --run-id \ "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" --trigger-name "exampleTrigger" ``` -##### Parameters + +##### Parameters + |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| diff --git a/src/datafactory/setup.py b/src/datafactory/setup.py index bb56e22a214..7b123bda715 100644 --- a/src/datafactory/setup.py +++ b/src/datafactory/setup.py @@ -10,7 +10,7 @@ from setuptools import setup, find_packages # HISTORY.rst entry. -VERSION = '0.1.0' +VERSION = "0.1.0" try: from azext_datafactory.manual.version import VERSION except ImportError: @@ -19,15 +19,15 @@ # The full list of classifiers is available at # https://pypi.python.org/pypi?%3Aaction=list_classifiers CLASSIFIERS = [ - 'Development Status :: 4 - Beta', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'License :: OSI Approved :: MIT License', + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License", ] DEPENDENCIES = [] @@ -37,22 +37,22 @@ except ImportError: pass -with open('README.md', 'r', encoding='utf-8') as f: +with open("README.md", "r", encoding="utf-8") as f: README = f.read() -with open('HISTORY.rst', 'r', encoding='utf-8') as f: +with open("HISTORY.rst", "r", encoding="utf-8") as f: HISTORY = f.read() setup( - name='datafactory', + name="datafactory", version=VERSION, - description='Microsoft Azure Command-Line Tools DataFactoryManagementClient Extension', - author='Microsoft Corporation', - author_email='azpycli@microsoft.com', - url='https://github.com/Azure/azure-cli-extensions/tree/main/src/datafactory', - long_description=README + '\n\n' + HISTORY, - license='MIT', + description="Microsoft Azure Command-Line Tools DataFactoryManagementClient Extension", + author="Microsoft Corporation", + author_email="azpycli@microsoft.com", + url="https://github.com/Azure/azure-cli-extensions/tree/main/src/datafactory", + long_description=README + "\n\n" + HISTORY, + license="MIT", classifiers=CLASSIFIERS, packages=find_packages(), install_requires=DEPENDENCIES, - package_data={'azext_datafactory': ['azext_metadata.json']}, + package_data={"azext_datafactory": ["azext_metadata.json"]}, ) From c452f94d1d4723c34de2d4d8ec42123908bc7b5b Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Fri, 28 Oct 2022 05:18:45 +0000 Subject: [PATCH 42/85] [Release] Update index.json for extension [ datafactory ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=12040&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/3442a090a06b91a3ac117aa8eb77e36afb974f68 --- src/index.json | 42 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/src/index.json b/src/index.json index c66f32e99de..c27a17e264b 100644 --- a/src/index.json +++ b/src/index.json @@ -21019,6 +21019,48 @@ "version": "0.6.0" }, "sha256Digest": "c34245db473f35d029c6a794527304aa110f230821386c8687e4a0ce5d03dc61" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/datafactory-0.7.0-py3-none-any.whl", + "filename": "datafactory-0.7.0-py3-none-any.whl", + "metadata": { + "azext.minCliCoreVersion": "2.15.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/datafactory" + } + } + }, + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "datafactory", + "summary": "Microsoft Azure Command-Line Tools DataFactoryManagementClient Extension", + "version": "0.7.0" + }, + "sha256Digest": "4820123af622fa8961940a33343b4538851fd506b35e100da7154f71e0089a15" } ], "datamigration": [ From 9db57acab3d3d07a34a4909567e488c504dc28dc Mon Sep 17 00:00:00 2001 From: FumingZhang <81607949+FumingZhang@users.noreply.github.com> Date: Fri, 28 Oct 2022 14:10:50 +0800 Subject: [PATCH 43/85] {AKS} Fix workload identity update error after oidc issure GA in azure-cli (#5496) --- src/aks-preview/HISTORY.rst | 2 ++ .../managed_cluster_decorator.py | 31 +++++++------------ .../latest/test_managed_cluster_decorator.py | 2 +- 3 files changed, 15 insertions(+), 20 deletions(-) diff --git a/src/aks-preview/HISTORY.rst b/src/aks-preview/HISTORY.rst index e5f4b9584e4..d58938800c5 100644 --- a/src/aks-preview/HISTORY.rst +++ b/src/aks-preview/HISTORY.rst @@ -12,6 +12,8 @@ To release a new version, please select a new version number (usually plus 1 to Pending +++++++ +* Fix workload identity update error after oidc issure GA in azure-cli. + 0.5.112 +++++++ diff --git a/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py b/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py index 46a6618110b..8f026164d1d 100644 --- a/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py +++ b/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py @@ -2319,7 +2319,9 @@ def set_up_oidc_issuer_profile(self, mc: ManagedCluster) -> ManagedCluster: """ self._ensure_mc(mc) - mc.oidc_issuer_profile = self.context.get_oidc_issuer_profile() + oidc_issuer_profile = self.context.get_oidc_issuer_profile() + if oidc_issuer_profile is not None: + mc.oidc_issuer_profile = oidc_issuer_profile return mc @@ -2533,15 +2535,10 @@ def construct_mc_profile_preview(self, bypass_restore_defaults: bool = False) -> mc = self.set_up_pod_security_policy(mc) # set up pod identity profile mc = self.set_up_pod_identity_profile(mc) - - # update workload identity & OIDC issuer settings - # NOTE: in current implementation, workload identity settings setup requires checking - # previous OIDC issuer profile. However, the OIDC issuer settings setup will - # overrides the previous OIDC issuer profile based on user input. Therefore, we have - # to make sure the workload identity settings setup is done after OIDC issuer settings. - mc = self.set_up_workload_identity_profile(mc) + # set up oidc issuer profile, GA in 2.42.0 mc = self.set_up_oidc_issuer_profile(mc) - + # set up workload identity profile + mc = self.set_up_workload_identity_profile(mc) # set up azure keyvalut kms mc = self.set_up_azure_keyvault_kms(mc) # set up node restriction @@ -2824,8 +2821,9 @@ def update_oidc_issuer_profile(self, mc: ManagedCluster) -> ManagedCluster: :return: the ManagedCluster object """ self._ensure_mc(mc) - - mc.oidc_issuer_profile = self.context.get_oidc_issuer_profile() + oidc_issuer_profile = self.context.get_oidc_issuer_profile() + if oidc_issuer_profile is not None: + mc.oidc_issuer_profile = oidc_issuer_profile return mc @@ -3129,15 +3127,10 @@ def update_mc_profile_preview(self) -> ManagedCluster: mc = self.update_pod_security_policy(mc) # update pod identity profile mc = self.update_pod_identity_profile(mc) - - # update workload identity & OIDC issuer settings - # NOTE: in current implementation, workload identity settings setup requires checking - # previous OIDC issuer profile. However, the OIDC issuer settings setup will - # overrides the previous OIDC issuer profile based on user input. Therefore, we have - # to make sure the workload identity settings setup is done after OIDC issuer settings. - mc = self.update_workload_identity_profile(mc) + # update oidc issure profile, GA in 2.42.0 mc = self.update_oidc_issuer_profile(mc) - + # update workload identity profile + mc = self.update_workload_identity_profile(mc) # update azure keyvalut kms mc = self.update_azure_keyvault_kms(mc) # update node restriction diff --git a/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py b/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py index 31ded0aef56..c4292e0c5d4 100644 --- a/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py +++ b/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py @@ -4986,7 +4986,7 @@ def test_update_oidc_issuer_profile__default_value_mc_enabled(self): mc.oidc_issuer_profile = self.models.ManagedClusterOIDCIssuerProfile(enabled=True) dec.context.attach_mc(mc) updated_mc = dec.update_oidc_issuer_profile(mc) - self.assertIsNone(updated_mc.oidc_issuer_profile) + self.assertTrue(updated_mc.oidc_issuer_profile.enabled) def test_update_oidc_issuer_profile__enabled(self): dec = AKSPreviewManagedClusterUpdateDecorator( From 64a01e801f20befc58a60c16ad676db40d452d0a Mon Sep 17 00:00:00 2001 From: FumingZhang <81607949+FumingZhang@users.noreply.github.com> Date: Fri, 28 Oct 2022 14:38:40 +0800 Subject: [PATCH 44/85] {AKS} `az aks update`: Fix command failure in validation of AzureMonitorMetrics Addon for SP-based cluster (#5490) * fix sp-based cluster blocked by monitor metrics * update history & version * fix test case * revert history change --- src/aks-preview/HISTORY.rst | 3 ++- .../managed_cluster_decorator.py | 4 ++-- .../tests/latest/test_aks_commands.py | 2 +- .../latest/test_managed_cluster_decorator.py | 17 +++++++++++++++++ 4 files changed, 22 insertions(+), 4 deletions(-) diff --git a/src/aks-preview/HISTORY.rst b/src/aks-preview/HISTORY.rst index d58938800c5..ad806c049c5 100644 --- a/src/aks-preview/HISTORY.rst +++ b/src/aks-preview/HISTORY.rst @@ -13,11 +13,12 @@ Pending +++++++ * Fix workload identity update error after oidc issure GA in azure-cli. +* Fix `az aks update` command failing on SP-based cluster blocked by validation in AzureMonitorMetrics Addon, see issue `\#5336 `_. 0.5.112 +++++++ -* Add --outbound-type to update managed cluster command. +* Add `--outbound-type` to update managed cluster command. 0.5.111 +++++++ diff --git a/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py b/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py index 8f026164d1d..79465d1b8a7 100644 --- a/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py +++ b/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py @@ -1979,9 +1979,9 @@ def _get_enable_azure_monitor_metrics(self, enable_validation: bool = False) -> if enable_validation: if enable_azure_monitor_metrics and self._get_disable_azure_monitor_metrics(False): raise MutuallyExclusiveArgumentError( - "Cannot specify --enable-azuremonitormetrics and --enable-azuremonitormetrics at the same time." + "Cannot specify --enable-azuremonitormetrics and --disable-azuremonitormetrics at the same time." ) - if not check_is_msi_cluster(self.mc): + if enable_azure_monitor_metrics and not check_is_msi_cluster(self.mc): raise RequiredArgumentMissingError( "--enable-azuremonitormetrics can only be specified for clusters with managed identity enabled" ) diff --git a/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py b/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py index 5896cc39017..b5a716dbb1d 100644 --- a/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py +++ b/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py @@ -1552,7 +1552,7 @@ def test_aks_nodepool_add_with_ossku_windows2022(self, resource_group, resource_ 'aks delete -g {resource_group} -n {name} --yes --no-wait', checks=[self.is_empty()]) @AllowLargeResponse() - @AKSCustomResourceGroupPreparer(random_name_length=17, name_prefix='clitest', location='eastus') + @AKSCustomResourceGroupPreparer(random_name_length=17, name_prefix='clitest', location='eastus', preserve_default_location=True) def test_aks_nodepool_add_with_disable_windows_outbound_nat(self, resource_group, resource_group_location): # reset the count so in replay mode the random names will start with 0 self.test_resources_count = 0 diff --git a/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py b/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py index c4292e0c5d4..479ce0cc7c7 100644 --- a/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py +++ b/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py @@ -5717,6 +5717,23 @@ def test_update_vpa(self): ) self.assertEqual(dec_mc_3, ground_truth_mc_3) + def test_update_azure_monitor_profile(self): + dec_1 = AKSPreviewManagedClusterUpdateDecorator( + self.cmd, + self.client, + {}, + CUSTOM_MGMT_AKS_PREVIEW, + ) + mc_1 = self.models.ManagedCluster( + location="test_location", + ) + dec_1.context.attach_mc(mc_1) + dec_mc_1 = dec_1.update_azure_monitor_profile(mc_1) + ground_truth_mc_1 = self.models.ManagedCluster( + location="test_location", + ) + self.assertEqual(dec_mc_1, ground_truth_mc_1) + def test_update_mc_profile_preview(self): import inspect From bade5bf7c33cb8623d9ecb830b592a2a982aa93c Mon Sep 17 00:00:00 2001 From: Yu Chen <16348853+jsntcy@users.noreply.github.com> Date: Fri, 28 Oct 2022 17:16:48 +0800 Subject: [PATCH 45/85] [qbs] Revert pr-5402 (#5497) --- src/index.json | 47 ------------------------------------------- src/service_name.json | 5 ----- 2 files changed, 52 deletions(-) diff --git a/src/index.json b/src/index.json index c27a17e264b..6cf25cce7e0 100644 --- a/src/index.json +++ b/src/index.json @@ -32767,53 +32767,6 @@ "sha256Digest": "706cc2550fbd07b8b676345c2f26c5ba66550905bc8ec224c6c4e5637c497266" } ], - "qbs": [ - { - "downloadUrl": "https://qbsazcliextension.blob.core.windows.net/qbs/qbs-0.1.0-py3-none-any.whl", - "filename": "qbs-0.1.0-py3-none-any.whl", - "metadata": { - "azext.isPreview": true, - "azext.minCliCoreVersion": "2.39.0", - "classifiers": [ - "Development Status :: 3 - Alpha", - "Intended Audience :: Developers", - "Intended Audience :: System Administrators", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "License :: OSI Approved :: MIT License" - ], - "extensions": { - "python.details": { - "contacts": [ - { - "email": "qbs-team@consensys.net", - "name": "ConsenSys", - "role": "author" - } - ], - "document_names": { - "description": "DESCRIPTION.rst" - }, - "project_urls": { - "Home": "https://consensys.net/quorum/qbs/" - } - } - }, - "generator": "bdist_wheel (0.30.0)", - "license": "MIT", - "metadata_version": "2.0", - "name": "qbs", - "summary": "Support for Quorum Blockchain Service.", - "version": "0.1.0" - }, - "sha256Digest": "f8ef34fb62837e16bafe4ce4e141f1d35e7a494516614d8a54a050b4527da3f6" - } - ], "quantum": [ { "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/quantum-0.1.0-py3-none-any.whl", diff --git a/src/service_name.json b/src/service_name.json index 137294c522d..0e80eb1edfc 100644 --- a/src/service_name.json +++ b/src/service_name.json @@ -629,11 +629,6 @@ "AzureServiceName": "Azure Orbital", "URL": "https://docs.microsoft.com/en-us/azure/orbital/" }, - { - "Command": "az qbs", - "AzureServiceName": "ConsenSys Quorum Blockchain Service", - "URL": "https://consensys.net/quorum/qbs/" - }, { "Command": "az nginx", "AzureServiceName": "Nginx for Azure", From d829ec7af66879a050c3582fc2ffddf614a776d7 Mon Sep 17 00:00:00 2001 From: FumingZhang <81607949+FumingZhang@users.noreply.github.com> Date: Fri, 28 Oct 2022 17:53:20 +0800 Subject: [PATCH 46/85] {AKS} `az aks update`: Fix command failure on changes not related to outbound type conversion (#5492) --- src/aks-preview/HISTORY.rst | 4 + .../managed_cluster_decorator.py | 132 ++++-- ...pdate_outbound_from_slb_to_natgateway.yaml | 432 ++++++++---------- .../tests/latest/test_aks_commands.py | 9 +- .../latest/test_managed_cluster_decorator.py | 84 +++- src/aks-preview/setup.py | 2 +- 6 files changed, 390 insertions(+), 273 deletions(-) diff --git a/src/aks-preview/HISTORY.rst b/src/aks-preview/HISTORY.rst index ad806c049c5..99f8dedb247 100644 --- a/src/aks-preview/HISTORY.rst +++ b/src/aks-preview/HISTORY.rst @@ -12,8 +12,12 @@ To release a new version, please select a new version number (usually plus 1 to Pending +++++++ +0.5.113 ++++++++ + * Fix workload identity update error after oidc issure GA in azure-cli. * Fix `az aks update` command failing on SP-based cluster blocked by validation in AzureMonitorMetrics Addon, see issue `\#5336 `_. +* Fix `az aks update` command failing on changes not related to outbound type conversion, see issue `\#24430 https://github.com/Azure/azure-cli/issues/24430>`_. 0.5.112 +++++++ diff --git a/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py b/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py index 79465d1b8a7..b3a126b3c74 100644 --- a/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py +++ b/src/aks-preview/azext_aks_preview/managed_cluster_decorator.py @@ -54,6 +54,8 @@ CONST_PRIVATE_DNS_ZONE_NONE, CONST_PRIVATE_DNS_ZONE_SYSTEM, CONST_EBPF_DATAPLANE_CILIUM, + CONST_OUTBOUND_TYPE_USER_ASSIGNED_NAT_GATEWAY, + CONST_OUTBOUND_TYPE_USER_DEFINED_ROUTING, ) from azext_aks_preview._helpers import ( get_cluster_snapshot_by_snapshot_id, @@ -276,7 +278,12 @@ def get_ip_families(self) -> Union[List[str], None]: # this parameter does not need validation return ip_families - def get_outbound_type(self, load_balancer_profile: ManagedClusterLoadBalancerProfile = None) -> Union[str, None]: + def _get_outbound_type( + self, + enable_validation: bool = False, + read_only: bool = False, + load_balancer_profile: ManagedClusterLoadBalancerProfile = None, + ) -> Union[str, None]: """Internal function to dynamically obtain the value of outbound_type according to the context. Note: All the external parameters involved in the validation are not verified in their own getters. @@ -297,10 +304,91 @@ def get_outbound_type(self, load_balancer_profile: ManagedClusterLoadBalancerPro :return: string or None """ - outbound_type = super().get_outbound_type(load_balancer_profile) - user_assigned_outbound_type = self.raw_param.get("outbound_type") - if user_assigned_outbound_type: - return user_assigned_outbound_type + # read the original value passed by the command + outbound_type = self.raw_param.get("outbound_type") + # In create mode, try to read the property value corresponding to the parameter from the `mc` object. + read_from_mc = False + if self.decorator_mode == DecoratorMode.CREATE: + if ( + self.mc and + self.mc.network_profile and + self.mc.network_profile.outbound_type is not None + ): + outbound_type = self.mc.network_profile.outbound_type + read_from_mc = True + + # skip dynamic completion & validation if option read_only is specified + if read_only: + return outbound_type + + # dynamic completion + if ( + self.decorator_mode == DecoratorMode.CREATE and + not read_from_mc and + outbound_type != CONST_OUTBOUND_TYPE_MANAGED_NAT_GATEWAY and + outbound_type != CONST_OUTBOUND_TYPE_USER_ASSIGNED_NAT_GATEWAY and + outbound_type != CONST_OUTBOUND_TYPE_USER_DEFINED_ROUTING + ): + outbound_type = CONST_OUTBOUND_TYPE_LOAD_BALANCER + + # validation + # Note: The parameters involved in the validation are not verified in their own getters. + if enable_validation: + if outbound_type in [ + CONST_OUTBOUND_TYPE_USER_DEFINED_ROUTING, + CONST_OUTBOUND_TYPE_MANAGED_NAT_GATEWAY, + CONST_OUTBOUND_TYPE_USER_ASSIGNED_NAT_GATEWAY, + ]: + if safe_lower(self._get_load_balancer_sku(enable_validation=False)) == CONST_LOAD_BALANCER_SKU_BASIC: + raise InvalidArgumentValueError( + f"{outbound_type} doesn't support basic load balancer sku" + ) + + if outbound_type in [ + CONST_OUTBOUND_TYPE_USER_DEFINED_ROUTING, + CONST_OUTBOUND_TYPE_USER_ASSIGNED_NAT_GATEWAY, + ]: + if self.get_vnet_subnet_id() in ["", None]: + raise RequiredArgumentMissingError( + "--vnet-subnet-id must be specified for userDefinedRouting and it must " + "be pre-configured with a route table with egress rules" + ) + + if outbound_type == CONST_OUTBOUND_TYPE_USER_DEFINED_ROUTING: + if load_balancer_profile: + if ( + load_balancer_profile.managed_outbound_i_ps or + load_balancer_profile.outbound_i_ps or + load_balancer_profile.outbound_ip_prefixes + ): + raise MutuallyExclusiveArgumentError( + "userDefinedRouting doesn't support customizing \ + a standard load balancer with IP addresses" + ) + else: + if ( + self.get_load_balancer_managed_outbound_ip_count() or + self.get_load_balancer_outbound_ips() or + self.get_load_balancer_outbound_ip_prefixes() + ): + raise MutuallyExclusiveArgumentError( + "userDefinedRouting doesn't support customizing \ + a standard load balancer with IP addresses" + ) + if self.decorator_mode == DecoratorMode.UPDATE: + if outbound_type == CONST_OUTBOUND_TYPE_MANAGED_NAT_GATEWAY: + if self.mc.agent_pool_profiles is not None and len(self.mc.agent_pool_profiles) > 1: + multizoned = False + for ap in self.mc.agent_pool_profiles: + if ap.availability_zones: + multizoned = True + msg = ( + "\nWarning: this AKS cluster has multi-zonal nodepools, but NAT Gateway is not " + "currently zone redundant. Migrating outbound connectivity to NAT Gateway could lead to " + "a reduction in zone redundancy for this cluster. Continue?" + ) + if multizoned and not self.get_yes() and not prompt_y_n(msg, default="n"): + raise DecoratorEarlyExitException() return outbound_type def get_load_balancer_managed_outbound_ip_count(self) -> Union[int, None]: @@ -2639,26 +2727,12 @@ def check_raw_parameters(self): raise RequiredArgumentMissingError(error_msg) def update_outbound_type_in_network_profile(self, mc: ManagedCluster) -> ManagedCluster: - """Set up network profile for the ManagedCluster object. - Build load balancer profile, verify outbound type and load balancer sku first, then set up network profile. + """Update outbound type of network profile for the ManagedCluster object. + :return: the ManagedCluster object """ self._ensure_mc(mc) - # verify outbound type - # Note: Validation internally depends on load_balancer_sku, which is a temporary value that is - # dynamically completed. - if not mc.network_profile: - raise UnknownError( - "Unexpectedly get an empty network profile in the process of updating outbound type." - ) - if mc.agent_pool_profiles is not None and len(mc.agent_pool_profiles) > 1: - multizoned = False - for ap in mc.agent_pool_profiles: - if ap.availability_zones: - multizoned = True - if multizoned and not self.get_yes() and not prompt_y_n("\n" + CONST_OUTBOUND_MIGRATION_MULTIZONE_TO_NATGATEWAY_MSG, default="y"): - raise DecoratorEarlyExitException() outboundType = self.context.get_outbound_type() if outboundType: mc.network_profile.outbound_type = outboundType @@ -2671,10 +2745,11 @@ def update_nat_gateway_profile(self, mc: ManagedCluster) -> ManagedCluster: self._ensure_mc(mc) if not mc.network_profile: - raise UnknownError( - "Unexpectedly get an empty network profile in the process of updating nat gateway profile." - ) - if mc.network_profile.outbound_type != CONST_OUTBOUND_TYPE_MANAGED_NAT_GATEWAY: + raise UnknownError( + "Unexpectedly get an empty network profile in the process of updating nat gateway profile." + ) + outbound_type = self.context.get_outbound_type() + if outbound_type and outbound_type != CONST_OUTBOUND_TYPE_MANAGED_NAT_GATEWAY: mc.network_profile.nat_gateway_profile = None else: mc.network_profile.nat_gateway_profile = _update_nat_gateway_profile( @@ -2698,7 +2773,8 @@ def update_load_balancer_profile(self, mc: ManagedCluster) -> ManagedCluster: raise UnknownError( "Unexpectedly get an empty network profile in the process of updating load balancer profile." ) - if mc.network_profile.outbound_type != CONST_OUTBOUND_TYPE_LOAD_BALANCER: + outbound_type = self.context.get_outbound_type() + if outbound_type and outbound_type != CONST_OUTBOUND_TYPE_LOAD_BALANCER: mc.network_profile.load_balancer_profile = None else: # In the internal function "_update_load_balancer_profile", it will check whether the provided parameters @@ -3151,9 +3227,5 @@ def update_mc_profile_preview(self) -> ManagedCluster: mc = self.update_linux_profile(mc) # update outbound type mc = self.update_outbound_type_in_network_profile(mc) - # update nat gateway profile - mc = self.update_nat_gateway_profile(mc) - # update load balancer profile - mc = self.update_load_balancer_profile(mc) return mc diff --git a/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_update_outbound_from_slb_to_natgateway.yaml b/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_update_outbound_from_slb_to_natgateway.yaml index 85f228fb336..572b9d68f72 100644 --- a/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_update_outbound_from_slb_to_natgateway.yaml +++ b/src/aks-preview/azext_aks_preview/tests/latest/recordings/test_aks_update_outbound_from_slb_to_natgateway.yaml @@ -11,14 +11,15 @@ interactions: Connection: - keep-alive ParameterSetName: - - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + - --resource-group --name --vm-set-type -c --outbound-type --load-balancer-managed-outbound-ip-count + --ssh-key-value User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest000001?api-version=2021-04-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001","name":"clitest000001","type":"Microsoft.Resources/resourceGroups","location":"westus2","tags":{"product":"azurecli","cause":"automation","date":"2022-10-26T07:27:53Z"},"properties":{"provisioningState":"Succeeded"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001","name":"clitest000001","type":"Microsoft.Resources/resourceGroups","location":"westus2","tags":{"product":"azurecli","cause":"automation","date":"2022-10-28T08:47:38Z"},"properties":{"provisioningState":"Succeeded"}}' headers: cache-control: - no-cache @@ -27,7 +28,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Wed, 26 Oct 2022 07:27:54 GMT + - Fri, 28 Oct 2022 08:47:38 GMT expires: - '-1' pragma: @@ -43,7 +44,7 @@ interactions: message: OK - request: body: '{"location": "westus2", "identity": {"type": "SystemAssigned"}, "properties": - {"kubernetesVersion": "", "dnsPrefix": "cliakstest-clitesttj4rejxwd-79a739", + {"kubernetesVersion": "", "dnsPrefix": "cliakstest-clitestf6ggfhgqn-79a739", "agentPoolProfiles": [{"count": 1, "vmSize": "Standard_DS2_v2", "osDiskSizeGB": 0, "workloadRuntime": "OCIContainer", "osType": "Linux", "enableAutoScaling": false, "type": "VirtualMachineScaleSets", "mode": "System", "orchestratorVersion": @@ -51,12 +52,14 @@ interactions: false, "scaleSetPriority": "Regular", "scaleSetEvictionPolicy": "Delete", "spotMaxPrice": -1.0, "nodeTaints": [], "enableEncryptionAtHost": false, "enableUltraSSD": false, "enableFIPS": false, "name": "nodepool1"}], "linuxProfile": {"adminUsername": - "azureuser", "ssh": {"publicKeys": [{"keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFZag72JUdUPjufTC+9bGJ6+03D4WKC5qcYu2xbnzAEjW0CGYdezDhyqogngxvXgEzfhg7yUDUMYKjfg70oTN7w5t4YFiouN50Zlwra73QZ71Y9uNWM7qd5Uolaw1drHPLEbTcFloZTQM8gwWainBxkd6xRJK7A/otKFpN+TdV4T1dpNH826z+HC/ZpTavYZI+hqc7twWIj7hDQtUs2k4NDWJzPwMTAuIj4aNrmBYv4z8rK/ZtNIK07UbFlWDHFz1eZrDjOEBoSaakTY5j3vZXwe/dj5SmQypxg3AY+Xxboe4YxU2Iv1r0mcr7ST0wDEgUsdCUXO/lvNO3vGGV8qzf + "azureuser", "ssh": {"publicKeys": [{"keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDp4FLc0tKQN/qhlFwE2bUCrBZ5bw3/rTff8QNPJpwTGzadlfRuLYLfNk/Sv5qEeyFwwDDZN9JgXg2Yom/n9a0ilgAlU5ACaQQQE13tu6KCxufVYa2EvofI5Vu4L0fg8hVA7/hCpJpwr+VlNpTuSToPhJNMxRY8kKP8ni5xOACT7+ONyZlVrVsoKl3/FzH1KjzrtS7mHKjBqhxYiX43I2JaKEb69Kxp4iz2vRzKA0A4kXV0Ey17y9jylZZ2NuBSe1lWjo+fxwsL8zriunuTr3Ks7bq+82p6miJvDF2U1bGi7V6VXQQM3/XhH+wJt/cHTI6g2ZQVURQHnE5bLZ5P4kyh azcli_aks_live_test@example.com\n"}]}}, "addonProfiles": {}, "enableRBAC": true, "enablePodSecurityPolicy": false, "networkProfile": {"networkPlugin": "kubenet", "podCidr": "10.244.0.0/16", "serviceCidr": "10.0.0.0/16", "dnsServiceIP": "10.0.0.10", "dockerBridgeCidr": "172.17.0.1/16", "outboundType": "loadBalancer", "loadBalancerSku": - "standard"}, "disableLocalAccounts": false, "storageProfile": {}}}' + "standard", "loadBalancerProfile": {"managedOutboundIPs": {"count": 2, "countIPv6": + 0}, "allocatedOutboundPorts": 0, "idleTimeoutInMinutes": 30, "backendPoolType": + "NodeIPConfiguration"}}, "disableLocalAccounts": false, "storageProfile": {}}}' headers: Accept: - application/json @@ -67,13 +70,14 @@ interactions: Connection: - keep-alive Content-Length: - - '1558' + - '1734' Content-Type: - application/json ParameterSetName: - - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + - --resource-group --name --vm-set-type -c --outbound-type --load-balancer-managed-outbound-ip-count + --ssh-key-value User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview @@ -84,8 +88,8 @@ interactions: \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \"provisioningState\": \"Creating\",\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"kubernetesVersion\": \"1.23.12\",\n \"currentKubernetesVersion\": \"1.23.12\",\n \"dnsPrefix\": - \"cliakstest-clitesttj4rejxwd-79a739\",\n \"fqdn\": \"cliakstest-clitesttj4rejxwd-79a739-ab6f17da.hcp.westus2.azmk8s.io\",\n - \ \"azurePortalFQDN\": \"cliakstest-clitesttj4rejxwd-79a739-ab6f17da.portal.hcp.westus2.azmk8s.io\",\n + \"cliakstest-clitestf6ggfhgqn-79a739\",\n \"fqdn\": \"cliakstest-clitestf6ggfhgqn-79a739-cd0e0432.hcp.westus2.azmk8s.io\",\n + \ \"azurePortalFQDN\": \"cliakstest-clitestf6ggfhgqn-79a739-cd0e0432.portal.hcp.westus2.azmk8s.io\",\n \ \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \"count\": 1,\n \"vmSize\": \"Standard_DS2_v2\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": \"Managed\",\n \"kubeletDiskType\": \"OS\",\n \"workloadRuntime\": @@ -99,37 +103,38 @@ interactions: \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\": {},\n \ \"enableFIPS\": false\n }\n ],\n \"linuxProfile\": {\n \"adminUsername\": \"azureuser\",\n \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFZag72JUdUPjufTC+9bGJ6+03D4WKC5qcYu2xbnzAEjW0CGYdezDhyqogngxvXgEzfhg7yUDUMYKjfg70oTN7w5t4YFiouN50Zlwra73QZ71Y9uNWM7qd5Uolaw1drHPLEbTcFloZTQM8gwWainBxkd6xRJK7A/otKFpN+TdV4T1dpNH826z+HC/ZpTavYZI+hqc7twWIj7hDQtUs2k4NDWJzPwMTAuIj4aNrmBYv4z8rK/ZtNIK07UbFlWDHFz1eZrDjOEBoSaakTY5j3vZXwe/dj5SmQypxg3AY+Xxboe4YxU2Iv1r0mcr7ST0wDEgUsdCUXO/lvNO3vGGV8qzf + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDp4FLc0tKQN/qhlFwE2bUCrBZ5bw3/rTff8QNPJpwTGzadlfRuLYLfNk/Sv5qEeyFwwDDZN9JgXg2Yom/n9a0ilgAlU5ACaQQQE13tu6KCxufVYa2EvofI5Vu4L0fg8hVA7/hCpJpwr+VlNpTuSToPhJNMxRY8kKP8ni5xOACT7+ONyZlVrVsoKl3/FzH1KjzrtS7mHKjBqhxYiX43I2JaKEb69Kxp4iz2vRzKA0A4kXV0Ey17y9jylZZ2NuBSe1lWjo+fxwsL8zriunuTr3Ks7bq+82p6miJvDF2U1bGi7V6VXQQM3/XhH+wJt/cHTI6g2ZQVURQHnE5bLZ5P4kyh azcli_aks_live_test@example.com\\n\"\n }\n ]\n }\n },\n \"servicePrincipalProfile\": {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n },\n \"nodeResourceGroup\": \"MC_clitest000001_cliakstest000002_westus2\",\n \"enableRBAC\": true,\n \ \"enablePodSecurityPolicy\": false,\n \"networkProfile\": {\n \"networkPlugin\": \"kubenet\",\n \"loadBalancerSku\": \"standard\",\n \"loadBalancerProfile\": - {\n \"managedOutboundIPs\": {\n \"count\": 1\n },\n \"backendPoolType\": - \"nodeIPConfiguration\"\n },\n \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": - \"10.0.0.0/16\",\n \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": - \"172.17.0.1/16\",\n \"outboundType\": \"loadBalancer\",\n \"podCidrs\": - [\n \"10.244.0.0/16\"\n ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n - \ ],\n \"ipFamilies\": [\n \"IPv4\"\n ]\n },\n \"maxAgentPools\": - 100,\n \"disableLocalAccounts\": false,\n \"securityProfile\": {},\n \"storageProfile\": - {\n \"diskCSIDriver\": {\n \"enabled\": true,\n \"version\": \"v1\"\n - \ },\n \"fileCSIDriver\": {\n \"enabled\": true\n },\n \"snapshotController\": - {\n \"enabled\": true\n }\n },\n \"oidcIssuerProfile\": {\n \"enabled\": - false\n },\n \"workloadAutoScalerProfile\": {}\n },\n \"identity\": - {\n \"type\": \"SystemAssigned\",\n \"principalId\":\"00000000-0000-0000-0000-000000000001\",\n + {\n \"managedOutboundIPs\": {\n \"count\": 2\n },\n \"allocatedOutboundPorts\": + 0,\n \"idleTimeoutInMinutes\": 30,\n \"backendPoolType\": \"nodeIPConfiguration\"\n + \ },\n \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\",\n + \ \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\",\n + \ \"outboundType\": \"loadBalancer\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\n + \ ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\": + [\n \"IPv4\"\n ]\n },\n \"maxAgentPools\": 100,\n \"disableLocalAccounts\": + false,\n \"securityProfile\": {},\n \"storageProfile\": {\n \"diskCSIDriver\": + {\n \"enabled\": true,\n \"version\": \"v1\"\n },\n \"fileCSIDriver\": + {\n \"enabled\": true\n },\n \"snapshotController\": {\n \"enabled\": + true\n }\n },\n \"oidcIssuerProfile\": {\n \"enabled\": false\n + \ },\n \"workloadAutoScalerProfile\": {}\n },\n \"identity\": {\n \"type\": + \"SystemAssigned\",\n \"principalId\":\"00000000-0000-0000-0000-000000000001\",\n \ \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\"\n },\n \"sku\": {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" headers: azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b5298de9-0b91-4169-a645-4301a56cf834?api-version=2016-03-30 cache-control: - no-cache content-length: - - '3453' + - '3520' content-type: - application/json date: - - Wed, 26 Oct 2022 07:27:58 GMT + - Fri, 28 Oct 2022 08:47:43 GMT expires: - '-1' pragma: @@ -157,16 +162,17 @@ interactions: Connection: - keep-alive ParameterSetName: - - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + - --resource-group --name --vm-set-type -c --outbound-type --load-balancer-managed-outbound-ip-count + --ssh-key-value User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b5298de9-0b91-4169-a645-4301a56cf834?api-version=2016-03-30 response: body: - string: "{\n \"name\": \"1b8fd278-feb7-af40-bf21-303dd1dab078\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-10-26T07:27:59.3884289Z\"\n }" + string: "{\n \"name\": \"e98d29b5-910b-6941-a645-4301a56cf834\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-28T08:47:43.1198417Z\"\n }" headers: cache-control: - no-cache @@ -175,7 +181,7 @@ interactions: content-type: - application/json date: - - Wed, 26 Oct 2022 07:28:28 GMT + - Fri, 28 Oct 2022 08:48:13 GMT expires: - '-1' pragma: @@ -205,16 +211,17 @@ interactions: Connection: - keep-alive ParameterSetName: - - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + - --resource-group --name --vm-set-type -c --outbound-type --load-balancer-managed-outbound-ip-count + --ssh-key-value User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b5298de9-0b91-4169-a645-4301a56cf834?api-version=2016-03-30 response: body: - string: "{\n \"name\": \"1b8fd278-feb7-af40-bf21-303dd1dab078\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-10-26T07:27:59.3884289Z\"\n }" + string: "{\n \"name\": \"e98d29b5-910b-6941-a645-4301a56cf834\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-28T08:47:43.1198417Z\"\n }" headers: cache-control: - no-cache @@ -223,7 +230,7 @@ interactions: content-type: - application/json date: - - Wed, 26 Oct 2022 07:28:59 GMT + - Fri, 28 Oct 2022 08:48:43 GMT expires: - '-1' pragma: @@ -253,16 +260,17 @@ interactions: Connection: - keep-alive ParameterSetName: - - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + - --resource-group --name --vm-set-type -c --outbound-type --load-balancer-managed-outbound-ip-count + --ssh-key-value User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b5298de9-0b91-4169-a645-4301a56cf834?api-version=2016-03-30 response: body: - string: "{\n \"name\": \"1b8fd278-feb7-af40-bf21-303dd1dab078\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-10-26T07:27:59.3884289Z\"\n }" + string: "{\n \"name\": \"e98d29b5-910b-6941-a645-4301a56cf834\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-28T08:47:43.1198417Z\"\n }" headers: cache-control: - no-cache @@ -271,7 +279,7 @@ interactions: content-type: - application/json date: - - Wed, 26 Oct 2022 07:29:29 GMT + - Fri, 28 Oct 2022 08:49:13 GMT expires: - '-1' pragma: @@ -301,16 +309,17 @@ interactions: Connection: - keep-alive ParameterSetName: - - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + - --resource-group --name --vm-set-type -c --outbound-type --load-balancer-managed-outbound-ip-count + --ssh-key-value User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b5298de9-0b91-4169-a645-4301a56cf834?api-version=2016-03-30 response: body: - string: "{\n \"name\": \"1b8fd278-feb7-af40-bf21-303dd1dab078\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-10-26T07:27:59.3884289Z\"\n }" + string: "{\n \"name\": \"e98d29b5-910b-6941-a645-4301a56cf834\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-28T08:47:43.1198417Z\"\n }" headers: cache-control: - no-cache @@ -319,7 +328,7 @@ interactions: content-type: - application/json date: - - Wed, 26 Oct 2022 07:29:59 GMT + - Fri, 28 Oct 2022 08:49:43 GMT expires: - '-1' pragma: @@ -349,16 +358,17 @@ interactions: Connection: - keep-alive ParameterSetName: - - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + - --resource-group --name --vm-set-type -c --outbound-type --load-balancer-managed-outbound-ip-count + --ssh-key-value User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b5298de9-0b91-4169-a645-4301a56cf834?api-version=2016-03-30 response: body: - string: "{\n \"name\": \"1b8fd278-feb7-af40-bf21-303dd1dab078\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-10-26T07:27:59.3884289Z\"\n }" + string: "{\n \"name\": \"e98d29b5-910b-6941-a645-4301a56cf834\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-28T08:47:43.1198417Z\"\n }" headers: cache-control: - no-cache @@ -367,7 +377,7 @@ interactions: content-type: - application/json date: - - Wed, 26 Oct 2022 07:30:29 GMT + - Fri, 28 Oct 2022 08:50:13 GMT expires: - '-1' pragma: @@ -397,16 +407,17 @@ interactions: Connection: - keep-alive ParameterSetName: - - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + - --resource-group --name --vm-set-type -c --outbound-type --load-balancer-managed-outbound-ip-count + --ssh-key-value User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b5298de9-0b91-4169-a645-4301a56cf834?api-version=2016-03-30 response: body: - string: "{\n \"name\": \"1b8fd278-feb7-af40-bf21-303dd1dab078\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-10-26T07:27:59.3884289Z\"\n }" + string: "{\n \"name\": \"e98d29b5-910b-6941-a645-4301a56cf834\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-28T08:47:43.1198417Z\"\n }" headers: cache-control: - no-cache @@ -415,7 +426,7 @@ interactions: content-type: - application/json date: - - Wed, 26 Oct 2022 07:30:59 GMT + - Fri, 28 Oct 2022 08:50:44 GMT expires: - '-1' pragma: @@ -445,16 +456,17 @@ interactions: Connection: - keep-alive ParameterSetName: - - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + - --resource-group --name --vm-set-type -c --outbound-type --load-balancer-managed-outbound-ip-count + --ssh-key-value User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b5298de9-0b91-4169-a645-4301a56cf834?api-version=2016-03-30 response: body: - string: "{\n \"name\": \"1b8fd278-feb7-af40-bf21-303dd1dab078\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-10-26T07:27:59.3884289Z\"\n }" + string: "{\n \"name\": \"e98d29b5-910b-6941-a645-4301a56cf834\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-28T08:47:43.1198417Z\"\n }" headers: cache-control: - no-cache @@ -463,7 +475,7 @@ interactions: content-type: - application/json date: - - Wed, 26 Oct 2022 07:31:29 GMT + - Fri, 28 Oct 2022 08:51:14 GMT expires: - '-1' pragma: @@ -493,16 +505,17 @@ interactions: Connection: - keep-alive ParameterSetName: - - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + - --resource-group --name --vm-set-type -c --outbound-type --load-balancer-managed-outbound-ip-count + --ssh-key-value User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b5298de9-0b91-4169-a645-4301a56cf834?api-version=2016-03-30 response: body: - string: "{\n \"name\": \"1b8fd278-feb7-af40-bf21-303dd1dab078\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-10-26T07:27:59.3884289Z\"\n }" + string: "{\n \"name\": \"e98d29b5-910b-6941-a645-4301a56cf834\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-28T08:47:43.1198417Z\"\n }" headers: cache-control: - no-cache @@ -511,7 +524,7 @@ interactions: content-type: - application/json date: - - Wed, 26 Oct 2022 07:31:59 GMT + - Fri, 28 Oct 2022 08:51:43 GMT expires: - '-1' pragma: @@ -541,65 +554,18 @@ interactions: Connection: - keep-alive ParameterSetName: - - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + - --resource-group --name --vm-set-type -c --outbound-type --load-balancer-managed-outbound-ip-count + --ssh-key-value User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/b5298de9-0b91-4169-a645-4301a56cf834?api-version=2016-03-30 response: body: - string: "{\n \"name\": \"1b8fd278-feb7-af40-bf21-303dd1dab078\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-10-26T07:27:59.3884289Z\"\n }" - headers: - cache-control: - - no-cache - content-length: - - '126' - content-type: - - application/json - date: - - Wed, 26 Oct 2022 07:32:29 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - nginx - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - aks create - Connection: - - keep-alive - ParameterSetName: - - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value - User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 - (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/78d28f1b-b7fe-40af-bf21-303dd1dab078?api-version=2016-03-30 - response: - body: - string: "{\n \"name\": \"1b8fd278-feb7-af40-bf21-303dd1dab078\",\n \"status\": - \"Succeeded\",\n \"startTime\": \"2022-10-26T07:27:59.3884289Z\",\n \"endTime\": - \"2022-10-26T07:32:39.0357785Z\"\n }" + string: "{\n \"name\": \"e98d29b5-910b-6941-a645-4301a56cf834\",\n \"status\": + \"Succeeded\",\n \"startTime\": \"2022-10-28T08:47:43.1198417Z\",\n \"endTime\": + \"2022-10-28T08:51:49.5411775Z\"\n }" headers: cache-control: - no-cache @@ -608,7 +574,7 @@ interactions: content-type: - application/json date: - - Wed, 26 Oct 2022 07:32:59 GMT + - Fri, 28 Oct 2022 08:52:13 GMT expires: - '-1' pragma: @@ -638,9 +604,10 @@ interactions: Connection: - keep-alive ParameterSetName: - - --resource-group --name --vm-set-type -c --outbound-type --ssh-key-value + - --resource-group --name --vm-set-type -c --outbound-type --load-balancer-managed-outbound-ip-count + --ssh-key-value User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview @@ -651,8 +618,8 @@ interactions: \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \"provisioningState\": \"Succeeded\",\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"kubernetesVersion\": \"1.23.12\",\n \"currentKubernetesVersion\": \"1.23.12\",\n \"dnsPrefix\": - \"cliakstest-clitesttj4rejxwd-79a739\",\n \"fqdn\": \"cliakstest-clitesttj4rejxwd-79a739-ab6f17da.hcp.westus2.azmk8s.io\",\n - \ \"azurePortalFQDN\": \"cliakstest-clitesttj4rejxwd-79a739-ab6f17da.portal.hcp.westus2.azmk8s.io\",\n + \"cliakstest-clitestf6ggfhgqn-79a739\",\n \"fqdn\": \"cliakstest-clitestf6ggfhgqn-79a739-cd0e0432.hcp.westus2.azmk8s.io\",\n + \ \"azurePortalFQDN\": \"cliakstest-clitestf6ggfhgqn-79a739-cd0e0432.portal.hcp.westus2.azmk8s.io\",\n \ \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \"count\": 1,\n \"vmSize\": \"Standard_DS2_v2\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": \"Managed\",\n \"kubeletDiskType\": \"OS\",\n \"workloadRuntime\": @@ -666,21 +633,23 @@ interactions: \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\": {},\n \ \"enableFIPS\": false\n }\n ],\n \"linuxProfile\": {\n \"adminUsername\": \"azureuser\",\n \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFZag72JUdUPjufTC+9bGJ6+03D4WKC5qcYu2xbnzAEjW0CGYdezDhyqogngxvXgEzfhg7yUDUMYKjfg70oTN7w5t4YFiouN50Zlwra73QZ71Y9uNWM7qd5Uolaw1drHPLEbTcFloZTQM8gwWainBxkd6xRJK7A/otKFpN+TdV4T1dpNH826z+HC/ZpTavYZI+hqc7twWIj7hDQtUs2k4NDWJzPwMTAuIj4aNrmBYv4z8rK/ZtNIK07UbFlWDHFz1eZrDjOEBoSaakTY5j3vZXwe/dj5SmQypxg3AY+Xxboe4YxU2Iv1r0mcr7ST0wDEgUsdCUXO/lvNO3vGGV8qzf + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDp4FLc0tKQN/qhlFwE2bUCrBZ5bw3/rTff8QNPJpwTGzadlfRuLYLfNk/Sv5qEeyFwwDDZN9JgXg2Yom/n9a0ilgAlU5ACaQQQE13tu6KCxufVYa2EvofI5Vu4L0fg8hVA7/hCpJpwr+VlNpTuSToPhJNMxRY8kKP8ni5xOACT7+ONyZlVrVsoKl3/FzH1KjzrtS7mHKjBqhxYiX43I2JaKEb69Kxp4iz2vRzKA0A4kXV0Ey17y9jylZZ2NuBSe1lWjo+fxwsL8zriunuTr3Ks7bq+82p6miJvDF2U1bGi7V6VXQQM3/XhH+wJt/cHTI6g2ZQVURQHnE5bLZ5P4kyh azcli_aks_live_test@example.com\\n\"\n }\n ]\n }\n },\n \"servicePrincipalProfile\": {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n },\n \"nodeResourceGroup\": \"MC_clitest000001_cliakstest000002_westus2\",\n \"enableRBAC\": true,\n \ \"enablePodSecurityPolicy\": false,\n \"networkProfile\": {\n \"networkPlugin\": \"kubenet\",\n \"loadBalancerSku\": \"Standard\",\n \"loadBalancerProfile\": - {\n \"managedOutboundIPs\": {\n \"count\": 1\n },\n \"effectiveOutboundIPs\": - [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/66497f43-1896-40bb-8814-3cd8d65f6abe\"\n - \ }\n ],\n \"backendPoolType\": \"nodeIPConfiguration\"\n },\n - \ \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\",\n - \ \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\",\n - \ \"outboundType\": \"loadBalancer\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\n - \ ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\": - [\n \"IPv4\"\n ]\n },\n \"maxAgentPools\": 100,\n \"identityProfile\": - {\n \"kubeletidentity\": {\n \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/cliakstest000002-agentpool\",\n + {\n \"managedOutboundIPs\": {\n \"count\": 2\n },\n \"effectiveOutboundIPs\": + [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/f5c95599-b68a-40ec-bb72-f6e76c053c33\"\n + \ },\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/30d21eb0-8965-4cea-927d-f04d0bb87f0b\"\n + \ }\n ],\n \"allocatedOutboundPorts\": 0,\n \"idleTimeoutInMinutes\": + 30,\n \"backendPoolType\": \"nodeIPConfiguration\"\n },\n \"podCidr\": + \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\",\n \"dnsServiceIP\": + \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\",\n \"outboundType\": + \"loadBalancer\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\n ],\n \"serviceCidrs\": + [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\": [\n \"IPv4\"\n ]\n + \ },\n \"maxAgentPools\": 100,\n \"identityProfile\": {\n \"kubeletidentity\": + {\n \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/cliakstest000002-agentpool\",\n \ \"clientId\":\"00000000-0000-0000-0000-000000000001\",\n \"objectId\":\"00000000-0000-0000-0000-000000000001\"\n \ }\n },\n \"disableLocalAccounts\": false,\n \"securityProfile\": {},\n \"storageProfile\": {\n \"diskCSIDriver\": {\n \"enabled\": @@ -694,11 +663,11 @@ interactions: cache-control: - no-cache content-length: - - '4106' + - '4397' content-type: - application/json date: - - Wed, 26 Oct 2022 07:33:00 GMT + - Fri, 28 Oct 2022 08:52:14 GMT expires: - '-1' pragma: @@ -731,7 +700,7 @@ interactions: - --resource-group --name --nat-gateway-managed-outbound-ip-count --nat-gateway-idle-timeout --outbound-type --aks-custom-header User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview @@ -742,8 +711,8 @@ interactions: \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \"provisioningState\": \"Succeeded\",\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"kubernetesVersion\": \"1.23.12\",\n \"currentKubernetesVersion\": \"1.23.12\",\n \"dnsPrefix\": - \"cliakstest-clitesttj4rejxwd-79a739\",\n \"fqdn\": \"cliakstest-clitesttj4rejxwd-79a739-ab6f17da.hcp.westus2.azmk8s.io\",\n - \ \"azurePortalFQDN\": \"cliakstest-clitesttj4rejxwd-79a739-ab6f17da.portal.hcp.westus2.azmk8s.io\",\n + \"cliakstest-clitestf6ggfhgqn-79a739\",\n \"fqdn\": \"cliakstest-clitestf6ggfhgqn-79a739-cd0e0432.hcp.westus2.azmk8s.io\",\n + \ \"azurePortalFQDN\": \"cliakstest-clitestf6ggfhgqn-79a739-cd0e0432.portal.hcp.westus2.azmk8s.io\",\n \ \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \"count\": 1,\n \"vmSize\": \"Standard_DS2_v2\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": \"Managed\",\n \"kubeletDiskType\": \"OS\",\n \"workloadRuntime\": @@ -757,21 +726,23 @@ interactions: \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\": {},\n \ \"enableFIPS\": false\n }\n ],\n \"linuxProfile\": {\n \"adminUsername\": \"azureuser\",\n \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFZag72JUdUPjufTC+9bGJ6+03D4WKC5qcYu2xbnzAEjW0CGYdezDhyqogngxvXgEzfhg7yUDUMYKjfg70oTN7w5t4YFiouN50Zlwra73QZ71Y9uNWM7qd5Uolaw1drHPLEbTcFloZTQM8gwWainBxkd6xRJK7A/otKFpN+TdV4T1dpNH826z+HC/ZpTavYZI+hqc7twWIj7hDQtUs2k4NDWJzPwMTAuIj4aNrmBYv4z8rK/ZtNIK07UbFlWDHFz1eZrDjOEBoSaakTY5j3vZXwe/dj5SmQypxg3AY+Xxboe4YxU2Iv1r0mcr7ST0wDEgUsdCUXO/lvNO3vGGV8qzf + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDp4FLc0tKQN/qhlFwE2bUCrBZ5bw3/rTff8QNPJpwTGzadlfRuLYLfNk/Sv5qEeyFwwDDZN9JgXg2Yom/n9a0ilgAlU5ACaQQQE13tu6KCxufVYa2EvofI5Vu4L0fg8hVA7/hCpJpwr+VlNpTuSToPhJNMxRY8kKP8ni5xOACT7+ONyZlVrVsoKl3/FzH1KjzrtS7mHKjBqhxYiX43I2JaKEb69Kxp4iz2vRzKA0A4kXV0Ey17y9jylZZ2NuBSe1lWjo+fxwsL8zriunuTr3Ks7bq+82p6miJvDF2U1bGi7V6VXQQM3/XhH+wJt/cHTI6g2ZQVURQHnE5bLZ5P4kyh azcli_aks_live_test@example.com\\n\"\n }\n ]\n }\n },\n \"servicePrincipalProfile\": {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n },\n \"nodeResourceGroup\": \"MC_clitest000001_cliakstest000002_westus2\",\n \"enableRBAC\": true,\n \ \"enablePodSecurityPolicy\": false,\n \"networkProfile\": {\n \"networkPlugin\": \"kubenet\",\n \"loadBalancerSku\": \"Standard\",\n \"loadBalancerProfile\": - {\n \"managedOutboundIPs\": {\n \"count\": 1\n },\n \"effectiveOutboundIPs\": - [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/66497f43-1896-40bb-8814-3cd8d65f6abe\"\n - \ }\n ],\n \"backendPoolType\": \"nodeIPConfiguration\"\n },\n - \ \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\",\n - \ \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\",\n - \ \"outboundType\": \"loadBalancer\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\n - \ ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\": - [\n \"IPv4\"\n ]\n },\n \"maxAgentPools\": 100,\n \"identityProfile\": - {\n \"kubeletidentity\": {\n \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/cliakstest000002-agentpool\",\n + {\n \"managedOutboundIPs\": {\n \"count\": 2\n },\n \"effectiveOutboundIPs\": + [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/f5c95599-b68a-40ec-bb72-f6e76c053c33\"\n + \ },\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/30d21eb0-8965-4cea-927d-f04d0bb87f0b\"\n + \ }\n ],\n \"allocatedOutboundPorts\": 0,\n \"idleTimeoutInMinutes\": + 30,\n \"backendPoolType\": \"nodeIPConfiguration\"\n },\n \"podCidr\": + \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\",\n \"dnsServiceIP\": + \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\",\n \"outboundType\": + \"loadBalancer\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\n ],\n \"serviceCidrs\": + [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\": [\n \"IPv4\"\n ]\n + \ },\n \"maxAgentPools\": 100,\n \"identityProfile\": {\n \"kubeletidentity\": + {\n \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/cliakstest000002-agentpool\",\n \ \"clientId\":\"00000000-0000-0000-0000-000000000001\",\n \"objectId\":\"00000000-0000-0000-0000-000000000001\"\n \ }\n },\n \"disableLocalAccounts\": false,\n \"securityProfile\": {},\n \"storageProfile\": {\n \"diskCSIDriver\": {\n \"enabled\": @@ -785,11 +756,11 @@ interactions: cache-control: - no-cache content-length: - - '4106' + - '4397' content-type: - application/json date: - - Wed, 26 Oct 2022 07:33:01 GMT + - Fri, 28 Oct 2022 08:52:14 GMT expires: - '-1' pragma: @@ -810,7 +781,7 @@ interactions: - request: body: '{"location": "westus2", "sku": {"name": "Basic", "tier": "Free"}, "identity": {"type": "SystemAssigned"}, "properties": {"kubernetesVersion": "1.23.12", "dnsPrefix": - "cliakstest-clitesttj4rejxwd-79a739", "agentPoolProfiles": [{"count": 1, "vmSize": + "cliakstest-clitestf6ggfhgqn-79a739", "agentPoolProfiles": [{"count": 1, "vmSize": "Standard_DS2_v2", "osDiskSizeGB": 128, "osDiskType": "Managed", "kubeletDiskType": "OS", "workloadRuntime": "OCIContainer", "maxPods": 110, "osType": "Linux", "osSKU": "Ubuntu", "enableAutoScaling": false, "type": "VirtualMachineScaleSets", @@ -818,10 +789,10 @@ interactions: {"code": "Running"}, "enableNodePublicIP": false, "enableCustomCATrust": false, "enableEncryptionAtHost": false, "enableUltraSSD": false, "enableFIPS": false, "name": "nodepool1"}], "linuxProfile": {"adminUsername": "azureuser", "ssh": - {"publicKeys": [{"keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFZag72JUdUPjufTC+9bGJ6+03D4WKC5qcYu2xbnzAEjW0CGYdezDhyqogngxvXgEzfhg7yUDUMYKjfg70oTN7w5t4YFiouN50Zlwra73QZ71Y9uNWM7qd5Uolaw1drHPLEbTcFloZTQM8gwWainBxkd6xRJK7A/otKFpN+TdV4T1dpNH826z+HC/ZpTavYZI+hqc7twWIj7hDQtUs2k4NDWJzPwMTAuIj4aNrmBYv4z8rK/ZtNIK07UbFlWDHFz1eZrDjOEBoSaakTY5j3vZXwe/dj5SmQypxg3AY+Xxboe4YxU2Iv1r0mcr7ST0wDEgUsdCUXO/lvNO3vGGV8qzf + {"publicKeys": [{"keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDp4FLc0tKQN/qhlFwE2bUCrBZ5bw3/rTff8QNPJpwTGzadlfRuLYLfNk/Sv5qEeyFwwDDZN9JgXg2Yom/n9a0ilgAlU5ACaQQQE13tu6KCxufVYa2EvofI5Vu4L0fg8hVA7/hCpJpwr+VlNpTuSToPhJNMxRY8kKP8ni5xOACT7+ONyZlVrVsoKl3/FzH1KjzrtS7mHKjBqhxYiX43I2JaKEb69Kxp4iz2vRzKA0A4kXV0Ey17y9jylZZ2NuBSe1lWjo+fxwsL8zriunuTr3Ks7bq+82p6miJvDF2U1bGi7V6VXQQM3/XhH+wJt/cHTI6g2ZQVURQHnE5bLZ5P4kyh azcli_aks_live_test@example.com\n"}]}}, "servicePrincipalProfile": {"clientId":"00000000-0000-0000-0000-000000000001"}, - "nodeResourceGroup": "MC_clitest000001_cliakstest000002_westus2", "enableRBAC": - true, "enablePodSecurityPolicy": false, "networkProfile": {"networkPlugin": + "oidcIssuerProfile": {"enabled": false}, "nodeResourceGroup": "MC_clitest000001_cliakstest000002_westus2", + "enableRBAC": true, "enablePodSecurityPolicy": false, "networkProfile": {"networkPlugin": "kubenet", "podCidr": "10.244.0.0/16", "serviceCidr": "10.0.0.0/16", "dnsServiceIP": "10.0.0.10", "dockerBridgeCidr": "172.17.0.1/16", "outboundType": "managedNATGateway", "loadBalancerSku": "Standard", "natGatewayProfile": {"managedOutboundIPProfile": @@ -843,14 +814,14 @@ interactions: Connection: - keep-alive Content-Length: - - '2352' + - '2393' Content-Type: - application/json ParameterSetName: - --resource-group --name --nat-gateway-managed-outbound-ip-count --nat-gateway-idle-timeout --outbound-type --aks-custom-header User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview @@ -861,8 +832,8 @@ interactions: \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \"provisioningState\": \"Updating\",\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"kubernetesVersion\": \"1.23.12\",\n \"currentKubernetesVersion\": \"1.23.12\",\n \"dnsPrefix\": - \"cliakstest-clitesttj4rejxwd-79a739\",\n \"fqdn\": \"cliakstest-clitesttj4rejxwd-79a739-ab6f17da.hcp.westus2.azmk8s.io\",\n - \ \"azurePortalFQDN\": \"cliakstest-clitesttj4rejxwd-79a739-ab6f17da.portal.hcp.westus2.azmk8s.io\",\n + \"cliakstest-clitestf6ggfhgqn-79a739\",\n \"fqdn\": \"cliakstest-clitestf6ggfhgqn-79a739-cd0e0432.hcp.westus2.azmk8s.io\",\n + \ \"azurePortalFQDN\": \"cliakstest-clitestf6ggfhgqn-79a739-cd0e0432.portal.hcp.westus2.azmk8s.io\",\n \ \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \"count\": 1,\n \"vmSize\": \"Standard_DS2_v2\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": \"Managed\",\n \"kubeletDiskType\": \"OS\",\n \"workloadRuntime\": @@ -876,23 +847,24 @@ interactions: \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\": {},\n \ \"enableFIPS\": false\n }\n ],\n \"linuxProfile\": {\n \"adminUsername\": \"azureuser\",\n \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFZag72JUdUPjufTC+9bGJ6+03D4WKC5qcYu2xbnzAEjW0CGYdezDhyqogngxvXgEzfhg7yUDUMYKjfg70oTN7w5t4YFiouN50Zlwra73QZ71Y9uNWM7qd5Uolaw1drHPLEbTcFloZTQM8gwWainBxkd6xRJK7A/otKFpN+TdV4T1dpNH826z+HC/ZpTavYZI+hqc7twWIj7hDQtUs2k4NDWJzPwMTAuIj4aNrmBYv4z8rK/ZtNIK07UbFlWDHFz1eZrDjOEBoSaakTY5j3vZXwe/dj5SmQypxg3AY+Xxboe4YxU2Iv1r0mcr7ST0wDEgUsdCUXO/lvNO3vGGV8qzf + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDp4FLc0tKQN/qhlFwE2bUCrBZ5bw3/rTff8QNPJpwTGzadlfRuLYLfNk/Sv5qEeyFwwDDZN9JgXg2Yom/n9a0ilgAlU5ACaQQQE13tu6KCxufVYa2EvofI5Vu4L0fg8hVA7/hCpJpwr+VlNpTuSToPhJNMxRY8kKP8ni5xOACT7+ONyZlVrVsoKl3/FzH1KjzrtS7mHKjBqhxYiX43I2JaKEb69Kxp4iz2vRzKA0A4kXV0Ey17y9jylZZ2NuBSe1lWjo+fxwsL8zriunuTr3Ks7bq+82p6miJvDF2U1bGi7V6VXQQM3/XhH+wJt/cHTI6g2ZQVURQHnE5bLZ5P4kyh azcli_aks_live_test@example.com\\n\"\n }\n ]\n }\n },\n \"servicePrincipalProfile\": {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n },\n \"nodeResourceGroup\": \"MC_clitest000001_cliakstest000002_westus2\",\n \"enableRBAC\": true,\n \ \"enablePodSecurityPolicy\": false,\n \"networkProfile\": {\n \"networkPlugin\": \"kubenet\",\n \"loadBalancerSku\": \"Standard\",\n \"loadBalancerProfile\": - {\n \"managedOutboundIPs\": {\n \"count\": 1\n },\n \"effectiveOutboundIPs\": - [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/66497f43-1896-40bb-8814-3cd8d65f6abe\"\n - \ }\n ],\n \"backendPoolType\": \"nodeIPConfiguration\"\n },\n - \ \"natGatewayProfile\": {\n \"managedOutboundIPProfile\": {\n \"count\": - 2\n },\n \"idleTimeoutInMinutes\": 30\n },\n \"podCidr\": \"10.244.0.0/16\",\n - \ \"serviceCidr\": \"10.0.0.0/16\",\n \"dnsServiceIP\": \"10.0.0.10\",\n - \ \"dockerBridgeCidr\": \"172.17.0.1/16\",\n \"outboundType\": \"managedNATGateway\",\n - \ \"podCidrs\": [\n \"10.244.0.0/16\"\n ],\n \"serviceCidrs\": - [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\": [\n \"IPv4\"\n ]\n - \ },\n \"maxAgentPools\": 100,\n \"identityProfile\": {\n \"kubeletidentity\": - {\n \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/cliakstest000002-agentpool\",\n + {\n \"managedOutboundIPs\": {\n \"count\": 2\n },\n \"effectiveOutboundIPs\": + [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/f5c95599-b68a-40ec-bb72-f6e76c053c33\"\n + \ },\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/30d21eb0-8965-4cea-927d-f04d0bb87f0b\"\n + \ }\n ],\n \"allocatedOutboundPorts\": 0,\n \"idleTimeoutInMinutes\": + 30,\n \"backendPoolType\": \"nodeIPConfiguration\"\n },\n \"natGatewayProfile\": + {\n \"managedOutboundIPProfile\": {\n \"count\": 2\n },\n \"idleTimeoutInMinutes\": + 30\n },\n \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\",\n + \ \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\",\n + \ \"outboundType\": \"managedNATGateway\",\n \"podCidrs\": [\n \"10.244.0.0/16\"\n + \ ],\n \"serviceCidrs\": [\n \"10.0.0.0/16\"\n ],\n \"ipFamilies\": + [\n \"IPv4\"\n ]\n },\n \"maxAgentPools\": 100,\n \"identityProfile\": + {\n \"kubeletidentity\": {\n \"resourceId\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/cliakstest000002-agentpool\",\n \ \"clientId\":\"00000000-0000-0000-0000-000000000001\",\n \"objectId\":\"00000000-0000-0000-0000-000000000001\"\n \ }\n },\n \"disableLocalAccounts\": false,\n \"securityProfile\": {},\n \"storageProfile\": {\n \"diskCSIDriver\": {\n \"enabled\": @@ -904,15 +876,15 @@ interactions: {\n \"name\": \"Basic\",\n \"tier\": \"Free\"\n }\n }" headers: azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/86d1700a-f14c-4068-879b-fefe3d8bc7ba?api-version=2016-03-30 + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/3e4077d6-440f-4c4a-82f0-01d6c5417ae9?api-version=2016-03-30 cache-control: - no-cache content-length: - - '4235' + - '4526' content-type: - application/json date: - - Wed, 26 Oct 2022 07:33:04 GMT + - Fri, 28 Oct 2022 08:52:17 GMT expires: - '-1' pragma: @@ -928,7 +900,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - - '1199' + - '1198' status: code: 200 message: OK @@ -947,14 +919,14 @@ interactions: - --resource-group --name --nat-gateway-managed-outbound-ip-count --nat-gateway-idle-timeout --outbound-type --aks-custom-header User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/86d1700a-f14c-4068-879b-fefe3d8bc7ba?api-version=2016-03-30 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/3e4077d6-440f-4c4a-82f0-01d6c5417ae9?api-version=2016-03-30 response: body: - string: "{\n \"name\": \"0a70d186-4cf1-6840-879b-fefe3d8bc7ba\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-10-26T07:33:04.1423202Z\"\n }" + string: "{\n \"name\": \"d677403e-0f44-4a4c-82f0-01d6c5417ae9\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-28T08:52:17.8094112Z\"\n }" headers: cache-control: - no-cache @@ -963,7 +935,7 @@ interactions: content-type: - application/json date: - - Wed, 26 Oct 2022 07:33:34 GMT + - Fri, 28 Oct 2022 08:52:47 GMT expires: - '-1' pragma: @@ -996,14 +968,14 @@ interactions: - --resource-group --name --nat-gateway-managed-outbound-ip-count --nat-gateway-idle-timeout --outbound-type --aks-custom-header User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/86d1700a-f14c-4068-879b-fefe3d8bc7ba?api-version=2016-03-30 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/3e4077d6-440f-4c4a-82f0-01d6c5417ae9?api-version=2016-03-30 response: body: - string: "{\n \"name\": \"0a70d186-4cf1-6840-879b-fefe3d8bc7ba\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-10-26T07:33:04.1423202Z\"\n }" + string: "{\n \"name\": \"d677403e-0f44-4a4c-82f0-01d6c5417ae9\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-28T08:52:17.8094112Z\"\n }" headers: cache-control: - no-cache @@ -1012,7 +984,7 @@ interactions: content-type: - application/json date: - - Wed, 26 Oct 2022 07:34:04 GMT + - Fri, 28 Oct 2022 08:53:17 GMT expires: - '-1' pragma: @@ -1045,14 +1017,14 @@ interactions: - --resource-group --name --nat-gateway-managed-outbound-ip-count --nat-gateway-idle-timeout --outbound-type --aks-custom-header User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/86d1700a-f14c-4068-879b-fefe3d8bc7ba?api-version=2016-03-30 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/3e4077d6-440f-4c4a-82f0-01d6c5417ae9?api-version=2016-03-30 response: body: - string: "{\n \"name\": \"0a70d186-4cf1-6840-879b-fefe3d8bc7ba\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-10-26T07:33:04.1423202Z\"\n }" + string: "{\n \"name\": \"d677403e-0f44-4a4c-82f0-01d6c5417ae9\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-28T08:52:17.8094112Z\"\n }" headers: cache-control: - no-cache @@ -1061,7 +1033,7 @@ interactions: content-type: - application/json date: - - Wed, 26 Oct 2022 07:34:34 GMT + - Fri, 28 Oct 2022 08:53:47 GMT expires: - '-1' pragma: @@ -1094,14 +1066,14 @@ interactions: - --resource-group --name --nat-gateway-managed-outbound-ip-count --nat-gateway-idle-timeout --outbound-type --aks-custom-header User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/86d1700a-f14c-4068-879b-fefe3d8bc7ba?api-version=2016-03-30 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/3e4077d6-440f-4c4a-82f0-01d6c5417ae9?api-version=2016-03-30 response: body: - string: "{\n \"name\": \"0a70d186-4cf1-6840-879b-fefe3d8bc7ba\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-10-26T07:33:04.1423202Z\"\n }" + string: "{\n \"name\": \"d677403e-0f44-4a4c-82f0-01d6c5417ae9\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-28T08:52:17.8094112Z\"\n }" headers: cache-control: - no-cache @@ -1110,7 +1082,7 @@ interactions: content-type: - application/json date: - - Wed, 26 Oct 2022 07:35:04 GMT + - Fri, 28 Oct 2022 08:54:18 GMT expires: - '-1' pragma: @@ -1143,14 +1115,14 @@ interactions: - --resource-group --name --nat-gateway-managed-outbound-ip-count --nat-gateway-idle-timeout --outbound-type --aks-custom-header User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/86d1700a-f14c-4068-879b-fefe3d8bc7ba?api-version=2016-03-30 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/3e4077d6-440f-4c4a-82f0-01d6c5417ae9?api-version=2016-03-30 response: body: - string: "{\n \"name\": \"0a70d186-4cf1-6840-879b-fefe3d8bc7ba\",\n \"status\": - \"InProgress\",\n \"startTime\": \"2022-10-26T07:33:04.1423202Z\"\n }" + string: "{\n \"name\": \"d677403e-0f44-4a4c-82f0-01d6c5417ae9\",\n \"status\": + \"InProgress\",\n \"startTime\": \"2022-10-28T08:52:17.8094112Z\"\n }" headers: cache-control: - no-cache @@ -1159,7 +1131,7 @@ interactions: content-type: - application/json date: - - Wed, 26 Oct 2022 07:35:33 GMT + - Fri, 28 Oct 2022 08:54:48 GMT expires: - '-1' pragma: @@ -1192,15 +1164,15 @@ interactions: - --resource-group --name --nat-gateway-managed-outbound-ip-count --nat-gateway-idle-timeout --outbound-type --aks-custom-header User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/86d1700a-f14c-4068-879b-fefe3d8bc7ba?api-version=2016-03-30 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.ContainerService/locations/westus2/operations/3e4077d6-440f-4c4a-82f0-01d6c5417ae9?api-version=2016-03-30 response: body: - string: "{\n \"name\": \"0a70d186-4cf1-6840-879b-fefe3d8bc7ba\",\n \"status\": - \"Succeeded\",\n \"startTime\": \"2022-10-26T07:33:04.1423202Z\",\n \"endTime\": - \"2022-10-26T07:35:48.1753038Z\"\n }" + string: "{\n \"name\": \"d677403e-0f44-4a4c-82f0-01d6c5417ae9\",\n \"status\": + \"Succeeded\",\n \"startTime\": \"2022-10-28T08:52:17.8094112Z\",\n \"endTime\": + \"2022-10-28T08:54:57.9286232Z\"\n }" headers: cache-control: - no-cache @@ -1209,7 +1181,7 @@ interactions: content-type: - application/json date: - - Wed, 26 Oct 2022 07:36:04 GMT + - Fri, 28 Oct 2022 08:55:18 GMT expires: - '-1' pragma: @@ -1242,7 +1214,7 @@ interactions: - --resource-group --name --nat-gateway-managed-outbound-ip-count --nat-gateway-idle-timeout --outbound-type --aks-custom-header User-Agent: - - AZURECLI/2.41.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-containerservice/20.4.0b Python/3.8.10 (Linux-5.15.0-1022-azure-x86_64-with-glibc2.29) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.ContainerService/managedClusters/cliakstest000002?api-version=2022-09-02-preview @@ -1253,8 +1225,8 @@ interactions: \"Microsoft.ContainerService/ManagedClusters\",\n \"properties\": {\n \"provisioningState\": \"Succeeded\",\n \"powerState\": {\n \"code\": \"Running\"\n },\n \"kubernetesVersion\": \"1.23.12\",\n \"currentKubernetesVersion\": \"1.23.12\",\n \"dnsPrefix\": - \"cliakstest-clitesttj4rejxwd-79a739\",\n \"fqdn\": \"cliakstest-clitesttj4rejxwd-79a739-ab6f17da.hcp.westus2.azmk8s.io\",\n - \ \"azurePortalFQDN\": \"cliakstest-clitesttj4rejxwd-79a739-ab6f17da.portal.hcp.westus2.azmk8s.io\",\n + \"cliakstest-clitestf6ggfhgqn-79a739\",\n \"fqdn\": \"cliakstest-clitestf6ggfhgqn-79a739-cd0e0432.hcp.westus2.azmk8s.io\",\n + \ \"azurePortalFQDN\": \"cliakstest-clitestf6ggfhgqn-79a739-cd0e0432.portal.hcp.westus2.azmk8s.io\",\n \ \"agentPoolProfiles\": [\n {\n \"name\": \"nodepool1\",\n \"count\": 1,\n \"vmSize\": \"Standard_DS2_v2\",\n \"osDiskSizeGB\": 128,\n \"osDiskType\": \"Managed\",\n \"kubeletDiskType\": \"OS\",\n \"workloadRuntime\": @@ -1268,18 +1240,20 @@ interactions: \"AKSUbuntu-1804gen2containerd-2022.10.03\",\n \"upgradeSettings\": {},\n \ \"enableFIPS\": false\n }\n ],\n \"linuxProfile\": {\n \"adminUsername\": \"azureuser\",\n \"ssh\": {\n \"publicKeys\": [\n {\n \"keyData\": - \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFZag72JUdUPjufTC+9bGJ6+03D4WKC5qcYu2xbnzAEjW0CGYdezDhyqogngxvXgEzfhg7yUDUMYKjfg70oTN7w5t4YFiouN50Zlwra73QZ71Y9uNWM7qd5Uolaw1drHPLEbTcFloZTQM8gwWainBxkd6xRJK7A/otKFpN+TdV4T1dpNH826z+HC/ZpTavYZI+hqc7twWIj7hDQtUs2k4NDWJzPwMTAuIj4aNrmBYv4z8rK/ZtNIK07UbFlWDHFz1eZrDjOEBoSaakTY5j3vZXwe/dj5SmQypxg3AY+Xxboe4YxU2Iv1r0mcr7ST0wDEgUsdCUXO/lvNO3vGGV8qzf + \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDp4FLc0tKQN/qhlFwE2bUCrBZ5bw3/rTff8QNPJpwTGzadlfRuLYLfNk/Sv5qEeyFwwDDZN9JgXg2Yom/n9a0ilgAlU5ACaQQQE13tu6KCxufVYa2EvofI5Vu4L0fg8hVA7/hCpJpwr+VlNpTuSToPhJNMxRY8kKP8ni5xOACT7+ONyZlVrVsoKl3/FzH1KjzrtS7mHKjBqhxYiX43I2JaKEb69Kxp4iz2vRzKA0A4kXV0Ey17y9jylZZ2NuBSe1lWjo+fxwsL8zriunuTr3Ks7bq+82p6miJvDF2U1bGi7V6VXQQM3/XhH+wJt/cHTI6g2ZQVURQHnE5bLZ5P4kyh azcli_aks_live_test@example.com\\n\"\n }\n ]\n }\n },\n \"servicePrincipalProfile\": {\n \"clientId\":\"00000000-0000-0000-0000-000000000001\"\n },\n \"nodeResourceGroup\": \"MC_clitest000001_cliakstest000002_westus2\",\n \"enableRBAC\": true,\n \ \"enablePodSecurityPolicy\": false,\n \"networkProfile\": {\n \"networkPlugin\": \"kubenet\",\n \"loadBalancerSku\": \"Standard\",\n \"loadBalancerProfile\": - {\n \"managedOutboundIPs\": {\n \"count\": 1\n },\n \"effectiveOutboundIPs\": - [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/66497f43-1896-40bb-8814-3cd8d65f6abe\"\n - \ }\n ],\n \"backendPoolType\": \"nodeIPConfiguration\"\n },\n - \ \"natGatewayProfile\": {\n \"managedOutboundIPProfile\": {\n \"count\": - 2\n },\n \"effectiveOutboundIPs\": [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/901ca738-9f18-4ca0-9bbd-e3d3943b481a\"\n - \ },\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/0e91132d-cfa8-4b09-ba34-0bed69292769\"\n + {\n \"managedOutboundIPs\": {\n \"count\": 2\n },\n \"effectiveOutboundIPs\": + [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/f5c95599-b68a-40ec-bb72-f6e76c053c33\"\n + \ },\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/30d21eb0-8965-4cea-927d-f04d0bb87f0b\"\n + \ }\n ],\n \"allocatedOutboundPorts\": 0,\n \"idleTimeoutInMinutes\": + 30,\n \"backendPoolType\": \"nodeIPConfiguration\"\n },\n \"natGatewayProfile\": + {\n \"managedOutboundIPProfile\": {\n \"count\": 2\n },\n \"effectiveOutboundIPs\": + [\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/a13e9c3a-40fa-43ae-98cb-28684fb5ccec\"\n + \ },\n {\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/MC_clitest000001_cliakstest000002_westus2/providers/Microsoft.Network/publicIPAddresses/a173e4ab-5e86-4a25-8c39-12899005998d\"\n \ }\n ],\n \"idleTimeoutInMinutes\": 30\n },\n \"podCidr\": \"10.244.0.0/16\",\n \"serviceCidr\": \"10.0.0.0/16\",\n \"dnsServiceIP\": \"10.0.0.10\",\n \"dockerBridgeCidr\": \"172.17.0.1/16\",\n \"outboundType\": @@ -1300,11 +1274,11 @@ interactions: cache-control: - no-cache content-length: - - '4723' + - '5014' content-type: - application/json date: - - Wed, 26 Oct 2022 07:36:05 GMT + - Fri, 28 Oct 2022 08:55:18 GMT expires: - '-1' pragma: diff --git a/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py b/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py index b5a716dbb1d..abd7c10ceb3 100644 --- a/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py +++ b/src/aks-preview/azext_aks_preview/tests/latest/test_aks_commands.py @@ -131,11 +131,12 @@ def test_aks_update_outbound_from_slb_to_natgateway(self, resource_group, resour create_cmd = 'aks create --resource-group={resource_group} --name={name} ' \ '--vm-set-type VirtualMachineScaleSets -c 1 ' \ - '--outbound-type=loadbalancer ' \ + '--outbound-type=loadbalancer --load-balancer-managed-outbound-ip-count 2 ' \ '--ssh-key-value={ssh_key_value}' self.cmd(create_cmd, checks=[ self.check('provisioningState', 'Succeeded'), self.check('networkProfile.outboundType', 'loadBalancer'), + self.check('networkProfile.loadBalancerProfile.managedOutboundIPs.count', 2), ]) update_cmd = 'aks update --resource-group={resource_group} --name={name} ' \ @@ -146,10 +147,8 @@ def test_aks_update_outbound_from_slb_to_natgateway(self, resource_group, resour self.cmd(update_cmd, checks=[ self.check('provisioningState', 'Succeeded'), self.check('networkProfile.outboundType', 'managedNATGateway'), - self.check( - 'networkProfile.natGatewayProfile.idleTimeoutInMinutes', 30), - self.check( - 'networkProfile.natGatewayProfile.managedOutboundIpProfile.count', 2), + self.check('networkProfile.natGatewayProfile.idleTimeoutInMinutes', 30), + self.check('networkProfile.natGatewayProfile.managedOutboundIpProfile.count', 2), ]) @AllowLargeResponse() diff --git a/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py b/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py index 479ce0cc7c7..bbdeb256e5d 100644 --- a/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py +++ b/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py @@ -4639,6 +4639,74 @@ def test_update_load_balancer_profile(self): # fail on incomplete mc object (no network profile) with self.assertRaises(UnknownError): dec_9.update_load_balancer_profile(mc_9) + + # custom value + dec_10 = AKSPreviewManagedClusterUpdateDecorator( + self.cmd, + self.client, + {"outbound_type": "managedNATGateway"}, + CUSTOM_MGMT_AKS_PREVIEW, + ) + mc_10 = self.models.ManagedCluster( + location="test_location", + network_profile=self.models.ContainerServiceNetworkProfile( + load_balancer_sku="standard", + outbound_type="loadBalancer", + load_balancer_profile=self.models.load_balancer_models.ManagedClusterLoadBalancerProfile( + outbound_ip_prefixes=self.models.load_balancer_models.ManagedClusterLoadBalancerProfileOutboundIPPrefixes( + public_ip_prefixes=[ + self.models.load_balancer_models.ResourceReference( + id="id1" + ), + self.models.load_balancer_models.ResourceReference( + id="id2" + ), + ] + ) + ), + ), + ) + dec_10.context.attach_mc(mc_10) + dec_mc_10 = dec_10.update_load_balancer_profile(mc_10) + + ground_truth_mc_10 = self.models.ManagedCluster( + location="test_location", + network_profile=self.models.ContainerServiceNetworkProfile( + load_balancer_sku="standard", + outbound_type="loadBalancer", + ), + ) + self.assertEqual(dec_mc_10, ground_truth_mc_10) + + def test_update_nat_gateway_profile(self): + # default value in `aks_update` + dec_1 = AKSPreviewManagedClusterUpdateDecorator( + self.cmd, + self.client, + {"outbound_type": "loadBalancer"}, + CUSTOM_MGMT_AKS_PREVIEW, + ) + # fail on passing the wrong mc object + with self.assertRaises(CLIInternalError): + dec_1.update_nat_gateway_profile(None) + + mc_1 = self.models.ManagedCluster( + location="test_location", + network_profile=self.models.ContainerServiceNetworkProfile( + nat_gateway_profile=self.models.nat_gateway_models.ManagedClusterNATGatewayProfile(), + ), + ) + dec_1.context.attach_mc(mc_1) + dec_mc_1 = dec_1.update_nat_gateway_profile(mc_1) + + ground_truth_mc_1 = self.models.ManagedCluster( + location="test_location", + network_profile=self.models.ContainerServiceNetworkProfile( + outbound_type="loadBalancer", + ), + ) + self.assertEqual(dec_mc_1, ground_truth_mc_1) + def test_update_outbound_type(self): # default value in `aks_update` dec_1 = AKSPreviewManagedClusterUpdateDecorator( @@ -4646,13 +4714,15 @@ def test_update_outbound_type(self): self.client, { "outbound_type": "managedNATGateway", - "nat_gateway_managed_outbound_ip_count": 2, }, CUSTOM_MGMT_AKS_PREVIEW, ) mc_1 = self.models.ManagedCluster( location="test_location", - network_profile=self.models.ContainerServiceNetworkProfile(), + network_profile=self.models.ContainerServiceNetworkProfile( + load_balancer_sku="standard", + outbound_type="loadBalancer", + ), ) dec_1.context.attach_mc(mc_1) # fail on passing the wrong mc object @@ -4662,9 +4732,11 @@ def test_update_outbound_type(self): ground_truth_mc_1 = self.models.ManagedCluster( location="test_location", - network_profile=self.models.ContainerServiceNetworkProfile(), + network_profile=self.models.ContainerServiceNetworkProfile( + load_balancer_sku="standard", + outbound_type="managedNATGateway", + ), ) - ground_truth_mc_1.network_profile.outbound_type = "managedNATGateway" self.assertEqual(dec_mc_1, ground_truth_mc_1) @@ -5628,8 +5700,6 @@ def test_update_node_restriction(self): ) ), ) - print(dec_mc_2.security_profile) - print(ground_truth_mc_2.security_profile) self.assertEqual(dec_mc_2, ground_truth_mc_2) dec_3 = AKSPreviewManagedClusterUpdateDecorator( @@ -5691,8 +5761,6 @@ def test_update_vpa(self): ) ), ) - print(dec_mc_2.workload_auto_scaler_profile) - print(ground_truth_mc_2.workload_auto_scaler_profile) self.assertEqual(dec_mc_2, ground_truth_mc_2) dec_3 = AKSPreviewManagedClusterUpdateDecorator( diff --git a/src/aks-preview/setup.py b/src/aks-preview/setup.py index 0134a88c419..9898c42a8fb 100644 --- a/src/aks-preview/setup.py +++ b/src/aks-preview/setup.py @@ -9,7 +9,7 @@ from setuptools import setup, find_packages -VERSION = "0.5.112" +VERSION = "0.5.113" CLASSIFIERS = [ "Development Status :: 4 - Beta", From f1d8f34dbac8e5f0940acbfb8fe94b4b31dfe14e Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Fri, 28 Oct 2022 09:59:33 +0000 Subject: [PATCH 47/85] [Release] Update index.json for extension [ aks-preview ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=12151&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/d829ec7af66879a050c3582fc2ffddf614a776d7 --- src/index.json | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/src/index.json b/src/index.json index 6cf25cce7e0..2cd4868007d 100644 --- a/src/index.json +++ b/src/index.json @@ -6997,6 +6997,49 @@ "version": "0.5.112" }, "sha256Digest": "b33d01bc5ba13d5932951e577ede76d97aa6d3fad88dcabe1b786b3f12476eff" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/aks_preview-0.5.113-py2.py3-none-any.whl", + "filename": "aks_preview-0.5.113-py2.py3-none-any.whl", + "metadata": { + "azext.isPreview": true, + "azext.minCliCoreVersion": "2.38.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/aks-preview" + } + } + }, + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "aks-preview", + "summary": "Provides a preview for upcoming AKS features", + "version": "0.5.113" + }, + "sha256Digest": "033c6189f4ce704653e4f9ef880ccbedecd478d652922cdb16a604f0fcdd6cc2" } ], "alertsmanagement": [ From a45fa4179bb2b551ece96175aace9766051f3499 Mon Sep 17 00:00:00 2001 From: Saisankar Gochhayat Date: Sun, 30 Oct 2022 23:22:03 -0700 Subject: [PATCH 48/85] Remove old arcappliance versions (#5500) Co-authored-by: Sai Sankar Gochhayat --- src/index.json | 156 ------------------------------------------------- 1 file changed, 156 deletions(-) diff --git a/src/index.json b/src/index.json index 2cd4868007d..c72ef30342f 100644 --- a/src/index.json +++ b/src/index.json @@ -8735,162 +8735,6 @@ }, "sha256Digest": "f65ea31e60c8576137f8abef556c365bea8cbf50f1650b9e4375fdc8ba7a0b1e" }, - { - "downloadUrl": "https://arcplatformcliextprod.blob.core.windows.net/arcappliance/arcappliance-0.2.24-py2.py3-none-any.whl", - "filename": "arcappliance-0.2.24-py2.py3-none-any.whl", - "metadata": { - "azext.isPreview": true, - "azext.minCliCoreVersion": "2.0.67", - "classifiers": [ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "Intended Audience :: System Administrators", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "License :: OSI Approved :: MIT License" - ], - "extensions": { - "python.details": { - "contacts": [ - { - "email": "appliance@microsoft.com", - "name": "Microsoft Corporation", - "role": "author" - } - ], - "document_names": { - "description": "DESCRIPTION.rst" - }, - "project_urls": { - "Home": "https://msazure.visualstudio.com/AzureArcPlatform/_git/arcappliance-cli-extensions" - } - } - }, - "extras": [], - "generator": "bdist_wheel (0.30.0)", - "license": "MIT", - "metadata_version": "2.0", - "name": "arcappliance", - "run_requires": [ - { - "requires": [ - "jsonschema (==3.2.0)", - "kubernetes (==11.0.0)" - ] - } - ], - "summary": "Microsoft Azure Command-Line Tools Arcappliance Extension", - "version": "0.2.24" - }, - "sha256Digest": "6fad685e2ddfdc345542d990760aff7838b83d978b31fca23440dc9e11bf94d8" - }, - { - "downloadUrl": "https://arcplatformcliextprod.blob.core.windows.net/arcappliance/arcappliance-0.2.25-py2.py3-none-any.whl", - "filename": "arcappliance-0.2.25-py2.py3-none-any.whl", - "metadata": { - "azext.isPreview": true, - "azext.minCliCoreVersion": "2.0.67", - "classifiers": [ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "Intended Audience :: System Administrators", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "License :: OSI Approved :: MIT License" - ], - "extensions": { - "python.details": { - "contacts": [ - { - "email": "appliance@microsoft.com", - "name": "Microsoft Corporation", - "role": "author" - } - ], - "document_names": { - "description": "DESCRIPTION.rst" - }, - "project_urls": { - "Home": "https://msazure.visualstudio.com/AzureArcPlatform/_git/arcappliance-cli-extensions" - } - } - }, - "extras": [], - "generator": "bdist_wheel (0.30.0)", - "license": "MIT", - "metadata_version": "2.0", - "name": "arcappliance", - "run_requires": [ - { - "requires": [ - "jsonschema (==3.2.0)", - "kubernetes (==11.0.0)" - ] - } - ], - "summary": "Microsoft Azure Command-Line Tools Arcappliance Extension", - "version": "0.2.25" - }, - "sha256Digest": "e7e040dea494625c9ecde8202c810ed43fcf8ccf3f4637549a2c27e862b56a89" - }, - { - "downloadUrl": "https://arcplatformcliextprod.blob.core.windows.net/arcappliance/arcappliance-0.2.26-py2.py3-none-any.whl", - "filename": "arcappliance-0.2.26-py2.py3-none-any.whl", - "metadata": { - "azext.isPreview": true, - "azext.minCliCoreVersion": "2.0.67", - "classifiers": [ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "Intended Audience :: System Administrators", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "License :: OSI Approved :: MIT License" - ], - "extensions": { - "python.details": { - "contacts": [ - { - "email": "appliance@microsoft.com", - "name": "Microsoft Corporation", - "role": "author" - } - ], - "document_names": { - "description": "DESCRIPTION.rst" - }, - "project_urls": { - "Home": "https://msazure.visualstudio.com/AzureArcPlatform/_git/arcappliance-cli-extensions" - } - } - }, - "extras": [], - "generator": "bdist_wheel (0.30.0)", - "license": "MIT", - "metadata_version": "2.0", - "name": "arcappliance", - "run_requires": [ - { - "requires": [ - "jsonschema (==3.2.0)", - "kubernetes (==11.0.0)" - ] - } - ], - "summary": "Microsoft Azure Command-Line Tools Arcappliance Extension", - "version": "0.2.26" - }, - "sha256Digest": "74a950dc428e1d2b6a85a16567f3a8c14b24ea8c51ede59d67a89c3cca5f6e59" - }, { "downloadUrl": "https://arcplatformcliextprod.blob.core.windows.net/arcappliance/arcappliance-0.2.27-py2.py3-none-any.whl", "filename": "arcappliance-0.2.27-py2.py3-none-any.whl", From b3762a9b3ca4dd29adddcaba833950be818cf4a5 Mon Sep 17 00:00:00 2001 From: Mason Chen Date: Mon, 31 Oct 2022 15:46:43 +0800 Subject: [PATCH 49/85] Add warn when update Config Server or Application Configuration Service with SSH auth. (#5474) --- src/spring/HISTORY.md | 4 ++++ src/spring/azext_spring/_params.py | 11 ++++++----- src/spring/azext_spring/_validators.py | 10 ++++++++++ src/spring/azext_spring/_validators_enterprise.py | 10 ++++++++++ src/spring/setup.py | 2 +- 5 files changed, 31 insertions(+), 6 deletions(-) diff --git a/src/spring/HISTORY.md b/src/spring/HISTORY.md index c89cec36b59..fcad95f7527 100644 --- a/src/spring/HISTORY.md +++ b/src/spring/HISTORY.md @@ -1,5 +1,9 @@ Release History =============== +1.1.14 +--- +* Add warn when update Config Server or Application Configuration Service with SSH auth. + 1.1.13 --- * Stop execution and throw exception when operation status is `Failed`. diff --git a/src/spring/azext_spring/_params.py b/src/spring/azext_spring/_params.py index adb97a22cf2..49a8e685855 100644 --- a/src/spring/azext_spring/_params.py +++ b/src/spring/azext_spring/_params.py @@ -13,8 +13,8 @@ validate_vnet, validate_vnet_required_parameters, validate_node_resource_group, validate_tracing_parameters_asc_create, validate_tracing_parameters_asc_update, validate_app_insights_parameters, validate_instance_count, validate_java_agent_parameters, - validate_ingress_timeout, validate_remote_debugging_port, validate_jar, validate_ingress_send_timeout, - validate_ingress_session_max_age) + validate_ingress_timeout, validate_jar, validate_ingress_send_timeout, + validate_ingress_session_max_age, validate_config_server_ssh_or_warn, validate_remote_debugging_port) from ._validators_enterprise import (only_support_enterprise, validate_builder_resource, validate_builder_create, validate_builder_update, validate_build_pool_size, validate_git_uri, validate_acs_patterns, validate_config_file_patterns, @@ -22,7 +22,8 @@ validate_api_portal_instance_count, validate_buildpack_binding_exist, validate_buildpack_binding_not_exist, validate_buildpack_binding_properties, validate_buildpack_binding_secrets, - validate_build_env, validate_target_module, validate_runtime_version) + validate_build_env, validate_target_module, validate_runtime_version, + validate_acs_ssh_or_warn) from ._app_validator import (fulfill_deployment_param, active_deployment_exist, ensure_not_active_deployment, validate_deloy_path, validate_deloyment_create_path, validate_cpu, validate_build_cpu, validate_memory, validate_build_memory, @@ -536,7 +537,7 @@ def prepare_logs_argument(c): c.argument('host_key', help='Host key of the added config.') c.argument('host_key_algorithm', help='Host key algorithm of the added config.') - c.argument('private_key', help='Private_key of the added config.') + c.argument('private_key', help='Private_key of the added config.', validator=validate_config_server_ssh_or_warn) c.argument('strict_host_key_checking', options_list=['--strict-host-key-checking', '--host-key-check'], help='Strict_host_key_checking of the added config.') @@ -666,7 +667,7 @@ def prepare_logs_argument(c): c.argument('password', help='Password of the added config.') c.argument('host_key', help='Host key of the added config.') c.argument('host_key_algorithm', help='Host key algorithm of the added config.') - c.argument('private_key', help='Private_key of the added config.') + c.argument('private_key', help='Private_key of the added config.', validator=validate_acs_ssh_or_warn) c.argument('host_key_check', help='Strict host key checking of the added config which is used in SSH authentication. If false, ignore errors with host key.') for scope in ['add', 'update', 'remove']: diff --git a/src/spring/azext_spring/_validators.py b/src/spring/azext_spring/_validators.py index 7de18dce789..2fefbea712c 100644 --- a/src/spring/azext_spring/_validators.py +++ b/src/spring/azext_spring/_validators.py @@ -671,3 +671,13 @@ def _parse_jar_file(artifact_path): except Exception as err: # pylint: disable=broad-except telemetry.set_exception("parse user jar file failed, " + str(err)) return None + + +def validate_config_server_ssh_or_warn(namespace): + private_key = namespace.private_key + host_key = namespace.host_key + host_key_algorithm = namespace.host_key_algorithm + strict_host_key_checking = namespace.strict_host_key_checking + if private_key or host_key or host_key_algorithm or strict_host_key_checking: + logger.warning("SSH authentication only supports SHA-1 signature under Config Server restriction. " + "Please refer to https://aka.ms/asa-configserver-ssh to understand how to use SSH under this restriction.") diff --git a/src/spring/azext_spring/_validators_enterprise.py b/src/spring/azext_spring/_validators_enterprise.py index 2800267d3a0..926f47038c3 100644 --- a/src/spring/azext_spring/_validators_enterprise.py +++ b/src/spring/azext_spring/_validators_enterprise.py @@ -111,6 +111,16 @@ def validate_git_uri(namespace): raise InvalidArgumentValueError("Git URI should start with \"https://\" or \"git@\"") +def validate_acs_ssh_or_warn(namespace): + private_key = namespace.private_key + host_key = namespace.host_key + host_key_algorithm = namespace.host_key_algorithm + host_key_check = namespace.host_key_check + if private_key or host_key or host_key_algorithm or host_key_check: + logger.warning("SSH authentication only supports SHA-1 signature under ACS restriction. " + "Please refer to https://aka.ms/asa-acs-ssh to understand how to use SSH under this restriction.") + + def validate_config_file_patterns(namespace): if namespace.config_file_patterns: _validate_patterns(namespace.config_file_patterns) diff --git a/src/spring/setup.py b/src/spring/setup.py index 111c3806299..99029c1a704 100644 --- a/src/spring/setup.py +++ b/src/spring/setup.py @@ -16,7 +16,7 @@ # TODO: Confirm this is the right version number you want and it matches your # HISTORY.rst entry. -VERSION = '1.1.13' +VERSION = '1.1.14' # The full list of classifiers is available at # https://pypi.python.org/pypi?%3Aaction=list_classifiers From bebd9af446b0b9fbe4a4283386166832808d54f3 Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Mon, 31 Oct 2022 07:53:25 +0000 Subject: [PATCH 50/85] [Release] Update index.json for extension [ spring ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=12306&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/b3762a9b3ca4dd29adddcaba833950be818cf4a5 --- src/index.json | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/src/index.json b/src/index.json index c72ef30342f..e196629fb3a 100644 --- a/src/index.json +++ b/src/index.json @@ -36054,6 +36054,49 @@ "version": "1.1.13" }, "sha256Digest": "c3d968114b8e68b522962dccba958bc43b0235af55c417ad5cf6b6a25f1a9444" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/spring-1.1.14-py3-none-any.whl", + "filename": "spring-1.1.14-py3-none-any.whl", + "metadata": { + "azext.isPreview": false, + "azext.minCliCoreVersion": "2.38.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/spring" + } + } + }, + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "spring", + "summary": "Microsoft Azure Command-Line Tools spring Extension", + "version": "1.1.14" + }, + "sha256Digest": "e7d57cac5569ee8b530ddc51cf44ff45eedbdf5f94fadbb058e79eea705758a8" } ], "spring-cloud": [ From 8a7a4dabb2ddb56d64bcfed8b00646c48d5a622a Mon Sep 17 00:00:00 2001 From: Yuwei Zhou Date: Tue, 1 Nov 2022 11:34:17 +0800 Subject: [PATCH 51/85] =?UTF-8?q?Revert=20"Revert=20"[Spring]=20Support=20?= =?UTF-8?q?setting=20marketplace=20plan=20when=20creati=E2=80=A6=20(#5486)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/spring/HISTORY.md | 5 + src/spring/azext_spring/_help.py | 8 + src/spring/azext_spring/_marketplace.py | 41 ++ src/spring/azext_spring/_params.py | 4 + src/spring/azext_spring/_validators.py | 11 +- src/spring/azext_spring/commands.py | 4 + src/spring/azext_spring/spring_instance.py | 16 + .../tests/latest/test_asa_create.py | 11 + .../tests/latest/test_asa_validator.py | 6 +- .../vendored_sdks/marketplace/__init__.py | 16 + .../marketplace/_configuration.py | 73 ++++ .../marketplace/_marketplace_rp_service.py | 120 ++++++ .../vendored_sdks/marketplace/_version.py | 8 + .../vendored_sdks/marketplace/aio/__init__.py | 10 + .../marketplace/aio/_configuration.py | 69 ++++ .../aio/_marketplace_rp_service.py | 119 ++++++ .../vendored_sdks/marketplace/models.py | 7 + .../vendored_sdks/marketplace/py.typed | 1 + .../marketplace/v2018_08_01_beta/__init__.py | 18 + .../v2018_08_01_beta/_configuration.py | 78 ++++ .../_marketplace_rp_service.py | 102 +++++ .../v2018_08_01_beta/_metadata.json | 114 ++++++ .../marketplace/v2018_08_01_beta/_patch.py | 31 ++ .../marketplace/v2018_08_01_beta/_vendor.py | 27 ++ .../marketplace/v2018_08_01_beta/_version.py | 9 + .../v2018_08_01_beta/aio/__init__.py | 15 + .../v2018_08_01_beta/aio/_configuration.py | 74 ++++ .../aio/_marketplace_rp_service.py | 95 +++++ .../v2018_08_01_beta/aio/_patch.py | 31 ++ .../aio/operations/__init__.py | 13 + .../aio/operations/_offer_operations.py | 100 +++++ .../v2018_08_01_beta/models/__init__.py | 40 ++ .../v2018_08_01_beta/models/_models.py | 330 ++++++++++++++++ .../v2018_08_01_beta/models/_models_py3.py | 357 ++++++++++++++++++ .../v2018_08_01_beta/operations/__init__.py | 13 + .../operations/_offer_operations.py | 149 ++++++++ .../marketplace/v2018_08_01_beta/py.typed | 1 + src/spring/setup.py | 2 +- 38 files changed, 2122 insertions(+), 6 deletions(-) create mode 100644 src/spring/azext_spring/_marketplace.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/__init__.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/_configuration.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/_marketplace_rp_service.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/_version.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/aio/__init__.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/aio/_configuration.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/aio/_marketplace_rp_service.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/models.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/py.typed create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/__init__.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_configuration.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_marketplace_rp_service.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_metadata.json create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_patch.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_vendor.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_version.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/__init__.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/_configuration.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/_marketplace_rp_service.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/_patch.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/operations/__init__.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/operations/_offer_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/models/__init__.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/models/_models.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/models/_models_py3.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/operations/__init__.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/operations/_offer_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/py.typed diff --git a/src/spring/HISTORY.md b/src/spring/HISTORY.md index fcad95f7527..83c4c840330 100644 --- a/src/spring/HISTORY.md +++ b/src/spring/HISTORY.md @@ -1,5 +1,10 @@ Release History =============== +1.2.0 +--- +* Add command `az spring list-marketplace-plan` to list all supported VMware product. For more detail, see https://learn.microsoft.com/en-us/azure/spring-apps/how-to-enterprise-marketplace-offer. +* Add argument `--marketplace-plan-id` in command `az spring create` to support purchasing different VMware product plan when creating Enterprise sku Spring resource. + 1.1.14 --- * Add warn when update Config Server or Application Configuration Service with SSH auth. diff --git a/src/spring/azext_spring/_help.py b/src/spring/azext_spring/_help.py index fe0c8314bee..749f916b055 100644 --- a/src/spring/azext_spring/_help.py +++ b/src/spring/azext_spring/_help.py @@ -39,6 +39,14 @@ az spring create -n MyService -g MyResourceGroup --sku Enterprise --enable-application-configuration-service --enable-service-registry --enable-gateway --enable-api-portal """ +helps['spring list-marketplace-plan'] = """ + type: command + short-summary: (Enterprise Tier Only) List Marketplace plan to be purchased. + examples: + - name: List all plans. + text: az spring list-marketplace-plan -o table +""" + helps['spring update'] = """ type: command short-summary: Update an Azure Spring Apps. diff --git a/src/spring/azext_spring/_marketplace.py b/src/spring/azext_spring/_marketplace.py new file mode 100644 index 00000000000..ac79caf3392 --- /dev/null +++ b/src/spring/azext_spring/_marketplace.py @@ -0,0 +1,41 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +# pylint: disable=wrong-import-order +# pylint: disable=unused-argument, logging-format-interpolation, protected-access, wrong-import-order, too-many-lines +from knack.log import get_logger +from ._constant import (MARKETPLACE_OFFER_ID, MARKETPLACE_PUBLISHER_ID) + +logger = get_logger(__name__) + + +def _spring_list_marketplace_plan(cmd, client): + # return get_mgmt_service_client(cli_ctx, AppPlatformManagementClient_20220501preview) + from azure.cli.core.commands.client_factory import get_mgmt_service_client + from .vendored_sdks.marketplace.v2018_08_01_beta import MarketplaceRPService + from .vendored_sdks.marketplace.v2018_08_01_beta.models import Offer + + logger.warning('To view the Azure Spring Apps Enterprise tier offering and read a detailed description, see https://aka.ms/ascmpoffer') + client = get_mgmt_service_client(cmd.cli_ctx, MarketplaceRPService) + offer = client.offer.get('{}.{}'.format(MARKETPLACE_PUBLISHER_ID, MARKETPLACE_OFFER_ID)) + offer.plans = [x for x in offer.plans if _is_valid_plan(x)] + return Offer.deserialize(offer).serialize(offer) + + +def _is_valid_plan(plan): + return plan.availabilities + + +def transform_marketplace_plan_output(result): + def _table_item_view(plan): + return { + 'publisher id': result['properties']['publisherId'], + 'product id': result['properties']['offerId'], + 'plan id': plan['planId'], + 'plan display name': plan['displayName'] + } + + plans = result['properties']['plans'] + return [_table_item_view(plan) for plan in plans] diff --git a/src/spring/azext_spring/_params.py b/src/spring/azext_spring/_params.py index 49a8e685855..66f77617a54 100644 --- a/src/spring/azext_spring/_params.py +++ b/src/spring/azext_spring/_params.py @@ -155,6 +155,10 @@ def load_arguments(self, _): validator=validate_api_portal_instance_count, options_list=['--api-portal-instance-count', '--ap-instance'], help='(Enterprise Tier Only) Number of API portal instances.') + c.argument('marketplace_plan_id', + is_preview=True, + help='(Enterprise Tier Only) Specify a different Marketplace plan to purchase with Spring instance. ' + 'List all plans by running `az spring list-marketplace-plan -o table`.') with self.argument_context('spring update') as c: c.argument('sku', arg_type=sku_type, validator=normalize_sku) diff --git a/src/spring/azext_spring/_validators.py b/src/spring/azext_spring/_validators.py index 2fefbea712c..18d00e3ac07 100644 --- a/src/spring/azext_spring/_validators.py +++ b/src/spring/azext_spring/_validators.py @@ -50,6 +50,7 @@ def validate_sku(cmd, namespace): _validate_saas_provider(cmd, namespace) _validate_terms(cmd, namespace) else: + _check_saas_not_set(cmd, namespace) _check_tanzu_components_not_enable(cmd, namespace) normalize_sku(cmd, namespace) @@ -59,6 +60,11 @@ def normalize_sku(cmd, namespace): namespace.sku = models.Sku(name=_get_sku_name(namespace.sku), tier=namespace.sku) +def _check_saas_not_set(cmd, namespace): + if namespace.marketplace_plan_id: + raise InvalidArgumentValueError('--marketplace-plan-id is supported only when --sku=Enterprise') + + def _validate_saas_provider(cmd, namespace): from azure.cli.core.commands.client_factory import get_mgmt_service_client from azure.cli.core.profiles import ResourceType @@ -72,17 +78,18 @@ def _validate_terms(cmd, namespace): from azure.mgmt.marketplaceordering import MarketplaceOrderingAgreements from azure.cli.core.commands.client_factory import get_mgmt_service_client client = get_mgmt_service_client(cmd.cli_ctx, MarketplaceOrderingAgreements).marketplace_agreements + plan_id = namespace.marketplace_plan_id or MARKETPLACE_PLAN_ID term = client.get(offer_type="virtualmachine", publisher_id=MARKETPLACE_PUBLISHER_ID, offer_id=MARKETPLACE_OFFER_ID, - plan_id=MARKETPLACE_PLAN_ID) + plan_id=plan_id) if not term.accepted: raise InvalidArgumentValueError('Terms for Azure Spring Apps Enterprise is not accepted.\n' 'Run "az term accept --publisher {} ' '--product {} ' '--plan {}" to accept the term.'.format(MARKETPLACE_PUBLISHER_ID, MARKETPLACE_OFFER_ID, - MARKETPLACE_PLAN_ID)) + plan_id)) def _check_tanzu_components_not_enable(cmd, namespace): diff --git a/src/spring/azext_spring/commands.py b/src/spring/azext_spring/commands.py index 1b96b49de38..78017be9546 100644 --- a/src/spring/azext_spring/commands.py +++ b/src/spring/azext_spring/commands.py @@ -23,6 +23,7 @@ transform_spring_cloud_gateway_output, transform_api_portal_output) from ._validators import validate_app_insights_command_not_supported_tier +from ._marketplace import (transform_marketplace_plan_output) from ._validators_enterprise import (validate_gateway_update, validate_api_portal_update) from ._app_managed_identity_validator import (validate_app_identity_remove_or_warning, validate_app_identity_assign_or_warning) @@ -93,6 +94,9 @@ def load_command_table(self, _): with self.command_group('spring', custom_command_type=spring_routing_util, exception_handler=handle_asc_exception) as g: g.custom_command('create', 'spring_create', supports_no_wait=True) + g.custom_command('list-marketplace-plan', 'spring_list_marketplace_plan', + is_preview=True, + table_transformer=transform_marketplace_plan_output) with self.command_group('spring', client_factory=cf_spring_20220501preview, exception_handler=handle_asc_exception) as g: diff --git a/src/spring/azext_spring/spring_instance.py b/src/spring/azext_spring/spring_instance.py index 447ff226474..a54331f0283 100644 --- a/src/spring/azext_spring/spring_instance.py +++ b/src/spring/azext_spring/spring_instance.py @@ -18,6 +18,8 @@ from ._validators import (_parse_sku_name, validate_instance_not_existed) from azure.cli.core.commands import LongRunningOperation from knack.log import get_logger +from ._marketplace import _spring_list_marketplace_plan +from ._constant import (MARKETPLACE_OFFER_ID, MARKETPLACE_PUBLISHER_ID) logger = get_logger(__name__) @@ -62,6 +64,7 @@ def create_service(self, sku=None, tags=None, ingress_read_timeout=None, + marketplace_plan_id=None, **_): properties = models.ClusterResourceProperties( zone_redundant=zone_redundant @@ -74,6 +77,13 @@ def create_service(self, else: properties.vnet_addons = None + if marketplace_plan_id: + properties.marketplace_resource = models.MarketplaceResource( + plan=marketplace_plan_id, + product=MARKETPLACE_OFFER_ID, + publisher=MARKETPLACE_PUBLISHER_ID + ) + if service_runtime_subnet or app_subnet or reserved_cidr_range: properties.network_profile = models.NetworkProfile( service_runtime_subnet_id=service_runtime_subnet, @@ -153,6 +163,7 @@ def spring_create(cmd, client, resource_group, name, api_portal_instance_count=None, enable_log_stream_public_endpoint=None, ingress_read_timeout=None, + marketplace_plan_id=None, no_wait=False): """ Because Standard/Basic tier vs. Enterprise tier creation are very different. Here routes the command to different @@ -183,6 +194,7 @@ def spring_create(cmd, client, resource_group, name, 'enable_api_portal': enable_api_portal, 'api_portal_instance_count': api_portal_instance_count, 'enable_log_stream_public_endpoint': enable_log_stream_public_endpoint, + 'marketplace_plan_id': marketplace_plan_id, 'no_wait': no_wait } @@ -198,3 +210,7 @@ def _enable_app_insights(cmd, client, resource_group, name, location, app_insigh return create_default_buildpack_binding_for_application_insights(cmd, client, resource_group, name, location, app_insights_key, app_insights, sampling_rate) + + +def spring_list_marketplace_plan(cmd, client): + return _spring_list_marketplace_plan(cmd, client) diff --git a/src/spring/azext_spring/tests/latest/test_asa_create.py b/src/spring/azext_spring/tests/latest/test_asa_create.py index a7e6422bc63..c40ae5af7c3 100644 --- a/src/spring/azext_spring/tests/latest/test_asa_create.py +++ b/src/spring/azext_spring/tests/latest/test_asa_create.py @@ -79,6 +79,17 @@ def test_asc_create_enterprise(self): self.assertEqual('E0', resource.sku.name) self.assertEqual('Enterprise', resource.sku.tier) self.assertEqual(False, resource.properties.zone_redundant) + self.assertIsNone(resource.properties.marketplace_resource) + + def test_asc_create_enterprise_with_plan(self): + self._execute('rg', 'asc', sku=self._get_sku('Enterprise'), disable_app_insights=True, marketplace_plan_id='my-plan') + resource = self.created_resource + self.assertEqual('E0', resource.sku.name) + self.assertEqual('Enterprise', resource.sku.tier) + self.assertEqual(False, resource.properties.zone_redundant) + self.assertEqual('my-plan', resource.properties.marketplace_resource.plan) + self.assertEqual('azure-spring-cloud-vmware-tanzu-2', resource.properties.marketplace_resource.product) + self.assertEqual('vmware-inc', resource.properties.marketplace_resource.publisher) class TestSpringCloudCreateWithAI(BasicTest): diff --git a/src/spring/azext_spring/tests/latest/test_asa_validator.py b/src/spring/azext_spring/tests/latest/test_asa_validator.py index 6bce1b17e32..1b80700c3a5 100644 --- a/src/spring/azext_spring/tests/latest/test_asa_validator.py +++ b/src/spring/azext_spring/tests/latest/test_asa_validator.py @@ -308,20 +308,20 @@ def _mock_not_registered_client(cli_ctx, client_type, **kwargs): class TestSkuValidator(unittest.TestCase): @mock.patch('azure.cli.core.commands.client_factory.get_mgmt_service_client', _mock_happy_client) def test_happy_path(self): - ns = Namespace(sku='Enterprise') + ns = Namespace(sku='Enterprise', marketplace_plan_id=None) validate_sku(_get_test_cmd(), ns) self.assertEqual('Enterprise', ns.sku.tier) @mock.patch('azure.cli.core.commands.client_factory.get_mgmt_service_client', _mock_not_accepted_term_client) def test_term_not_accept(self): - ns = Namespace(sku='Enterprise') + ns = Namespace(sku='Enterprise', marketplace_plan_id=None) with self.assertRaises(InvalidArgumentValueError) as context: validate_sku(_get_test_cmd(), ns) self.assertTrue('Terms for Azure Spring Apps Enterprise is not accepted.' in str(context.exception)) @mock.patch('azure.cli.core.commands.client_factory.get_mgmt_service_client', _mock_not_registered_client) def test_provider_not_registered(self): - ns = Namespace(sku='Enterprise') + ns = Namespace(sku='Enterprise', marketplace_plan_id=None) with self.assertRaises(InvalidArgumentValueError) as context: validate_sku(_get_test_cmd(), ns) self.assertTrue('Microsoft.SaaS resource provider is not registered.' in str(context.exception)) diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/__init__.py b/src/spring/azext_spring/vendored_sdks/marketplace/__init__.py new file mode 100644 index 00000000000..027c248725a --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/__init__.py @@ -0,0 +1,16 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._marketplace_rp_service import MarketplaceRPService +__all__ = ['MarketplaceRPService'] + +try: + from ._patch import patch_sdk # type: ignore + patch_sdk() +except ImportError: + pass diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/_configuration.py b/src/spring/azext_spring/vendored_sdks/marketplace/_configuration.py new file mode 100644 index 00000000000..1ee87a11353 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/_configuration.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy + +from ._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Optional + + from azure.core.credentials import TokenCredential + +class MarketplaceRPServiceConfiguration(Configuration): + """Configuration for MarketplaceRPService. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param market: The Market to use for the request. Default value is "US". + :type market: str + :param include_stop_sold_plans: The Market to use for the request. Default value is "true". + :type include_stop_sold_plans: str + """ + + def __init__( + self, + credential, # type: "TokenCredential" + market="US", # type: Optional[str] + include_stop_sold_plans="true", # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + super(MarketplaceRPServiceConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.market = market + self.include_stop_sold_plans = include_stop_sold_plans + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'azure-mgmt-marketplace/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs # type: Any + ): + # type: (...) -> None + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/_marketplace_rp_service.py b/src/spring/azext_spring/vendored_sdks/marketplace/_marketplace_rp_service.py new file mode 100644 index 00000000000..910eedcdc10 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/_marketplace_rp_service.py @@ -0,0 +1,120 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from msrest import Deserializer, Serializer + +from azure.mgmt.core import ARMPipelineClient +from azure.profiles import KnownProfiles, ProfileDefinition +from azure.profiles.multiapiclient import MultiApiClientMixin + +from ._configuration import MarketplaceRPServiceConfiguration + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Optional + + from azure.core.credentials import TokenCredential + +class _SDKClient(object): + def __init__(self, *args, **kwargs): + """This is a fake class to support current implemetation of MultiApiClientMixin." + Will be removed in final version of multiapi azure-core based client + """ + pass + +class MarketplaceRPService(MultiApiClientMixin, _SDKClient): + """REST APIs for Azure Marketplace. + + This ready contains multiple API versions, to help you deal with all of the Azure clouds + (Azure Stack, Azure Government, Azure China, etc.). + By default, it uses the latest API version available on public Azure. + For production, you should stick to a particular api-version and/or profile. + The profile sets a mapping between an operation group and its API version. + The api-version parameter sets the default API version if the operation + group is not described in the profile. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param market: The Market to use for the request. Default value is "US". + :type market: str + :param include_stop_sold_plans: The Market to use for the request. Default value is "true". + :type include_stop_sold_plans: str + :param api_version: API version to use if no profile is provided, or if missing in profile. + :type api_version: str + :param base_url: Service URL + :type base_url: str + :param profile: A profile definition, from KnownProfiles to dict. + :type profile: azure.profiles.KnownProfiles + """ + + DEFAULT_API_VERSION = '2018-08-01-beta' + _PROFILE_TAG = "azure.mgmt.marketplace.MarketplaceRPService" + LATEST_PROFILE = ProfileDefinition({ + _PROFILE_TAG: { + None: DEFAULT_API_VERSION, + }}, + _PROFILE_TAG + " latest" + ) + + def __init__( + self, + credential, # type: "TokenCredential" + market="US", # type: Optional[str] + include_stop_sold_plans="true", # type: Optional[str] + api_version=None, # type: Optional[str] + base_url="https://management.azure.com", # type: str + profile=KnownProfiles.default, # type: KnownProfiles + **kwargs # type: Any + ): + self._config = MarketplaceRPServiceConfiguration(credential, "US", "true", **kwargs) + self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + super(MarketplaceRPService, self).__init__( + api_version=api_version, + profile=profile + ) + + @classmethod + def _models_dict(cls, api_version): + return {k: v for k, v in cls.models(api_version).__dict__.items() if isinstance(v, type)} + + @classmethod + def models(cls, api_version=DEFAULT_API_VERSION): + """Module depends on the API version: + + * 2018-08-01-beta: :mod:`v2018_08_01_beta.models` + """ + if api_version == '2018-08-01-beta': + from .v2018_08_01_beta import models + return models + raise ValueError("API version {} is not available".format(api_version)) + + @property + def offer(self): + """Instance depends on the API version: + + * 2018-08-01-beta: :class:`OfferOperations` + """ + api_version = self._get_api_version('offer') + if api_version == '2018-08-01-beta': + from .v2018_08_01_beta.operations import OfferOperations as OperationClass + else: + raise ValueError("API version {} does not have operation group 'offer'".format(api_version)) + return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) + + def close(self): + self._client.close() + def __enter__(self): + self._client.__enter__() + return self + def __exit__(self, *exc_details): + self._client.__exit__(*exc_details) diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/_version.py b/src/spring/azext_spring/vendored_sdks/marketplace/_version.py new file mode 100644 index 00000000000..a30a458f8b5 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/_version.py @@ -0,0 +1,8 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +VERSION = "0.1.0" \ No newline at end of file diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/aio/__init__.py b/src/spring/azext_spring/vendored_sdks/marketplace/aio/__init__.py new file mode 100644 index 00000000000..d294a30c210 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/aio/__init__.py @@ -0,0 +1,10 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._marketplace_rp_service import MarketplaceRPService +__all__ = ['MarketplaceRPService'] diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/aio/_configuration.py b/src/spring/azext_spring/vendored_sdks/marketplace/aio/_configuration.py new file mode 100644 index 00000000000..d114aa2f027 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/aio/_configuration.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Optional, TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy + +from .._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + +class MarketplaceRPServiceConfiguration(Configuration): + """Configuration for MarketplaceRPService. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param market: The Market to use for the request. Default value is "US". + :type market: str + :param include_stop_sold_plans: The Market to use for the request. Default value is "true". + :type include_stop_sold_plans: str + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + market: Optional[str] = "US", + include_stop_sold_plans: Optional[str] = "true", + **kwargs # type: Any + ) -> None: + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + super(MarketplaceRPServiceConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.market = market + self.include_stop_sold_plans = include_stop_sold_plans + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'azure-mgmt-marketplace/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs: Any + ) -> None: + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/aio/_marketplace_rp_service.py b/src/spring/azext_spring/vendored_sdks/marketplace/aio/_marketplace_rp_service.py new file mode 100644 index 00000000000..556e3442d4a --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/aio/_marketplace_rp_service.py @@ -0,0 +1,119 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, Optional, TYPE_CHECKING + +from msrest import Deserializer, Serializer + +from azure.mgmt.core import AsyncARMPipelineClient +from azure.profiles import KnownProfiles, ProfileDefinition +from azure.profiles.multiapiclient import MultiApiClientMixin + +from ._configuration import MarketplaceRPServiceConfiguration + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials import TokenCredential + from azure.core.credentials_async import AsyncTokenCredential + +class _SDKClient(object): + def __init__(self, *args, **kwargs): + """This is a fake class to support current implemetation of MultiApiClientMixin." + Will be removed in final version of multiapi azure-core based client + """ + pass + +class MarketplaceRPService(MultiApiClientMixin, _SDKClient): + """REST APIs for Azure Marketplace. + + This ready contains multiple API versions, to help you deal with all of the Azure clouds + (Azure Stack, Azure Government, Azure China, etc.). + By default, it uses the latest API version available on public Azure. + For production, you should stick to a particular api-version and/or profile. + The profile sets a mapping between an operation group and its API version. + The api-version parameter sets the default API version if the operation + group is not described in the profile. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param market: The Market to use for the request. Default value is "US". + :type market: str + :param include_stop_sold_plans: The Market to use for the request. Default value is "true". + :type include_stop_sold_plans: str + :param api_version: API version to use if no profile is provided, or if missing in profile. + :type api_version: str + :param base_url: Service URL + :type base_url: str + :param profile: A profile definition, from KnownProfiles to dict. + :type profile: azure.profiles.KnownProfiles + """ + + DEFAULT_API_VERSION = '2018-08-01-beta' + _PROFILE_TAG = "azure.mgmt.marketplace.MarketplaceRPService" + LATEST_PROFILE = ProfileDefinition({ + _PROFILE_TAG: { + None: DEFAULT_API_VERSION, + }}, + _PROFILE_TAG + " latest" + ) + + def __init__( + self, + credential: "AsyncTokenCredential", + market: Optional[str] = "US", + include_stop_sold_plans: Optional[str] = "true", + api_version: Optional[str] = None, + base_url: str = "https://management.azure.com", + profile: KnownProfiles = KnownProfiles.default, + **kwargs # type: Any + ) -> None: + self._config = MarketplaceRPServiceConfiguration(credential, market, include_stop_sold_plans, **kwargs) + self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + super(MarketplaceRPService, self).__init__( + api_version=api_version, + profile=profile + ) + + @classmethod + def _models_dict(cls, api_version): + return {k: v for k, v in cls.models(api_version).__dict__.items() if isinstance(v, type)} + + @classmethod + def models(cls, api_version=DEFAULT_API_VERSION): + """Module depends on the API version: + + * 2018-08-01-beta: :mod:`v2018_08_01_beta.models` + """ + if api_version == '2018-08-01-beta': + from ..v2018_08_01_beta import models + return models + raise ValueError("API version {} is not available".format(api_version)) + + @property + def offer(self): + """Instance depends on the API version: + + * 2018-08-01-beta: :class:`OfferOperations` + """ + api_version = self._get_api_version('offer') + if api_version == '2018-08-01-beta': + from ..v2018_08_01_beta.aio.operations import OfferOperations as OperationClass + else: + raise ValueError("API version {} does not have operation group 'offer'".format(api_version)) + return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) + + async def close(self): + await self._client.close() + async def __aenter__(self): + await self._client.__aenter__() + return self + async def __aexit__(self, *exc_details): + await self._client.__aexit__(*exc_details) diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/models.py b/src/spring/azext_spring/vendored_sdks/marketplace/models.py new file mode 100644 index 00000000000..9c020db9776 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/models.py @@ -0,0 +1,7 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from .v2018_08_01_beta.models import * diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/py.typed b/src/spring/azext_spring/vendored_sdks/marketplace/py.typed new file mode 100644 index 00000000000..e5aff4f83af --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/__init__.py b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/__init__.py new file mode 100644 index 00000000000..d495e51aad2 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/__init__.py @@ -0,0 +1,18 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._marketplace_rp_service import MarketplaceRPService +from ._version import VERSION + +__version__ = VERSION +__all__ = ['MarketplaceRPService'] + +# `._patch.py` is used for handwritten extensions to the generated code +# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md +from ._patch import patch_sdk +patch_sdk() diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_configuration.py b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_configuration.py new file mode 100644 index 00000000000..f9b0855dbd8 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_configuration.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy + +from ._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Optional + + from azure.core.credentials import TokenCredential + + +class MarketplaceRPServiceConfiguration(Configuration): # pylint: disable=too-many-instance-attributes + """Configuration for MarketplaceRPService. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param market: The Market to use for the request. Default value is "US". + :type market: str + :param include_stop_sold_plans: The Market to use for the request. Default value is "true". + :type include_stop_sold_plans: str + :keyword api_version: Api Version. Default value is "2018-08-01-beta". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__( + self, + credential, # type: "TokenCredential" + market="US", # type: Optional[str] + include_stop_sold_plans="true", # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + super(MarketplaceRPServiceConfiguration, self).__init__(**kwargs) + api_version = kwargs.pop('api_version', "2018-08-01-beta") # type: str + + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.credential = credential + self.market = market + self.include_stop_sold_plans = include_stop_sold_plans + self.api_version = api_version + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'mgmt-marketplace/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs # type: Any + ): + # type: (...) -> None + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_marketplace_rp_service.py b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_marketplace_rp_service.py new file mode 100644 index 00000000000..6d9a20cf977 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_marketplace_rp_service.py @@ -0,0 +1,102 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import TYPE_CHECKING + +from msrest import Deserializer, Serializer + +from azure.mgmt.core import ARMPipelineClient + +from . import models +from ._configuration import MarketplaceRPServiceConfiguration +from .operations import OfferOperations + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Optional + + from azure.core.credentials import TokenCredential + from azure.core.rest import HttpRequest, HttpResponse + +class MarketplaceRPService(object): + """REST APIs for Azure Marketplace. + + :ivar offer: OfferOperations operations + :vartype offer: azure.mgmt.marketplace.v2018_08_01_beta.operations.OfferOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param market: The Market to use for the request. Default value is "US". + :type market: str + :param include_stop_sold_plans: The Market to use for the request. Default value is "false". + :type include_stop_sold_plans: str + :param base_url: Service URL. Default value is "https://management.azure.com". + :type base_url: str + :keyword api_version: Api Version. Default value is "2018-08-01-beta". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__( + self, + credential, # type: "TokenCredential" + market="US", # type: Optional[str] + include_stop_sold_plans="false", # type: Optional[str] + base_url="https://management.azure.com", # type: str + **kwargs # type: Any + ): + # type: (...) -> None + self._config = MarketplaceRPServiceConfiguration(credential=credential, market="US", include_stop_sold_plans="false", **kwargs) + self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + self._serialize.client_side_validation = False + self.offer = OfferOperations(self._client, self._config, self._serialize, self._deserialize) + + + def _send_request( + self, + request, # type: HttpRequest + **kwargs # type: Any + ): + # type: (...) -> HttpResponse + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, **kwargs) + + def close(self): + # type: () -> None + self._client.close() + + def __enter__(self): + # type: () -> MarketplaceRPService + self._client.__enter__() + return self + + def __exit__(self, *exc_details): + # type: (Any) -> None + self._client.__exit__(*exc_details) diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_metadata.json b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_metadata.json new file mode 100644 index 00000000000..3f64ec7d6af --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_metadata.json @@ -0,0 +1,114 @@ +{ + "chosen_version": "2018-08-01-beta", + "total_api_version_list": ["2018-08-01-beta"], + "client": { + "name": "MarketplaceRPService", + "filename": "_marketplace_rp_service", + "description": "REST APIs for Azure Marketplace.", + "host_value": "\"https://management.azure.com\"", + "parameterized_host_template": null, + "azure_arm": true, + "has_lro_operations": false, + "client_side_validation": false, + "sync_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"azure.mgmt.core\": [\"ARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"MarketplaceRPServiceConfiguration\"]}, \"thirdparty\": {\"msrest\": [\"Deserializer\", \"Serializer\"]}}}", + "async_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"], \"azure.core.credentials\": [\"TokenCredential\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"azure.mgmt.core\": [\"AsyncARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"MarketplaceRPServiceConfiguration\"]}, \"thirdparty\": {\"msrest\": [\"Deserializer\", \"Serializer\"]}}}" + }, + "global_parameters": { + "sync": { + "credential": { + "signature": "credential, # type: \"TokenCredential\"", + "description": "Credential needed for the client to connect to Azure.", + "docstring_type": "~azure.core.credentials.TokenCredential", + "required": true + }, + "market": { + "signature": "market=\"US\", # type: Optional[str]", + "description": "The Market to use for the request. Default value is \"US\".", + "docstring_type": "str", + "required": false + }, + "include_stop_sold_plans": { + "signature": "include_stop_sold_plans=\"true\", # type: Optional[str]", + "description": "The Market to use for the request. Default value is \"true\".", + "docstring_type": "str", + "required": false + } + }, + "async": { + "credential": { + "signature": "credential: \"AsyncTokenCredential\",", + "description": "Credential needed for the client to connect to Azure.", + "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", + "required": true + }, + "market": { + "signature": "market: Optional[str] = \"US\",", + "description": "The Market to use for the request. Default value is \"US\".", + "docstring_type": "str", + "required": false + }, + "include_stop_sold_plans": { + "signature": "include_stop_sold_plans: Optional[str] = \"true\",", + "description": "The Market to use for the request. Default value is \"true\".", + "docstring_type": "str", + "required": false + } + }, + "constant": { + }, + "call": "credential, market, include_stop_sold_plans", + "service_client_specific": { + "sync": { + "api_version": { + "signature": "api_version=None, # type: Optional[str]", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url=\"https://management.azure.com\", # type: str", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile=KnownProfiles.default, # type: KnownProfiles", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + }, + "async": { + "api_version": { + "signature": "api_version: Optional[str] = None,", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url: str = \"https://management.azure.com\",", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile: KnownProfiles = KnownProfiles.default,", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + } + } + }, + "config": { + "credential": true, + "credential_scopes": ["https://management.azure.com/.default"], + "credential_call_sync": "ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs)", + "credential_call_async": "AsyncARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs)", + "sync_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMChallengeAuthenticationPolicy\", \"ARMHttpLoggingPolicy\"]}, \"local\": {\"._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}}", + "async_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\", \"AsyncARMChallengeAuthenticationPolicy\"]}, \"local\": {\".._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}}" + }, + "operation_groups": { + "offer": "OfferOperations" + } +} \ No newline at end of file diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_patch.py b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_patch.py new file mode 100644 index 00000000000..74e48ecd07c --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_patch.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# This file is used for handwritten extensions to the generated code. Example: +# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md +def patch_sdk(): + pass \ No newline at end of file diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_vendor.py b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_vendor.py new file mode 100644 index 00000000000..138f663c53a --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_vendor.py @@ -0,0 +1,27 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.pipeline.transport import HttpRequest + +def _convert_request(request, files=None): + data = request.content if not files else None + request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) + if files: + request.set_formdata_body(files) + return request + +def _format_url_section(template, **kwargs): + components = template.split("/") + while components: + try: + return template.format(**kwargs) + except KeyError as key: + formatted_components = template.split("/") + components = [ + c for c in formatted_components if "{}".format(key.args[0]) not in c + ] + template = "/".join(components) diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_version.py b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_version.py new file mode 100644 index 00000000000..e5754a47ce6 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0b1" diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/__init__.py b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/__init__.py new file mode 100644 index 00000000000..b616b046b07 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/__init__.py @@ -0,0 +1,15 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._marketplace_rp_service import MarketplaceRPService +__all__ = ['MarketplaceRPService'] + +# `._patch.py` is used for handwritten extensions to the generated code +# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md +from ._patch import patch_sdk +patch_sdk() diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/_configuration.py b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/_configuration.py new file mode 100644 index 00000000000..f6f73f02673 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/_configuration.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, Optional, TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy + +from .._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + + +class MarketplaceRPServiceConfiguration(Configuration): # pylint: disable=too-many-instance-attributes + """Configuration for MarketplaceRPService. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param market: The Market to use for the request. Default value is "US". + :type market: str + :param include_stop_sold_plans: The Market to use for the request. Default value is "true". + :type include_stop_sold_plans: str + :keyword api_version: Api Version. Default value is "2018-08-01-beta". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + market: Optional[str] = "US", + include_stop_sold_plans: Optional[str] = "true", + **kwargs: Any + ) -> None: + super(MarketplaceRPServiceConfiguration, self).__init__(**kwargs) + api_version = kwargs.pop('api_version', "2018-08-01-beta") # type: str + + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.credential = credential + self.market = market + self.include_stop_sold_plans = include_stop_sold_plans + self.api_version = api_version + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'mgmt-marketplace/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs: Any + ) -> None: + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/_marketplace_rp_service.py b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/_marketplace_rp_service.py new file mode 100644 index 00000000000..5dc70c4de80 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/_marketplace_rp_service.py @@ -0,0 +1,95 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Awaitable, Optional, TYPE_CHECKING + +from msrest import Deserializer, Serializer + +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.mgmt.core import AsyncARMPipelineClient + +from .. import models +from ._configuration import MarketplaceRPServiceConfiguration +from .operations import OfferOperations + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + +class MarketplaceRPService: + """REST APIs for Azure Marketplace. + + :ivar offer: OfferOperations operations + :vartype offer: azure.mgmt.marketplace.v2018_08_01_beta.aio.operations.OfferOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param market: The Market to use for the request. Default value is "US". + :type market: str + :param include_stop_sold_plans: The Market to use for the request. Default value is "true". + :type include_stop_sold_plans: str + :param base_url: Service URL. Default value is "https://management.azure.com". + :type base_url: str + :keyword api_version: Api Version. Default value is "2018-08-01-beta". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + market: Optional[str] = "US", + include_stop_sold_plans: Optional[str] = "true", + base_url: str = "https://management.azure.com", + **kwargs: Any + ) -> None: + self._config = MarketplaceRPServiceConfiguration(credential=credential, market=market, include_stop_sold_plans=include_stop_sold_plans, **kwargs) + self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + self._serialize.client_side_validation = False + self.offer = OfferOperations(self._client, self._config, self._serialize, self._deserialize) + + + def _send_request( + self, + request: HttpRequest, + **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, **kwargs) + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> "MarketplaceRPService": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details) -> None: + await self._client.__aexit__(*exc_details) diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/_patch.py b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/_patch.py new file mode 100644 index 00000000000..74e48ecd07c --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/_patch.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# This file is used for handwritten extensions to the generated code. Example: +# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md +def patch_sdk(): + pass \ No newline at end of file diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/operations/__init__.py b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/operations/__init__.py new file mode 100644 index 00000000000..d57e3328b94 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/operations/__init__.py @@ -0,0 +1,13 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._offer_operations import OfferOperations + +__all__ = [ + 'OfferOperations', +] diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/operations/_offer_operations.py b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/operations/_offer_operations.py new file mode 100644 index 00000000000..8b18df2433e --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/aio/operations/_offer_operations.py @@ -0,0 +1,100 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Optional, TypeVar + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._offer_operations import build_get_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class OfferOperations: + """OfferOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.marketplace.v2018_08_01_beta.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + @distributed_trace_async + async def get( + self, + offer_id: str, + **kwargs: Any + ) -> "_models.Offer": + """Gets information about a specific offer. + + :param offer_id: The offer ID to update or delete. + :type offer_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Offer, or the result of cls(response) + :rtype: ~azure.mgmt.marketplace.v2018_08_01_beta.models.Offer + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.Offer"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + api_version = kwargs.pop('api_version', "2018-08-01-beta") # type: str + + + request = build_get_request( + offer_id=offer_id, + api_version=api_version, + market=self._config.market, + include_stop_sold_plans=self._config.include_stop_sold_plans, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Offer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/providers/Microsoft.Marketplace/offers/{offerId}"} # type: ignore + diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/models/__init__.py b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/models/__init__.py new file mode 100644 index 00000000000..1a81dcdfa6e --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/models/__init__.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +try: + from ._models_py3 import AvailabilityEntity + from ._models_py3 import ErrorResponse + from ._models_py3 import ErrorResponseError + from ._models_py3 import IncludedQuantityProperty + from ._models_py3 import Meter + from ._models_py3 import Offer + from ._models_py3 import OfferListResponse + from ._models_py3 import Plan + from ._models_py3 import Price +except (SyntaxError, ImportError): + from ._models import AvailabilityEntity # type: ignore + from ._models import ErrorResponse # type: ignore + from ._models import ErrorResponseError # type: ignore + from ._models import IncludedQuantityProperty # type: ignore + from ._models import Meter # type: ignore + from ._models import Offer # type: ignore + from ._models import OfferListResponse # type: ignore + from ._models import Plan # type: ignore + from ._models import Price # type: ignore + +__all__ = [ + 'AvailabilityEntity', + 'ErrorResponse', + 'ErrorResponseError', + 'IncludedQuantityProperty', + 'Meter', + 'Offer', + 'OfferListResponse', + 'Plan', + 'Price', +] diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/models/_models.py b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/models/_models.py new file mode 100644 index 00000000000..de05963102e --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/models/_models.py @@ -0,0 +1,330 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + + +class AvailabilityEntity(msrest.serialization.Model): + """AvailabilityEntity. + + :ivar id: + :vartype id: str + :ivar actions: List of allowed actions. + :vartype actions: list[str] + :ivar market: Market for the availability collection. + :vartype market: str + :ivar meter: Applicable billing meter information. + :vartype meter: ~azure.mgmt.marketplace.v2018_08_01_beta.models.Meter + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'actions': {'key': 'actions', 'type': '[str]'}, + 'market': {'key': 'market', 'type': 'str'}, + 'meter': {'key': 'meter', 'type': 'Meter'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword id: + :paramtype id: str + :keyword actions: List of allowed actions. + :paramtype actions: list[str] + :keyword market: Market for the availability collection. + :paramtype market: str + :keyword meter: Applicable billing meter information. + :paramtype meter: ~azure.mgmt.marketplace.v2018_08_01_beta.models.Meter + """ + super(AvailabilityEntity, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.actions = kwargs.get('actions', None) + self.market = kwargs.get('market', None) + self.meter = kwargs.get('meter', None) + + +class ErrorResponse(msrest.serialization.Model): + """Error response indicates Microsoft.Marketplace service is not able to process the incoming request. The reason is provided in the error message. + + :ivar error: The details of the error. + :vartype error: ~azure.mgmt.marketplace.v2018_08_01_beta.models.ErrorResponseError + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorResponseError'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword error: The details of the error. + :paramtype error: ~azure.mgmt.marketplace.v2018_08_01_beta.models.ErrorResponseError + """ + super(ErrorResponse, self).__init__(**kwargs) + self.error = kwargs.get('error', None) + + +class ErrorResponseError(msrest.serialization.Model): + """The details of the error. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: Error code. + :vartype code: str + :ivar message: Error message indicating why the operation failed. + :vartype message: str + """ + + _validation = { + 'code': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ErrorResponseError, self).__init__(**kwargs) + self.code = None + self.message = None + + +class IncludedQuantityProperty(msrest.serialization.Model): + """IncludedQuantityProperty. + + :ivar term_id: Term id. + :vartype term_id: str + :ivar quantity: Quantity. + :vartype quantity: str + """ + + _attribute_map = { + 'term_id': {'key': 'termId', 'type': 'str'}, + 'quantity': {'key': 'quantity', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword term_id: Term id. + :paramtype term_id: str + :keyword quantity: Quantity. + :paramtype quantity: str + """ + super(IncludedQuantityProperty, self).__init__(**kwargs) + self.term_id = kwargs.get('term_id', None) + self.quantity = kwargs.get('quantity', None) + + +class Meter(msrest.serialization.Model): + """Meter. + + :ivar meter_id: Metetr id. + :vartype meter_id: str + :ivar price: Price for this meter. + :vartype price: ~azure.mgmt.marketplace.v2018_08_01_beta.models.Price + :ivar included_quantity_properties: Included quantity properties. + :vartype included_quantity_properties: + list[~azure.mgmt.marketplace.v2018_08_01_beta.models.IncludedQuantityProperty] + """ + + _attribute_map = { + 'meter_id': {'key': 'meterId', 'type': 'str'}, + 'price': {'key': 'price', 'type': 'Price'}, + 'included_quantity_properties': {'key': 'includedQuantityProperties', 'type': '[IncludedQuantityProperty]'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword meter_id: Metetr id. + :paramtype meter_id: str + :keyword price: Price for this meter. + :paramtype price: ~azure.mgmt.marketplace.v2018_08_01_beta.models.Price + :keyword included_quantity_properties: Included quantity properties. + :paramtype included_quantity_properties: + list[~azure.mgmt.marketplace.v2018_08_01_beta.models.IncludedQuantityProperty] + """ + super(Meter, self).__init__(**kwargs) + self.meter_id = kwargs.get('meter_id', None) + self.price = kwargs.get('price', None) + self.included_quantity_properties = kwargs.get('included_quantity_properties', None) + + +class Offer(msrest.serialization.Model): + """The offer data structure. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar offer_id: Offers unique id. + :vartype offer_id: str + :ivar display_name: It will be displayed prominently in the marketplace. + :vartype display_name: str + :ivar publisher_id: Publisher unique id. + :vartype publisher_id: str + :ivar publisher_display_name: Publisher name that will be displayed prominently in the + marketplace. + :vartype publisher_display_name: str + :ivar e_tag: Identifier for purposes of race condition. + :vartype e_tag: str + :ivar plans: Offer plans. + :vartype plans: list[~azure.mgmt.marketplace.v2018_08_01_beta.models.Plan] + """ + + _validation = { + 'offer_id': {'readonly': True}, + 'display_name': {'readonly': True}, + 'publisher_id': {'readonly': True}, + 'publisher_display_name': {'readonly': True}, + } + + _attribute_map = { + 'offer_id': {'key': 'properties.offerId', 'type': 'str'}, + 'display_name': {'key': 'properties.displayName', 'type': 'str'}, + 'publisher_id': {'key': 'properties.publisherId', 'type': 'str'}, + 'publisher_display_name': {'key': 'properties.publisherDisplayName', 'type': 'str'}, + 'e_tag': {'key': 'properties.eTag', 'type': 'str'}, + 'plans': {'key': 'properties.plans', 'type': '[Plan]'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword e_tag: Identifier for purposes of race condition. + :paramtype e_tag: str + :keyword plans: Offer plans. + :paramtype plans: list[~azure.mgmt.marketplace.v2018_08_01_beta.models.Plan] + """ + super(Offer, self).__init__(**kwargs) + self.offer_id = None + self.display_name = None + self.publisher_id = None + self.publisher_display_name = None + self.e_tag = kwargs.get('e_tag', None) + self.plans = kwargs.get('plans', None) + + +class OfferListResponse(msrest.serialization.Model): + """OfferListResponse. + + :ivar value: + :vartype value: list[~azure.mgmt.marketplace.v2018_08_01_beta.models.Offer] + :ivar next_link: URL to get the next set of offer list results if there are any. + :vartype next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Offer]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword value: + :paramtype value: list[~azure.mgmt.marketplace.v2018_08_01_beta.models.Offer] + :keyword next_link: URL to get the next set of offer list results if there are any. + :paramtype next_link: str + """ + super(OfferListResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) + + +class Plan(msrest.serialization.Model): + """Plan. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar description: Description for this plan. + :vartype description: str + :ivar plan_id: Text identifier for this plan. + :vartype plan_id: str + :ivar display_name: Friendly name for the plan for display in the marketplace. + :vartype display_name: str + :ivar availabilities: + :vartype availabilities: + list[~azure.mgmt.marketplace.v2018_08_01_beta.models.AvailabilityEntity] + """ + + _validation = { + 'description': {'readonly': True}, + 'plan_id': {'readonly': True}, + 'display_name': {'readonly': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'plan_id': {'key': 'planId', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'availabilities': {'key': 'availabilities', 'type': '[AvailabilityEntity]'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword availabilities: + :paramtype availabilities: + list[~azure.mgmt.marketplace.v2018_08_01_beta.models.AvailabilityEntity] + """ + super(Plan, self).__init__(**kwargs) + self.description = None + self.plan_id = None + self.display_name = None + self.availabilities = kwargs.get('availabilities', None) + + +class Price(msrest.serialization.Model): + """Price. + + :ivar currency_code: Currency code i.e. 'USD'. + :vartype currency_code: str + :ivar list_price: Retail price for the item. + :vartype list_price: float + """ + + _attribute_map = { + 'currency_code': {'key': 'currencyCode', 'type': 'str'}, + 'list_price': {'key': 'listPrice', 'type': 'float'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword currency_code: Currency code i.e. 'USD'. + :paramtype currency_code: str + :keyword list_price: Retail price for the item. + :paramtype list_price: float + """ + super(Price, self).__init__(**kwargs) + self.currency_code = kwargs.get('currency_code', None) + self.list_price = kwargs.get('list_price', None) diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/models/_models_py3.py b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/models/_models_py3.py new file mode 100644 index 00000000000..7c88e11c7d1 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/models/_models_py3.py @@ -0,0 +1,357 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import List, Optional + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + + +class AvailabilityEntity(msrest.serialization.Model): + """AvailabilityEntity. + + :ivar id: + :vartype id: str + :ivar actions: List of allowed actions. + :vartype actions: list[str] + :ivar market: Market for the availability collection. + :vartype market: str + :ivar meter: Applicable billing meter information. + :vartype meter: ~azure.mgmt.marketplace.v2018_08_01_beta.models.Meter + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'actions': {'key': 'actions', 'type': '[str]'}, + 'market': {'key': 'market', 'type': 'str'}, + 'meter': {'key': 'meter', 'type': 'Meter'}, + } + + def __init__( + self, + *, + id: Optional[str] = None, + actions: Optional[List[str]] = None, + market: Optional[str] = None, + meter: Optional["Meter"] = None, + **kwargs + ): + """ + :keyword id: + :paramtype id: str + :keyword actions: List of allowed actions. + :paramtype actions: list[str] + :keyword market: Market for the availability collection. + :paramtype market: str + :keyword meter: Applicable billing meter information. + :paramtype meter: ~azure.mgmt.marketplace.v2018_08_01_beta.models.Meter + """ + super(AvailabilityEntity, self).__init__(**kwargs) + self.id = id + self.actions = actions + self.market = market + self.meter = meter + + +class ErrorResponse(msrest.serialization.Model): + """Error response indicates Microsoft.Marketplace service is not able to process the incoming request. The reason is provided in the error message. + + :ivar error: The details of the error. + :vartype error: ~azure.mgmt.marketplace.v2018_08_01_beta.models.ErrorResponseError + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorResponseError'}, + } + + def __init__( + self, + *, + error: Optional["ErrorResponseError"] = None, + **kwargs + ): + """ + :keyword error: The details of the error. + :paramtype error: ~azure.mgmt.marketplace.v2018_08_01_beta.models.ErrorResponseError + """ + super(ErrorResponse, self).__init__(**kwargs) + self.error = error + + +class ErrorResponseError(msrest.serialization.Model): + """The details of the error. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: Error code. + :vartype code: str + :ivar message: Error message indicating why the operation failed. + :vartype message: str + """ + + _validation = { + 'code': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ErrorResponseError, self).__init__(**kwargs) + self.code = None + self.message = None + + +class IncludedQuantityProperty(msrest.serialization.Model): + """IncludedQuantityProperty. + + :ivar term_id: Term id. + :vartype term_id: str + :ivar quantity: Quantity. + :vartype quantity: str + """ + + _attribute_map = { + 'term_id': {'key': 'termId', 'type': 'str'}, + 'quantity': {'key': 'quantity', 'type': 'str'}, + } + + def __init__( + self, + *, + term_id: Optional[str] = None, + quantity: Optional[str] = None, + **kwargs + ): + """ + :keyword term_id: Term id. + :paramtype term_id: str + :keyword quantity: Quantity. + :paramtype quantity: str + """ + super(IncludedQuantityProperty, self).__init__(**kwargs) + self.term_id = term_id + self.quantity = quantity + + +class Meter(msrest.serialization.Model): + """Meter. + + :ivar meter_id: Metetr id. + :vartype meter_id: str + :ivar price: Price for this meter. + :vartype price: ~azure.mgmt.marketplace.v2018_08_01_beta.models.Price + :ivar included_quantity_properties: Included quantity properties. + :vartype included_quantity_properties: + list[~azure.mgmt.marketplace.v2018_08_01_beta.models.IncludedQuantityProperty] + """ + + _attribute_map = { + 'meter_id': {'key': 'meterId', 'type': 'str'}, + 'price': {'key': 'price', 'type': 'Price'}, + 'included_quantity_properties': {'key': 'includedQuantityProperties', 'type': '[IncludedQuantityProperty]'}, + } + + def __init__( + self, + *, + meter_id: Optional[str] = None, + price: Optional["Price"] = None, + included_quantity_properties: Optional[List["IncludedQuantityProperty"]] = None, + **kwargs + ): + """ + :keyword meter_id: Metetr id. + :paramtype meter_id: str + :keyword price: Price for this meter. + :paramtype price: ~azure.mgmt.marketplace.v2018_08_01_beta.models.Price + :keyword included_quantity_properties: Included quantity properties. + :paramtype included_quantity_properties: + list[~azure.mgmt.marketplace.v2018_08_01_beta.models.IncludedQuantityProperty] + """ + super(Meter, self).__init__(**kwargs) + self.meter_id = meter_id + self.price = price + self.included_quantity_properties = included_quantity_properties + + +class Offer(msrest.serialization.Model): + """The offer data structure. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar offer_id: Offers unique id. + :vartype offer_id: str + :ivar display_name: It will be displayed prominently in the marketplace. + :vartype display_name: str + :ivar publisher_id: Publisher unique id. + :vartype publisher_id: str + :ivar publisher_display_name: Publisher name that will be displayed prominently in the + marketplace. + :vartype publisher_display_name: str + :ivar e_tag: Identifier for purposes of race condition. + :vartype e_tag: str + :ivar plans: Offer plans. + :vartype plans: list[~azure.mgmt.marketplace.v2018_08_01_beta.models.Plan] + """ + + _validation = { + 'offer_id': {'readonly': True}, + 'display_name': {'readonly': True}, + 'publisher_id': {'readonly': True}, + 'publisher_display_name': {'readonly': True}, + } + + _attribute_map = { + 'offer_id': {'key': 'properties.offerId', 'type': 'str'}, + 'display_name': {'key': 'properties.displayName', 'type': 'str'}, + 'publisher_id': {'key': 'properties.publisherId', 'type': 'str'}, + 'publisher_display_name': {'key': 'properties.publisherDisplayName', 'type': 'str'}, + 'e_tag': {'key': 'properties.eTag', 'type': 'str'}, + 'plans': {'key': 'properties.plans', 'type': '[Plan]'}, + } + + def __init__( + self, + *, + e_tag: Optional[str] = None, + plans: Optional[List["Plan"]] = None, + **kwargs + ): + """ + :keyword e_tag: Identifier for purposes of race condition. + :paramtype e_tag: str + :keyword plans: Offer plans. + :paramtype plans: list[~azure.mgmt.marketplace.v2018_08_01_beta.models.Plan] + """ + super(Offer, self).__init__(**kwargs) + self.offer_id = None + self.display_name = None + self.publisher_id = None + self.publisher_display_name = None + self.e_tag = e_tag + self.plans = plans + + +class OfferListResponse(msrest.serialization.Model): + """OfferListResponse. + + :ivar value: + :vartype value: list[~azure.mgmt.marketplace.v2018_08_01_beta.models.Offer] + :ivar next_link: URL to get the next set of offer list results if there are any. + :vartype next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Offer]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["Offer"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword value: + :paramtype value: list[~azure.mgmt.marketplace.v2018_08_01_beta.models.Offer] + :keyword next_link: URL to get the next set of offer list results if there are any. + :paramtype next_link: str + """ + super(OfferListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class Plan(msrest.serialization.Model): + """Plan. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar description: Description for this plan. + :vartype description: str + :ivar plan_id: Text identifier for this plan. + :vartype plan_id: str + :ivar display_name: Friendly name for the plan for display in the marketplace. + :vartype display_name: str + :ivar availabilities: + :vartype availabilities: + list[~azure.mgmt.marketplace.v2018_08_01_beta.models.AvailabilityEntity] + """ + + _validation = { + 'description': {'readonly': True}, + 'plan_id': {'readonly': True}, + 'display_name': {'readonly': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'plan_id': {'key': 'planId', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'availabilities': {'key': 'availabilities', 'type': '[AvailabilityEntity]'}, + } + + def __init__( + self, + *, + availabilities: Optional[List["AvailabilityEntity"]] = None, + **kwargs + ): + """ + :keyword availabilities: + :paramtype availabilities: + list[~azure.mgmt.marketplace.v2018_08_01_beta.models.AvailabilityEntity] + """ + super(Plan, self).__init__(**kwargs) + self.description = None + self.plan_id = None + self.display_name = None + self.availabilities = availabilities + + +class Price(msrest.serialization.Model): + """Price. + + :ivar currency_code: Currency code i.e. 'USD'. + :vartype currency_code: str + :ivar list_price: Retail price for the item. + :vartype list_price: float + """ + + _attribute_map = { + 'currency_code': {'key': 'currencyCode', 'type': 'str'}, + 'list_price': {'key': 'listPrice', 'type': 'float'}, + } + + def __init__( + self, + *, + currency_code: Optional[str] = None, + list_price: Optional[float] = None, + **kwargs + ): + """ + :keyword currency_code: Currency code i.e. 'USD'. + :paramtype currency_code: str + :keyword list_price: Retail price for the item. + :paramtype list_price: float + """ + super(Price, self).__init__(**kwargs) + self.currency_code = currency_code + self.list_price = list_price diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/operations/__init__.py b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/operations/__init__.py new file mode 100644 index 00000000000..d57e3328b94 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/operations/__init__.py @@ -0,0 +1,13 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._offer_operations import OfferOperations + +__all__ = [ + 'OfferOperations', +] diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/operations/_offer_operations.py b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/operations/_offer_operations.py new file mode 100644 index 00000000000..a4311e1e6b4 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/operations/_offer_operations.py @@ -0,0 +1,149 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Optional, TypeVar + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_get_request( + offer_id, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = kwargs.pop('api_version', "2018-08-01-beta") # type: str + market = kwargs.pop('market', "US") # type: Optional[str] + include_stop_sold_plans = kwargs.pop('include_stop_sold_plans', "true") # type: Optional[str] + + accept = "application/json" + # Construct URL + _url = kwargs.pop("template_url", "/providers/Microsoft.Marketplace/offers/{offerId}") + path_format_arguments = { + "offerId": _SERIALIZER.url("offer_id", offer_id, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + if market is not None: + _query_parameters['market'] = _SERIALIZER.query("market", market, 'str') + if include_stop_sold_plans is not None: + _query_parameters['includeStopSoldPlans'] = _SERIALIZER.query("include_stop_sold_plans", include_stop_sold_plans, 'str') + _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + +# fmt: on +class OfferOperations(object): + """OfferOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.marketplace.v2018_08_01_beta.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + @distributed_trace + def get( + self, + offer_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.Offer" + """Gets information about a specific offer. + + :param offer_id: The offer ID to update or delete. + :type offer_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Offer, or the result of cls(response) + :rtype: ~azure.mgmt.marketplace.v2018_08_01_beta.models.Offer + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.Offer"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + api_version = kwargs.pop('api_version', "2018-08-01-beta") # type: str + + + request = build_get_request( + offer_id=offer_id, + api_version=api_version, + market=self._config.market, + include_stop_sold_plans=self._config.include_stop_sold_plans, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Offer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/providers/Microsoft.Marketplace/offers/{offerId}"} # type: ignore + diff --git a/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/py.typed b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/py.typed new file mode 100644 index 00000000000..e5aff4f83af --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/marketplace/v2018_08_01_beta/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/src/spring/setup.py b/src/spring/setup.py index 99029c1a704..f2b10a03b66 100644 --- a/src/spring/setup.py +++ b/src/spring/setup.py @@ -16,7 +16,7 @@ # TODO: Confirm this is the right version number you want and it matches your # HISTORY.rst entry. -VERSION = '1.1.14' +VERSION = '1.2.0' # The full list of classifiers is available at # https://pypi.python.org/pypi?%3Aaction=list_classifiers From e7140f21e9956b09179f038362db9d94e545d56a Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Tue, 1 Nov 2022 03:41:40 +0000 Subject: [PATCH 52/85] [Release] Update index.json for extension [ spring ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=12456&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/8a7a4dabb2ddb56d64bcfed8b00646c48d5a622a --- src/index.json | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/src/index.json b/src/index.json index e196629fb3a..d7bf3364691 100644 --- a/src/index.json +++ b/src/index.json @@ -36097,6 +36097,49 @@ "version": "1.1.14" }, "sha256Digest": "e7d57cac5569ee8b530ddc51cf44ff45eedbdf5f94fadbb058e79eea705758a8" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/spring-1.2.0-py3-none-any.whl", + "filename": "spring-1.2.0-py3-none-any.whl", + "metadata": { + "azext.isPreview": false, + "azext.minCliCoreVersion": "2.38.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/spring" + } + } + }, + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "spring", + "summary": "Microsoft Azure Command-Line Tools spring Extension", + "version": "1.2.0" + }, + "sha256Digest": "36ff3ca0812a809dcfda55b4510194f3a53891b7f5a7a2bb0bc5385c072e9fae" } ], "spring-cloud": [ From 0509f8518b4edc6bf24eadf1d670020d409ed60b Mon Sep 17 00:00:00 2001 From: songlu <37168047+PARADISSEEKR@users.noreply.github.com> Date: Tue, 1 Nov 2022 14:22:01 +0800 Subject: [PATCH 53/85] [Dynatrace] Dynatrace.Observability GA Release (#5165) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * dynatrace * update * 09-01-preview * EOL * test(draft) * Update _create.py * test * Update service_name.json * test * Update _update.py * test * test * example and preview tag * api version * Update service_name.json * update * update * rename sso * update * Update README.md * delete confirmation="” --- .github/CODEOWNERS | 2 + src/dynatrace/HISTORY.rst | 8 + src/dynatrace/README.md | 115 ++ src/dynatrace/azext_dynatrace/__init__.py | 42 + src/dynatrace/azext_dynatrace/_help.py | 11 + src/dynatrace/azext_dynatrace/_params.py | 13 + src/dynatrace/azext_dynatrace/aaz/__init__.py | 6 + .../azext_dynatrace/aaz/latest/__init__.py | 6 + .../aaz/latest/dynatrace/__cmd_group.py | 23 + .../aaz/latest/dynatrace/__init__.py | 11 + .../latest/dynatrace/monitor/__cmd_group.py | 23 + .../aaz/latest/dynatrace/monitor/__init__.py | 23 + .../aaz/latest/dynatrace/monitor/_create.py | 647 +++++++++++ .../aaz/latest/dynatrace/monitor/_delete.py | 159 +++ .../dynatrace/monitor/_get_sso_detail.py | 203 ++++ .../dynatrace/monitor/_get_vm_host_payload.py | 165 +++ .../aaz/latest/dynatrace/monitor/_list.py | 346 ++++++ .../dynatrace/monitor/_list_app_service.py | 193 ++++ .../latest/dynatrace/monitor/_list_host.py | 193 ++++ .../monitor/_list_linkable_environment.py | 226 ++++ .../monitor/_list_monitored_resource.py | 181 +++ .../aaz/latest/dynatrace/monitor/_show.py | 345 ++++++ .../aaz/latest/dynatrace/monitor/_update.py | 375 ++++++ .../aaz/latest/dynatrace/monitor/_wait.py | 341 ++++++ .../monitor/sso_config/__cmd_group.py | 23 + .../dynatrace/monitor/sso_config/__init__.py | 15 + .../dynatrace/monitor/sso_config/_create.py | 296 +++++ .../dynatrace/monitor/sso_config/_list.py | 226 ++++ .../dynatrace/monitor/sso_config/_show.py | 226 ++++ .../dynatrace/monitor/sso_config/_wait.py | 222 ++++ .../dynatrace/monitor/tag_rule/__cmd_group.py | 23 + .../dynatrace/monitor/tag_rule/__init__.py | 17 + .../dynatrace/monitor/tag_rule/_create.py | 410 +++++++ .../dynatrace/monitor/tag_rule/_delete.py | 169 +++ .../dynatrace/monitor/tag_rule/_list.py | 267 +++++ .../dynatrace/monitor/tag_rule/_show.py | 267 +++++ .../dynatrace/monitor/tag_rule/_update.py | 389 +++++++ .../dynatrace/monitor/tag_rule/_wait.py | 263 +++++ .../azext_dynatrace/azext_metadata.json | 4 + src/dynatrace/azext_dynatrace/commands.py | 15 + src/dynatrace/azext_dynatrace/custom.py | 14 + .../azext_dynatrace/tests/__init__.py | 6 + .../azext_dynatrace/tests/latest/__init__.py | 6 + .../tests/latest/credential_replacer.py | 24 + .../recordings/test_dynatrace_monitor.yaml | 1002 +++++++++++++++++ ...monitor_single_sign_on_configurations.yaml | 564 ++++++++++ .../test_dynatrace_monitor_tag_rule.yaml | 500 ++++++++ .../tests/latest/test_dynatrace.py | 169 +++ src/dynatrace/setup.cfg | 1 + src/dynatrace/setup.py | 49 + src/service_name.json | 5 + 51 files changed, 8829 insertions(+) create mode 100644 src/dynatrace/HISTORY.rst create mode 100644 src/dynatrace/README.md create mode 100644 src/dynatrace/azext_dynatrace/__init__.py create mode 100644 src/dynatrace/azext_dynatrace/_help.py create mode 100644 src/dynatrace/azext_dynatrace/_params.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/__init__.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/__init__.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/__cmd_group.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/__init__.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/__cmd_group.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/__init__.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_create.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_delete.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_get_sso_detail.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_get_vm_host_payload.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_list.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_list_app_service.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_list_host.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_list_linkable_environment.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_list_monitored_resource.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_show.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_update.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_wait.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/__cmd_group.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/__init__.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/_create.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/_list.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/_show.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/_wait.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/__cmd_group.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/__init__.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_create.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_delete.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_list.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_show.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_update.py create mode 100644 src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_wait.py create mode 100644 src/dynatrace/azext_dynatrace/azext_metadata.json create mode 100644 src/dynatrace/azext_dynatrace/commands.py create mode 100644 src/dynatrace/azext_dynatrace/custom.py create mode 100644 src/dynatrace/azext_dynatrace/tests/__init__.py create mode 100644 src/dynatrace/azext_dynatrace/tests/latest/__init__.py create mode 100644 src/dynatrace/azext_dynatrace/tests/latest/credential_replacer.py create mode 100644 src/dynatrace/azext_dynatrace/tests/latest/recordings/test_dynatrace_monitor.yaml create mode 100644 src/dynatrace/azext_dynatrace/tests/latest/recordings/test_dynatrace_monitor_single_sign_on_configurations.yaml create mode 100644 src/dynatrace/azext_dynatrace/tests/latest/recordings/test_dynatrace_monitor_tag_rule.yaml create mode 100644 src/dynatrace/azext_dynatrace/tests/latest/test_dynatrace.py create mode 100644 src/dynatrace/setup.cfg create mode 100644 src/dynatrace/setup.py diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 8c8e419cd18..6fd29c2e006 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -251,3 +251,5 @@ /src/reservation/ @gaoyp830 @rkapso @msft-adrianma @sornaks @juhee0202 /src/scenario-guide/ @zhoxing-ms @ReaNAiveD + +/src/dynatrace/ @jsntcy diff --git a/src/dynatrace/HISTORY.rst b/src/dynatrace/HISTORY.rst new file mode 100644 index 00000000000..8c34bccfff8 --- /dev/null +++ b/src/dynatrace/HISTORY.rst @@ -0,0 +1,8 @@ +.. :changelog: + +Release History +=============== + +0.1.0 +++++++ +* Initial release. \ No newline at end of file diff --git a/src/dynatrace/README.md b/src/dynatrace/README.md new file mode 100644 index 00000000000..8ed997f5a1f --- /dev/null +++ b/src/dynatrace/README.md @@ -0,0 +1,115 @@ +# Azure CLI Dynatrace Extension # +This is an extension to Azure CLI to manage Dynatrace resources. + +## How to use ## +Install this extension using the below CLI command +``` +az extension add --name dynatrace +``` + +### Included Features ### +#### dynatrace monitor #### +##### Create ##### +``` +az dynatrace monitor create -g rg -n monitor --user-info "{first-name:Alice,last-name:Bobab,email-address:Alice@microsoft.com,phone-number:1234567890,country:US}" + --plan-data "{usage-type:committed,billing-cycle:Monthly,plan-details:azureportalintegration_privatepreview@TIDhjdtn7tfnxcy,effective-date:2022-08-20}" + --environment "{single-sign-on:{aad-domains:['abc']}}" + +``` +##### Show ##### +``` +az dynatrace monitor show -g rg -n monitor +``` +##### List ##### +``` +az dynatrace monitor list -g rg +``` +##### Update ##### +``` +az dynatrace monitor update -g {rg} -n {monitor} --tags {{env:dev}} +``` + +##### Delete ##### +``` +az dynatrace monitor delete -n monitor -g rg -y + +``` + +##### Get-sso-detail ##### +``` +az dynatrace monitor get-sso-detail -g rg --monitor-name monitor --user-principal Alice@microsoft.com + +``` + +##### Get-vm-host-payload ##### +``` +az dynatrace monitor get-vm-host-payload -g rg --monitor-name monitor + +``` + +##### List-app-service ##### +``` +az dynatrace monitor list-app-service -g rg --monitor-name monitor + +``` + +##### List-host ##### +``` +az dynatrace monitor list-host -g rg --monitor-name monitor + +``` + +##### List-linkable-environment ##### +``` +az dynatrace monitor list-linkable-environment -g rg --monitor-name monitor --user-principal Alice@microsoft.com --region eastus2euap + +``` + +##### List-monitored-resource ##### +``` +az dynatrace monitor list-monitored-resource -g rg --monitor-name monitor + +``` + +#### dynatrace monitor tag-rule #### +##### Create ##### +``` +az dynatrace monitor tag-rule create -g rg --monitor-name monitor -n default +--log-rules "{send-aad-logs:enabled,send-subscription-logs:enabled,send-activity-logs:enabled,filtering-tags:[{name:env,value:prod,action:include},{name:env,value:dev,action:exclude}]}" +--metric-rules "{filtering-tags:[{name:env,value:prod,action:include}]}" + +``` +##### Show ##### +``` +az dynatrace monitor tag-rule show -g rg --monitor-name monitor -n default +``` +##### List ##### +``` +az dynatrace monitor tag-rule list -g rg --monitor-name monitor +``` +##### Update ##### +``` +az dynatrace monitor tag-rule update -g rg --monitor-name monitor -n default +``` + +##### Delete ##### +``` +az dynatrace monitor tag-rule delete -g rg --monitor-name monitor -n default -y + +``` + +#### dynatrace monitor sso-config #### +##### Create ##### +``` +az dynatrace monitor sso-config create -g rg --monitor-name monitor -n default +--aad-domains "['mpliftrdt20210811outlook.onmicrosoft.com']" --single-sign-on-url "https://www.dynatrace.io" + +``` +##### Show ##### +``` +az dynatrace monitor sso-config show -g rg --monitor-name monitor -n default +``` +##### List ##### +``` +az dynatrace monitor sso-config list -g rg --monitor-name monitor +``` diff --git a/src/dynatrace/azext_dynatrace/__init__.py b/src/dynatrace/azext_dynatrace/__init__.py new file mode 100644 index 00000000000..3011bc43ac5 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/__init__.py @@ -0,0 +1,42 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +from azure.cli.core import AzCommandsLoader +from azext_dynatrace._help import helps # pylint: disable=unused-import + + +class DynatraceCommandsLoader(AzCommandsLoader): + + def __init__(self, cli_ctx=None): + from azure.cli.core.commands import CliCommandType + custom_command_type = CliCommandType( + operations_tmpl='azext_dynatrace.custom#{}') + super().__init__(cli_ctx=cli_ctx, + custom_command_type=custom_command_type) + + def load_command_table(self, args): + from azext_dynatrace.commands import load_command_table + from azure.cli.core.aaz import load_aaz_command_table + try: + from . import aaz + except ImportError: + aaz = None + if aaz: + load_aaz_command_table( + loader=self, + aaz_pkg_name=aaz.__name__, + args=args + ) + load_command_table(self, args) + return self.command_table + + def load_arguments(self, command): + from azext_dynatrace._params import load_arguments + load_arguments(self, command) + + +COMMAND_LOADER_CLS = DynatraceCommandsLoader diff --git a/src/dynatrace/azext_dynatrace/_help.py b/src/dynatrace/azext_dynatrace/_help.py new file mode 100644 index 00000000000..126d5d00714 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/_help.py @@ -0,0 +1,11 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: disable=line-too-long +# pylint: disable=too-many-lines + +from knack.help_files import helps # pylint: disable=unused-import diff --git a/src/dynatrace/azext_dynatrace/_params.py b/src/dynatrace/azext_dynatrace/_params.py new file mode 100644 index 00000000000..cfcec717c9c --- /dev/null +++ b/src/dynatrace/azext_dynatrace/_params.py @@ -0,0 +1,13 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: disable=too-many-lines +# pylint: disable=too-many-statements + + +def load_arguments(self, _): # pylint: disable=unused-argument + pass diff --git a/src/dynatrace/azext_dynatrace/aaz/__init__.py b/src/dynatrace/azext_dynatrace/aaz/__init__.py new file mode 100644 index 00000000000..5757aea3175 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/__init__.py b/src/dynatrace/azext_dynatrace/aaz/latest/__init__.py new file mode 100644 index 00000000000..5757aea3175 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/__cmd_group.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/__cmd_group.py new file mode 100644 index 00000000000..8a9b80130ab --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/__cmd_group.py @@ -0,0 +1,23 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command_group( + "dynatrace", +) +class __CMDGroup(AAZCommandGroup): + """Manage dynatrace + """ + pass + + +__all__ = ["__CMDGroup"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/__init__.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/__init__.py new file mode 100644 index 00000000000..5a9d61963d6 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/__init__.py @@ -0,0 +1,11 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from .__cmd_group import * diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/__cmd_group.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/__cmd_group.py new file mode 100644 index 00000000000..5be83770da4 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/__cmd_group.py @@ -0,0 +1,23 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command_group( + "dynatrace monitor", +) +class __CMDGroup(AAZCommandGroup): + """Manage dynatrace monitor + """ + pass + + +__all__ = ["__CMDGroup"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/__init__.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/__init__.py new file mode 100644 index 00000000000..39a7265d2b1 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/__init__.py @@ -0,0 +1,23 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from .__cmd_group import * +from ._create import * +from ._delete import * +from ._get_sso_detail import * +from ._get_vm_host_payload import * +from ._list import * +from ._list_app_service import * +from ._list_host import * +from ._list_linkable_environment import * +from ._list_monitored_resource import * +from ._show import * +from ._update import * +from ._wait import * diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_create.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_create.py new file mode 100644 index 00000000000..a1a7222a863 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_create.py @@ -0,0 +1,647 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor create" +) +class Create(AAZCommand): + """Create a monitor resource + + :example: Create a monitor + az dynatrace monitor create -g rg -n monitor --user-info "{first-name:Alice,last-name:Bobab,email-address:Alice@microsoft.com,phone-number:1234567890,country:US}" --plan-data "{usage-type:committed,billing-cycle:Monthly,plan-details:azureportalintegration_privatepreview@TIDhjdtn7tfnxcy,effective-date:2022-08-20}" --environment "{single-sign-on:{aad-domains:['abc']}}" + """ + + _aaz_info = { + "version": "2021-09-01", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors/{}", "2021-09-01"], + ] + } + + AZ_SUPPORT_NO_WAIT = True + + def _handler(self, command_args): + super()._handler(command_args) + return self.build_lro_poller(self._execute_operations, self._output) + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.monitor_name = AAZStrArg( + options=["-n", "--name", "--monitor-name"], + help="Monitor resource name", + required=True, + id_part="name", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + + # define Arg Group "Properties" + + _args_schema = cls._args_schema + _args_schema.environment = AAZObjectArg( + options=["--environment"], + arg_group="Properties", + help="Properties of the Dynatrace environment.", + ) + _args_schema.subscription_status = AAZStrArg( + options=["--subscription-status"], + arg_group="Properties", + help="Marketplace subscription status.", + enum={"Active": "Active", "Suspended": "Suspended"}, + ) + _args_schema.monitoring_status = AAZStrArg( + options=["--monitoring-status"], + arg_group="Properties", + help="Status of the monitor.", + enum={"Disabled": "Disabled", "Enabled": "Enabled"}, + ) + _args_schema.plan_data = AAZObjectArg( + options=["--plan-data"], + arg_group="Properties", + help="Billing plan information.", + ) + _args_schema.user_info = AAZObjectArg( + options=["--user-info"], + arg_group="Properties", + help="User info.", + ) + + environment = cls._args_schema.environment + environment.account_info = AAZObjectArg( + options=["account-info"], + help="Dynatrace Account Information", + ) + environment.environment_info = AAZObjectArg( + options=["environment-info"], + help="Dynatrace Environment Information", + ) + environment.single_sign_on = AAZObjectArg( + options=["single-sign-on"], + help="The details of a Dynatrace single sign-on.", + ) + environment.user_id = AAZStrArg( + options=["user-id"], + help="User id", + ) + + account_info = cls._args_schema.environment.account_info + account_info.account_id = AAZStrArg( + options=["account-id"], + help="Account Id of the account this environment is linked to", + ) + account_info.region_id = AAZStrArg( + options=["region-id"], + help="Region in which the account is created", + ) + + environment_info = cls._args_schema.environment.environment_info + environment_info.environment_id = AAZStrArg( + options=["environment-id"], + help="Id of the environment created", + ) + environment_info.ingestion_key = AAZStrArg( + options=["ingestion-key"], + help="Ingestion key of the environment", + ) + environment_info.landing_url = AAZStrArg( + options=["landing-url"], + help="Landing URL for Dynatrace environment", + ) + environment_info.logs_ingestion_endpoint = AAZStrArg( + options=["logs-ingestion-endpoint"], + help="Ingestion endpoint used for sending logs", + ) + + single_sign_on = cls._args_schema.environment.single_sign_on + single_sign_on.aad_domains = AAZListArg( + options=["aad-domains"], + help="array of Aad(azure active directory) domains", + ) + single_sign_on.enterprise_app_id = AAZStrArg( + options=["enterprise-app-id"], + help="Version of the Dynatrace agent installed on the VM.", + ) + single_sign_on.single_sign_on_state = AAZStrArg( + options=["single-sign-on-state"], + help="State of Single Sign On", + enum={"Disable": "Disable", "Enable": "Enable", "Existing": "Existing", "Initial": "Initial"}, + ) + single_sign_on.single_sign_on_url = AAZStrArg( + options=["single-sign-on-url"], + help="The login URL specific to this Dynatrace Environment", + ) + + aad_domains = cls._args_schema.environment.single_sign_on.aad_domains + aad_domains.Element = AAZStrArg() + + plan_data = cls._args_schema.plan_data + plan_data.billing_cycle = AAZStrArg( + options=["billing-cycle"], + help="different billing cycles like MONTHLY/WEEKLY. this could be enum", + ) + plan_data.effective_date = AAZDateTimeArg( + options=["effective-date"], + help="date when plan was applied", + ) + plan_data.plan_details = AAZStrArg( + options=["plan-details"], + help="plan id as published by Dynatrace", + ) + plan_data.usage_type = AAZStrArg( + options=["usage-type"], + help="different usage type like PAYG/COMMITTED. this could be enum", + ) + + user_info = cls._args_schema.user_info + user_info.country = AAZStrArg( + options=["country"], + help="Country of the user", + ) + user_info.email_address = AAZStrArg( + options=["email-address"], + help="Email of the user used by Dynatrace for contacting them if needed", + fmt=AAZStrArgFormat( + pattern="^[A-Za-z0-9._%+-]+@(?:[A-Za-z0-9-]+\.)+[A-Za-z]{2,}$", + ), + ) + user_info.first_name = AAZStrArg( + options=["first-name"], + help="First Name of the user", + ) + user_info.last_name = AAZStrArg( + options=["last-name"], + help="Last Name of the user", + ) + user_info.phone_number = AAZStrArg( + options=["phone-number"], + help="Phone number of the user used by Dynatrace for contacting them if needed", + fmt=AAZStrArgFormat( + max_length=40, + ), + ) + + # define Arg Group "Resource" + + _args_schema = cls._args_schema + _args_schema.identity = AAZObjectArg( + options=["--identity"], + arg_group="Resource", + help="The managed service identities assigned to this resource.", + ) + _args_schema.location = AAZResourceLocationArg( + arg_group="Resource", + help="The geo-location where the resource lives", + required=True, + fmt=AAZResourceLocationArgFormat( + resource_group_arg="resource_group", + ), + ) + _args_schema.tags = AAZDictArg( + options=["--tags"], + arg_group="Resource", + help="Resource tags.", + ) + + identity = cls._args_schema.identity + identity.type = AAZStrArg( + options=["type"], + help="The type of managed identity assigned to this resource.", + required=True, + enum={"SystemAndUserAssigned": "SystemAndUserAssigned", "SystemAssigned": "SystemAssigned", "UserAssigned": "UserAssigned"}, + ) + identity.user_assigned_identities = AAZDictArg( + options=["user-assigned-identities"], + help="The identities assigned to this resource by the user.", + ) + + user_assigned_identities = cls._args_schema.identity.user_assigned_identities + user_assigned_identities.Element = AAZObjectArg() + + _element = cls._args_schema.identity.user_assigned_identities.Element + _element.client_id = AAZStrArg( + options=["client-id"], + help="The active directory client identifier for this principal.", + required=True, + ) + _element.principal_id = AAZStrArg( + options=["principal-id"], + help="The active directory identifier for this principal.", + required=True, + ) + + tags = cls._args_schema.tags + tags.Element = AAZStrArg() + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + yield self.MonitorsCreateOrUpdate(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) + return result + + class MonitorsCreateOrUpdate(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [202]: + return self.client.build_lro_polling( + self.ctx.args.no_wait, + session, + self.on_200_201, + self.on_error, + lro_options={"final-state-via": "azure-async-operation"}, + path_format_arguments=self.url_parameters, + ) + if session.http_response.status_code in [200, 201]: + return self.client.build_lro_polling( + self.ctx.args.no_wait, + session, + self.on_200_201, + self.on_error, + lro_options={"final-state-via": "azure-async-operation"}, + path_format_arguments=self.url_parameters, + ) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors/{monitorName}", + **self.url_parameters + ) + + @property + def method(self): + return "PUT" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "monitorName", self.ctx.args.monitor_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Content-Type", "application/json", + ), + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + @property + def content(self): + _content_value, _builder = self.new_content_builder( + self.ctx.args, + typ=AAZObjectType, + typ_kwargs={"flags": {"required": True, "client_flatten": True}} + ) + _builder.set_prop("identity", AAZObjectType, ".identity") + _builder.set_prop("location", AAZStrType, ".location", typ_kwargs={"flags": {"required": True}}) + _builder.set_prop("properties", AAZObjectType, ".", typ_kwargs={"flags": {"required": True, "client_flatten": True}}) + _builder.set_prop("tags", AAZDictType, ".tags") + + identity = _builder.get(".identity") + if identity is not None: + identity.set_prop("type", AAZStrType, ".type", typ_kwargs={"flags": {"required": True}}) + identity.set_prop("userAssignedIdentities", AAZDictType, ".user_assigned_identities") + + user_assigned_identities = _builder.get(".identity.userAssignedIdentities") + if user_assigned_identities is not None: + user_assigned_identities.set_elements(AAZObjectType, ".") + + _elements = _builder.get(".identity.userAssignedIdentities{}") + if _elements is not None: + _elements.set_prop("clientId", AAZStrType, ".client_id", typ_kwargs={"flags": {"required": True}}) + _elements.set_prop("principalId", AAZStrType, ".principal_id", typ_kwargs={"flags": {"required": True}}) + + properties = _builder.get(".properties") + if properties is not None: + properties.set_prop("dynatraceEnvironmentProperties", AAZObjectType, ".environment") + properties.set_prop("marketplaceSubscriptionStatus", AAZStrType, ".subscription_status") + properties.set_prop("monitoringStatus", AAZStrType, ".monitoring_status") + properties.set_prop("planData", AAZObjectType, ".plan_data") + properties.set_prop("userInfo", AAZObjectType, ".user_info") + + dynatrace_environment_properties = _builder.get(".properties.dynatraceEnvironmentProperties") + if dynatrace_environment_properties is not None: + dynatrace_environment_properties.set_prop("accountInfo", AAZObjectType, ".account_info") + dynatrace_environment_properties.set_prop("environmentInfo", AAZObjectType, ".environment_info") + dynatrace_environment_properties.set_prop("singleSignOnProperties", AAZObjectType, ".single_sign_on") + dynatrace_environment_properties.set_prop("userId", AAZStrType, ".user_id") + + account_info = _builder.get(".properties.dynatraceEnvironmentProperties.accountInfo") + if account_info is not None: + account_info.set_prop("accountId", AAZStrType, ".account_id") + account_info.set_prop("regionId", AAZStrType, ".region_id") + + environment_info = _builder.get(".properties.dynatraceEnvironmentProperties.environmentInfo") + if environment_info is not None: + environment_info.set_prop("environmentId", AAZStrType, ".environment_id") + environment_info.set_prop("ingestionKey", AAZStrType, ".ingestion_key") + environment_info.set_prop("landingURL", AAZStrType, ".landing_url") + environment_info.set_prop("logsIngestionEndpoint", AAZStrType, ".logs_ingestion_endpoint") + + single_sign_on_properties = _builder.get(".properties.dynatraceEnvironmentProperties.singleSignOnProperties") + if single_sign_on_properties is not None: + single_sign_on_properties.set_prop("aadDomains", AAZListType, ".aad_domains") + single_sign_on_properties.set_prop("enterpriseAppId", AAZStrType, ".enterprise_app_id") + single_sign_on_properties.set_prop("singleSignOnState", AAZStrType, ".single_sign_on_state") + single_sign_on_properties.set_prop("singleSignOnUrl", AAZStrType, ".single_sign_on_url") + + aad_domains = _builder.get(".properties.dynatraceEnvironmentProperties.singleSignOnProperties.aadDomains") + if aad_domains is not None: + aad_domains.set_elements(AAZStrType, ".") + + plan_data = _builder.get(".properties.planData") + if plan_data is not None: + plan_data.set_prop("billingCycle", AAZStrType, ".billing_cycle") + plan_data.set_prop("effectiveDate", AAZStrType, ".effective_date") + plan_data.set_prop("planDetails", AAZStrType, ".plan_details") + plan_data.set_prop("usageType", AAZStrType, ".usage_type") + + user_info = _builder.get(".properties.userInfo") + if user_info is not None: + user_info.set_prop("country", AAZStrType, ".country") + user_info.set_prop("emailAddress", AAZStrType, ".email_address") + user_info.set_prop("firstName", AAZStrType, ".first_name") + user_info.set_prop("lastName", AAZStrType, ".last_name") + user_info.set_prop("phoneNumber", AAZStrType, ".phone_number") + + tags = _builder.get(".tags") + if tags is not None: + tags.set_elements(AAZStrType, ".") + + return self.serialize_content(_content_value) + + def on_200_201(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200_201 + ) + + _schema_on_200_201 = None + + @classmethod + def _build_schema_on_200_201(cls): + if cls._schema_on_200_201 is not None: + return cls._schema_on_200_201 + + cls._schema_on_200_201 = AAZObjectType() + + _schema_on_200_201 = cls._schema_on_200_201 + _schema_on_200_201.id = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200_201.identity = AAZObjectType() + _schema_on_200_201.location = AAZStrType( + flags={"required": True}, + ) + _schema_on_200_201.name = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200_201.properties = AAZObjectType( + flags={"required": True, "client_flatten": True}, + ) + _schema_on_200_201.system_data = AAZObjectType( + serialized_name="systemData", + flags={"read_only": True}, + ) + _schema_on_200_201.tags = AAZDictType() + _schema_on_200_201.type = AAZStrType( + flags={"read_only": True}, + ) + + identity = cls._schema_on_200_201.identity + identity.principal_id = AAZStrType( + serialized_name="principalId", + flags={"read_only": True}, + ) + identity.tenant_id = AAZStrType( + serialized_name="tenantId", + flags={"read_only": True}, + ) + identity.type = AAZStrType( + flags={"required": True}, + ) + identity.user_assigned_identities = AAZDictType( + serialized_name="userAssignedIdentities", + ) + + user_assigned_identities = cls._schema_on_200_201.identity.user_assigned_identities + user_assigned_identities.Element = AAZObjectType() + + _element = cls._schema_on_200_201.identity.user_assigned_identities.Element + _element.client_id = AAZStrType( + serialized_name="clientId", + flags={"required": True}, + ) + _element.principal_id = AAZStrType( + serialized_name="principalId", + flags={"required": True}, + ) + + properties = cls._schema_on_200_201.properties + properties.dynatrace_environment_properties = AAZObjectType( + serialized_name="dynatraceEnvironmentProperties", + ) + properties.liftr_resource_category = AAZStrType( + serialized_name="liftrResourceCategory", + ) + properties.liftr_resource_preference = AAZIntType( + serialized_name="liftrResourcePreference", + flags={"read_only": True}, + ) + properties.marketplace_subscription_status = AAZStrType( + serialized_name="marketplaceSubscriptionStatus", + ) + properties.monitoring_status = AAZStrType( + serialized_name="monitoringStatus", + ) + properties.plan_data = AAZObjectType( + serialized_name="planData", + ) + properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + properties.user_info = AAZObjectType( + serialized_name="userInfo", + ) + + dynatrace_environment_properties = cls._schema_on_200_201.properties.dynatrace_environment_properties + dynatrace_environment_properties.account_info = AAZObjectType( + serialized_name="accountInfo", + ) + dynatrace_environment_properties.environment_info = AAZObjectType( + serialized_name="environmentInfo", + ) + dynatrace_environment_properties.single_sign_on_properties = AAZObjectType( + serialized_name="singleSignOnProperties", + ) + dynatrace_environment_properties.user_id = AAZStrType( + serialized_name="userId", + ) + + account_info = cls._schema_on_200_201.properties.dynatrace_environment_properties.account_info + account_info.account_id = AAZStrType( + serialized_name="accountId", + ) + account_info.region_id = AAZStrType( + serialized_name="regionId", + ) + + environment_info = cls._schema_on_200_201.properties.dynatrace_environment_properties.environment_info + environment_info.environment_id = AAZStrType( + serialized_name="environmentId", + ) + environment_info.ingestion_key = AAZStrType( + serialized_name="ingestionKey", + ) + environment_info.landing_url = AAZStrType( + serialized_name="landingURL", + ) + environment_info.logs_ingestion_endpoint = AAZStrType( + serialized_name="logsIngestionEndpoint", + ) + + single_sign_on_properties = cls._schema_on_200_201.properties.dynatrace_environment_properties.single_sign_on_properties + single_sign_on_properties.aad_domains = AAZListType( + serialized_name="aadDomains", + ) + single_sign_on_properties.enterprise_app_id = AAZStrType( + serialized_name="enterpriseAppId", + ) + single_sign_on_properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + single_sign_on_properties.single_sign_on_state = AAZStrType( + serialized_name="singleSignOnState", + ) + single_sign_on_properties.single_sign_on_url = AAZStrType( + serialized_name="singleSignOnUrl", + ) + + aad_domains = cls._schema_on_200_201.properties.dynatrace_environment_properties.single_sign_on_properties.aad_domains + aad_domains.Element = AAZStrType() + + plan_data = cls._schema_on_200_201.properties.plan_data + plan_data.billing_cycle = AAZStrType( + serialized_name="billingCycle", + ) + plan_data.effective_date = AAZStrType( + serialized_name="effectiveDate", + ) + plan_data.plan_details = AAZStrType( + serialized_name="planDetails", + ) + plan_data.usage_type = AAZStrType( + serialized_name="usageType", + ) + + user_info = cls._schema_on_200_201.properties.user_info + user_info.country = AAZStrType() + user_info.email_address = AAZStrType( + serialized_name="emailAddress", + ) + user_info.first_name = AAZStrType( + serialized_name="firstName", + ) + user_info.last_name = AAZStrType( + serialized_name="lastName", + ) + user_info.phone_number = AAZStrType( + serialized_name="phoneNumber", + ) + + system_data = cls._schema_on_200_201.system_data + system_data.created_at = AAZStrType( + serialized_name="createdAt", + ) + system_data.created_by = AAZStrType( + serialized_name="createdBy", + ) + system_data.created_by_type = AAZStrType( + serialized_name="createdByType", + ) + system_data.last_modified_at = AAZStrType( + serialized_name="lastModifiedAt", + ) + system_data.last_modified_by = AAZStrType( + serialized_name="lastModifiedBy", + ) + system_data.last_modified_by_type = AAZStrType( + serialized_name="lastModifiedByType", + ) + + tags = cls._schema_on_200_201.tags + tags.Element = AAZStrType() + + return cls._schema_on_200_201 + + +__all__ = ["Create"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_delete.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_delete.py new file mode 100644 index 00000000000..427fbd3b41d --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_delete.py @@ -0,0 +1,159 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor delete", + confirmation="Are you sure you want to perform this operation?", +) +class Delete(AAZCommand): + """Delete a monitor resource + + :example: Delete a monitor + az dynatrace monitor delete -n monitor -g rg -y + """ + + _aaz_info = { + "version": "2021-09-01", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors/{}", "2021-09-01"], + ] + } + + AZ_SUPPORT_NO_WAIT = True + + def _handler(self, command_args): + super()._handler(command_args) + return self.build_lro_poller(self._execute_operations, None) + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.monitor_name = AAZStrArg( + options=["-n", "--name", "--monitor-name"], + help="Monitor resource name", + required=True, + id_part="name", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + yield self.MonitorsDelete(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + class MonitorsDelete(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [202]: + return self.client.build_lro_polling( + self.ctx.args.no_wait, + session, + self.on_200, + self.on_error, + lro_options={"final-state-via": "azure-async-operation"}, + path_format_arguments=self.url_parameters, + ) + if session.http_response.status_code in [200]: + return self.client.build_lro_polling( + self.ctx.args.no_wait, + session, + self.on_200, + self.on_error, + lro_options={"final-state-via": "azure-async-operation"}, + path_format_arguments=self.url_parameters, + ) + if session.http_response.status_code in [204]: + return self.client.build_lro_polling( + self.ctx.args.no_wait, + session, + self.on_204, + self.on_error, + lro_options={"final-state-via": "azure-async-operation"}, + path_format_arguments=self.url_parameters, + ) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors/{monitorName}", + **self.url_parameters + ) + + @property + def method(self): + return "DELETE" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "monitorName", self.ctx.args.monitor_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + def on_200(self, session): + pass + + def on_204(self, session): + pass + + +__all__ = ["Delete"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_get_sso_detail.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_get_sso_detail.py new file mode 100644 index 00000000000..1410d489dc6 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_get_sso_detail.py @@ -0,0 +1,203 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor get-sso-detail", +) +class GetSsoDetail(AAZCommand): + """Get the SSO configuration details from the partner + + :example: Get-sso-detail + az dynatrace monitor get-sso-detail -g rg --monitor-name monitor --user-principal Alice@microsoft.com + """ + + _aaz_info = { + "version": "2021-09-01", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors/{}/getssodetails", "2021-09-01"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return self._output() + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.monitor_name = AAZStrArg( + options=["--monitor-name"], + help="Monitor resource name", + required=True, + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + + # define Arg Group "Request" + + _args_schema = cls._args_schema + _args_schema.user_principal = AAZStrArg( + options=["--user-principal"], + arg_group="Request", + help="user principal id of the user", + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.MonitorsGetSSODetails(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) + return result + + class MonitorsGetSSODetails(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors/{monitorName}/getSSODetails", + **self.url_parameters + ) + + @property + def method(self): + return "POST" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "monitorName", self.ctx.args.monitor_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Content-Type", "application/json", + ), + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + @property + def content(self): + _content_value, _builder = self.new_content_builder( + self.ctx.args, + typ=AAZObjectType, + typ_kwargs={"flags": {"client_flatten": True}} + ) + _builder.set_prop("userPrincipal", AAZStrType, ".user_principal") + + return self.serialize_content(_content_value) + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.aad_domains = AAZListType( + serialized_name="aadDomains", + ) + _schema_on_200.admin_users = AAZListType( + serialized_name="adminUsers", + ) + _schema_on_200.is_sso_enabled = AAZStrType( + serialized_name="isSsoEnabled", + ) + _schema_on_200.metadata_url = AAZStrType( + serialized_name="metadataUrl", + ) + _schema_on_200.single_sign_on_url = AAZStrType( + serialized_name="singleSignOnUrl", + ) + + aad_domains = cls._schema_on_200.aad_domains + aad_domains.Element = AAZStrType() + + admin_users = cls._schema_on_200.admin_users + admin_users.Element = AAZStrType() + + return cls._schema_on_200 + + +__all__ = ["GetSsoDetail"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_get_vm_host_payload.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_get_vm_host_payload.py new file mode 100644 index 00000000000..5febbd242ce --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_get_vm_host_payload.py @@ -0,0 +1,165 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor get-vm-host-payload", +) +class GetVmHostPayload(AAZCommand): + """Return the payload that need to be passed in the request body for installing Dynatrace agent on a VM + + :example: Get-vm-host-payload + az dynatrace monitor get-vm-host-payload -g rg --monitor-name monitor + """ + + _aaz_info = { + "version": "2021-09-01", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors/{}/getvmhostpayload", "2021-09-01"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return self._output() + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.monitor_name = AAZStrArg( + options=["--monitor-name"], + help="Monitor resource name", + required=True, + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.MonitorsGetVMHostPayload(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) + return result + + class MonitorsGetVMHostPayload(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors/{monitorName}/getVMHostPayload", + **self.url_parameters + ) + + @property + def method(self): + return "POST" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "monitorName", self.ctx.args.monitor_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.environment_id = AAZStrType( + serialized_name="environmentId", + ) + _schema_on_200.ingestion_key = AAZStrType( + serialized_name="ingestionKey", + ) + + return cls._schema_on_200 + + +__all__ = ["GetVmHostPayload"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_list.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_list.py new file mode 100644 index 00000000000..c3255edb106 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_list.py @@ -0,0 +1,346 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor list" +) +class List(AAZCommand): + """List all monitor resource by monitor name in a resource group + + :example: List monitor + az dynatrace monitor list -g rg --monitor-name monitor + """ + + _aaz_info = { + "version": "2021-09-01", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors", "2021-09-01"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + return self.build_paging(self._execute_operations, self._output) + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.MonitorsListByResourceGroup(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance.value, client_flatten=True) + next_link = self.deserialize_output(self.ctx.vars.instance.next_link) + return result, next_link + + class MonitorsListByResourceGroup(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors", + **self.url_parameters + ) + + @property + def method(self): + return "GET" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.next_link = AAZStrType( + serialized_name="nextLink", + ) + _schema_on_200.value = AAZListType( + flags={"required": True}, + ) + + value = cls._schema_on_200.value + value.Element = AAZObjectType() + + _element = cls._schema_on_200.value.Element + _element.id = AAZStrType( + flags={"read_only": True}, + ) + _element.identity = AAZObjectType() + _element.location = AAZStrType( + flags={"required": True}, + ) + _element.name = AAZStrType( + flags={"read_only": True}, + ) + _element.properties = AAZObjectType( + flags={"required": True, "client_flatten": True}, + ) + _element.system_data = AAZObjectType( + serialized_name="systemData", + flags={"read_only": True}, + ) + _element.tags = AAZDictType() + _element.type = AAZStrType( + flags={"read_only": True}, + ) + + identity = cls._schema_on_200.value.Element.identity + identity.principal_id = AAZStrType( + serialized_name="principalId", + flags={"read_only": True}, + ) + identity.tenant_id = AAZStrType( + serialized_name="tenantId", + flags={"read_only": True}, + ) + identity.type = AAZStrType( + flags={"required": True}, + ) + identity.user_assigned_identities = AAZDictType( + serialized_name="userAssignedIdentities", + ) + + user_assigned_identities = cls._schema_on_200.value.Element.identity.user_assigned_identities + user_assigned_identities.Element = AAZObjectType() + + _element = cls._schema_on_200.value.Element.identity.user_assigned_identities.Element + _element.client_id = AAZStrType( + serialized_name="clientId", + flags={"required": True}, + ) + _element.principal_id = AAZStrType( + serialized_name="principalId", + flags={"required": True}, + ) + + properties = cls._schema_on_200.value.Element.properties + properties.dynatrace_environment_properties = AAZObjectType( + serialized_name="dynatraceEnvironmentProperties", + ) + properties.liftr_resource_category = AAZStrType( + serialized_name="liftrResourceCategory", + ) + properties.liftr_resource_preference = AAZIntType( + serialized_name="liftrResourcePreference", + flags={"read_only": True}, + ) + properties.marketplace_subscription_status = AAZStrType( + serialized_name="marketplaceSubscriptionStatus", + ) + properties.monitoring_status = AAZStrType( + serialized_name="monitoringStatus", + ) + properties.plan_data = AAZObjectType( + serialized_name="planData", + ) + properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + properties.user_info = AAZObjectType( + serialized_name="userInfo", + ) + + dynatrace_environment_properties = cls._schema_on_200.value.Element.properties.dynatrace_environment_properties + dynatrace_environment_properties.account_info = AAZObjectType( + serialized_name="accountInfo", + ) + dynatrace_environment_properties.environment_info = AAZObjectType( + serialized_name="environmentInfo", + ) + dynatrace_environment_properties.single_sign_on_properties = AAZObjectType( + serialized_name="singleSignOnProperties", + ) + dynatrace_environment_properties.user_id = AAZStrType( + serialized_name="userId", + ) + + account_info = cls._schema_on_200.value.Element.properties.dynatrace_environment_properties.account_info + account_info.account_id = AAZStrType( + serialized_name="accountId", + ) + account_info.region_id = AAZStrType( + serialized_name="regionId", + ) + + environment_info = cls._schema_on_200.value.Element.properties.dynatrace_environment_properties.environment_info + environment_info.environment_id = AAZStrType( + serialized_name="environmentId", + ) + environment_info.ingestion_key = AAZStrType( + serialized_name="ingestionKey", + ) + environment_info.landing_url = AAZStrType( + serialized_name="landingURL", + ) + environment_info.logs_ingestion_endpoint = AAZStrType( + serialized_name="logsIngestionEndpoint", + ) + + single_sign_on_properties = cls._schema_on_200.value.Element.properties.dynatrace_environment_properties.single_sign_on_properties + single_sign_on_properties.aad_domains = AAZListType( + serialized_name="aadDomains", + ) + single_sign_on_properties.enterprise_app_id = AAZStrType( + serialized_name="enterpriseAppId", + ) + single_sign_on_properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + single_sign_on_properties.single_sign_on_state = AAZStrType( + serialized_name="singleSignOnState", + ) + single_sign_on_properties.single_sign_on_url = AAZStrType( + serialized_name="singleSignOnUrl", + ) + + aad_domains = cls._schema_on_200.value.Element.properties.dynatrace_environment_properties.single_sign_on_properties.aad_domains + aad_domains.Element = AAZStrType() + + plan_data = cls._schema_on_200.value.Element.properties.plan_data + plan_data.billing_cycle = AAZStrType( + serialized_name="billingCycle", + ) + plan_data.effective_date = AAZStrType( + serialized_name="effectiveDate", + ) + plan_data.plan_details = AAZStrType( + serialized_name="planDetails", + ) + plan_data.usage_type = AAZStrType( + serialized_name="usageType", + ) + + user_info = cls._schema_on_200.value.Element.properties.user_info + user_info.country = AAZStrType() + user_info.email_address = AAZStrType( + serialized_name="emailAddress", + ) + user_info.first_name = AAZStrType( + serialized_name="firstName", + ) + user_info.last_name = AAZStrType( + serialized_name="lastName", + ) + user_info.phone_number = AAZStrType( + serialized_name="phoneNumber", + ) + + system_data = cls._schema_on_200.value.Element.system_data + system_data.created_at = AAZStrType( + serialized_name="createdAt", + ) + system_data.created_by = AAZStrType( + serialized_name="createdBy", + ) + system_data.created_by_type = AAZStrType( + serialized_name="createdByType", + ) + system_data.last_modified_at = AAZStrType( + serialized_name="lastModifiedAt", + ) + system_data.last_modified_by = AAZStrType( + serialized_name="lastModifiedBy", + ) + system_data.last_modified_by_type = AAZStrType( + serialized_name="lastModifiedByType", + ) + + tags = cls._schema_on_200.value.Element.tags + tags.Element = AAZStrType() + + return cls._schema_on_200 + + +__all__ = ["List"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_list_app_service.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_list_app_service.py new file mode 100644 index 00000000000..d3112a66bb1 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_list_app_service.py @@ -0,0 +1,193 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor list-app-service" +) +class ListAppService(AAZCommand): + """Get list of app services with dynatrace PaaS OneAgent enabled + + :example: List-app-service + az dynatrace monitor list-app-service -g rg --monitor-name monitor + """ + + _aaz_info = { + "version": "2021-09-01", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors/{}/listappservices", "2021-09-01"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + return self.build_paging(self._execute_operations, self._output) + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.monitor_name = AAZStrArg( + options=["--monitor-name"], + help="Monitor resource name", + required=True, + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.MonitorsListAppServices(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance.value, client_flatten=True) + next_link = self.deserialize_output(self.ctx.vars.instance.next_link) + return result, next_link + + class MonitorsListAppServices(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors/{monitorName}/listAppServices", + **self.url_parameters + ) + + @property + def method(self): + return "POST" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "monitorName", self.ctx.args.monitor_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.next_link = AAZStrType( + serialized_name="nextLink", + ) + _schema_on_200.value = AAZListType() + + value = cls._schema_on_200.value + value.Element = AAZObjectType() + + _element = cls._schema_on_200.value.Element + _element.auto_update_setting = AAZStrType( + serialized_name="autoUpdateSetting", + ) + _element.availability_state = AAZStrType( + serialized_name="availabilityState", + ) + _element.host_group = AAZStrType( + serialized_name="hostGroup", + ) + _element.host_name = AAZStrType( + serialized_name="hostName", + ) + _element.log_module = AAZStrType( + serialized_name="logModule", + ) + _element.monitoring_type = AAZStrType( + serialized_name="monitoringType", + ) + _element.resource_id = AAZStrType( + serialized_name="resourceId", + ) + _element.update_status = AAZStrType( + serialized_name="updateStatus", + ) + _element.version = AAZStrType() + + return cls._schema_on_200 + + +__all__ = ["ListAppService"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_list_host.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_list_host.py new file mode 100644 index 00000000000..ffff5ea9398 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_list_host.py @@ -0,0 +1,193 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor list-host" +) +class ListHost(AAZCommand): + """List the compute resources currently being monitored by the dynatrace resource + + :example: List-host + az dynatrace monitor list-host -g rg --monitor-name monitor + """ + + _aaz_info = { + "version": "2021-09-01", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors/{}/listhosts", "2021-09-01"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + return self.build_paging(self._execute_operations, self._output) + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.monitor_name = AAZStrArg( + options=["--monitor-name"], + help="Monitor resource name", + required=True, + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.MonitorsListHosts(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance.value, client_flatten=True) + next_link = self.deserialize_output(self.ctx.vars.instance.next_link) + return result, next_link + + class MonitorsListHosts(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors/{monitorName}/listHosts", + **self.url_parameters + ) + + @property + def method(self): + return "POST" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "monitorName", self.ctx.args.monitor_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.next_link = AAZStrType( + serialized_name="nextLink", + ) + _schema_on_200.value = AAZListType() + + value = cls._schema_on_200.value + value.Element = AAZObjectType() + + _element = cls._schema_on_200.value.Element + _element.auto_update_setting = AAZStrType( + serialized_name="autoUpdateSetting", + ) + _element.availability_state = AAZStrType( + serialized_name="availabilityState", + ) + _element.host_group = AAZStrType( + serialized_name="hostGroup", + ) + _element.host_name = AAZStrType( + serialized_name="hostName", + ) + _element.log_module = AAZStrType( + serialized_name="logModule", + ) + _element.monitoring_type = AAZStrType( + serialized_name="monitoringType", + ) + _element.resource_id = AAZStrType( + serialized_name="resourceId", + ) + _element.update_status = AAZStrType( + serialized_name="updateStatus", + ) + _element.version = AAZStrType() + + return cls._schema_on_200 + + +__all__ = ["ListHost"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_list_linkable_environment.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_list_linkable_environment.py new file mode 100644 index 00000000000..65296cfb641 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_list_linkable_environment.py @@ -0,0 +1,226 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor list-linkable-environment" +) +class ListLinkableEnvironment(AAZCommand): + """Get all the dynatrace environments that a user can link a azure resource to + + :example: List-linkable-environment + az dynatrace monitor list-linkable-environment -g rg --monitor-name monitor --user-principal Alice@microsoft.com --region eastus2euap + """ + + _aaz_info = { + "version": "2021-09-01", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors/{}/listlinkableenvironments", "2021-09-01"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + return self.build_paging(self._execute_operations, self._output) + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.monitor_name = AAZStrArg( + options=["--monitor-name"], + help="Monitor resource name", + required=True, + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + + # define Arg Group "Request" + + _args_schema = cls._args_schema + _args_schema.region = AAZStrArg( + options=["--region"], + arg_group="Request", + help="Azure region in which we want to link the environment", + ) + _args_schema.tenant_id = AAZStrArg( + options=["--tenant-id"], + arg_group="Request", + help="Tenant Id of the user in which they want to link the environment", + ) + _args_schema.user_principal = AAZStrArg( + options=["--user-principal"], + arg_group="Request", + help="user principal id of the user", + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.MonitorsListLinkableEnvironments(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance.value, client_flatten=True) + next_link = self.deserialize_output(self.ctx.vars.instance.next_link) + return result, next_link + + class MonitorsListLinkableEnvironments(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors/{monitorName}/listLinkableEnvironments", + **self.url_parameters + ) + + @property + def method(self): + return "POST" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "monitorName", self.ctx.args.monitor_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Content-Type", "application/json", + ), + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + @property + def content(self): + _content_value, _builder = self.new_content_builder( + self.ctx.args, + typ=AAZObjectType, + typ_kwargs={"flags": {"required": True, "client_flatten": True}} + ) + _builder.set_prop("region", AAZStrType, ".region") + _builder.set_prop("tenantId", AAZStrType, ".tenant_id") + _builder.set_prop("userPrincipal", AAZStrType, ".user_principal") + + return self.serialize_content(_content_value) + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.next_link = AAZStrType( + serialized_name="nextLink", + ) + _schema_on_200.value = AAZListType() + + value = cls._schema_on_200.value + value.Element = AAZObjectType() + + _element = cls._schema_on_200.value.Element + _element.environment_id = AAZStrType( + serialized_name="environmentId", + ) + _element.environment_name = AAZStrType( + serialized_name="environmentName", + ) + _element.plan_data = AAZObjectType( + serialized_name="planData", + ) + + plan_data = cls._schema_on_200.value.Element.plan_data + plan_data.billing_cycle = AAZStrType( + serialized_name="billingCycle", + ) + plan_data.effective_date = AAZStrType( + serialized_name="effectiveDate", + ) + plan_data.plan_details = AAZStrType( + serialized_name="planDetails", + ) + plan_data.usage_type = AAZStrType( + serialized_name="usageType", + ) + + return cls._schema_on_200 + + +__all__ = ["ListLinkableEnvironment"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_list_monitored_resource.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_list_monitored_resource.py new file mode 100644 index 00000000000..a38efea915c --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_list_monitored_resource.py @@ -0,0 +1,181 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor list-monitored-resource" +) +class ListMonitoredResource(AAZCommand): + """List the resources currently being monitored by the dynatrace monitor resource + + :example: List-monitored-resource + az dynatrace monitor list-monitored-resource -g rg --monitor-name monitor + """ + + _aaz_info = { + "version": "2021-09-01", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors/{}/listmonitoredresources", "2021-09-01"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + return self.build_paging(self._execute_operations, self._output) + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.monitor_name = AAZStrArg( + options=["--monitor-name"], + help="Monitor resource name", + required=True, + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.MonitorsListMonitoredResources(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance.value, client_flatten=True) + next_link = self.deserialize_output(self.ctx.vars.instance.next_link) + return result, next_link + + class MonitorsListMonitoredResources(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors/{monitorName}/listMonitoredResources", + **self.url_parameters + ) + + @property + def method(self): + return "POST" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "monitorName", self.ctx.args.monitor_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.next_link = AAZStrType( + serialized_name="nextLink", + ) + _schema_on_200.value = AAZListType() + + value = cls._schema_on_200.value + value.Element = AAZObjectType() + + _element = cls._schema_on_200.value.Element + _element.id = AAZStrType() + _element.reason_for_logs_status = AAZStrType( + serialized_name="reasonForLogsStatus", + ) + _element.reason_for_metrics_status = AAZStrType( + serialized_name="reasonForMetricsStatus", + ) + _element.sending_logs = AAZStrType( + serialized_name="sendingLogs", + ) + _element.sending_metrics = AAZStrType( + serialized_name="sendingMetrics", + ) + + return cls._schema_on_200 + + +__all__ = ["ListMonitoredResource"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_show.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_show.py new file mode 100644 index 00000000000..ffaab2c8352 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_show.py @@ -0,0 +1,345 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor show" +) +class Show(AAZCommand): + """Show a monitor resource + + :example: Show a monitor + az dynatrace monitor show -g rg -n monitor + """ + + _aaz_info = { + "version": "2021-09-01", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors/{}", "2021-09-01"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return self._output() + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.monitor_name = AAZStrArg( + options=["-n", "--name", "--monitor-name"], + help="Monitor resource name", + required=True, + id_part="name", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.MonitorsGet(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) + return result + + class MonitorsGet(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors/{monitorName}", + **self.url_parameters + ) + + @property + def method(self): + return "GET" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "monitorName", self.ctx.args.monitor_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.id = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.identity = AAZObjectType() + _schema_on_200.location = AAZStrType( + flags={"required": True}, + ) + _schema_on_200.name = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.properties = AAZObjectType( + flags={"required": True, "client_flatten": True}, + ) + _schema_on_200.system_data = AAZObjectType( + serialized_name="systemData", + flags={"read_only": True}, + ) + _schema_on_200.tags = AAZDictType() + _schema_on_200.type = AAZStrType( + flags={"read_only": True}, + ) + + identity = cls._schema_on_200.identity + identity.principal_id = AAZStrType( + serialized_name="principalId", + flags={"read_only": True}, + ) + identity.tenant_id = AAZStrType( + serialized_name="tenantId", + flags={"read_only": True}, + ) + identity.type = AAZStrType( + flags={"required": True}, + ) + identity.user_assigned_identities = AAZDictType( + serialized_name="userAssignedIdentities", + ) + + user_assigned_identities = cls._schema_on_200.identity.user_assigned_identities + user_assigned_identities.Element = AAZObjectType() + + _element = cls._schema_on_200.identity.user_assigned_identities.Element + _element.client_id = AAZStrType( + serialized_name="clientId", + flags={"required": True}, + ) + _element.principal_id = AAZStrType( + serialized_name="principalId", + flags={"required": True}, + ) + + properties = cls._schema_on_200.properties + properties.dynatrace_environment_properties = AAZObjectType( + serialized_name="dynatraceEnvironmentProperties", + ) + properties.liftr_resource_category = AAZStrType( + serialized_name="liftrResourceCategory", + ) + properties.liftr_resource_preference = AAZIntType( + serialized_name="liftrResourcePreference", + flags={"read_only": True}, + ) + properties.marketplace_subscription_status = AAZStrType( + serialized_name="marketplaceSubscriptionStatus", + ) + properties.monitoring_status = AAZStrType( + serialized_name="monitoringStatus", + ) + properties.plan_data = AAZObjectType( + serialized_name="planData", + ) + properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + properties.user_info = AAZObjectType( + serialized_name="userInfo", + ) + + dynatrace_environment_properties = cls._schema_on_200.properties.dynatrace_environment_properties + dynatrace_environment_properties.account_info = AAZObjectType( + serialized_name="accountInfo", + ) + dynatrace_environment_properties.environment_info = AAZObjectType( + serialized_name="environmentInfo", + ) + dynatrace_environment_properties.single_sign_on_properties = AAZObjectType( + serialized_name="singleSignOnProperties", + ) + dynatrace_environment_properties.user_id = AAZStrType( + serialized_name="userId", + ) + + account_info = cls._schema_on_200.properties.dynatrace_environment_properties.account_info + account_info.account_id = AAZStrType( + serialized_name="accountId", + ) + account_info.region_id = AAZStrType( + serialized_name="regionId", + ) + + environment_info = cls._schema_on_200.properties.dynatrace_environment_properties.environment_info + environment_info.environment_id = AAZStrType( + serialized_name="environmentId", + ) + environment_info.ingestion_key = AAZStrType( + serialized_name="ingestionKey", + ) + environment_info.landing_url = AAZStrType( + serialized_name="landingURL", + ) + environment_info.logs_ingestion_endpoint = AAZStrType( + serialized_name="logsIngestionEndpoint", + ) + + single_sign_on_properties = cls._schema_on_200.properties.dynatrace_environment_properties.single_sign_on_properties + single_sign_on_properties.aad_domains = AAZListType( + serialized_name="aadDomains", + ) + single_sign_on_properties.enterprise_app_id = AAZStrType( + serialized_name="enterpriseAppId", + ) + single_sign_on_properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + single_sign_on_properties.single_sign_on_state = AAZStrType( + serialized_name="singleSignOnState", + ) + single_sign_on_properties.single_sign_on_url = AAZStrType( + serialized_name="singleSignOnUrl", + ) + + aad_domains = cls._schema_on_200.properties.dynatrace_environment_properties.single_sign_on_properties.aad_domains + aad_domains.Element = AAZStrType() + + plan_data = cls._schema_on_200.properties.plan_data + plan_data.billing_cycle = AAZStrType( + serialized_name="billingCycle", + ) + plan_data.effective_date = AAZStrType( + serialized_name="effectiveDate", + ) + plan_data.plan_details = AAZStrType( + serialized_name="planDetails", + ) + plan_data.usage_type = AAZStrType( + serialized_name="usageType", + ) + + user_info = cls._schema_on_200.properties.user_info + user_info.country = AAZStrType() + user_info.email_address = AAZStrType( + serialized_name="emailAddress", + ) + user_info.first_name = AAZStrType( + serialized_name="firstName", + ) + user_info.last_name = AAZStrType( + serialized_name="lastName", + ) + user_info.phone_number = AAZStrType( + serialized_name="phoneNumber", + ) + + system_data = cls._schema_on_200.system_data + system_data.created_at = AAZStrType( + serialized_name="createdAt", + ) + system_data.created_by = AAZStrType( + serialized_name="createdBy", + ) + system_data.created_by_type = AAZStrType( + serialized_name="createdByType", + ) + system_data.last_modified_at = AAZStrType( + serialized_name="lastModifiedAt", + ) + system_data.last_modified_by = AAZStrType( + serialized_name="lastModifiedBy", + ) + system_data.last_modified_by_type = AAZStrType( + serialized_name="lastModifiedByType", + ) + + tags = cls._schema_on_200.tags + tags.Element = AAZStrType() + + return cls._schema_on_200 + + +__all__ = ["Show"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_update.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_update.py new file mode 100644 index 00000000000..b506ab52955 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_update.py @@ -0,0 +1,375 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor update" +) +class Update(AAZCommand): + """Update a monitor resource + + :example: Update monitor + az dynatrace monitor update -g {rg} -n {monitor} --tags {{env:dev}} + """ + + _aaz_info = { + "version": "2021-09-01", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors/{}", "2021-09-01"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return self._output() + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.monitor_name = AAZStrArg( + options=["-n", "--name", "--monitor-name"], + help="Monitor resource name", + required=True, + id_part="name", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + + # define Arg Group "Resource" + + _args_schema = cls._args_schema + _args_schema.tags = AAZDictArg( + options=["--tags"], + arg_group="Resource", + help="Resource tags.", + ) + + tags = cls._args_schema.tags + tags.Element = AAZStrArg() + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.MonitorsUpdate(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) + return result + + class MonitorsUpdate(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors/{monitorName}", + **self.url_parameters + ) + + @property + def method(self): + return "PATCH" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "monitorName", self.ctx.args.monitor_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Content-Type", "application/json", + ), + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + @property + def content(self): + _content_value, _builder = self.new_content_builder( + self.ctx.args, + typ=AAZObjectType, + typ_kwargs={"flags": {"required": True, "client_flatten": True}} + ) + _builder.set_prop("tags", AAZDictType, ".tags") + + tags = _builder.get(".tags") + if tags is not None: + tags.set_elements(AAZStrType, ".") + + return self.serialize_content(_content_value) + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.id = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.identity = AAZObjectType() + _schema_on_200.location = AAZStrType( + flags={"required": True}, + ) + _schema_on_200.name = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.properties = AAZObjectType( + flags={"required": True, "client_flatten": True}, + ) + _schema_on_200.system_data = AAZObjectType( + serialized_name="systemData", + flags={"read_only": True}, + ) + _schema_on_200.tags = AAZDictType() + _schema_on_200.type = AAZStrType( + flags={"read_only": True}, + ) + + identity = cls._schema_on_200.identity + identity.principal_id = AAZStrType( + serialized_name="principalId", + flags={"read_only": True}, + ) + identity.tenant_id = AAZStrType( + serialized_name="tenantId", + flags={"read_only": True}, + ) + identity.type = AAZStrType( + flags={"required": True}, + ) + identity.user_assigned_identities = AAZDictType( + serialized_name="userAssignedIdentities", + ) + + user_assigned_identities = cls._schema_on_200.identity.user_assigned_identities + user_assigned_identities.Element = AAZObjectType() + + _element = cls._schema_on_200.identity.user_assigned_identities.Element + _element.client_id = AAZStrType( + serialized_name="clientId", + flags={"required": True}, + ) + _element.principal_id = AAZStrType( + serialized_name="principalId", + flags={"required": True}, + ) + + properties = cls._schema_on_200.properties + properties.dynatrace_environment_properties = AAZObjectType( + serialized_name="dynatraceEnvironmentProperties", + ) + properties.liftr_resource_category = AAZStrType( + serialized_name="liftrResourceCategory", + ) + properties.liftr_resource_preference = AAZIntType( + serialized_name="liftrResourcePreference", + flags={"read_only": True}, + ) + properties.marketplace_subscription_status = AAZStrType( + serialized_name="marketplaceSubscriptionStatus", + ) + properties.monitoring_status = AAZStrType( + serialized_name="monitoringStatus", + ) + properties.plan_data = AAZObjectType( + serialized_name="planData", + ) + properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + properties.user_info = AAZObjectType( + serialized_name="userInfo", + ) + + dynatrace_environment_properties = cls._schema_on_200.properties.dynatrace_environment_properties + dynatrace_environment_properties.account_info = AAZObjectType( + serialized_name="accountInfo", + ) + dynatrace_environment_properties.environment_info = AAZObjectType( + serialized_name="environmentInfo", + ) + dynatrace_environment_properties.single_sign_on_properties = AAZObjectType( + serialized_name="singleSignOnProperties", + ) + dynatrace_environment_properties.user_id = AAZStrType( + serialized_name="userId", + ) + + account_info = cls._schema_on_200.properties.dynatrace_environment_properties.account_info + account_info.account_id = AAZStrType( + serialized_name="accountId", + ) + account_info.region_id = AAZStrType( + serialized_name="regionId", + ) + + environment_info = cls._schema_on_200.properties.dynatrace_environment_properties.environment_info + environment_info.environment_id = AAZStrType( + serialized_name="environmentId", + ) + environment_info.ingestion_key = AAZStrType( + serialized_name="ingestionKey", + ) + environment_info.landing_url = AAZStrType( + serialized_name="landingURL", + ) + environment_info.logs_ingestion_endpoint = AAZStrType( + serialized_name="logsIngestionEndpoint", + ) + + single_sign_on_properties = cls._schema_on_200.properties.dynatrace_environment_properties.single_sign_on_properties + single_sign_on_properties.aad_domains = AAZListType( + serialized_name="aadDomains", + ) + single_sign_on_properties.enterprise_app_id = AAZStrType( + serialized_name="enterpriseAppId", + ) + single_sign_on_properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + single_sign_on_properties.single_sign_on_state = AAZStrType( + serialized_name="singleSignOnState", + ) + single_sign_on_properties.single_sign_on_url = AAZStrType( + serialized_name="singleSignOnUrl", + ) + + aad_domains = cls._schema_on_200.properties.dynatrace_environment_properties.single_sign_on_properties.aad_domains + aad_domains.Element = AAZStrType() + + plan_data = cls._schema_on_200.properties.plan_data + plan_data.billing_cycle = AAZStrType( + serialized_name="billingCycle", + ) + plan_data.effective_date = AAZStrType( + serialized_name="effectiveDate", + ) + plan_data.plan_details = AAZStrType( + serialized_name="planDetails", + ) + plan_data.usage_type = AAZStrType( + serialized_name="usageType", + ) + + user_info = cls._schema_on_200.properties.user_info + user_info.country = AAZStrType() + user_info.email_address = AAZStrType( + serialized_name="emailAddress", + ) + user_info.first_name = AAZStrType( + serialized_name="firstName", + ) + user_info.last_name = AAZStrType( + serialized_name="lastName", + ) + user_info.phone_number = AAZStrType( + serialized_name="phoneNumber", + ) + + system_data = cls._schema_on_200.system_data + system_data.created_at = AAZStrType( + serialized_name="createdAt", + ) + system_data.created_by = AAZStrType( + serialized_name="createdBy", + ) + system_data.created_by_type = AAZStrType( + serialized_name="createdByType", + ) + system_data.last_modified_at = AAZStrType( + serialized_name="lastModifiedAt", + ) + system_data.last_modified_by = AAZStrType( + serialized_name="lastModifiedBy", + ) + system_data.last_modified_by_type = AAZStrType( + serialized_name="lastModifiedByType", + ) + + tags = cls._schema_on_200.tags + tags.Element = AAZStrType() + + return cls._schema_on_200 + + +__all__ = ["Update"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_wait.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_wait.py new file mode 100644 index 00000000000..76c90ef8c69 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/_wait.py @@ -0,0 +1,341 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor wait", +) +class Wait(AAZWaitCommand): + """Place the CLI in a waiting state until a condition is met. + """ + + _aaz_info = { + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors/{}", "2021-09-01"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return self._output() + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.monitor_name = AAZStrArg( + options=["-n", "--name", "--monitor-name"], + help="Monitor resource name", + required=True, + id_part="name", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.MonitorsGet(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance, client_flatten=False) + return result + + class MonitorsGet(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors/{monitorName}", + **self.url_parameters + ) + + @property + def method(self): + return "GET" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "monitorName", self.ctx.args.monitor_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.id = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.identity = AAZObjectType() + _schema_on_200.location = AAZStrType( + flags={"required": True}, + ) + _schema_on_200.name = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.properties = AAZObjectType( + flags={"required": True, "client_flatten": True}, + ) + _schema_on_200.system_data = AAZObjectType( + serialized_name="systemData", + flags={"read_only": True}, + ) + _schema_on_200.tags = AAZDictType() + _schema_on_200.type = AAZStrType( + flags={"read_only": True}, + ) + + identity = cls._schema_on_200.identity + identity.principal_id = AAZStrType( + serialized_name="principalId", + flags={"read_only": True}, + ) + identity.tenant_id = AAZStrType( + serialized_name="tenantId", + flags={"read_only": True}, + ) + identity.type = AAZStrType( + flags={"required": True}, + ) + identity.user_assigned_identities = AAZDictType( + serialized_name="userAssignedIdentities", + ) + + user_assigned_identities = cls._schema_on_200.identity.user_assigned_identities + user_assigned_identities.Element = AAZObjectType() + + _element = cls._schema_on_200.identity.user_assigned_identities.Element + _element.client_id = AAZStrType( + serialized_name="clientId", + flags={"required": True}, + ) + _element.principal_id = AAZStrType( + serialized_name="principalId", + flags={"required": True}, + ) + + properties = cls._schema_on_200.properties + properties.dynatrace_environment_properties = AAZObjectType( + serialized_name="dynatraceEnvironmentProperties", + ) + properties.liftr_resource_category = AAZStrType( + serialized_name="liftrResourceCategory", + ) + properties.liftr_resource_preference = AAZIntType( + serialized_name="liftrResourcePreference", + flags={"read_only": True}, + ) + properties.marketplace_subscription_status = AAZStrType( + serialized_name="marketplaceSubscriptionStatus", + ) + properties.monitoring_status = AAZStrType( + serialized_name="monitoringStatus", + ) + properties.plan_data = AAZObjectType( + serialized_name="planData", + ) + properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + properties.user_info = AAZObjectType( + serialized_name="userInfo", + ) + + dynatrace_environment_properties = cls._schema_on_200.properties.dynatrace_environment_properties + dynatrace_environment_properties.account_info = AAZObjectType( + serialized_name="accountInfo", + ) + dynatrace_environment_properties.environment_info = AAZObjectType( + serialized_name="environmentInfo", + ) + dynatrace_environment_properties.single_sign_on_properties = AAZObjectType( + serialized_name="singleSignOnProperties", + ) + dynatrace_environment_properties.user_id = AAZStrType( + serialized_name="userId", + ) + + account_info = cls._schema_on_200.properties.dynatrace_environment_properties.account_info + account_info.account_id = AAZStrType( + serialized_name="accountId", + ) + account_info.region_id = AAZStrType( + serialized_name="regionId", + ) + + environment_info = cls._schema_on_200.properties.dynatrace_environment_properties.environment_info + environment_info.environment_id = AAZStrType( + serialized_name="environmentId", + ) + environment_info.ingestion_key = AAZStrType( + serialized_name="ingestionKey", + ) + environment_info.landing_url = AAZStrType( + serialized_name="landingURL", + ) + environment_info.logs_ingestion_endpoint = AAZStrType( + serialized_name="logsIngestionEndpoint", + ) + + single_sign_on_properties = cls._schema_on_200.properties.dynatrace_environment_properties.single_sign_on_properties + single_sign_on_properties.aad_domains = AAZListType( + serialized_name="aadDomains", + ) + single_sign_on_properties.enterprise_app_id = AAZStrType( + serialized_name="enterpriseAppId", + ) + single_sign_on_properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + single_sign_on_properties.single_sign_on_state = AAZStrType( + serialized_name="singleSignOnState", + ) + single_sign_on_properties.single_sign_on_url = AAZStrType( + serialized_name="singleSignOnUrl", + ) + + aad_domains = cls._schema_on_200.properties.dynatrace_environment_properties.single_sign_on_properties.aad_domains + aad_domains.Element = AAZStrType() + + plan_data = cls._schema_on_200.properties.plan_data + plan_data.billing_cycle = AAZStrType( + serialized_name="billingCycle", + ) + plan_data.effective_date = AAZStrType( + serialized_name="effectiveDate", + ) + plan_data.plan_details = AAZStrType( + serialized_name="planDetails", + ) + plan_data.usage_type = AAZStrType( + serialized_name="usageType", + ) + + user_info = cls._schema_on_200.properties.user_info + user_info.country = AAZStrType() + user_info.email_address = AAZStrType( + serialized_name="emailAddress", + ) + user_info.first_name = AAZStrType( + serialized_name="firstName", + ) + user_info.last_name = AAZStrType( + serialized_name="lastName", + ) + user_info.phone_number = AAZStrType( + serialized_name="phoneNumber", + ) + + system_data = cls._schema_on_200.system_data + system_data.created_at = AAZStrType( + serialized_name="createdAt", + ) + system_data.created_by = AAZStrType( + serialized_name="createdBy", + ) + system_data.created_by_type = AAZStrType( + serialized_name="createdByType", + ) + system_data.last_modified_at = AAZStrType( + serialized_name="lastModifiedAt", + ) + system_data.last_modified_by = AAZStrType( + serialized_name="lastModifiedBy", + ) + system_data.last_modified_by_type = AAZStrType( + serialized_name="lastModifiedByType", + ) + + tags = cls._schema_on_200.tags + tags.Element = AAZStrType() + + return cls._schema_on_200 + + +__all__ = ["Wait"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/__cmd_group.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/__cmd_group.py new file mode 100644 index 00000000000..c9fd6de3ebc --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/__cmd_group.py @@ -0,0 +1,23 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command_group( + "dynatrace monitor sso-config", +) +class __CMDGroup(AAZCommandGroup): + """Manage monitor sso-config + """ + pass + + +__all__ = ["__CMDGroup"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/__init__.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/__init__.py new file mode 100644 index 00000000000..25812ac18ba --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/__init__.py @@ -0,0 +1,15 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from .__cmd_group import * +from ._create import * +from ._list import * +from ._show import * +from ._wait import * diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/_create.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/_create.py new file mode 100644 index 00000000000..c9da015c5c9 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/_create.py @@ -0,0 +1,296 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor sso-config create" +) +class Create(AAZCommand): + """Create a dynatrace sso-config resource + + :example: Create a sso-config + az dynatrace monitor sso-config create -g rg --monitor-name monitor -n default --aad-domains "['mpliftrdt20210811outlook.onmicrosoft.com']" --single-sign-on-url "https://www.dynatrace.io" + """ + + _aaz_info = { + "version": "2021-09-01", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors/{}/singlesignonconfigurations/{}", "2021-09-01"], + ] + } + + AZ_SUPPORT_NO_WAIT = True + + def _handler(self, command_args): + super()._handler(command_args) + return self.build_lro_poller(self._execute_operations, self._output) + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.configuration_name = AAZStrArg( + options=["-n", "--name", "--configuration-name"], + help="Single Sign On Configuration Name", + required=True, + id_part="child_name_1", + ) + _args_schema.monitor_name = AAZStrArg( + options=["--monitor-name"], + help="Monitor resource name", + required=True, + id_part="name", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + + # define Arg Group "Properties" + + _args_schema = cls._args_schema + _args_schema.aad_domains = AAZListArg( + options=["--aad-domains"], + arg_group="Properties", + help="array of Aad(azure active directory) domains", + ) + _args_schema.enterprise_app_id = AAZStrArg( + options=["--enterprise-app-id"], + arg_group="Properties", + help="Version of the Dynatrace agent installed on the VM.", + ) + _args_schema.single_sign_on_state = AAZStrArg( + options=["--single-sign-on-state"], + arg_group="Properties", + help="State of Single Sign On", + enum={"Disable": "Disable", "Enable": "Enable", "Existing": "Existing", "Initial": "Initial"}, + ) + _args_schema.single_sign_on_url = AAZStrArg( + options=["--single-sign-on-url"], + arg_group="Properties", + help="The login URL specific to this Dynatrace Environment", + ) + + aad_domains = cls._args_schema.aad_domains + aad_domains.Element = AAZStrArg() + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + yield self.SingleSignOnCreateOrUpdate(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) + return result + + class SingleSignOnCreateOrUpdate(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [202]: + return self.client.build_lro_polling( + self.ctx.args.no_wait, + session, + self.on_200_201, + self.on_error, + lro_options={"final-state-via": "azure-async-operation"}, + path_format_arguments=self.url_parameters, + ) + if session.http_response.status_code in [200, 201]: + return self.client.build_lro_polling( + self.ctx.args.no_wait, + session, + self.on_200_201, + self.on_error, + lro_options={"final-state-via": "azure-async-operation"}, + path_format_arguments=self.url_parameters, + ) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors/{monitorName}/singleSignOnConfigurations/{configurationName}", + **self.url_parameters + ) + + @property + def method(self): + return "PUT" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "configurationName", self.ctx.args.configuration_name, + required=True, + ), + **self.serialize_url_param( + "monitorName", self.ctx.args.monitor_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Content-Type", "application/json", + ), + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + @property + def content(self): + _content_value, _builder = self.new_content_builder( + self.ctx.args, + typ=AAZObjectType, + typ_kwargs={"flags": {"required": True, "client_flatten": True}} + ) + _builder.set_prop("properties", AAZObjectType, ".", typ_kwargs={"flags": {"required": True, "client_flatten": True}}) + + properties = _builder.get(".properties") + if properties is not None: + properties.set_prop("aadDomains", AAZListType, ".aad_domains") + properties.set_prop("enterpriseAppId", AAZStrType, ".enterprise_app_id") + properties.set_prop("singleSignOnState", AAZStrType, ".single_sign_on_state") + properties.set_prop("singleSignOnUrl", AAZStrType, ".single_sign_on_url") + + aad_domains = _builder.get(".properties.aadDomains") + if aad_domains is not None: + aad_domains.set_elements(AAZStrType, ".") + + return self.serialize_content(_content_value) + + def on_200_201(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200_201 + ) + + _schema_on_200_201 = None + + @classmethod + def _build_schema_on_200_201(cls): + if cls._schema_on_200_201 is not None: + return cls._schema_on_200_201 + + cls._schema_on_200_201 = AAZObjectType() + + _schema_on_200_201 = cls._schema_on_200_201 + _schema_on_200_201.id = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200_201.name = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200_201.properties = AAZObjectType( + flags={"required": True, "client_flatten": True}, + ) + _schema_on_200_201.system_data = AAZObjectType( + serialized_name="systemData", + flags={"read_only": True}, + ) + _schema_on_200_201.type = AAZStrType( + flags={"read_only": True}, + ) + + properties = cls._schema_on_200_201.properties + properties.aad_domains = AAZListType( + serialized_name="aadDomains", + ) + properties.enterprise_app_id = AAZStrType( + serialized_name="enterpriseAppId", + ) + properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + properties.single_sign_on_state = AAZStrType( + serialized_name="singleSignOnState", + ) + properties.single_sign_on_url = AAZStrType( + serialized_name="singleSignOnUrl", + ) + + aad_domains = cls._schema_on_200_201.properties.aad_domains + aad_domains.Element = AAZStrType() + + system_data = cls._schema_on_200_201.system_data + system_data.created_at = AAZStrType( + serialized_name="createdAt", + ) + system_data.created_by = AAZStrType( + serialized_name="createdBy", + ) + system_data.created_by_type = AAZStrType( + serialized_name="createdByType", + ) + system_data.last_modified_at = AAZStrType( + serialized_name="lastModifiedAt", + ) + system_data.last_modified_by = AAZStrType( + serialized_name="lastModifiedBy", + ) + system_data.last_modified_by_type = AAZStrType( + serialized_name="lastModifiedByType", + ) + + return cls._schema_on_200_201 + + +__all__ = ["Create"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/_list.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/_list.py new file mode 100644 index 00000000000..3a73c93e2d9 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/_list.py @@ -0,0 +1,226 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor sso-config list" +) +class List(AAZCommand): + """List all dynatrace sso-config by monitor name + + :example: List sso-config + az dynatrace monitor sso-config list -g rg --monitor-name monitor + """ + + _aaz_info = { + "version": "2021-09-01", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors/{}/singlesignonconfigurations", "2021-09-01"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + return self.build_paging(self._execute_operations, self._output) + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.monitor_name = AAZStrArg( + options=["--monitor-name"], + help="Monitor resource name", + required=True, + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.SingleSignOnList(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance.value, client_flatten=True) + next_link = self.deserialize_output(self.ctx.vars.instance.next_link) + return result, next_link + + class SingleSignOnList(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors/{monitorName}/singleSignOnConfigurations", + **self.url_parameters + ) + + @property + def method(self): + return "GET" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "monitorName", self.ctx.args.monitor_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.next_link = AAZStrType( + serialized_name="nextLink", + ) + _schema_on_200.value = AAZListType( + flags={"required": True}, + ) + + value = cls._schema_on_200.value + value.Element = AAZObjectType() + + _element = cls._schema_on_200.value.Element + _element.id = AAZStrType( + flags={"read_only": True}, + ) + _element.name = AAZStrType( + flags={"read_only": True}, + ) + _element.properties = AAZObjectType( + flags={"required": True, "client_flatten": True}, + ) + _element.system_data = AAZObjectType( + serialized_name="systemData", + flags={"read_only": True}, + ) + _element.type = AAZStrType( + flags={"read_only": True}, + ) + + properties = cls._schema_on_200.value.Element.properties + properties.aad_domains = AAZListType( + serialized_name="aadDomains", + ) + properties.enterprise_app_id = AAZStrType( + serialized_name="enterpriseAppId", + ) + properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + properties.single_sign_on_state = AAZStrType( + serialized_name="singleSignOnState", + ) + properties.single_sign_on_url = AAZStrType( + serialized_name="singleSignOnUrl", + ) + + aad_domains = cls._schema_on_200.value.Element.properties.aad_domains + aad_domains.Element = AAZStrType() + + system_data = cls._schema_on_200.value.Element.system_data + system_data.created_at = AAZStrType( + serialized_name="createdAt", + ) + system_data.created_by = AAZStrType( + serialized_name="createdBy", + ) + system_data.created_by_type = AAZStrType( + serialized_name="createdByType", + ) + system_data.last_modified_at = AAZStrType( + serialized_name="lastModifiedAt", + ) + system_data.last_modified_by = AAZStrType( + serialized_name="lastModifiedBy", + ) + system_data.last_modified_by_type = AAZStrType( + serialized_name="lastModifiedByType", + ) + + return cls._schema_on_200 + + +__all__ = ["List"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/_show.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/_show.py new file mode 100644 index 00000000000..177e264fa1b --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/_show.py @@ -0,0 +1,226 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor sso-config show" +) +class Show(AAZCommand): + """Show a dynatrace sso-config + + :example: Show sso-config + az dynatrace monitor sso-config show -g rg --monitor-name monitor -n default + """ + + _aaz_info = { + "version": "2021-09-01", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors/{}/singlesignonconfigurations/{}", "2021-09-01"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return self._output() + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.configuration_name = AAZStrArg( + options=["-n", "--name", "--configuration-name"], + help="Single Sign On Configuration Name", + required=True, + id_part="child_name_1", + ) + _args_schema.monitor_name = AAZStrArg( + options=["--monitor-name"], + help="Monitor resource name", + required=True, + id_part="name", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.SingleSignOnGet(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) + return result + + class SingleSignOnGet(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors/{monitorName}/singleSignOnConfigurations/{configurationName}", + **self.url_parameters + ) + + @property + def method(self): + return "GET" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "configurationName", self.ctx.args.configuration_name, + required=True, + ), + **self.serialize_url_param( + "monitorName", self.ctx.args.monitor_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.id = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.name = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.properties = AAZObjectType( + flags={"required": True, "client_flatten": True}, + ) + _schema_on_200.system_data = AAZObjectType( + serialized_name="systemData", + flags={"read_only": True}, + ) + _schema_on_200.type = AAZStrType( + flags={"read_only": True}, + ) + + properties = cls._schema_on_200.properties + properties.aad_domains = AAZListType( + serialized_name="aadDomains", + ) + properties.enterprise_app_id = AAZStrType( + serialized_name="enterpriseAppId", + ) + properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + properties.single_sign_on_state = AAZStrType( + serialized_name="singleSignOnState", + ) + properties.single_sign_on_url = AAZStrType( + serialized_name="singleSignOnUrl", + ) + + aad_domains = cls._schema_on_200.properties.aad_domains + aad_domains.Element = AAZStrType() + + system_data = cls._schema_on_200.system_data + system_data.created_at = AAZStrType( + serialized_name="createdAt", + ) + system_data.created_by = AAZStrType( + serialized_name="createdBy", + ) + system_data.created_by_type = AAZStrType( + serialized_name="createdByType", + ) + system_data.last_modified_at = AAZStrType( + serialized_name="lastModifiedAt", + ) + system_data.last_modified_by = AAZStrType( + serialized_name="lastModifiedBy", + ) + system_data.last_modified_by_type = AAZStrType( + serialized_name="lastModifiedByType", + ) + + return cls._schema_on_200 + + +__all__ = ["Show"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/_wait.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/_wait.py new file mode 100644 index 00000000000..8d2c627ad70 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/sso_config/_wait.py @@ -0,0 +1,222 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor sso-config wait", +) +class Wait(AAZWaitCommand): + """Place the CLI in a waiting state until a condition is met. + """ + + _aaz_info = { + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors/{}/singlesignonconfigurations/{}", "2021-09-01"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return self._output() + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.configuration_name = AAZStrArg( + options=["-n", "--name", "--configuration-name"], + help="Single Sign On Configuration Name", + required=True, + id_part="child_name_1", + ) + _args_schema.monitor_name = AAZStrArg( + options=["--monitor-name"], + help="Monitor resource name", + required=True, + id_part="name", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.SingleSignOnGet(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance, client_flatten=False) + return result + + class SingleSignOnGet(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors/{monitorName}/singleSignOnConfigurations/{configurationName}", + **self.url_parameters + ) + + @property + def method(self): + return "GET" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "configurationName", self.ctx.args.configuration_name, + required=True, + ), + **self.serialize_url_param( + "monitorName", self.ctx.args.monitor_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.id = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.name = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.properties = AAZObjectType( + flags={"required": True, "client_flatten": True}, + ) + _schema_on_200.system_data = AAZObjectType( + serialized_name="systemData", + flags={"read_only": True}, + ) + _schema_on_200.type = AAZStrType( + flags={"read_only": True}, + ) + + properties = cls._schema_on_200.properties + properties.aad_domains = AAZListType( + serialized_name="aadDomains", + ) + properties.enterprise_app_id = AAZStrType( + serialized_name="enterpriseAppId", + ) + properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + properties.single_sign_on_state = AAZStrType( + serialized_name="singleSignOnState", + ) + properties.single_sign_on_url = AAZStrType( + serialized_name="singleSignOnUrl", + ) + + aad_domains = cls._schema_on_200.properties.aad_domains + aad_domains.Element = AAZStrType() + + system_data = cls._schema_on_200.system_data + system_data.created_at = AAZStrType( + serialized_name="createdAt", + ) + system_data.created_by = AAZStrType( + serialized_name="createdBy", + ) + system_data.created_by_type = AAZStrType( + serialized_name="createdByType", + ) + system_data.last_modified_at = AAZStrType( + serialized_name="lastModifiedAt", + ) + system_data.last_modified_by = AAZStrType( + serialized_name="lastModifiedBy", + ) + system_data.last_modified_by_type = AAZStrType( + serialized_name="lastModifiedByType", + ) + + return cls._schema_on_200 + + +__all__ = ["Wait"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/__cmd_group.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/__cmd_group.py new file mode 100644 index 00000000000..92bbd3561e5 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/__cmd_group.py @@ -0,0 +1,23 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command_group( + "dynatrace monitor tag-rule", +) +class __CMDGroup(AAZCommandGroup): + """Manage dynatrace monitor tag-rule + """ + pass + + +__all__ = ["__CMDGroup"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/__init__.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/__init__.py new file mode 100644 index 00000000000..db73033039b --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/__init__.py @@ -0,0 +1,17 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from .__cmd_group import * +from ._create import * +from ._delete import * +from ._list import * +from ._show import * +from ._update import * +from ._wait import * diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_create.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_create.py new file mode 100644 index 00000000000..0cc7eda63d8 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_create.py @@ -0,0 +1,410 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor tag-rule create" +) +class Create(AAZCommand): + """Create a tag rule + + :example: Create tag-rule + az dynatrace monitor tag-rule create -g rg --monitor-name monitor -n default --log-rules "{send-aad-logs:enabled,send-subscription-logs:enabled,send-activity-logs:enabled,filtering-tags:[{name:env,value:prod,action:include},{name:env,value:dev,action:exclude}]}" --metric-rules "{filtering-tags:[{name:env,value:prod,action:include}]}" + """ + + _aaz_info = { + "version": "2021-09-01", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors/{}/tagrules/{}", "2021-09-01"], + ] + } + + AZ_SUPPORT_NO_WAIT = True + + def _handler(self, command_args): + super()._handler(command_args) + return self.build_lro_poller(self._execute_operations, self._output) + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.monitor_name = AAZStrArg( + options=["--monitor-name"], + help="Monitor resource name", + required=True, + id_part="name", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + _args_schema.rule_set_name = AAZStrArg( + options=["-n", "--name", "--rule-set-name"], + help="Monitor rule set name", + required=True, + id_part="child_name_1", + ) + + # define Arg Group "Properties" + + _args_schema = cls._args_schema + _args_schema.log_rules = AAZObjectArg( + options=["--log-rules"], + arg_group="Properties", + help="Set of rules for sending logs for the Monitor resource.", + ) + _args_schema.metric_rules = AAZObjectArg( + options=["--metric-rules"], + arg_group="Properties", + help="Set of rules for sending metrics for the Monitor resource.", + ) + + log_rules = cls._args_schema.log_rules + log_rules.filtering_tags = AAZListArg( + options=["filtering-tags"], + help="List of filtering tags to be used for capturing logs. This only takes effect if SendActivityLogs flag is enabled. If empty, all resources will be captured. If only Exclude action is specified, the rules will apply to the list of all available resources. If Include actions are specified, the rules will only include resources with the associated tags.", + ) + log_rules.send_aad_logs = AAZStrArg( + options=["send-aad-logs"], + help="Flag specifying if AAD logs should be sent for the Monitor resource.", + enum={"Disabled": "Disabled", "Enabled": "Enabled"}, + ) + log_rules.send_activity_logs = AAZStrArg( + options=["send-activity-logs"], + help="Flag specifying if activity logs from Azure resources should be sent for the Monitor resource.", + enum={"Disabled": "Disabled", "Enabled": "Enabled"}, + ) + log_rules.send_subscription_logs = AAZStrArg( + options=["send-subscription-logs"], + help="Flag specifying if subscription logs should be sent for the Monitor resource.", + enum={"Disabled": "Disabled", "Enabled": "Enabled"}, + ) + + filtering_tags = cls._args_schema.log_rules.filtering_tags + filtering_tags.Element = AAZObjectArg() + cls._build_args_filtering_tag_create(filtering_tags.Element) + + metric_rules = cls._args_schema.metric_rules + metric_rules.filtering_tags = AAZListArg( + options=["filtering-tags"], + help="List of filtering tags to be used for capturing metrics. If empty, all resources will be captured. If only Exclude action is specified, the rules will apply to the list of all available resources. If Include actions are specified, the rules will only include resources with the associated tags.", + ) + + filtering_tags = cls._args_schema.metric_rules.filtering_tags + filtering_tags.Element = AAZObjectArg() + cls._build_args_filtering_tag_create(filtering_tags.Element) + return cls._args_schema + + _args_filtering_tag_create = None + + @classmethod + def _build_args_filtering_tag_create(cls, _schema): + if cls._args_filtering_tag_create is not None: + _schema.action = cls._args_filtering_tag_create.action + _schema.name = cls._args_filtering_tag_create.name + _schema.value = cls._args_filtering_tag_create.value + return + + cls._args_filtering_tag_create = AAZObjectArg() + + filtering_tag_create = cls._args_filtering_tag_create + filtering_tag_create.action = AAZStrArg( + options=["action"], + help="Valid actions for a filtering tag. Exclusion takes priority over inclusion.", + enum={"Exclude": "Exclude", "Include": "Include"}, + ) + filtering_tag_create.name = AAZStrArg( + options=["name"], + help="The name (also known as the key) of the tag.", + ) + filtering_tag_create.value = AAZStrArg( + options=["value"], + help="The value of the tag.", + ) + + _schema.action = cls._args_filtering_tag_create.action + _schema.name = cls._args_filtering_tag_create.name + _schema.value = cls._args_filtering_tag_create.value + + def _execute_operations(self): + self.pre_operations() + yield self.TagRulesCreateOrUpdate(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) + return result + + class TagRulesCreateOrUpdate(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [202]: + return self.client.build_lro_polling( + self.ctx.args.no_wait, + session, + self.on_200_201, + self.on_error, + lro_options={"final-state-via": "azure-async-operation"}, + path_format_arguments=self.url_parameters, + ) + if session.http_response.status_code in [200, 201]: + return self.client.build_lro_polling( + self.ctx.args.no_wait, + session, + self.on_200_201, + self.on_error, + lro_options={"final-state-via": "azure-async-operation"}, + path_format_arguments=self.url_parameters, + ) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors/{monitorName}/tagRules/{ruleSetName}", + **self.url_parameters + ) + + @property + def method(self): + return "PUT" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "monitorName", self.ctx.args.monitor_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "ruleSetName", self.ctx.args.rule_set_name, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Content-Type", "application/json", + ), + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + @property + def content(self): + _content_value, _builder = self.new_content_builder( + self.ctx.args, + typ=AAZObjectType, + typ_kwargs={"flags": {"required": True, "client_flatten": True}} + ) + _builder.set_prop("properties", AAZObjectType, ".", typ_kwargs={"flags": {"required": True, "client_flatten": True}}) + + properties = _builder.get(".properties") + if properties is not None: + properties.set_prop("logRules", AAZObjectType, ".log_rules") + properties.set_prop("metricRules", AAZObjectType, ".metric_rules") + + log_rules = _builder.get(".properties.logRules") + if log_rules is not None: + log_rules.set_prop("filteringTags", AAZListType, ".filtering_tags") + log_rules.set_prop("sendAadLogs", AAZStrType, ".send_aad_logs") + log_rules.set_prop("sendActivityLogs", AAZStrType, ".send_activity_logs") + log_rules.set_prop("sendSubscriptionLogs", AAZStrType, ".send_subscription_logs") + + filtering_tags = _builder.get(".properties.logRules.filteringTags") + if filtering_tags is not None: + _build_schema_filtering_tag_create(filtering_tags.set_elements(AAZObjectType, ".")) + + metric_rules = _builder.get(".properties.metricRules") + if metric_rules is not None: + metric_rules.set_prop("filteringTags", AAZListType, ".filtering_tags") + + filtering_tags = _builder.get(".properties.metricRules.filteringTags") + if filtering_tags is not None: + _build_schema_filtering_tag_create(filtering_tags.set_elements(AAZObjectType, ".")) + + return self.serialize_content(_content_value) + + def on_200_201(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200_201 + ) + + _schema_on_200_201 = None + + @classmethod + def _build_schema_on_200_201(cls): + if cls._schema_on_200_201 is not None: + return cls._schema_on_200_201 + + cls._schema_on_200_201 = AAZObjectType() + + _schema_on_200_201 = cls._schema_on_200_201 + _schema_on_200_201.id = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200_201.name = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200_201.properties = AAZObjectType( + flags={"required": True, "client_flatten": True}, + ) + _schema_on_200_201.system_data = AAZObjectType( + serialized_name="systemData", + flags={"read_only": True}, + ) + _schema_on_200_201.type = AAZStrType( + flags={"read_only": True}, + ) + + properties = cls._schema_on_200_201.properties + properties.log_rules = AAZObjectType( + serialized_name="logRules", + ) + properties.metric_rules = AAZObjectType( + serialized_name="metricRules", + ) + properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + + log_rules = cls._schema_on_200_201.properties.log_rules + log_rules.filtering_tags = AAZListType( + serialized_name="filteringTags", + ) + log_rules.send_aad_logs = AAZStrType( + serialized_name="sendAadLogs", + ) + log_rules.send_activity_logs = AAZStrType( + serialized_name="sendActivityLogs", + ) + log_rules.send_subscription_logs = AAZStrType( + serialized_name="sendSubscriptionLogs", + ) + + filtering_tags = cls._schema_on_200_201.properties.log_rules.filtering_tags + filtering_tags.Element = AAZObjectType() + _build_schema_filtering_tag_read(filtering_tags.Element) + + metric_rules = cls._schema_on_200_201.properties.metric_rules + metric_rules.filtering_tags = AAZListType( + serialized_name="filteringTags", + ) + + filtering_tags = cls._schema_on_200_201.properties.metric_rules.filtering_tags + filtering_tags.Element = AAZObjectType() + _build_schema_filtering_tag_read(filtering_tags.Element) + + system_data = cls._schema_on_200_201.system_data + system_data.created_at = AAZStrType( + serialized_name="createdAt", + ) + system_data.created_by = AAZStrType( + serialized_name="createdBy", + ) + system_data.created_by_type = AAZStrType( + serialized_name="createdByType", + ) + system_data.last_modified_at = AAZStrType( + serialized_name="lastModifiedAt", + ) + system_data.last_modified_by = AAZStrType( + serialized_name="lastModifiedBy", + ) + system_data.last_modified_by_type = AAZStrType( + serialized_name="lastModifiedByType", + ) + + return cls._schema_on_200_201 + + +def _build_schema_filtering_tag_create(_builder): + if _builder is None: + return + _builder.set_prop("action", AAZStrType, ".action") + _builder.set_prop("name", AAZStrType, ".name") + _builder.set_prop("value", AAZStrType, ".value") + + +_schema_filtering_tag_read = None + + +def _build_schema_filtering_tag_read(_schema): + global _schema_filtering_tag_read + if _schema_filtering_tag_read is not None: + _schema.action = _schema_filtering_tag_read.action + _schema.name = _schema_filtering_tag_read.name + _schema.value = _schema_filtering_tag_read.value + return + + _schema_filtering_tag_read = AAZObjectType() + + filtering_tag_read = _schema_filtering_tag_read + filtering_tag_read.action = AAZStrType() + filtering_tag_read.name = AAZStrType() + filtering_tag_read.value = AAZStrType() + + _schema.action = _schema_filtering_tag_read.action + _schema.name = _schema_filtering_tag_read.name + _schema.value = _schema_filtering_tag_read.value + + +__all__ = ["Create"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_delete.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_delete.py new file mode 100644 index 00000000000..f501dc605c6 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_delete.py @@ -0,0 +1,169 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor tag-rule delete", + confirmation="Are you sure you want to perform this operation?", +) +class Delete(AAZCommand): + """Delete a tag rule + + :example: Delete tag-rule + az dynatrace monitor tag-rule delete -g rg --monitor-name monitor -n default -y + """ + + _aaz_info = { + "version": "2021-09-01", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors/{}/tagrules/{}", "2021-09-01"], + ] + } + + AZ_SUPPORT_NO_WAIT = True + + def _handler(self, command_args): + super()._handler(command_args) + return self.build_lro_poller(self._execute_operations, None) + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.monitor_name = AAZStrArg( + options=["--monitor-name"], + help="Monitor resource name", + required=True, + id_part="name", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + _args_schema.rule_set_name = AAZStrArg( + options=["-n", "--name", "--rule-set-name"], + help="Monitor rule set name", + required=True, + id_part="child_name_1", + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + yield self.TagRulesDelete(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + class TagRulesDelete(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [202]: + return self.client.build_lro_polling( + self.ctx.args.no_wait, + session, + self.on_200, + self.on_error, + lro_options={"final-state-via": "azure-async-operation"}, + path_format_arguments=self.url_parameters, + ) + if session.http_response.status_code in [200]: + return self.client.build_lro_polling( + self.ctx.args.no_wait, + session, + self.on_200, + self.on_error, + lro_options={"final-state-via": "azure-async-operation"}, + path_format_arguments=self.url_parameters, + ) + if session.http_response.status_code in [204]: + return self.client.build_lro_polling( + self.ctx.args.no_wait, + session, + self.on_204, + self.on_error, + lro_options={"final-state-via": "azure-async-operation"}, + path_format_arguments=self.url_parameters, + ) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors/{monitorName}/tagRules/{ruleSetName}", + **self.url_parameters + ) + + @property + def method(self): + return "DELETE" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "monitorName", self.ctx.args.monitor_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "ruleSetName", self.ctx.args.rule_set_name, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + def on_200(self, session): + pass + + def on_204(self, session): + pass + + +__all__ = ["Delete"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_list.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_list.py new file mode 100644 index 00000000000..b3274be1776 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_list.py @@ -0,0 +1,267 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor tag-rule list" +) +class List(AAZCommand): + """List all tag rule by monitor name + + :example: List tag-rule + az dynatrace monitor tag-rule list -g rg --monitor-name monitor + """ + + _aaz_info = { + "version": "2021-09-01", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors/{}/tagrules", "2021-09-01"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + return self.build_paging(self._execute_operations, self._output) + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.monitor_name = AAZStrArg( + options=["--monitor-name"], + help="Monitor resource name", + required=True, + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.TagRulesList(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance.value, client_flatten=True) + next_link = self.deserialize_output(self.ctx.vars.instance.next_link) + return result, next_link + + class TagRulesList(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors/{monitorName}/tagRules", + **self.url_parameters + ) + + @property + def method(self): + return "GET" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "monitorName", self.ctx.args.monitor_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.next_link = AAZStrType( + serialized_name="nextLink", + ) + _schema_on_200.value = AAZListType( + flags={"required": True}, + ) + + value = cls._schema_on_200.value + value.Element = AAZObjectType() + + _element = cls._schema_on_200.value.Element + _element.id = AAZStrType( + flags={"read_only": True}, + ) + _element.name = AAZStrType( + flags={"read_only": True}, + ) + _element.properties = AAZObjectType( + flags={"required": True, "client_flatten": True}, + ) + _element.system_data = AAZObjectType( + serialized_name="systemData", + flags={"read_only": True}, + ) + _element.type = AAZStrType( + flags={"read_only": True}, + ) + + properties = cls._schema_on_200.value.Element.properties + properties.log_rules = AAZObjectType( + serialized_name="logRules", + ) + properties.metric_rules = AAZObjectType( + serialized_name="metricRules", + ) + properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + + log_rules = cls._schema_on_200.value.Element.properties.log_rules + log_rules.filtering_tags = AAZListType( + serialized_name="filteringTags", + ) + log_rules.send_aad_logs = AAZStrType( + serialized_name="sendAadLogs", + ) + log_rules.send_activity_logs = AAZStrType( + serialized_name="sendActivityLogs", + ) + log_rules.send_subscription_logs = AAZStrType( + serialized_name="sendSubscriptionLogs", + ) + + filtering_tags = cls._schema_on_200.value.Element.properties.log_rules.filtering_tags + filtering_tags.Element = AAZObjectType() + _build_schema_filtering_tag_read(filtering_tags.Element) + + metric_rules = cls._schema_on_200.value.Element.properties.metric_rules + metric_rules.filtering_tags = AAZListType( + serialized_name="filteringTags", + ) + + filtering_tags = cls._schema_on_200.value.Element.properties.metric_rules.filtering_tags + filtering_tags.Element = AAZObjectType() + _build_schema_filtering_tag_read(filtering_tags.Element) + + system_data = cls._schema_on_200.value.Element.system_data + system_data.created_at = AAZStrType( + serialized_name="createdAt", + ) + system_data.created_by = AAZStrType( + serialized_name="createdBy", + ) + system_data.created_by_type = AAZStrType( + serialized_name="createdByType", + ) + system_data.last_modified_at = AAZStrType( + serialized_name="lastModifiedAt", + ) + system_data.last_modified_by = AAZStrType( + serialized_name="lastModifiedBy", + ) + system_data.last_modified_by_type = AAZStrType( + serialized_name="lastModifiedByType", + ) + + return cls._schema_on_200 + + +_schema_filtering_tag_read = None + + +def _build_schema_filtering_tag_read(_schema): + global _schema_filtering_tag_read + if _schema_filtering_tag_read is not None: + _schema.action = _schema_filtering_tag_read.action + _schema.name = _schema_filtering_tag_read.name + _schema.value = _schema_filtering_tag_read.value + return + + _schema_filtering_tag_read = AAZObjectType() + + filtering_tag_read = _schema_filtering_tag_read + filtering_tag_read.action = AAZStrType() + filtering_tag_read.name = AAZStrType() + filtering_tag_read.value = AAZStrType() + + _schema.action = _schema_filtering_tag_read.action + _schema.name = _schema_filtering_tag_read.name + _schema.value = _schema_filtering_tag_read.value + + +__all__ = ["List"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_show.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_show.py new file mode 100644 index 00000000000..ad9ca4476ab --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_show.py @@ -0,0 +1,267 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor tag-rule show" +) +class Show(AAZCommand): + """Show a tag rule + + :example: Show tag-rule + az dynatrace monitor tag-rule show -g rg --monitor-name monitor -n default + """ + + _aaz_info = { + "version": "2021-09-01", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors/{}/tagrules/{}", "2021-09-01"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return self._output() + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.monitor_name = AAZStrArg( + options=["--monitor-name"], + help="Monitor resource name", + required=True, + id_part="name", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + _args_schema.rule_set_name = AAZStrArg( + options=["-n", "--name", "--rule-set-name"], + help="Monitor rule set name", + required=True, + id_part="child_name_1", + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.TagRulesGet(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) + return result + + class TagRulesGet(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors/{monitorName}/tagRules/{ruleSetName}", + **self.url_parameters + ) + + @property + def method(self): + return "GET" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "monitorName", self.ctx.args.monitor_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "ruleSetName", self.ctx.args.rule_set_name, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.id = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.name = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.properties = AAZObjectType( + flags={"required": True, "client_flatten": True}, + ) + _schema_on_200.system_data = AAZObjectType( + serialized_name="systemData", + flags={"read_only": True}, + ) + _schema_on_200.type = AAZStrType( + flags={"read_only": True}, + ) + + properties = cls._schema_on_200.properties + properties.log_rules = AAZObjectType( + serialized_name="logRules", + ) + properties.metric_rules = AAZObjectType( + serialized_name="metricRules", + ) + properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + + log_rules = cls._schema_on_200.properties.log_rules + log_rules.filtering_tags = AAZListType( + serialized_name="filteringTags", + ) + log_rules.send_aad_logs = AAZStrType( + serialized_name="sendAadLogs", + ) + log_rules.send_activity_logs = AAZStrType( + serialized_name="sendActivityLogs", + ) + log_rules.send_subscription_logs = AAZStrType( + serialized_name="sendSubscriptionLogs", + ) + + filtering_tags = cls._schema_on_200.properties.log_rules.filtering_tags + filtering_tags.Element = AAZObjectType() + _build_schema_filtering_tag_read(filtering_tags.Element) + + metric_rules = cls._schema_on_200.properties.metric_rules + metric_rules.filtering_tags = AAZListType( + serialized_name="filteringTags", + ) + + filtering_tags = cls._schema_on_200.properties.metric_rules.filtering_tags + filtering_tags.Element = AAZObjectType() + _build_schema_filtering_tag_read(filtering_tags.Element) + + system_data = cls._schema_on_200.system_data + system_data.created_at = AAZStrType( + serialized_name="createdAt", + ) + system_data.created_by = AAZStrType( + serialized_name="createdBy", + ) + system_data.created_by_type = AAZStrType( + serialized_name="createdByType", + ) + system_data.last_modified_at = AAZStrType( + serialized_name="lastModifiedAt", + ) + system_data.last_modified_by = AAZStrType( + serialized_name="lastModifiedBy", + ) + system_data.last_modified_by_type = AAZStrType( + serialized_name="lastModifiedByType", + ) + + return cls._schema_on_200 + + +_schema_filtering_tag_read = None + + +def _build_schema_filtering_tag_read(_schema): + global _schema_filtering_tag_read + if _schema_filtering_tag_read is not None: + _schema.action = _schema_filtering_tag_read.action + _schema.name = _schema_filtering_tag_read.name + _schema.value = _schema_filtering_tag_read.value + return + + _schema_filtering_tag_read = AAZObjectType() + + filtering_tag_read = _schema_filtering_tag_read + filtering_tag_read.action = AAZStrType() + filtering_tag_read.name = AAZStrType() + filtering_tag_read.value = AAZStrType() + + _schema.action = _schema_filtering_tag_read.action + _schema.name = _schema_filtering_tag_read.name + _schema.value = _schema_filtering_tag_read.value + + +__all__ = ["Show"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_update.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_update.py new file mode 100644 index 00000000000..54a8e818c92 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_update.py @@ -0,0 +1,389 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor tag-rule update" +) +class Update(AAZCommand): + """Update a tag rule + + :example: Update tag-rule + az dynatrace monitor tag-rule delete -g rg --monitor-name monitor -n default -y + """ + + _aaz_info = { + "version": "2021-09-01", + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors/{}/tagrules/{}", "2021-09-01"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return self._output() + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.monitor_name = AAZStrArg( + options=["--monitor-name"], + help="Monitor resource name", + required=True, + id_part="name", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + _args_schema.rule_set_name = AAZStrArg( + options=["-n", "--name", "--rule-set-name"], + help="Monitor rule set name", + required=True, + id_part="child_name_1", + ) + + # define Arg Group "Resource" + + _args_schema = cls._args_schema + _args_schema.log_rules = AAZObjectArg( + options=["--log-rules"], + arg_group="Resource", + help="Set of rules for sending logs for the Monitor resource.", + ) + _args_schema.metric_rules = AAZObjectArg( + options=["--metric-rules"], + arg_group="Resource", + help="Set of rules for sending metrics for the Monitor resource.", + ) + + log_rules = cls._args_schema.log_rules + log_rules.filtering_tags = AAZListArg( + options=["filtering-tags"], + help="List of filtering tags to be used for capturing logs. This only takes effect if SendActivityLogs flag is enabled. If empty, all resources will be captured. If only Exclude action is specified, the rules will apply to the list of all available resources. If Include actions are specified, the rules will only include resources with the associated tags.", + ) + log_rules.send_aad_logs = AAZStrArg( + options=["send-aad-logs"], + help="Flag specifying if AAD logs should be sent for the Monitor resource.", + enum={"Disabled": "Disabled", "Enabled": "Enabled"}, + ) + log_rules.send_activity_logs = AAZStrArg( + options=["send-activity-logs"], + help="Flag specifying if activity logs from Azure resources should be sent for the Monitor resource.", + enum={"Disabled": "Disabled", "Enabled": "Enabled"}, + ) + log_rules.send_subscription_logs = AAZStrArg( + options=["send-subscription-logs"], + help="Flag specifying if subscription logs should be sent for the Monitor resource.", + enum={"Disabled": "Disabled", "Enabled": "Enabled"}, + ) + + filtering_tags = cls._args_schema.log_rules.filtering_tags + filtering_tags.Element = AAZObjectArg() + cls._build_args_filtering_tag_update(filtering_tags.Element) + + metric_rules = cls._args_schema.metric_rules + metric_rules.filtering_tags = AAZListArg( + options=["filtering-tags"], + help="List of filtering tags to be used for capturing metrics. If empty, all resources will be captured. If only Exclude action is specified, the rules will apply to the list of all available resources. If Include actions are specified, the rules will only include resources with the associated tags.", + ) + + filtering_tags = cls._args_schema.metric_rules.filtering_tags + filtering_tags.Element = AAZObjectArg() + cls._build_args_filtering_tag_update(filtering_tags.Element) + return cls._args_schema + + _args_filtering_tag_update = None + + @classmethod + def _build_args_filtering_tag_update(cls, _schema): + if cls._args_filtering_tag_update is not None: + _schema.action = cls._args_filtering_tag_update.action + _schema.name = cls._args_filtering_tag_update.name + _schema.value = cls._args_filtering_tag_update.value + return + + cls._args_filtering_tag_update = AAZObjectArg() + + filtering_tag_update = cls._args_filtering_tag_update + filtering_tag_update.action = AAZStrArg( + options=["action"], + help="Valid actions for a filtering tag. Exclusion takes priority over inclusion.", + enum={"Exclude": "Exclude", "Include": "Include"}, + ) + filtering_tag_update.name = AAZStrArg( + options=["name"], + help="The name (also known as the key) of the tag.", + ) + filtering_tag_update.value = AAZStrArg( + options=["value"], + help="The value of the tag.", + ) + + _schema.action = cls._args_filtering_tag_update.action + _schema.name = cls._args_filtering_tag_update.name + _schema.value = cls._args_filtering_tag_update.value + + def _execute_operations(self): + self.pre_operations() + self.TagRulesUpdate(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) + return result + + class TagRulesUpdate(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors/{monitorName}/tagRules/{ruleSetName}", + **self.url_parameters + ) + + @property + def method(self): + return "PATCH" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "monitorName", self.ctx.args.monitor_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "ruleSetName", self.ctx.args.rule_set_name, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Content-Type", "application/json", + ), + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + @property + def content(self): + _content_value, _builder = self.new_content_builder( + self.ctx.args, + typ=AAZObjectType, + typ_kwargs={"flags": {"required": True, "client_flatten": True}} + ) + _builder.set_prop("logRules", AAZObjectType, ".log_rules") + _builder.set_prop("metricRules", AAZObjectType, ".metric_rules") + + log_rules = _builder.get(".logRules") + if log_rules is not None: + log_rules.set_prop("filteringTags", AAZListType, ".filtering_tags") + log_rules.set_prop("sendAadLogs", AAZStrType, ".send_aad_logs") + log_rules.set_prop("sendActivityLogs", AAZStrType, ".send_activity_logs") + log_rules.set_prop("sendSubscriptionLogs", AAZStrType, ".send_subscription_logs") + + filtering_tags = _builder.get(".logRules.filteringTags") + if filtering_tags is not None: + _build_schema_filtering_tag_update(filtering_tags.set_elements(AAZObjectType, ".")) + + metric_rules = _builder.get(".metricRules") + if metric_rules is not None: + metric_rules.set_prop("filteringTags", AAZListType, ".filtering_tags") + + filtering_tags = _builder.get(".metricRules.filteringTags") + if filtering_tags is not None: + _build_schema_filtering_tag_update(filtering_tags.set_elements(AAZObjectType, ".")) + + return self.serialize_content(_content_value) + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.id = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.name = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.properties = AAZObjectType( + flags={"required": True, "client_flatten": True}, + ) + _schema_on_200.system_data = AAZObjectType( + serialized_name="systemData", + flags={"read_only": True}, + ) + _schema_on_200.type = AAZStrType( + flags={"read_only": True}, + ) + + properties = cls._schema_on_200.properties + properties.log_rules = AAZObjectType( + serialized_name="logRules", + ) + properties.metric_rules = AAZObjectType( + serialized_name="metricRules", + ) + properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + + log_rules = cls._schema_on_200.properties.log_rules + log_rules.filtering_tags = AAZListType( + serialized_name="filteringTags", + ) + log_rules.send_aad_logs = AAZStrType( + serialized_name="sendAadLogs", + ) + log_rules.send_activity_logs = AAZStrType( + serialized_name="sendActivityLogs", + ) + log_rules.send_subscription_logs = AAZStrType( + serialized_name="sendSubscriptionLogs", + ) + + filtering_tags = cls._schema_on_200.properties.log_rules.filtering_tags + filtering_tags.Element = AAZObjectType() + _build_schema_filtering_tag_read(filtering_tags.Element) + + metric_rules = cls._schema_on_200.properties.metric_rules + metric_rules.filtering_tags = AAZListType( + serialized_name="filteringTags", + ) + + filtering_tags = cls._schema_on_200.properties.metric_rules.filtering_tags + filtering_tags.Element = AAZObjectType() + _build_schema_filtering_tag_read(filtering_tags.Element) + + system_data = cls._schema_on_200.system_data + system_data.created_at = AAZStrType( + serialized_name="createdAt", + ) + system_data.created_by = AAZStrType( + serialized_name="createdBy", + ) + system_data.created_by_type = AAZStrType( + serialized_name="createdByType", + ) + system_data.last_modified_at = AAZStrType( + serialized_name="lastModifiedAt", + ) + system_data.last_modified_by = AAZStrType( + serialized_name="lastModifiedBy", + ) + system_data.last_modified_by_type = AAZStrType( + serialized_name="lastModifiedByType", + ) + + return cls._schema_on_200 + + +def _build_schema_filtering_tag_update(_builder): + if _builder is None: + return + _builder.set_prop("action", AAZStrType, ".action") + _builder.set_prop("name", AAZStrType, ".name") + _builder.set_prop("value", AAZStrType, ".value") + + +_schema_filtering_tag_read = None + + +def _build_schema_filtering_tag_read(_schema): + global _schema_filtering_tag_read + if _schema_filtering_tag_read is not None: + _schema.action = _schema_filtering_tag_read.action + _schema.name = _schema_filtering_tag_read.name + _schema.value = _schema_filtering_tag_read.value + return + + _schema_filtering_tag_read = AAZObjectType() + + filtering_tag_read = _schema_filtering_tag_read + filtering_tag_read.action = AAZStrType() + filtering_tag_read.name = AAZStrType() + filtering_tag_read.value = AAZStrType() + + _schema.action = _schema_filtering_tag_read.action + _schema.name = _schema_filtering_tag_read.name + _schema.value = _schema_filtering_tag_read.value + + +__all__ = ["Update"] diff --git a/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_wait.py b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_wait.py new file mode 100644 index 00000000000..90fa3713d39 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/aaz/latest/dynatrace/monitor/tag_rule/_wait.py @@ -0,0 +1,263 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: skip-file +# flake8: noqa + +from azure.cli.core.aaz import * + + +@register_command( + "dynatrace monitor tag-rule wait", +) +class Wait(AAZWaitCommand): + """Place the CLI in a waiting state until a condition is met. + """ + + _aaz_info = { + "resources": [ + ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/dynatrace.observability/monitors/{}/tagrules/{}", "2021-09-01"], + ] + } + + def _handler(self, command_args): + super()._handler(command_args) + self._execute_operations() + return self._output() + + _args_schema = None + + @classmethod + def _build_arguments_schema(cls, *args, **kwargs): + if cls._args_schema is not None: + return cls._args_schema + cls._args_schema = super()._build_arguments_schema(*args, **kwargs) + + # define Arg Group "" + + _args_schema = cls._args_schema + _args_schema.monitor_name = AAZStrArg( + options=["--monitor-name"], + help="Monitor resource name", + required=True, + id_part="name", + ) + _args_schema.resource_group = AAZResourceGroupNameArg( + required=True, + ) + _args_schema.rule_set_name = AAZStrArg( + options=["-n", "--name", "--rule-set-name"], + help="Monitor rule set name", + required=True, + id_part="child_name_1", + ) + return cls._args_schema + + def _execute_operations(self): + self.pre_operations() + self.TagRulesGet(ctx=self.ctx)() + self.post_operations() + + @register_callback + def pre_operations(self): + pass + + @register_callback + def post_operations(self): + pass + + def _output(self, *args, **kwargs): + result = self.deserialize_output(self.ctx.vars.instance, client_flatten=False) + return result + + class TagRulesGet(AAZHttpOperation): + CLIENT_TYPE = "MgmtClient" + + def __call__(self, *args, **kwargs): + request = self.make_request() + session = self.client.send_request(request=request, stream=False, **kwargs) + if session.http_response.status_code in [200]: + return self.on_200(session) + + return self.on_error(session.http_response) + + @property + def url(self): + return self.client.format_url( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Dynatrace.Observability/monitors/{monitorName}/tagRules/{ruleSetName}", + **self.url_parameters + ) + + @property + def method(self): + return "GET" + + @property + def error_format(self): + return "MgmtErrorFormat" + + @property + def url_parameters(self): + parameters = { + **self.serialize_url_param( + "monitorName", self.ctx.args.monitor_name, + required=True, + ), + **self.serialize_url_param( + "resourceGroupName", self.ctx.args.resource_group, + required=True, + ), + **self.serialize_url_param( + "ruleSetName", self.ctx.args.rule_set_name, + required=True, + ), + **self.serialize_url_param( + "subscriptionId", self.ctx.subscription_id, + required=True, + ), + } + return parameters + + @property + def query_parameters(self): + parameters = { + **self.serialize_query_param( + "api-version", "2021-09-01", + required=True, + ), + } + return parameters + + @property + def header_parameters(self): + parameters = { + **self.serialize_header_param( + "Accept", "application/json", + ), + } + return parameters + + def on_200(self, session): + data = self.deserialize_http_content(session) + self.ctx.set_var( + "instance", + data, + schema_builder=self._build_schema_on_200 + ) + + _schema_on_200 = None + + @classmethod + def _build_schema_on_200(cls): + if cls._schema_on_200 is not None: + return cls._schema_on_200 + + cls._schema_on_200 = AAZObjectType() + + _schema_on_200 = cls._schema_on_200 + _schema_on_200.id = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.name = AAZStrType( + flags={"read_only": True}, + ) + _schema_on_200.properties = AAZObjectType( + flags={"required": True, "client_flatten": True}, + ) + _schema_on_200.system_data = AAZObjectType( + serialized_name="systemData", + flags={"read_only": True}, + ) + _schema_on_200.type = AAZStrType( + flags={"read_only": True}, + ) + + properties = cls._schema_on_200.properties + properties.log_rules = AAZObjectType( + serialized_name="logRules", + ) + properties.metric_rules = AAZObjectType( + serialized_name="metricRules", + ) + properties.provisioning_state = AAZStrType( + serialized_name="provisioningState", + ) + + log_rules = cls._schema_on_200.properties.log_rules + log_rules.filtering_tags = AAZListType( + serialized_name="filteringTags", + ) + log_rules.send_aad_logs = AAZStrType( + serialized_name="sendAadLogs", + ) + log_rules.send_activity_logs = AAZStrType( + serialized_name="sendActivityLogs", + ) + log_rules.send_subscription_logs = AAZStrType( + serialized_name="sendSubscriptionLogs", + ) + + filtering_tags = cls._schema_on_200.properties.log_rules.filtering_tags + filtering_tags.Element = AAZObjectType() + _build_schema_filtering_tag_read(filtering_tags.Element) + + metric_rules = cls._schema_on_200.properties.metric_rules + metric_rules.filtering_tags = AAZListType( + serialized_name="filteringTags", + ) + + filtering_tags = cls._schema_on_200.properties.metric_rules.filtering_tags + filtering_tags.Element = AAZObjectType() + _build_schema_filtering_tag_read(filtering_tags.Element) + + system_data = cls._schema_on_200.system_data + system_data.created_at = AAZStrType( + serialized_name="createdAt", + ) + system_data.created_by = AAZStrType( + serialized_name="createdBy", + ) + system_data.created_by_type = AAZStrType( + serialized_name="createdByType", + ) + system_data.last_modified_at = AAZStrType( + serialized_name="lastModifiedAt", + ) + system_data.last_modified_by = AAZStrType( + serialized_name="lastModifiedBy", + ) + system_data.last_modified_by_type = AAZStrType( + serialized_name="lastModifiedByType", + ) + + return cls._schema_on_200 + + +_schema_filtering_tag_read = None + + +def _build_schema_filtering_tag_read(_schema): + global _schema_filtering_tag_read + if _schema_filtering_tag_read is not None: + _schema.action = _schema_filtering_tag_read.action + _schema.name = _schema_filtering_tag_read.name + _schema.value = _schema_filtering_tag_read.value + return + + _schema_filtering_tag_read = AAZObjectType() + + filtering_tag_read = _schema_filtering_tag_read + filtering_tag_read.action = AAZStrType() + filtering_tag_read.name = AAZStrType() + filtering_tag_read.value = AAZStrType() + + _schema.action = _schema_filtering_tag_read.action + _schema.name = _schema_filtering_tag_read.name + _schema.value = _schema_filtering_tag_read.value + + +__all__ = ["Wait"] diff --git a/src/dynatrace/azext_dynatrace/azext_metadata.json b/src/dynatrace/azext_dynatrace/azext_metadata.json new file mode 100644 index 00000000000..0a5db3c35db --- /dev/null +++ b/src/dynatrace/azext_dynatrace/azext_metadata.json @@ -0,0 +1,4 @@ +{ + "azext.isPreview": true, + "azext.minCliCoreVersion": "2.41.0" +} \ No newline at end of file diff --git a/src/dynatrace/azext_dynatrace/commands.py b/src/dynatrace/azext_dynatrace/commands.py new file mode 100644 index 00000000000..b0d842e4993 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/commands.py @@ -0,0 +1,15 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: disable=too-many-lines +# pylint: disable=too-many-statements + +# from azure.cli.core.commands import CliCommandType + + +def load_command_table(self, _): # pylint: disable=unused-argument + pass diff --git a/src/dynatrace/azext_dynatrace/custom.py b/src/dynatrace/azext_dynatrace/custom.py new file mode 100644 index 00000000000..86df1e48ef5 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/custom.py @@ -0,0 +1,14 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +# pylint: disable=too-many-lines +# pylint: disable=too-many-statements + +from knack.log import get_logger + + +logger = get_logger(__name__) diff --git a/src/dynatrace/azext_dynatrace/tests/__init__.py b/src/dynatrace/azext_dynatrace/tests/__init__.py new file mode 100644 index 00000000000..5757aea3175 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/tests/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- diff --git a/src/dynatrace/azext_dynatrace/tests/latest/__init__.py b/src/dynatrace/azext_dynatrace/tests/latest/__init__.py new file mode 100644 index 00000000000..5757aea3175 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/tests/latest/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- diff --git a/src/dynatrace/azext_dynatrace/tests/latest/credential_replacer.py b/src/dynatrace/azext_dynatrace/tests/latest/credential_replacer.py new file mode 100644 index 00000000000..7e369e37663 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/tests/latest/credential_replacer.py @@ -0,0 +1,24 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +# pylint: disable=line-too-long + +from azure.cli.testsdk.scenario_tests import RecordingProcessor + + +class ExpressRoutePortLOAContentReplacer(RecordingProcessor): + + def process_response(self, response): + import json + import base64 + + body = response['body']['string'] + + json_body = json.loads(body) + if json_body and 'ingestionKey' in json_body.keys(): + json_body['ingestionKey'] = base64.b64encode('ingestionKey content replaced by ExpressRoutePortLOAContentReplacer'.encode('utf-8')).decode('utf-8') + response['body']['string'] = json.dumps(json_body) + + return response diff --git a/src/dynatrace/azext_dynatrace/tests/latest/recordings/test_dynatrace_monitor.yaml b/src/dynatrace/azext_dynatrace/tests/latest/recordings/test_dynatrace_monitor.yaml new file mode 100644 index 00000000000..e1197385dd8 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/tests/latest/recordings/test_dynatrace_monitor.yaml @@ -0,0 +1,1002 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor create + Connection: + - keep-alive + ParameterSetName: + - -g -n --user-info --plan-data --dynatrace-environment-properties + User-Agent: + - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_dynatrace_monitor000001?api-version=2021-04-01 + response: + body: + string: '{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001", + "name": "cli_test_dynatrace_monitor000001", "type": "Microsoft.Resources/resourceGroups", + "location": "eastus2euap", "tags": {"product": "azurecli", "cause": "automation", + "date": "2022-09-14T08:43:56Z"}, "properties": {"provisioningState": "Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '364' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:44:00 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"location": "eastus2euap", "properties": {"dynatraceEnvironmentProperties": + {"singleSignOnProperties": {"aadDomains": ["abc"]}}, "planData": {"billingCycle": + "Monthly", "effectiveDate": "2022-08-19T16:00:00.000Z", "planDetails": "azureportalintegration_privatepreview@TIDhjdtn7tfnxcy", + "usageType": "committed"}, "userInfo": {"country": "US", "emailAddress": "agarwald@microsoft.com", + "firstName": "Alice", "lastName": "Bobab", "phoneNumber": "1234567890"}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor create + Connection: + - keep-alive + Content-Length: + - '459' + Content-Type: + - application/json + ParameterSetName: + - -g -n --user-info --plan-data --dynatrace-environment-properties + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors/monitor000002?api-version=2021-09-01 + response: + body: + string: '{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors/monitor000002", + "name": "monitor000002", "type": "dynatrace.observability/monitors", "location": + "eastus2euap", "systemData": {"createdBy": "v-jingszhang@microsoft.com", "createdByType": + "User", "createdAt": "2022-09-14T08:44:06.9550418Z", "lastModifiedBy": "v-jingszhang@microsoft.com", + "lastModifiedByType": "User", "lastModifiedAt": "2022-09-14T08:44:06.9550418Z"}, + "properties": {"monitoringStatus": "Enabled", "marketplaceSubscriptionStatus": + "Active", "dynatraceEnvironmentProperties": {"userId": null, "accountInfo": + null, "environmentInfo": null, "singleSignOnProperties": {"singleSignOnState": + "Initial", "enterpriseAppId": null, "singleSignOnUrl": null, "aadDomains": + ["abc"], "provisioningState": "Accepted"}}, "userInfo": null, "planData": + {"usageType": "committed", "billingCycle": "Monthly", "planDetails": "azureportalintegration_privatepreview@TIDhjdtn7tfnxcy", + "effectiveDate": "2022-08-19T16:00:00Z"}, "liftrResourceCategory": "MonitorLogs", + "liftrResourcePreference": 0, "provisioningState": "Accepted"}}' + headers: + azure-asyncoperation: + - https://management.azure.com/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/6515daec-1f3d-4e05-904e-99d84cc38130*9A18A00D17A801D914F8B65C518468D8EFF8CEFB2D045EE7CDD4D5E317C3BD5F?api-version=2021-09-01 + cache-control: + - no-cache + content-length: + - '1174' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:44:15 GMT + etag: + - '"a500326d-0000-0300-0000-6321945e0000"' + expires: + - '-1' + location: + - https://management.azure.com/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/6515daec-1f3d-4e05-904e-99d84cc38130*9A18A00D17A801D914F8B65C518468D8EFF8CEFB2D045EE7CDD4D5E317C3BD5F?api-version=2021-09-01 + mise-correlation-id: + - ffa7fa4e-65ad-4eaf-9d74-7169c9f3cbbc + pragma: + - no-cache + request-context: + - appId=cid-v1:fab059ea-d726-424a-9451-6beb3dc74944 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor create + Connection: + - keep-alive + ParameterSetName: + - -g -n --user-info --plan-data --dynatrace-environment-properties + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/6515daec-1f3d-4e05-904e-99d84cc38130*9A18A00D17A801D914F8B65C518468D8EFF8CEFB2D045EE7CDD4D5E317C3BD5F?api-version=2021-09-01 + response: + body: + string: '{"id": "/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/6515daec-1f3d-4e05-904e-99d84cc38130*9A18A00D17A801D914F8B65C518468D8EFF8CEFB2D045EE7CDD4D5E317C3BD5F", + "name": "6515daec-1f3d-4e05-904e-99d84cc38130*9A18A00D17A801D914F8B65C518468D8EFF8CEFB2D045EE7CDD4D5E317C3BD5F", + "resourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors/monitor000002", + "status": "Accepted", "startTime": "2022-09-14T08:44:13.4688138Z"}' + headers: + cache-control: + - no-cache + content-length: + - '540' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:44:44 GMT + etag: + - '"03023006-0000-0300-0000-6321945d0000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor create + Connection: + - keep-alive + ParameterSetName: + - -g -n --user-info --plan-data --dynatrace-environment-properties + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/6515daec-1f3d-4e05-904e-99d84cc38130*9A18A00D17A801D914F8B65C518468D8EFF8CEFB2D045EE7CDD4D5E317C3BD5F?api-version=2021-09-01 + response: + body: + string: '{"id": "/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/6515daec-1f3d-4e05-904e-99d84cc38130*9A18A00D17A801D914F8B65C518468D8EFF8CEFB2D045EE7CDD4D5E317C3BD5F", + "name": "6515daec-1f3d-4e05-904e-99d84cc38130*9A18A00D17A801D914F8B65C518468D8EFF8CEFB2D045EE7CDD4D5E317C3BD5F", + "resourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors/monitor000002", + "status": "Succeeded", "startTime": "2022-09-14T08:44:13.4688138Z", "endTime": + "2022-09-14T08:45:10.6707115Z", "error": {}, "properties": null}' + headers: + cache-control: + - no-cache + content-length: + - '617' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:45:15 GMT + etag: + - '"03027607-0000-0300-0000-632194960000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor create + Connection: + - keep-alive + ParameterSetName: + - -g -n --user-info --plan-data --dynatrace-environment-properties + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors/monitor000002?api-version=2021-09-01 + response: + body: + string: '{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors/monitor000002", + "name": "monitor000002", "type": "dynatrace.observability/monitors", "location": + "eastus2euap", "systemData": {"createdBy": "v-jingszhang@microsoft.com", "createdByType": + "User", "createdAt": "2022-09-14T08:44:06.9550418Z", "lastModifiedBy": "v-jingszhang@microsoft.com", + "lastModifiedByType": "User", "lastModifiedAt": "2022-09-14T08:44:06.9550418Z"}, + "properties": {"monitoringStatus": "Enabled", "marketplaceSubscriptionStatus": + "Active", "dynatraceEnvironmentProperties": {"userId": "1db33da7-af65-4503-9812-6f0070e2130d", + "accountInfo": {"accountId": "df877719-854b-4025-8dd7-29d4bf29c23e", "regionId": + "eastus"}, "environmentInfo": {"environmentId": "msl44559", "logsIngestionEndpoint": + "https://msl44559.sprint.dynatracelabs.com/api/v2/logs/ingest", "landingURL": + "https://msl44559.sprint.dynatracelabs.com/"}, "singleSignOnProperties": {"singleSignOnState": + "Initial", "aadDomains": ["abc"], "provisioningState": "Accepted"}}, "userInfo": + {"firstName": "Alice", "lastName": "Bobab", "emailAddress": "agarwald@microsoft.com", + "phoneNumber": "1234567890", "country": "US"}, "planData": {"usageType": "committed", + "billingCycle": "Monthly", "planDetails": "azureportalintegration_privatepreview@TIDhjdtn7tfnxcy", + "effectiveDate": "2022-08-19T16:00:00Z"}, "liftrResourceCategory": "MonitorLogs", + "liftrResourcePreference": 0, "provisioningState": "Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '1531' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:45:16 GMT + etag: + - '"a5009b71-0000-0300-0000-632194960000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor show + Connection: + - keep-alive + ParameterSetName: + - -g -n + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors/monitor000002?api-version=2021-09-01 + response: + body: + string: '{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors/monitor000002", + "name": "monitor000002", "type": "dynatrace.observability/monitors", "location": + "eastus2euap", "systemData": {"createdBy": "v-jingszhang@microsoft.com", "createdByType": + "User", "createdAt": "2022-09-14T08:44:06.9550418Z", "lastModifiedBy": "v-jingszhang@microsoft.com", + "lastModifiedByType": "User", "lastModifiedAt": "2022-09-14T08:44:06.9550418Z"}, + "properties": {"monitoringStatus": "Enabled", "marketplaceSubscriptionStatus": + "Active", "dynatraceEnvironmentProperties": {"userId": "1db33da7-af65-4503-9812-6f0070e2130d", + "accountInfo": {"accountId": "df877719-854b-4025-8dd7-29d4bf29c23e", "regionId": + "eastus"}, "environmentInfo": {"environmentId": "msl44559", "logsIngestionEndpoint": + "https://msl44559.sprint.dynatracelabs.com/api/v2/logs/ingest", "landingURL": + "https://msl44559.sprint.dynatracelabs.com/"}, "singleSignOnProperties": {"singleSignOnState": + "Initial", "aadDomains": ["abc"], "provisioningState": "Accepted"}}, "userInfo": + {"firstName": "Alice", "lastName": "Bobab", "emailAddress": "agarwald@microsoft.com", + "phoneNumber": "1234567890", "country": "US"}, "planData": {"usageType": "committed", + "billingCycle": "Monthly", "planDetails": "azureportalintegration_privatepreview@TIDhjdtn7tfnxcy", + "effectiveDate": "2022-08-19T16:00:00Z"}, "liftrResourceCategory": "MonitorLogs", + "liftrResourcePreference": 0, "provisioningState": "Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '1531' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:45:18 GMT + etag: + - '"a5009b71-0000-0300-0000-632194960000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + status: + code: 200 + message: OK +- request: + body: '{"tags": {"env": "dev"}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor update + Connection: + - keep-alive + Content-Length: + - '24' + Content-Type: + - application/json + ParameterSetName: + - -g -n --tags + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: PATCH + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors/monitor000002?api-version=2021-09-01 + response: + body: + string: '{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors/monitor000002", + "name": "monitor000002", "type": "dynatrace.observability/monitors", "location": + "eastus2euap", "tags": {"env": "dev"}, "systemData": {"createdBy": "v-jingszhang@microsoft.com", + "createdByType": "User", "createdAt": "2022-09-14T08:44:06.9550418Z", "lastModifiedBy": + "v-jingszhang@microsoft.com", "lastModifiedByType": "User", "lastModifiedAt": + "2022-09-14T08:45:22.0385144Z"}, "properties": {"monitoringStatus": "Enabled", + "marketplaceSubscriptionStatus": "Active", "dynatraceEnvironmentProperties": + {"userId": "1db33da7-af65-4503-9812-6f0070e2130d", "accountInfo": {"accountId": + "df877719-854b-4025-8dd7-29d4bf29c23e", "regionId": "eastus"}, "environmentInfo": + {"environmentId": "msl44559", "logsIngestionEndpoint": "https://msl44559.sprint.dynatracelabs.com/api/v2/logs/ingest", + "landingURL": "https://msl44559.sprint.dynatracelabs.com/"}, "singleSignOnProperties": + {"singleSignOnState": "Initial", "aadDomains": ["abc"], "provisioningState": + "Accepted"}}, "userInfo": {"firstName": "Alice", "lastName": "Bobab", "emailAddress": + "agarwald@microsoft.com", "phoneNumber": "1234567890", "country": "US"}, "planData": + {"usageType": "committed", "billingCycle": "Monthly", "planDetails": "azureportalintegration_privatepreview@TIDhjdtn7tfnxcy", + "effectiveDate": "2022-08-19T16:00:00Z"}, "liftrResourceCategory": "MonitorLogs", + "liftrResourcePreference": 0, "provisioningState": "Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '1555' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:45:26 GMT + etag: + - '"a500bc72-0000-0300-0000-632194a20000"' + expires: + - '-1' + mise-correlation-id: + - 5be38f9e-8553-443f-baf7-1afe433d1e56 + pragma: + - no-cache + request-context: + - appId=cid-v1:fab059ea-d726-424a-9451-6beb3dc74944 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor list + Connection: + - keep-alive + ParameterSetName: + - -g + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors?api-version=2021-09-01 + response: + body: + string: '{"value": [{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors/monitor000002", + "name": "monitor000002", "type": "dynatrace.observability/monitors", "location": + "eastus2euap", "tags": {"env": "dev"}, "systemData": {"createdBy": "v-jingszhang@microsoft.com", + "createdByType": "User", "createdAt": "2022-09-14T08:44:06.9550418Z", "lastModifiedBy": + "v-jingszhang@microsoft.com", "lastModifiedByType": "User", "lastModifiedAt": + "2022-09-14T08:45:22.0385144Z"}, "properties": {"monitoringStatus": "Enabled", + "marketplaceSubscriptionStatus": "Active", "dynatraceEnvironmentProperties": + {"userId": "1db33da7-af65-4503-9812-6f0070e2130d", "accountInfo": {"accountId": + "df877719-854b-4025-8dd7-29d4bf29c23e", "regionId": "eastus"}, "environmentInfo": + {"environmentId": "msl44559", "logsIngestionEndpoint": "https://msl44559.sprint.dynatracelabs.com/api/v2/logs/ingest", + "landingURL": "https://msl44559.sprint.dynatracelabs.com/"}, "singleSignOnProperties": + {"singleSignOnState": "Initial", "aadDomains": ["abc"], "provisioningState": + "Accepted"}}, "userInfo": {"firstName": "Alice", "lastName": "Bobab", "emailAddress": + "agarwald@microsoft.com", "phoneNumber": "1234567890", "country": "US"}, "planData": + {"usageType": "committed", "billingCycle": "Monthly", "planDetails": "azureportalintegration_privatepreview@TIDhjdtn7tfnxcy", + "effectiveDate": "2022-08-19T16:00:00Z"}, "liftrResourceCategory": "MonitorLogs", + "liftrResourcePreference": 0, "provisioningState": "Succeeded"}}]}' + headers: + cache-control: + - no-cache + content-length: + - '1568' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:45:28 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-original-request-ids: + - 23e06d6d-4787-49af-ae2b-2337f645780a + - 2981a166-5f62-4b5c-a2e4-ed8d0df55155 + - b4903a0a-6748-4d25-9249-3bf03e0162a4 + - 62b4f977-b000-4d79-adfa-3dc2d5c72ae2 + - 965cf5c3-ef4f-4ba3-8558-625ee59ec38b + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor list-app-service + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -g --monitor-name + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors/monitor000002/listAppServices?api-version=2021-09-01 + response: + body: + string: '{"value": [], "nextLink": null}' + headers: + cache-control: + - no-cache + content-length: + - '31' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:45:30 GMT + expires: + - '-1' + mise-correlation-id: + - 40a171a6-3177-4962-b969-28b4efe2a325 + pragma: + - no-cache + request-context: + - appId=cid-v1:fab059ea-d726-424a-9451-6beb3dc74944 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor list-host + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -g --monitor-name + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors/monitor000002/listHosts?api-version=2021-09-01 + response: + body: + string: '{"value": [], "nextLink": null}' + headers: + cache-control: + - no-cache + content-length: + - '31' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:45:32 GMT + expires: + - '-1' + mise-correlation-id: + - 43c5fc83-a0e4-4000-ba1d-1c5a7e0d815d + pragma: + - no-cache + request-context: + - appId=cid-v1:fab059ea-d726-424a-9451-6beb3dc74944 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor list-monitored-resource + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -g --monitor-name + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors/monitor000002/listMonitoredResources?api-version=2021-09-01 + response: + body: + string: '{"value": [], "nextLink": null}' + headers: + cache-control: + - no-cache + content-length: + - '31' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:45:35 GMT + expires: + - '-1' + mise-correlation-id: + - 6627e2fa-d11e-4ec9-84a7-f5778010de44 + pragma: + - no-cache + request-context: + - appId=cid-v1:fab059ea-d726-424a-9451-6beb3dc74944 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 200 + message: OK +- request: + body: '{"region": "eastus2euap", "tenantId": "be9927fa-821c-4178-9dae-e520c4beca74", + "userPrincipal": "agarwald@microsoft.com"}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor list-linkable-environment + Connection: + - keep-alive + Content-Length: + - '120' + Content-Type: + - application/json + ParameterSetName: + - -g --monitor-name --user-principal --region --tenant-id + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors/monitor000002/listLinkableEnvironments?api-version=2021-09-01 + response: + body: + string: '{"value": [], "nextLink": null}' + headers: + cache-control: + - no-cache + content-length: + - '31' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:45:37 GMT + expires: + - '-1' + mise-correlation-id: + - 07eb40e4-522f-4c24-8f77-2d66666f5a18 + pragma: + - no-cache + request-context: + - appId=cid-v1:fab059ea-d726-424a-9451-6beb3dc74944 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 200 + message: OK +- request: + body: '{"userPrincipal": "agarwald@microsoft.com"}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor get-sso-detail + Connection: + - keep-alive + Content-Length: + - '43' + Content-Type: + - application/json + ParameterSetName: + - -g --monitor-name --user-principal + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors/monitor000002/getSSODetails?api-version=2021-09-01 + response: + body: + string: '{"singleSignOnUrl": null, "metadataUrl": null, "isSsoEnabled": "Disabled", + "aadDomains": [], "adminUsers": ["agarwald@microsoft.com", "abhargava@microsoft.com", + "sveeravalli@microsoft.com", "v-jingszhang@microsoft.com"]}' + headers: + cache-control: + - no-cache + content-length: + - '220' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:45:39 GMT + expires: + - '-1' + mise-correlation-id: + - 1fc92fb5-473a-4082-971e-12cb5edbd2d0 + pragma: + - no-cache + request-context: + - appId=cid-v1:fab059ea-d726-424a-9451-6beb3dc74944 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor get-vm-host-payload + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -g --monitor-name + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors/monitor000002/getVMHostPayload?api-version=2021-09-01 + response: + body: + string: '{"ingestionKey": "aW5nZXN0aW9uS2V5IGNvbnRlbnQgcmVwbGFjZWQgYnkgRXhwcmVzc1JvdXRlUG9ydExPQUNvbnRlbnRSZXBsYWNlcg==", + "environmentId": "msl44559"}' + headers: + cache-control: + - no-cache + content-length: + - '141' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:45:42 GMT + expires: + - '-1' + mise-correlation-id: + - b9a91276-db4e-4e5a-81e1-e7d9392f8357 + pragma: + - no-cache + request-context: + - appId=cid-v1:fab059ea-d726-424a-9451-6beb3dc74944 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -n -g -y + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors/monitor000002?api-version=2021-09-01 + response: + body: + string: 'null' + headers: + azure-asyncoperation: + - https://management.azure.com/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/3a49cea9-85ba-4ea1-90d7-6af6a7ea6911*9A18A00D17A801D914F8B65C518468D8EFF8CEFB2D045EE7CDD4D5E317C3BD5F?api-version=2021-09-01 + cache-control: + - no-cache + content-length: + - '4' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:45:46 GMT + etag: + - '"a5005774-0000-0300-0000-632194b90000"' + expires: + - '-1' + location: + - https://management.azure.com/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/3a49cea9-85ba-4ea1-90d7-6af6a7ea6911*9A18A00D17A801D914F8B65C518468D8EFF8CEFB2D045EE7CDD4D5E317C3BD5F?api-version=2021-09-01 + mise-correlation-id: + - 8199eda5-1afe-4663-b292-a60672bb544b + pragma: + - no-cache + request-context: + - appId=cid-v1:fab059ea-d726-424a-9451-6beb3dc74944 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + status: + code: 202 + message: Accepted +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor delete + Connection: + - keep-alive + ParameterSetName: + - -n -g -y + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/3a49cea9-85ba-4ea1-90d7-6af6a7ea6911*9A18A00D17A801D914F8B65C518468D8EFF8CEFB2D045EE7CDD4D5E317C3BD5F?api-version=2021-09-01 + response: + body: + string: '{"id": "/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/3a49cea9-85ba-4ea1-90d7-6af6a7ea6911*9A18A00D17A801D914F8B65C518468D8EFF8CEFB2D045EE7CDD4D5E317C3BD5F", + "name": "3a49cea9-85ba-4ea1-90d7-6af6a7ea6911*9A18A00D17A801D914F8B65C518468D8EFF8CEFB2D045EE7CDD4D5E317C3BD5F", + "resourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors/monitor000002", + "status": "Deleting", "startTime": "2022-09-14T08:45:45.1853627Z", "error": + {}}' + headers: + cache-control: + - no-cache + content-length: + - '553' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:46:15 GMT + etag: + - '"03027208-0000-0300-0000-632194ba0000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + status: + code: 202 + message: Accepted +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor delete + Connection: + - keep-alive + ParameterSetName: + - -n -g -y + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/3a49cea9-85ba-4ea1-90d7-6af6a7ea6911*9A18A00D17A801D914F8B65C518468D8EFF8CEFB2D045EE7CDD4D5E317C3BD5F?api-version=2021-09-01 + response: + body: + string: '{"id": "/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/3a49cea9-85ba-4ea1-90d7-6af6a7ea6911*9A18A00D17A801D914F8B65C518468D8EFF8CEFB2D045EE7CDD4D5E317C3BD5F", + "name": "3a49cea9-85ba-4ea1-90d7-6af6a7ea6911*9A18A00D17A801D914F8B65C518468D8EFF8CEFB2D045EE7CDD4D5E317C3BD5F", + "resourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors/monitor000002", + "status": "Deleting", "startTime": "2022-09-14T08:45:45.1853627Z", "error": + {}}' + headers: + cache-control: + - no-cache + content-length: + - '553' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:46:46 GMT + etag: + - '"03027208-0000-0300-0000-632194ba0000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + status: + code: 202 + message: Accepted +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor delete + Connection: + - keep-alive + ParameterSetName: + - -n -g -y + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/3a49cea9-85ba-4ea1-90d7-6af6a7ea6911*9A18A00D17A801D914F8B65C518468D8EFF8CEFB2D045EE7CDD4D5E317C3BD5F?api-version=2021-09-01 + response: + body: + string: '{"id": "/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/3a49cea9-85ba-4ea1-90d7-6af6a7ea6911*9A18A00D17A801D914F8B65C518468D8EFF8CEFB2D045EE7CDD4D5E317C3BD5F", + "name": "3a49cea9-85ba-4ea1-90d7-6af6a7ea6911*9A18A00D17A801D914F8B65C518468D8EFF8CEFB2D045EE7CDD4D5E317C3BD5F", + "resourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor000001/providers/Dynatrace.Observability/monitors/monitor000002", + "status": "Succeeded", "startTime": "2022-09-14T08:45:45.1853627Z", "endTime": + "2022-09-14T08:46:51.4240111Z", "error": {}, "properties": null}' + headers: + cache-control: + - no-cache + content-length: + - '617' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:47:16 GMT + etag: + - '"0302030a-0000-0300-0000-632194fb0000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +version: 1 diff --git a/src/dynatrace/azext_dynatrace/tests/latest/recordings/test_dynatrace_monitor_single_sign_on_configurations.yaml b/src/dynatrace/azext_dynatrace/tests/latest/recordings/test_dynatrace_monitor_single_sign_on_configurations.yaml new file mode 100644 index 00000000000..0d771407123 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/tests/latest/recordings/test_dynatrace_monitor_single_sign_on_configurations.yaml @@ -0,0 +1,564 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor create + Connection: + - keep-alive + ParameterSetName: + - -g -n --user-info --plan-data --dynatrace-environment-properties + User-Agent: + - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_dynatrace_monitor_single_sign_on_configurations000001?api-version=2021-04-01 + response: + body: + string: '{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_single_sign_on_configurations000001", + "name": "cli_test_dynatrace_monitor_single_sign_on_configurations000001", + "type": "Microsoft.Resources/resourceGroups", "location": "eastus2euap", "tags": + {"product": "azurecli", "cause": "automation", "date": "2022-09-14T08:37:30Z"}, + "properties": {"provisioningState": "Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '424' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:37:35 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"location": "eastus2euap", "properties": {"dynatraceEnvironmentProperties": + {"singleSignOnProperties": {"aadDomains": ["abc"]}}, "planData": {"billingCycle": + "Monthly", "effectiveDate": "2022-08-19T16:00:00.000Z", "planDetails": "azureportalintegration_privatepreview@TIDhjdtn7tfnxcy", + "usageType": "committed"}, "userInfo": {"country": "US", "emailAddress": "agarwald@microsoft.com", + "firstName": "Alice", "lastName": "Bobab", "phoneNumber": "1234567890"}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor create + Connection: + - keep-alive + Content-Length: + - '459' + Content-Type: + - application/json + ParameterSetName: + - -g -n --user-info --plan-data --dynatrace-environment-properties + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_single_sign_on_configurations000001/providers/Dynatrace.Observability/monitors/monitor000002?api-version=2021-09-01 + response: + body: + string: '{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_single_sign_on_configurations000001/providers/Dynatrace.Observability/monitors/monitor000002", + "name": "monitor000002", "type": "dynatrace.observability/monitors", "location": + "eastus2euap", "systemData": {"createdBy": "v-jingszhang@microsoft.com", "createdByType": + "User", "createdAt": "2022-09-14T08:37:44.7953005Z", "lastModifiedBy": "v-jingszhang@microsoft.com", + "lastModifiedByType": "User", "lastModifiedAt": "2022-09-14T08:37:44.7953005Z"}, + "properties": {"monitoringStatus": "Enabled", "marketplaceSubscriptionStatus": + "Active", "dynatraceEnvironmentProperties": {"userId": null, "accountInfo": + null, "environmentInfo": null, "singleSignOnProperties": {"singleSignOnState": + "Initial", "enterpriseAppId": null, "singleSignOnUrl": null, "aadDomains": + ["abc"], "provisioningState": "Accepted"}}, "userInfo": null, "planData": + {"usageType": "committed", "billingCycle": "Monthly", "planDetails": "azureportalintegration_privatepreview@TIDhjdtn7tfnxcy", + "effectiveDate": "2022-08-19T16:00:00Z"}, "liftrResourceCategory": "MonitorLogs", + "liftrResourcePreference": 0, "provisioningState": "Accepted"}}' + headers: + azure-asyncoperation: + - https://management.azure.com/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/3e96b3b8-16f6-40d4-b135-1ff77a012f45*D9CDF81CC7A6445B39AD435A6BAD8A606DE750FAA38DD4B064EEC40672B76520?api-version=2021-09-01 + cache-control: + - no-cache + content-length: + - '1204' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:37:50 GMT + etag: + - '"a500554d-0000-0300-0000-632192de0000"' + expires: + - '-1' + location: + - https://management.azure.com/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/3e96b3b8-16f6-40d4-b135-1ff77a012f45*D9CDF81CC7A6445B39AD435A6BAD8A606DE750FAA38DD4B064EEC40672B76520?api-version=2021-09-01 + mise-correlation-id: + - 1a3d0805-47f0-425d-923b-20729792a587 + pragma: + - no-cache + request-context: + - appId=cid-v1:fab059ea-d726-424a-9451-6beb3dc74944 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor create + Connection: + - keep-alive + ParameterSetName: + - -g -n --user-info --plan-data --dynatrace-environment-properties + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/3e96b3b8-16f6-40d4-b135-1ff77a012f45*D9CDF81CC7A6445B39AD435A6BAD8A606DE750FAA38DD4B064EEC40672B76520?api-version=2021-09-01 + response: + body: + string: '{"id": "/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/3e96b3b8-16f6-40d4-b135-1ff77a012f45*D9CDF81CC7A6445B39AD435A6BAD8A606DE750FAA38DD4B064EEC40672B76520", + "name": "3e96b3b8-16f6-40d4-b135-1ff77a012f45*D9CDF81CC7A6445B39AD435A6BAD8A606DE750FAA38DD4B064EEC40672B76520", + "resourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_single_sign_on_configurations000001/providers/Dynatrace.Observability/monitors/monitor000002", + "status": "Accepted", "startTime": "2022-09-14T08:37:49.9564958Z"}' + headers: + cache-control: + - no-cache + content-length: + - '570' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:38:21 GMT + etag: + - '"020252fd-0000-0300-0000-632192dd0000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor create + Connection: + - keep-alive + ParameterSetName: + - -g -n --user-info --plan-data --dynatrace-environment-properties + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/3e96b3b8-16f6-40d4-b135-1ff77a012f45*D9CDF81CC7A6445B39AD435A6BAD8A606DE750FAA38DD4B064EEC40672B76520?api-version=2021-09-01 + response: + body: + string: '{"id": "/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/3e96b3b8-16f6-40d4-b135-1ff77a012f45*D9CDF81CC7A6445B39AD435A6BAD8A606DE750FAA38DD4B064EEC40672B76520", + "name": "3e96b3b8-16f6-40d4-b135-1ff77a012f45*D9CDF81CC7A6445B39AD435A6BAD8A606DE750FAA38DD4B064EEC40672B76520", + "resourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_single_sign_on_configurations000001/providers/Dynatrace.Observability/monitors/monitor000002", + "status": "Succeeded", "startTime": "2022-09-14T08:37:49.9564958Z", "endTime": + "2022-09-14T08:38:40.2431933Z", "error": {}, "properties": null}' + headers: + cache-control: + - no-cache + content-length: + - '647' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:38:51 GMT + etag: + - '"0202d6fe-0000-0300-0000-632193100000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor create + Connection: + - keep-alive + ParameterSetName: + - -g -n --user-info --plan-data --dynatrace-environment-properties + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_single_sign_on_configurations000001/providers/Dynatrace.Observability/monitors/monitor000002?api-version=2021-09-01 + response: + body: + string: '{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_single_sign_on_configurations000001/providers/Dynatrace.Observability/monitors/monitor000002", + "name": "monitor000002", "type": "dynatrace.observability/monitors", "location": + "eastus2euap", "systemData": {"createdBy": "v-jingszhang@microsoft.com", "createdByType": + "User", "createdAt": "2022-09-14T08:37:44.7953005Z", "lastModifiedBy": "v-jingszhang@microsoft.com", + "lastModifiedByType": "User", "lastModifiedAt": "2022-09-14T08:37:44.7953005Z"}, + "properties": {"monitoringStatus": "Enabled", "marketplaceSubscriptionStatus": + "Active", "dynatraceEnvironmentProperties": {"userId": "5d539180-2cd0-4ed6-bd7f-ecd8057295a7", + "accountInfo": {"accountId": "df877719-854b-4025-8dd7-29d4bf29c23e", "regionId": + "eastus"}, "environmentInfo": {"environmentId": "enj85099", "logsIngestionEndpoint": + "https://enj85099.sprint.dynatracelabs.com/api/v2/logs/ingest", "landingURL": + "https://enj85099.sprint.dynatracelabs.com/"}, "singleSignOnProperties": {"singleSignOnState": + "Initial", "aadDomains": ["abc"], "provisioningState": "Accepted"}}, "userInfo": + {"firstName": "Alice", "lastName": "Bobab", "emailAddress": "agarwald@microsoft.com", + "phoneNumber": "1234567890", "country": "US"}, "planData": {"usageType": "committed", + "billingCycle": "Monthly", "planDetails": "azureportalintegration_privatepreview@TIDhjdtn7tfnxcy", + "effectiveDate": "2022-08-19T16:00:00Z"}, "liftrResourceCategory": "MonitorLogs", + "liftrResourcePreference": 0, "provisioningState": "Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '1561' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:38:52 GMT + etag: + - '"a5002a52-0000-0300-0000-632193100000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + status: + code: 200 + message: OK +- request: + body: '{"properties": {"aadDomains": ["mpliftrdt20210811outlook.onmicrosoft.com"], + "singleSignOnUrl": "https://www.dynatrace.io"}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor sso-config create + Connection: + - keep-alive + Content-Length: + - '123' + Content-Type: + - application/json + ParameterSetName: + - -g --monitor-name -n --aad-domains --single-sign-on-url + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_single_sign_on_configurations000001/providers/Dynatrace.Observability/monitors/monitor000002/singleSignOnConfigurations/default?api-version=2021-09-01 + response: + body: + string: '{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_single_sign_on_configurations000001/providers/Dynatrace.Observability/monitors/monitor000002/singleSignOnConfigurations/default", + "name": "default", "type": "dynatrace.observability/monitors/singlesignonconfigurations", + "systemData": {"createdBy": "v-jingszhang@microsoft.com", "createdByType": + "User", "createdAt": "2022-09-14T08:38:53.8976563Z", "lastModifiedBy": "v-jingszhang@microsoft.com", + "lastModifiedByType": "User", "lastModifiedAt": "2022-09-14T08:38:53.8976563Z"}, + "properties": {"singleSignOnState": "Initial", "enterpriseAppId": null, "singleSignOnUrl": + "https://www.dynatrace.io", "aadDomains": ["mpliftrdt20210811outlook.onmicrosoft.com"], + "provisioningState": "Accepted"}}' + headers: + azure-asyncoperation: + - https://management.azure.com/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/38046889-1ea7-4aa0-a1e2-8758ebfee9f4*E9DEDF4CD829818BE14E4388EB27544FCA2992F7B4C49A63975D570B07B3D8D1?api-version=2021-09-01 + cache-control: + - no-cache + content-length: + - '791' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:38:54 GMT + etag: + - '"f0087736-0000-0300-0000-6321931e0000"' + expires: + - '-1' + location: + - https://management.azure.com/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/38046889-1ea7-4aa0-a1e2-8758ebfee9f4*E9DEDF4CD829818BE14E4388EB27544FCA2992F7B4C49A63975D570B07B3D8D1?api-version=2021-09-01 + mise-correlation-id: + - 9b5782d2-7350-4cfa-ad65-49e24b99bf1d + pragma: + - no-cache + request-context: + - appId=cid-v1:fab059ea-d726-424a-9451-6beb3dc74944 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor sso-config create + Connection: + - keep-alive + ParameterSetName: + - -g --monitor-name -n --aad-domains --single-sign-on-url + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/38046889-1ea7-4aa0-a1e2-8758ebfee9f4*E9DEDF4CD829818BE14E4388EB27544FCA2992F7B4C49A63975D570B07B3D8D1?api-version=2021-09-01 + response: + body: + string: '{"id": "/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/38046889-1ea7-4aa0-a1e2-8758ebfee9f4*E9DEDF4CD829818BE14E4388EB27544FCA2992F7B4C49A63975D570B07B3D8D1", + "name": "38046889-1ea7-4aa0-a1e2-8758ebfee9f4*E9DEDF4CD829818BE14E4388EB27544FCA2992F7B4C49A63975D570B07B3D8D1", + "resourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_single_sign_on_configurations000001/providers/Dynatrace.Observability/monitors/monitor000002/singleSignOnConfigurations/default", + "status": "Succeeded", "startTime": "2022-09-14T08:38:54.2937896Z", "endTime": + "2022-09-14T08:38:56.6635035Z", "error": {}, "properties": null}' + headers: + cache-control: + - no-cache + content-length: + - '682' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:39:24 GMT + etag: + - '"020231ff-0000-0300-0000-632193200000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor sso-config create + Connection: + - keep-alive + ParameterSetName: + - -g --monitor-name -n --aad-domains --single-sign-on-url + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_single_sign_on_configurations000001/providers/Dynatrace.Observability/monitors/monitor000002/singleSignOnConfigurations/default?api-version=2021-09-01 + response: + body: + string: '{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_single_sign_on_configurations000001/providers/Dynatrace.Observability/monitors/monitor000002/singleSignOnConfigurations/default", + "name": "default", "type": "dynatrace.observability/monitors/singlesignonconfigurations", + "systemData": {"createdBy": "v-jingszhang@microsoft.com", "createdByType": + "User", "createdAt": "2022-09-14T08:38:53.8976563Z", "lastModifiedBy": "v-jingszhang@microsoft.com", + "lastModifiedByType": "User", "lastModifiedAt": "2022-09-14T08:38:53.8976563Z"}, + "properties": {"singleSignOnState": "Initial", "singleSignOnUrl": "https://www.dynatrace.io", + "aadDomains": ["mpliftrdt20210811outlook.onmicrosoft.com"], "provisioningState": + "Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '767' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:39:24 GMT + etag: + - '"f0083b37-0000-0300-0000-632193200000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor sso-config show + Connection: + - keep-alive + ParameterSetName: + - -g --monitor-name -n + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_single_sign_on_configurations000001/providers/Dynatrace.Observability/monitors/monitor000002/singleSignOnConfigurations/default?api-version=2021-09-01 + response: + body: + string: '{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_single_sign_on_configurations000001/providers/Dynatrace.Observability/monitors/monitor000002/singleSignOnConfigurations/default", + "name": "default", "type": "dynatrace.observability/monitors/singlesignonconfigurations", + "systemData": {"createdBy": "v-jingszhang@microsoft.com", "createdByType": + "User", "createdAt": "2022-09-14T08:38:53.8976563Z", "lastModifiedBy": "v-jingszhang@microsoft.com", + "lastModifiedByType": "User", "lastModifiedAt": "2022-09-14T08:38:53.8976563Z"}, + "properties": {"singleSignOnState": "Initial", "singleSignOnUrl": "https://www.dynatrace.io", + "aadDomains": ["mpliftrdt20210811outlook.onmicrosoft.com"], "provisioningState": + "Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '767' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:39:27 GMT + etag: + - '"f0083b37-0000-0300-0000-632193200000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor sso-config list + Connection: + - keep-alive + ParameterSetName: + - -g --monitor-name + User-Agent: + - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.25.1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_single_sign_on_configurations000001/providers/Dynatrace.Observability/monitors/monitor000002/singleSignOnConfigurations?api-version=2021-09-01 + response: + body: + string: '{"value": [{"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_single_sign_on_configurations000001/providers/Dynatrace.Observability/monitors/monitor000002/singleSignOnConfigurations/default", + "name": "default", "type": "dynatrace.observability/monitors/singlesignonconfigurations", + "systemData": {"createdBy": "v-jingszhang@microsoft.com", "createdByType": + "User", "createdAt": "2022-09-14T08:38:53.8976563Z", "lastModifiedBy": "v-jingszhang@microsoft.com", + "lastModifiedByType": "User", "lastModifiedAt": "2022-09-14T08:38:53.8976563Z"}, + "properties": {"singleSignOnState": "Initial", "singleSignOnUrl": "https://www.dynatrace.io", + "aadDomains": ["mpliftrdt20210811outlook.onmicrosoft.com"], "provisioningState": + "Succeeded"}}]}' + headers: + cache-control: + - no-cache + content-length: + - '780' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 14 Sep 2022 08:39:29 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + status: + code: 200 + message: OK +version: 1 diff --git a/src/dynatrace/azext_dynatrace/tests/latest/recordings/test_dynatrace_monitor_tag_rule.yaml b/src/dynatrace/azext_dynatrace/tests/latest/recordings/test_dynatrace_monitor_tag_rule.yaml new file mode 100644 index 00000000000..2d79455ba77 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/tests/latest/recordings/test_dynatrace_monitor_tag_rule.yaml @@ -0,0 +1,500 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor create + Connection: + - keep-alive + ParameterSetName: + - -g -n --user-info --plan-data --dynatrace-environment-properties + User-Agent: + - AZURECLI/2.39.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_dynatrace_monitor_tag_rule000001?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_tag_rule000001","name":"cli_test_dynatrace_monitor_tag_rule000001","type":"Microsoft.Resources/resourceGroups","location":"eastus2euap","tags":{"product":"azurecli","cause":"automation","date":"2022-08-24T05:54:05Z"},"properties":{"provisioningState":"Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '365' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 24 Aug 2022 05:54:12 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"location": "eastus2euap", "properties": {"dynatraceEnvironmentProperties": + {"singleSignOnProperties": {"aadDomains": ["abc"]}}, "planData": {"billingCycle": + "Monthly", "effectiveDate": "2022-08-19T16:00:00.000Z", "planDetails": "azureportalintegration_privatepreview@TIDhjdtn7tfnxcy", + "usageType": "committed"}, "userInfo": {"country": "US", "emailAddress": "agarwald@microsoft.com", + "firstName": "Alice", "lastName": "Bobab", "phoneNumber": "1234567890"}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor create + Connection: + - keep-alive + Content-Length: + - '459' + Content-Type: + - application/json + ParameterSetName: + - -g -n --user-info --plan-data --dynatrace-environment-properties + User-Agent: + - AZURECLI/2.39.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_tag_rule000001/providers/Dynatrace.Observability/monitors/monitor000002?api-version=2021-09-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_tag_rule000001/providers/Dynatrace.Observability/monitors/monitor000002","name":"monitor000002","type":"dynatrace.observability/monitors","location":"eastus2euap","systemData":{"createdBy":"v-jingszhang@microsoft.com","createdByType":"User","createdAt":"2022-08-24T05:54:20.5773587Z","lastModifiedBy":"v-jingszhang@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-08-24T05:54:20.5773587Z"},"properties":{"monitoringStatus":"Enabled","marketplaceSubscriptionStatus":"Active","dynatraceEnvironmentProperties":{"userId":null,"accountInfo":null,"environmentInfo":null,"singleSignOnProperties":{"singleSignOnState":"Initial","enterpriseAppId":null,"singleSignOnUrl":null,"aadDomains":["abc"],"provisioningState":"Accepted"}},"userInfo":null,"planData":{"usageType":"committed","billingCycle":"Monthly","planDetails":"azureportalintegration_privatepreview@TIDhjdtn7tfnxcy","effectiveDate":"2022-08-19T16:00:00Z"},"liftrResourceCategory":"MonitorLogs","liftrResourcePreference":0,"provisioningState":"Accepted"}}' + headers: + azure-asyncoperation: + - https://management.azure.com/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/cd7a448a-b4bc-4d2d-b6eb-fcbf5661a345*F8FFD2B340C0BF920DF0763A7326344DED253A66BDDDB5556D047379EE0B62D2?api-version=2021-09-01 + cache-control: + - no-cache + content-length: + - '1123' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 24 Aug 2022 05:54:29 GMT + etag: + - '"2500e991-0000-0300-0000-6305bd140000"' + expires: + - '-1' + location: + - https://management.azure.com/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/cd7a448a-b4bc-4d2d-b6eb-fcbf5661a345*F8FFD2B340C0BF920DF0763A7326344DED253A66BDDDB5556D047379EE0B62D2?api-version=2021-09-01 + mise-correlation-id: + - cf996d6d-a647-487b-abec-b84db2c458da + pragma: + - no-cache + request-context: + - appId=cid-v1:fab059ea-d726-424a-9451-6beb3dc74944 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor create + Connection: + - keep-alive + ParameterSetName: + - -g -n --user-info --plan-data --dynatrace-environment-properties + User-Agent: + - AZURECLI/2.39.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/cd7a448a-b4bc-4d2d-b6eb-fcbf5661a345*F8FFD2B340C0BF920DF0763A7326344DED253A66BDDDB5556D047379EE0B62D2?api-version=2021-09-01 + response: + body: + string: '{"id":"/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/cd7a448a-b4bc-4d2d-b6eb-fcbf5661a345*F8FFD2B340C0BF920DF0763A7326344DED253A66BDDDB5556D047379EE0B62D2","name":"cd7a448a-b4bc-4d2d-b6eb-fcbf5661a345*F8FFD2B340C0BF920DF0763A7326344DED253A66BDDDB5556D047379EE0B62D2","resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_tag_rule000001/providers/Dynatrace.Observability/monitors/monitor000002","status":"Accepted","startTime":"2022-08-24T05:54:27.3761002Z"}' + headers: + cache-control: + - no-cache + content-length: + - '540' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 24 Aug 2022 05:55:00 GMT + etag: + - '"0200de74-0000-0300-0000-6305bd130000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor create + Connection: + - keep-alive + ParameterSetName: + - -g -n --user-info --plan-data --dynatrace-environment-properties + User-Agent: + - AZURECLI/2.39.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/cd7a448a-b4bc-4d2d-b6eb-fcbf5661a345*F8FFD2B340C0BF920DF0763A7326344DED253A66BDDDB5556D047379EE0B62D2?api-version=2021-09-01 + response: + body: + string: '{"id":"/providers/Dynatrace.Observability/locations/EASTUS2EUAP/operationStatuses/cd7a448a-b4bc-4d2d-b6eb-fcbf5661a345*F8FFD2B340C0BF920DF0763A7326344DED253A66BDDDB5556D047379EE0B62D2","name":"cd7a448a-b4bc-4d2d-b6eb-fcbf5661a345*F8FFD2B340C0BF920DF0763A7326344DED253A66BDDDB5556D047379EE0B62D2","resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_tag_rule000001/providers/Dynatrace.Observability/monitors/monitor000002","status":"Succeeded","startTime":"2022-08-24T05:54:27.3761002Z","endTime":"2022-08-24T05:55:16.0596553Z","error":{},"properties":null}' + headers: + cache-control: + - no-cache + content-length: + - '611' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 24 Aug 2022 05:55:29 GMT + etag: + - '"02005476-0000-0300-0000-6305bd440000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor create + Connection: + - keep-alive + ParameterSetName: + - -g -n --user-info --plan-data --dynatrace-environment-properties + User-Agent: + - AZURECLI/2.39.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_tag_rule000001/providers/Dynatrace.Observability/monitors/monitor000002?api-version=2021-09-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_tag_rule000001/providers/Dynatrace.Observability/monitors/monitor000002","name":"monitor000002","type":"dynatrace.observability/monitors","location":"eastus2euap","systemData":{"createdBy":"v-jingszhang@microsoft.com","createdByType":"User","createdAt":"2022-08-24T05:54:20.5773587Z","lastModifiedBy":"v-jingszhang@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-08-24T05:54:20.5773587Z"},"properties":{"monitoringStatus":"Enabled","marketplaceSubscriptionStatus":"Active","dynatraceEnvironmentProperties":{"userId":"b88c79cb-550e-45af-a013-08c135647e8a","accountInfo":{"accountId":"df877719-854b-4025-8dd7-29d4bf29c23e","regionId":"eastus"},"environmentInfo":{"environmentId":"cri57332","logsIngestionEndpoint":"https://cri57332.sprint.dynatracelabs.com/api/v2/logs/ingest","landingURL":"https://cri57332.sprint.dynatracelabs.com/"},"singleSignOnProperties":{"singleSignOnState":"Initial","aadDomains":["abc"],"provisioningState":"Accepted"}},"userInfo":{"firstName":"Alice","lastName":"Bobab","emailAddress":"agarwald@microsoft.com","phoneNumber":"1234567890","country":"US"},"planData":{"usageType":"committed","billingCycle":"Monthly","planDetails":"azureportalintegration_privatepreview@TIDhjdtn7tfnxcy","effectiveDate":"2022-08-19T16:00:00Z"},"liftrResourceCategory":"MonitorLogs","liftrResourcePreference":0,"provisioningState":"Succeeded"}}' + headers: + cache-control: + - no-cache + content-length: + - '1467' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 24 Aug 2022 05:55:30 GMT + etag: + - '"2500429c-0000-0300-0000-6305bd440000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + status: + code: 200 + message: OK +- request: + body: '{"properties": {"logRules": {"filteringTags": [{"action": "Include", "name": + "env", "value": "prod"}, {"action": "Exclude", "name": "env", "value": "dev"}], + "sendAadLogs": "Enabled", "sendActivityLogs": "Enabled", "sendSubscriptionLogs": + "Enabled"}, "metricRules": {"filteringTags": [{"action": "Include", "name": + "env", "value": "prod"}]}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor tag-rule create + Connection: + - keep-alive + Content-Length: + - '341' + Content-Type: + - application/json + ParameterSetName: + - -g --monitor-name -n --log-rules --metric-rules + User-Agent: + - AZURECLI/2.39.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_tag_rule000001/providers/Dynatrace.Observability/monitors/monitor000002/tagRules/default?api-version=2021-09-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_tag_rule000001/providers/Dynatrace.Observability/monitors/monitor000002/tagRules/default","name":"default","type":"dynatrace.observability/monitors/tagrules","systemData":{"createdBy":"v-jingszhang@microsoft.com","createdByType":"User","createdAt":"2022-08-24T05:55:32.3838733Z","lastModifiedBy":"v-jingszhang@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-08-24T05:55:32.3838733Z"},"properties":{"provisioningState":"Succeeded","logRules":{"sendAadLogs":"Enabled","sendSubscriptionLogs":"Enabled","sendActivityLogs":"Enabled","filteringTags":[{"name":"env","value":"prod","action":"Include"},{"name":"env","value":"dev","action":"Exclude"}]},"metricRules":{"filteringTags":[{"name":"env","value":"prod","action":"Include"}]}}}' + headers: + cache-control: + - no-cache + content-length: + - '847' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 24 Aug 2022 05:55:33 GMT + etag: + - '"1300293e-0000-0300-0000-6305bd550000"' + expires: + - '-1' + mise-correlation-id: + - 5e56b061-ce2f-4b0b-9549-a521424b205d + pragma: + - no-cache + request-context: + - appId=cid-v1:fab059ea-d726-424a-9451-6beb3dc74944 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 200 + message: OK +- request: + body: '{}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor tag-rule update + Connection: + - keep-alive + Content-Length: + - '2' + Content-Type: + - application/json + ParameterSetName: + - -g --monitor-name -n + User-Agent: + - AZURECLI/2.39.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: PATCH + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_tag_rule000001/providers/Dynatrace.Observability/monitors/monitor000002/tagRules/default?api-version=2021-09-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_tag_rule000001/providers/Dynatrace.Observability/monitors/monitor000002/tagRules/default","name":"default","type":"dynatrace.observability/monitors/tagrules","systemData":{"createdBy":"v-jingszhang@microsoft.com","createdByType":"User","createdAt":"2022-08-24T05:55:32.3838733Z","lastModifiedBy":"v-jingszhang@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-08-24T05:55:34.85093Z"},"properties":{"provisioningState":"Succeeded","logRules":{"sendAadLogs":"Enabled","sendSubscriptionLogs":"Enabled","sendActivityLogs":"Enabled","filteringTags":[{"name":"env","value":"prod","action":"Include"},{"name":"env","value":"dev","action":"Exclude"}]},"metricRules":{"filteringTags":[{"name":"env","value":"prod","action":"Include"}]}}}' + headers: + cache-control: + - no-cache + content-length: + - '845' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 24 Aug 2022 05:55:35 GMT + etag: + - '"13002b3e-0000-0300-0000-6305bd570000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor tag-rule show + Connection: + - keep-alive + ParameterSetName: + - -g --monitor-name -n + User-Agent: + - AZURECLI/2.39.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_tag_rule000001/providers/Dynatrace.Observability/monitors/monitor000002/tagRules/default?api-version=2021-09-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_tag_rule000001/providers/Dynatrace.Observability/monitors/monitor000002/tagRules/default","name":"default","type":"dynatrace.observability/monitors/tagrules","systemData":{"createdBy":"v-jingszhang@microsoft.com","createdByType":"User","createdAt":"2022-08-24T05:55:32.3838733Z","lastModifiedBy":"v-jingszhang@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-08-24T05:55:34.85093Z"},"properties":{"provisioningState":"Succeeded","logRules":{"sendAadLogs":"Enabled","sendSubscriptionLogs":"Enabled","sendActivityLogs":"Enabled","filteringTags":[{"name":"env","value":"prod","action":"Include"},{"name":"env","value":"dev","action":"Exclude"}]},"metricRules":{"filteringTags":[{"name":"env","value":"prod","action":"Include"}]}}}' + headers: + cache-control: + - no-cache + content-length: + - '845' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 24 Aug 2022 05:55:36 GMT + etag: + - '"13002b3e-0000-0300-0000-6305bd570000"' + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor tag-rule list + Connection: + - keep-alive + ParameterSetName: + - -g --monitor-name + User-Agent: + - AZURECLI/2.39.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_tag_rule000001/providers/Dynatrace.Observability/monitors/monitor000002/tagRules?api-version=2021-09-01 + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_tag_rule000001/providers/Dynatrace.Observability/monitors/monitor000002/tagRules/default","name":"default","type":"dynatrace.observability/monitors/tagrules","systemData":{"createdBy":"v-jingszhang@microsoft.com","createdByType":"User","createdAt":"2022-08-24T05:55:32.3838733Z","lastModifiedBy":"v-jingszhang@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-08-24T05:55:34.85093Z"},"properties":{"provisioningState":"Succeeded","logRules":{"sendAadLogs":"Enabled","sendSubscriptionLogs":"Enabled","sendActivityLogs":"Enabled","filteringTags":[{"name":"env","value":"prod","action":"Include"},{"name":"env","value":"dev","action":"Exclude"}]},"metricRules":{"filteringTags":[{"name":"env","value":"prod","action":"Include"}]}}}]}' + headers: + cache-control: + - no-cache + content-length: + - '857' + content-type: + - application/json; charset=utf-8 + date: + - Wed, 24 Aug 2022 05:55:37 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - dynatrace monitor tag-rule delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -g --monitor-name -n -y + User-Agent: + - AZURECLI/2.39.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.9 (Windows-10-10.0.22000-SP0) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_dynatrace_monitor_tag_rule000001/providers/Dynatrace.Observability/monitors/monitor000002/tagRules/default?api-version=2021-09-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Wed, 24 Aug 2022 05:55:40 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-providerhub-traffic: + - 'True' + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + status: + code: 200 + message: OK +version: 1 diff --git a/src/dynatrace/azext_dynatrace/tests/latest/test_dynatrace.py b/src/dynatrace/azext_dynatrace/tests/latest/test_dynatrace.py new file mode 100644 index 00000000000..429352d4af5 --- /dev/null +++ b/src/dynatrace/azext_dynatrace/tests/latest/test_dynatrace.py @@ -0,0 +1,169 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +import unittest +from azure.cli.testsdk import * +from .credential_replacer import ExpressRoutePortLOAContentReplacer + + +class DynatraceScenario(ScenarioTest): + + def __init__(self, method_name): + super().__init__(method_name, recording_processors=[ + ExpressRoutePortLOAContentReplacer() + ]) + + @ResourceGroupPreparer(name_prefix='cli_test_dynatrace_monitor', location='eastus2euap') + def test_dynatrace_monitor(self, resource_group): + self.kwargs.update({ + 'monitor': self.create_random_name('monitor', 15), + }) + + self.cmd('dynatrace monitor create -g {rg} -n {monitor} --user-info {{first-name:Alice,last-name:Bobab,email-address:agarwald@microsoft.com,phone-number:1234567890,country:US}} --plan-data {{usage-type:committed,billing-cycle:Monthly,plan-details:azureportalintegration_privatepreview@TIDhjdtn7tfnxcy,effective-date:2022-08-20}} --environment {{single-sign-on:{{aad-domains:[\'abc\']}}}}') + self.cmd('dynatrace monitor show -g {rg} -n {monitor}', checks=[ + self.check('name', '{monitor}'), + self.check('resourceGroup', '{rg}'), + self.check('dynatraceEnvironmentProperties.singleSignOnProperties.aadDomains[0]', 'abc'), + self.check('dynatraceEnvironmentProperties.singleSignOnProperties.provisioningState', 'Accepted'), + self.check('dynatraceEnvironmentProperties.singleSignOnProperties.singleSignOnState', 'Initial'), + self.check('marketplaceSubscriptionStatus', 'Active'), + self.check('monitoringStatus', 'Enabled'), + self.check('planData.billingCycle', 'Monthly'), + self.check('planData.planDetails', 'azureportalintegration_privatepreview@TIDhjdtn7tfnxcy'), + self.check('planData.usageType', 'committed'), + self.check('userInfo.country', 'US'), + self.check('userInfo.emailAddress', 'agarwald@microsoft.com'), + self.check('userInfo.firstName', 'Alice'), + self.check('userInfo.lastName', 'Bobab'), + self.check('userInfo.phoneNumber', '1234567890') + ]) + self.cmd('dynatrace monitor update -g {rg} -n {monitor} --tags {{env:dev}}', checks=[ + self.check('name', '{monitor}'), + self.check('resourceGroup', '{rg}'), + self.check('tags', {'env': 'dev'}) + ]) + self.cmd('dynatrace monitor list -g {rg}', checks=[ + self.check('[0].name', '{monitor}'), + self.check('[0].resourceGroup', '{rg}'), + self.check('[0].dynatraceEnvironmentProperties.singleSignOnProperties.aadDomains[0]', 'abc'), + self.check('[0].dynatraceEnvironmentProperties.singleSignOnProperties.provisioningState', 'Accepted'), + self.check('[0].dynatraceEnvironmentProperties.singleSignOnProperties.singleSignOnState', 'Initial'), + self.check('[0].marketplaceSubscriptionStatus', 'Active'), + self.check('[0].monitoringStatus', 'Enabled'), + self.check('[0].planData.billingCycle', 'Monthly'), + self.check('[0].planData.planDetails', 'azureportalintegration_privatepreview@TIDhjdtn7tfnxcy'), + self.check('[0].planData.usageType', 'committed'), + self.check('[0].userInfo.country', 'US'), + self.check('[0].userInfo.emailAddress', 'agarwald@microsoft.com'), + self.check('[0].userInfo.firstName', 'Alice'), + self.check('[0].userInfo.lastName', 'Bobab'), + self.check('[0].userInfo.phoneNumber', '1234567890'), + ]) + self.cmd('dynatrace monitor list-app-service -g {rg} --monitor-name {monitor}') + self.cmd('dynatrace monitor list-host -g {rg} --monitor-name {monitor}') + self.cmd('dynatrace monitor list-monitored-resource -g {rg} --monitor-name {monitor}') + self.cmd('dynatrace monitor list-linkable-environment -g {rg} --monitor-name {monitor} --user-principal agarwald@microsoft.com --region eastus2euap --tenant-id be9927fa-821c-4178-9dae-e520c4beca74') + self.cmd('dynatrace monitor get-sso-detail -g {rg} --monitor-name {monitor} --user-principal agarwald@microsoft.com', checks=[ + self.check('adminUsers[0]', 'agarwald@microsoft.com') + ]) + self.cmd('dynatrace monitor get-vm-host-payload -g {rg} --monitor-name {monitor}', checks=[ + self.exists('environmentId'), + self.exists('ingestionKey') + ]) + self.cmd('dynatrace monitor delete -n {monitor} -g {rg} -y') + + @ResourceGroupPreparer(name_prefix='cli_test_dynatrace_monitor_single_sign_on_configurations', location='eastus2euap') + def test_dynatrace_monitor_single_sign_on_configurations(self, resource_group): + self.kwargs.update({ + 'monitor': self.create_random_name('monitor', 15), + }) + self.cmd('dynatrace monitor create -g {rg} -n {monitor} --user-info {{first-name:Alice,last-name:Bobab,email-address:agarwald@microsoft.com,phone-number:1234567890,country:US}} --plan-data {{usage-type:committed,billing-cycle:Monthly,plan-details:azureportalintegration_privatepreview@TIDhjdtn7tfnxcy,effective-date:2022-08-20}} --environment {{single-sign-on:{{aad-domains:[\'abc\']}}}}') + self.cmd('dynatrace monitor sso-config create -g {rg} --monitor-name {monitor} -n default --aad-domains [\'mpliftrdt20210811outlook.onmicrosoft.com\'] --single-sign-on-url "https://www.dynatrace.io"', checks=[ + self.check('aadDomains[0]', 'mpliftrdt20210811outlook.onmicrosoft.com'), + self.check('singleSignOnUrl', 'https://www.dynatrace.io') + ]) + self.cmd('dynatrace monitor sso-config show -g {rg} --monitor-name {monitor} -n default', checks=[ + self.check('aadDomains[0]', 'mpliftrdt20210811outlook.onmicrosoft.com'), + self.check('singleSignOnUrl', 'https://www.dynatrace.io') + ]) + self.cmd('dynatrace monitor sso-config list -g {rg} --monitor-name {monitor}', checks=[ + self.check('[0].aadDomains[0]', 'mpliftrdt20210811outlook.onmicrosoft.com'), + self.check('[0].singleSignOnUrl', 'https://www.dynatrace.io') + ]) + + @ResourceGroupPreparer(name_prefix='cli_test_dynatrace_monitor_tag_rule', location='eastus2euap') + def test_dynatrace_monitor_tag_rule(self, resource_group): + self.kwargs.update({ + 'monitor': self.create_random_name('monitor', 15), + }) + + self.cmd('dynatrace monitor create -g {rg} -n {monitor} --user-info {{first-name:Alice,last-name:Bobab,email-address:agarwald@microsoft.com,phone-number:1234567890,country:US}} --plan-data {{usage-type:committed,billing-cycle:Monthly,plan-details:azureportalintegration_privatepreview@TIDhjdtn7tfnxcy,effective-date:2022-08-20}} --environment {{single-sign-on:{{aad-domains:[\'abc\']}}}} ') + self.cmd('dynatrace monitor tag-rule create -g {rg} --monitor-name {monitor} -n default --log-rules {{send-aad-logs:enabled,send-subscription-logs:enabled,send-activity-logs:enabled,filtering-tags:[{{name:env,value:prod,action:include}},{{name:env,value:dev,action:exclude}}]}} --metric-rules {{filtering-tags:[{{name:env,value:prod,action:include}}]}}', checks=[ + self.check('name', 'default'), + self.check('resourceGroup', '{rg}'), + self.check('logRules.filteringTags[0].action', 'Include'), + self.check('logRules.filteringTags[0].name', 'env'), + self.check('logRules.filteringTags[0].value', 'prod'), + self.check('logRules.filteringTags[1].action', 'Exclude'), + self.check('logRules.filteringTags[1].name', 'env'), + self.check('logRules.filteringTags[1].value', 'dev'), + self.check('logRules.sendAadLogs', 'Enabled'), + self.check('logRules.sendActivityLogs', 'Enabled'), + self.check('logRules.sendSubscriptionLogs', 'Enabled'), + self.check('metricRules.filteringTags[0].action', 'Include'), + self.check('metricRules.filteringTags[0].name', 'env'), + self.check('metricRules.filteringTags[0].value', 'prod') + ]) + self.cmd('dynatrace monitor tag-rule update -g {rg} --monitor-name {monitor} -n default', checks=[ + self.check('name', 'default'), + self.check('resourceGroup', '{rg}'), + self.check('logRules.filteringTags[0].action', 'Include'), + self.check('logRules.filteringTags[0].name', 'env'), + self.check('logRules.filteringTags[0].value', 'prod'), + self.check('logRules.filteringTags[1].action', 'Exclude'), + self.check('logRules.filteringTags[1].name', 'env'), + self.check('logRules.filteringTags[1].value', 'dev'), + self.check('logRules.sendAadLogs', 'Enabled'), + self.check('logRules.sendActivityLogs', 'Enabled'), + self.check('logRules.sendSubscriptionLogs', 'Enabled'), + self.check('metricRules.filteringTags[0].action', 'Include'), + self.check('metricRules.filteringTags[0].name', 'env'), + self.check('metricRules.filteringTags[0].value', 'prod') + ]) + self.cmd('dynatrace monitor tag-rule show -g {rg} --monitor-name {monitor} -n default', checks=[ + self.check('name', 'default'), + self.check('resourceGroup', '{rg}'), + self.check('logRules.filteringTags[0].action', 'Include'), + self.check('logRules.filteringTags[0].name', 'env'), + self.check('logRules.filteringTags[0].value', 'prod'), + self.check('logRules.filteringTags[1].action', 'Exclude'), + self.check('logRules.filteringTags[1].name', 'env'), + self.check('logRules.filteringTags[1].value', 'dev'), + self.check('logRules.sendAadLogs', 'Enabled'), + self.check('logRules.sendActivityLogs', 'Enabled'), + self.check('logRules.sendSubscriptionLogs', 'Enabled'), + self.check('metricRules.filteringTags[0].action', 'Include'), + self.check('metricRules.filteringTags[0].name', 'env'), + self.check('metricRules.filteringTags[0].value', 'prod') + ]) + self.cmd('dynatrace monitor tag-rule list -g {rg} --monitor-name {monitor}', checks=[ + self.check('[0].name', 'default'), + self.check('[0].resourceGroup', '{rg}'), + self.check('[0].logRules.filteringTags[0].action', 'Include'), + self.check('[0].logRules.filteringTags[0].name', 'env'), + self.check('[0].logRules.filteringTags[0].value', 'prod'), + self.check('[0].logRules.filteringTags[1].action', 'Exclude'), + self.check('[0].logRules.filteringTags[1].name', 'env'), + self.check('[0].logRules.filteringTags[1].value', 'dev'), + self.check('[0].logRules.sendAadLogs', 'Enabled'), + self.check('[0].logRules.sendActivityLogs', 'Enabled'), + self.check('[0].logRules.sendSubscriptionLogs', 'Enabled'), + self.check('[0].metricRules.filteringTags[0].action', 'Include'), + self.check('[0].metricRules.filteringTags[0].name', 'env'), + self.check('[0].metricRules.filteringTags[0].value', 'prod') + ]) + self.cmd('dynatrace monitor tag-rule delete -g {rg} --monitor-name {monitor} -n default -y') \ No newline at end of file diff --git a/src/dynatrace/setup.cfg b/src/dynatrace/setup.cfg new file mode 100644 index 00000000000..2fdd96e5d39 --- /dev/null +++ b/src/dynatrace/setup.cfg @@ -0,0 +1 @@ +#setup.cfg \ No newline at end of file diff --git a/src/dynatrace/setup.py b/src/dynatrace/setup.py new file mode 100644 index 00000000000..e9169c80b07 --- /dev/null +++ b/src/dynatrace/setup.py @@ -0,0 +1,49 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# +# Code generated by aaz-dev-tools +# -------------------------------------------------------------------------------------------- + +from codecs import open +from setuptools import setup, find_packages + + +# HISTORY.rst entry. +VERSION = '0.1.0' + +# The full list of classifiers is available at +# https://pypi.python.org/pypi?%3Aaction=list_classifiers +CLASSIFIERS = [ + 'Development Status :: 4 - Beta', + 'Intended Audience :: Developers', + 'Intended Audience :: System Administrators', + 'Programming Language :: Python', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'License :: OSI Approved :: MIT License', +] + +DEPENDENCIES = [] + +with open('README.md', 'r', encoding='utf-8') as f: + README = f.read() +with open('HISTORY.rst', 'r', encoding='utf-8') as f: + HISTORY = f.read() + +setup( + name='dynatrace', + version=VERSION, + description='Microsoft Azure Command-Line Tools Dynatrace Extension.', + long_description=README + '\n\n' + HISTORY, + license='MIT', + author='Microsoft Corporation', + author_email='azpycli@microsoft.com', + url='https://github.com/Azure/azure-cli-extensions/tree/main/src/dynatrace', + classifiers=CLASSIFIERS, + packages=find_packages(exclude=["tests"]), + package_data={'azext_dynatrace': ['azext_metadata.json']}, + install_requires=DEPENDENCIES +) diff --git a/src/service_name.json b/src/service_name.json index 0e80eb1edfc..d921e8d9265 100644 --- a/src/service_name.json +++ b/src/service_name.json @@ -648,5 +648,10 @@ "Command": "az hybridaks", "AzureServiceName": "Hybrid Container Service", "URL": "https://learn.microsoft.com/en-us/azure-stack/aks-hci/aks-hybrid-preview-overview" + }, + { + "Command": "az dynatrace", + "AzureServiceName": "Dynatrace Observability", + "URL": "https://docs.microsoft.com/en-us/azure/partner-solutions/dynatrace/" } ] From 08cf53e168326a97a78d3e635df5689a010773dc Mon Sep 17 00:00:00 2001 From: Xiaoyun Ding Date: Tue, 1 Nov 2022 14:25:22 +0800 Subject: [PATCH 54/85] add 2022-1101preview sdk (#5505) Co-authored-by: Xiaoyun Ding --- src/spring/azext_spring/_client_factory.py | 11 +- src/spring/azext_spring/commands.py | 3 +- .../v2022_11_01_preview/__init__.py | 24 + .../_app_platform_management_client.py | 266 + .../v2022_11_01_preview/_configuration.py | 70 + .../appplatform/v2022_11_01_preview/_patch.py | 20 + .../v2022_11_01_preview/_vendor.py | 27 + .../v2022_11_01_preview/_version.py | 9 + .../v2022_11_01_preview/aio/__init__.py | 21 + .../aio/_app_platform_management_client.py | 263 + .../v2022_11_01_preview/aio/_configuration.py | 67 + .../v2022_11_01_preview/aio/_patch.py | 20 + .../aio/operations/__init__.py | 77 + .../_api_portal_custom_domains_operations.py | 588 ++ .../aio/operations/_api_portals_operations.py | 714 ++ .../_application_accelerators_operations.py | 566 + .../_application_live_view_operations.py | 165 + .../_application_live_views_operations.py | 451 + .../aio/operations/_apps_operations.py | 1261 +++ .../aio/operations/_bindings_operations.py | 836 ++ .../_build_service_agent_pool_operations.py | 469 + .../_build_service_builder_operations.py | 652 ++ .../operations/_build_service_operations.py | 1116 ++ .../_buildpack_binding_operations.py | 628 ++ .../operations/_certificates_operations.py | 566 + .../operations/_config_servers_operations.py | 794 ++ .../_configuration_services_operations.py | 806 ++ .../operations/_custom_domains_operations.py | 837 ++ .../_customized_accelerators_operations.py | 770 ++ .../aio/operations/_deployments_operations.py | 2512 +++++ .../operations/_dev_tool_portal_operations.py | 165 + .../_dev_tool_portals_operations.py | 451 + .../_gateway_custom_domains_operations.py | 588 ++ .../_gateway_route_configs_operations.py | 591 ++ .../aio/operations/_gateways_operations.py | 779 ++ .../_monitoring_settings_operations.py | 568 + .../aio/operations/_operations.py | 132 + .../aio/operations/_patch.py | 20 + .../_predefined_accelerators_operations.py | 478 + .../_runtime_versions_operations.py | 102 + .../_service_registries_operations.py | 451 + .../aio/operations/_services_operations.py | 1482 +++ .../aio/operations/_skus_operations.py | 133 + .../aio/operations/_storages_operations.py | 565 + .../v2022_11_01_preview/models/__init__.py | 543 + .../_app_platform_management_client_enums.py | 439 + .../v2022_11_01_preview/models/_models_py3.py | 9165 +++++++++++++++++ .../v2022_11_01_preview/models/_patch.py | 20 + .../operations/__init__.py | 77 + .../_api_portal_custom_domains_operations.py | 734 ++ .../operations/_api_portals_operations.py | 871 ++ .../_application_accelerators_operations.py | 697 ++ .../_application_live_view_operations.py | 200 + .../_application_live_views_operations.py | 545 + .../operations/_apps_operations.py | 1527 +++ .../operations/_bindings_operations.py | 1002 ++ .../_build_service_agent_pool_operations.py | 578 ++ .../_build_service_builder_operations.py | 835 ++ .../operations/_build_service_operations.py | 1567 +++ .../_buildpack_binding_operations.py | 786 ++ .../operations/_certificates_operations.py | 691 ++ .../operations/_config_servers_operations.py | 923 ++ .../_configuration_services_operations.py | 965 ++ .../operations/_custom_domains_operations.py | 1003 ++ .../_customized_accelerators_operations.py | 966 ++ .../operations/_deployments_operations.py | 3125 ++++++ .../operations/_dev_tool_portal_operations.py | 200 + .../_dev_tool_portals_operations.py | 545 + .../_gateway_custom_domains_operations.py | 734 ++ .../_gateway_route_configs_operations.py | 737 ++ .../operations/_gateways_operations.py | 967 ++ .../_monitoring_settings_operations.py | 662 ++ .../operations/_operations.py | 154 + .../v2022_11_01_preview/operations/_patch.py | 20 + .../_predefined_accelerators_operations.py | 629 ++ .../_runtime_versions_operations.py | 124 + .../_service_registries_operations.py | 573 ++ .../operations/_services_operations.py | 1866 ++++ .../operations/_skus_operations.py | 160 + .../operations/_storages_operations.py | 688 ++ 80 files changed, 56425 insertions(+), 7 deletions(-) create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/__init__.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/_app_platform_management_client.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/_configuration.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/_patch.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/_vendor.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/_version.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/__init__.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/_app_platform_management_client.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/_configuration.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/_patch.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/__init__.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_api_portal_custom_domains_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_api_portals_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_application_accelerators_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_application_live_view_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_application_live_views_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_apps_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_bindings_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_build_service_agent_pool_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_build_service_builder_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_build_service_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_buildpack_binding_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_certificates_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_config_servers_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_configuration_services_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_custom_domains_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_customized_accelerators_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_deployments_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_dev_tool_portal_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_dev_tool_portals_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_gateway_custom_domains_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_gateway_route_configs_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_gateways_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_monitoring_settings_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_patch.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_predefined_accelerators_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_runtime_versions_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_service_registries_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_services_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_skus_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_storages_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/models/__init__.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/models/_app_platform_management_client_enums.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/models/_models_py3.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/models/_patch.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/__init__.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_api_portal_custom_domains_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_api_portals_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_application_accelerators_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_application_live_view_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_application_live_views_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_apps_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_bindings_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_build_service_agent_pool_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_build_service_builder_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_build_service_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_buildpack_binding_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_certificates_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_config_servers_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_configuration_services_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_custom_domains_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_customized_accelerators_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_deployments_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_dev_tool_portal_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_dev_tool_portals_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_gateway_custom_domains_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_gateway_route_configs_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_gateways_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_monitoring_settings_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_patch.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_predefined_accelerators_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_runtime_versions_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_service_registries_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_services_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_skus_operations.py create mode 100644 src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_storages_operations.py diff --git a/src/spring/azext_spring/_client_factory.py b/src/spring/azext_spring/_client_factory.py index dcac7143265..4e3e8b1de05 100644 --- a/src/spring/azext_spring/_client_factory.py +++ b/src/spring/azext_spring/_client_factory.py @@ -32,14 +32,13 @@ from .vendored_sdks.appplatform.v2022_09_01_preview import ( AppPlatformManagementClient as AppPlatformManagementClient_20220901preview ) +from .vendored_sdks.appplatform.v2022_11_01_preview import ( + AppPlatformManagementClient as AppPlatformManagementClient_20221101preview +) -def cf_spring_20220901preview(cli_ctx, *_): - return get_mgmt_service_client(cli_ctx, AppPlatformManagementClient_20220901preview) - - -def cf_spring_20220901preview(cli_ctx, *_): - return get_mgmt_service_client(cli_ctx, AppPlatformManagementClient_20220901preview) +def cf_spring_20221101preview(cli_ctx, *_): + return get_mgmt_service_client(cli_ctx, AppPlatformManagementClient_20221101preview) def cf_spring_20220901preview(cli_ctx, *_): diff --git a/src/spring/azext_spring/commands.py b/src/spring/azext_spring/commands.py index 78017be9546..f3652d53c09 100644 --- a/src/spring/azext_spring/commands.py +++ b/src/spring/azext_spring/commands.py @@ -7,7 +7,8 @@ from azure.cli.core.commands import CliCommandType from azext_spring._utils import handle_asc_exception -from ._client_factory import (cf_spring_20220901preview, +from ._client_factory import (cf_spring_20221101preview, + cf_spring_20220901preview, cf_spring_20220501preview, cf_spring_20220301preview, cf_spring_20220101preview, diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/__init__.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/__init__.py new file mode 100644 index 00000000000..c95d02df601 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/__init__.py @@ -0,0 +1,24 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._app_platform_management_client import AppPlatformManagementClient +from ._version import VERSION + +__version__ = VERSION + +try: + from ._patch import __all__ as _patch_all + from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = ["AppPlatformManagementClient"] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/_app_platform_management_client.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/_app_platform_management_client.py new file mode 100644 index 00000000000..75944123e4b --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/_app_platform_management_client.py @@ -0,0 +1,266 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, TYPE_CHECKING + +from azure.core.rest import HttpRequest, HttpResponse +from azure.mgmt.core import ARMPipelineClient + +from . import models +from .._serialization import Deserializer, Serializer +from ._configuration import AppPlatformManagementClientConfiguration +from .operations import ( + ApiPortalCustomDomainsOperations, + ApiPortalsOperations, + ApplicationAcceleratorsOperations, + ApplicationLiveViewOperations, + ApplicationLiveViewsOperations, + AppsOperations, + BindingsOperations, + BuildServiceAgentPoolOperations, + BuildServiceBuilderOperations, + BuildServiceOperations, + BuildpackBindingOperations, + CertificatesOperations, + ConfigServersOperations, + ConfigurationServicesOperations, + CustomDomainsOperations, + CustomizedAcceleratorsOperations, + DeploymentsOperations, + DevToolPortalOperations, + DevToolPortalsOperations, + GatewayCustomDomainsOperations, + GatewayRouteConfigsOperations, + GatewaysOperations, + MonitoringSettingsOperations, + Operations, + PredefinedAcceleratorsOperations, + RuntimeVersionsOperations, + ServiceRegistriesOperations, + ServicesOperations, + SkusOperations, + StoragesOperations, +) + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials import TokenCredential + + +class AppPlatformManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes + """REST API for Azure Spring Apps. + + :ivar services: ServicesOperations operations + :vartype services: azure.mgmt.appplatform.v2022_11_01_preview.operations.ServicesOperations + :ivar config_servers: ConfigServersOperations operations + :vartype config_servers: + azure.mgmt.appplatform.v2022_11_01_preview.operations.ConfigServersOperations + :ivar configuration_services: ConfigurationServicesOperations operations + :vartype configuration_services: + azure.mgmt.appplatform.v2022_11_01_preview.operations.ConfigurationServicesOperations + :ivar service_registries: ServiceRegistriesOperations operations + :vartype service_registries: + azure.mgmt.appplatform.v2022_11_01_preview.operations.ServiceRegistriesOperations + :ivar application_live_views: ApplicationLiveViewsOperations operations + :vartype application_live_views: + azure.mgmt.appplatform.v2022_11_01_preview.operations.ApplicationLiveViewsOperations + :ivar application_live_view: ApplicationLiveViewOperations operations + :vartype application_live_view: + azure.mgmt.appplatform.v2022_11_01_preview.operations.ApplicationLiveViewOperations + :ivar dev_tool_portals: DevToolPortalsOperations operations + :vartype dev_tool_portals: + azure.mgmt.appplatform.v2022_11_01_preview.operations.DevToolPortalsOperations + :ivar dev_tool_portal: DevToolPortalOperations operations + :vartype dev_tool_portal: + azure.mgmt.appplatform.v2022_11_01_preview.operations.DevToolPortalOperations + :ivar build_service: BuildServiceOperations operations + :vartype build_service: + azure.mgmt.appplatform.v2022_11_01_preview.operations.BuildServiceOperations + :ivar buildpack_binding: BuildpackBindingOperations operations + :vartype buildpack_binding: + azure.mgmt.appplatform.v2022_11_01_preview.operations.BuildpackBindingOperations + :ivar build_service_builder: BuildServiceBuilderOperations operations + :vartype build_service_builder: + azure.mgmt.appplatform.v2022_11_01_preview.operations.BuildServiceBuilderOperations + :ivar build_service_agent_pool: BuildServiceAgentPoolOperations operations + :vartype build_service_agent_pool: + azure.mgmt.appplatform.v2022_11_01_preview.operations.BuildServiceAgentPoolOperations + :ivar monitoring_settings: MonitoringSettingsOperations operations + :vartype monitoring_settings: + azure.mgmt.appplatform.v2022_11_01_preview.operations.MonitoringSettingsOperations + :ivar apps: AppsOperations operations + :vartype apps: azure.mgmt.appplatform.v2022_11_01_preview.operations.AppsOperations + :ivar bindings: BindingsOperations operations + :vartype bindings: azure.mgmt.appplatform.v2022_11_01_preview.operations.BindingsOperations + :ivar storages: StoragesOperations operations + :vartype storages: azure.mgmt.appplatform.v2022_11_01_preview.operations.StoragesOperations + :ivar certificates: CertificatesOperations operations + :vartype certificates: + azure.mgmt.appplatform.v2022_11_01_preview.operations.CertificatesOperations + :ivar custom_domains: CustomDomainsOperations operations + :vartype custom_domains: + azure.mgmt.appplatform.v2022_11_01_preview.operations.CustomDomainsOperations + :ivar deployments: DeploymentsOperations operations + :vartype deployments: + azure.mgmt.appplatform.v2022_11_01_preview.operations.DeploymentsOperations + :ivar operations: Operations operations + :vartype operations: azure.mgmt.appplatform.v2022_11_01_preview.operations.Operations + :ivar runtime_versions: RuntimeVersionsOperations operations + :vartype runtime_versions: + azure.mgmt.appplatform.v2022_11_01_preview.operations.RuntimeVersionsOperations + :ivar skus: SkusOperations operations + :vartype skus: azure.mgmt.appplatform.v2022_11_01_preview.operations.SkusOperations + :ivar gateways: GatewaysOperations operations + :vartype gateways: azure.mgmt.appplatform.v2022_11_01_preview.operations.GatewaysOperations + :ivar gateway_route_configs: GatewayRouteConfigsOperations operations + :vartype gateway_route_configs: + azure.mgmt.appplatform.v2022_11_01_preview.operations.GatewayRouteConfigsOperations + :ivar gateway_custom_domains: GatewayCustomDomainsOperations operations + :vartype gateway_custom_domains: + azure.mgmt.appplatform.v2022_11_01_preview.operations.GatewayCustomDomainsOperations + :ivar api_portals: ApiPortalsOperations operations + :vartype api_portals: + azure.mgmt.appplatform.v2022_11_01_preview.operations.ApiPortalsOperations + :ivar api_portal_custom_domains: ApiPortalCustomDomainsOperations operations + :vartype api_portal_custom_domains: + azure.mgmt.appplatform.v2022_11_01_preview.operations.ApiPortalCustomDomainsOperations + :ivar application_accelerators: ApplicationAcceleratorsOperations operations + :vartype application_accelerators: + azure.mgmt.appplatform.v2022_11_01_preview.operations.ApplicationAcceleratorsOperations + :ivar customized_accelerators: CustomizedAcceleratorsOperations operations + :vartype customized_accelerators: + azure.mgmt.appplatform.v2022_11_01_preview.operations.CustomizedAcceleratorsOperations + :ivar predefined_accelerators: PredefinedAcceleratorsOperations operations + :vartype predefined_accelerators: + azure.mgmt.appplatform.v2022_11_01_preview.operations.PredefinedAcceleratorsOperations + :param credential: Credential needed for the client to connect to Azure. Required. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: Gets subscription ID which uniquely identify the Microsoft Azure + subscription. The subscription ID forms part of the URI for every service call. Required. + :type subscription_id: str + :param base_url: Service URL. Default value is "https://management.azure.com". + :type base_url: str + :keyword api_version: Api Version. Default value is "2022-11-01-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + """ + + def __init__( + self, + credential: "TokenCredential", + subscription_id: str, + base_url: str = "https://management.azure.com", + **kwargs: Any + ) -> None: + self._config = AppPlatformManagementClientConfiguration( + credential=credential, subscription_id=subscription_id, **kwargs + ) + self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + self._serialize.client_side_validation = False + self.services = ServicesOperations(self._client, self._config, self._serialize, self._deserialize) + self.config_servers = ConfigServersOperations(self._client, self._config, self._serialize, self._deserialize) + self.configuration_services = ConfigurationServicesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.service_registries = ServiceRegistriesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.application_live_views = ApplicationLiveViewsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.application_live_view = ApplicationLiveViewOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.dev_tool_portals = DevToolPortalsOperations(self._client, self._config, self._serialize, self._deserialize) + self.dev_tool_portal = DevToolPortalOperations(self._client, self._config, self._serialize, self._deserialize) + self.build_service = BuildServiceOperations(self._client, self._config, self._serialize, self._deserialize) + self.buildpack_binding = BuildpackBindingOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.build_service_builder = BuildServiceBuilderOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.build_service_agent_pool = BuildServiceAgentPoolOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.monitoring_settings = MonitoringSettingsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.apps = AppsOperations(self._client, self._config, self._serialize, self._deserialize) + self.bindings = BindingsOperations(self._client, self._config, self._serialize, self._deserialize) + self.storages = StoragesOperations(self._client, self._config, self._serialize, self._deserialize) + self.certificates = CertificatesOperations(self._client, self._config, self._serialize, self._deserialize) + self.custom_domains = CustomDomainsOperations(self._client, self._config, self._serialize, self._deserialize) + self.deployments = DeploymentsOperations(self._client, self._config, self._serialize, self._deserialize) + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) + self.runtime_versions = RuntimeVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.skus = SkusOperations(self._client, self._config, self._serialize, self._deserialize) + self.gateways = GatewaysOperations(self._client, self._config, self._serialize, self._deserialize) + self.gateway_route_configs = GatewayRouteConfigsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.gateway_custom_domains = GatewayCustomDomainsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.api_portals = ApiPortalsOperations(self._client, self._config, self._serialize, self._deserialize) + self.api_portal_custom_domains = ApiPortalCustomDomainsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.application_accelerators = ApplicationAcceleratorsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.customized_accelerators = CustomizedAcceleratorsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.predefined_accelerators = PredefinedAcceleratorsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + + def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, **kwargs) + + def close(self): + # type: () -> None + self._client.close() + + def __enter__(self): + # type: () -> AppPlatformManagementClient + self._client.__enter__() + return self + + def __exit__(self, *exc_details): + # type: (Any) -> None + self._client.__exit__(*exc_details) diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/_configuration.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/_configuration.py new file mode 100644 index 00000000000..ddfc9041cc9 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/_configuration.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy + +from ._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials import TokenCredential + + +class AppPlatformManagementClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes + """Configuration for AppPlatformManagementClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. Required. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: Gets subscription ID which uniquely identify the Microsoft Azure + subscription. The subscription ID forms part of the URI for every service call. Required. + :type subscription_id: str + :keyword api_version: Api Version. Default value is "2022-11-01-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None: + super(AppPlatformManagementClientConfiguration, self).__init__(**kwargs) + api_version = kwargs.pop("api_version", "2022-11-01-preview") # type: str + + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + + self.credential = credential + self.subscription_id = subscription_id + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "mgmt-appplatform/{}".format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, **kwargs # type: Any + ): + # type: (...) -> None + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = ARMChallengeAuthenticationPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/_patch.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/_patch.py new file mode 100644 index 00000000000..f7dd3251033 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/_vendor.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/_vendor.py new file mode 100644 index 00000000000..9aad73fc743 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/_vendor.py @@ -0,0 +1,27 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.pipeline.transport import HttpRequest + + +def _convert_request(request, files=None): + data = request.content if not files else None + request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) + if files: + request.set_formdata_body(files) + return request + + +def _format_url_section(template, **kwargs): + components = template.split("/") + while components: + try: + return template.format(**kwargs) + except KeyError as key: + formatted_components = template.split("/") + components = [c for c in formatted_components if "{}".format(key.args[0]) not in c] + template = "/".join(components) diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/_version.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/_version.py new file mode 100644 index 00000000000..92453d8691d --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "6.1.0" diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/__init__.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/__init__.py new file mode 100644 index 00000000000..f70117d785d --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/__init__.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._app_platform_management_client import AppPlatformManagementClient + +try: + from ._patch import __all__ as _patch_all + from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = ["AppPlatformManagementClient"] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/_app_platform_management_client.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/_app_platform_management_client.py new file mode 100644 index 00000000000..e0099f366c4 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/_app_platform_management_client.py @@ -0,0 +1,263 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Awaitable, TYPE_CHECKING + +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.mgmt.core import AsyncARMPipelineClient + +from .. import models +from ..._serialization import Deserializer, Serializer +from ._configuration import AppPlatformManagementClientConfiguration +from .operations import ( + ApiPortalCustomDomainsOperations, + ApiPortalsOperations, + ApplicationAcceleratorsOperations, + ApplicationLiveViewOperations, + ApplicationLiveViewsOperations, + AppsOperations, + BindingsOperations, + BuildServiceAgentPoolOperations, + BuildServiceBuilderOperations, + BuildServiceOperations, + BuildpackBindingOperations, + CertificatesOperations, + ConfigServersOperations, + ConfigurationServicesOperations, + CustomDomainsOperations, + CustomizedAcceleratorsOperations, + DeploymentsOperations, + DevToolPortalOperations, + DevToolPortalsOperations, + GatewayCustomDomainsOperations, + GatewayRouteConfigsOperations, + GatewaysOperations, + MonitoringSettingsOperations, + Operations, + PredefinedAcceleratorsOperations, + RuntimeVersionsOperations, + ServiceRegistriesOperations, + ServicesOperations, + SkusOperations, + StoragesOperations, +) + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + + +class AppPlatformManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes + """REST API for Azure Spring Apps. + + :ivar services: ServicesOperations operations + :vartype services: azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.ServicesOperations + :ivar config_servers: ConfigServersOperations operations + :vartype config_servers: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.ConfigServersOperations + :ivar configuration_services: ConfigurationServicesOperations operations + :vartype configuration_services: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.ConfigurationServicesOperations + :ivar service_registries: ServiceRegistriesOperations operations + :vartype service_registries: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.ServiceRegistriesOperations + :ivar application_live_views: ApplicationLiveViewsOperations operations + :vartype application_live_views: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.ApplicationLiveViewsOperations + :ivar application_live_view: ApplicationLiveViewOperations operations + :vartype application_live_view: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.ApplicationLiveViewOperations + :ivar dev_tool_portals: DevToolPortalsOperations operations + :vartype dev_tool_portals: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.DevToolPortalsOperations + :ivar dev_tool_portal: DevToolPortalOperations operations + :vartype dev_tool_portal: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.DevToolPortalOperations + :ivar build_service: BuildServiceOperations operations + :vartype build_service: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.BuildServiceOperations + :ivar buildpack_binding: BuildpackBindingOperations operations + :vartype buildpack_binding: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.BuildpackBindingOperations + :ivar build_service_builder: BuildServiceBuilderOperations operations + :vartype build_service_builder: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.BuildServiceBuilderOperations + :ivar build_service_agent_pool: BuildServiceAgentPoolOperations operations + :vartype build_service_agent_pool: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.BuildServiceAgentPoolOperations + :ivar monitoring_settings: MonitoringSettingsOperations operations + :vartype monitoring_settings: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.MonitoringSettingsOperations + :ivar apps: AppsOperations operations + :vartype apps: azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.AppsOperations + :ivar bindings: BindingsOperations operations + :vartype bindings: azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.BindingsOperations + :ivar storages: StoragesOperations operations + :vartype storages: azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.StoragesOperations + :ivar certificates: CertificatesOperations operations + :vartype certificates: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.CertificatesOperations + :ivar custom_domains: CustomDomainsOperations operations + :vartype custom_domains: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.CustomDomainsOperations + :ivar deployments: DeploymentsOperations operations + :vartype deployments: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.DeploymentsOperations + :ivar operations: Operations operations + :vartype operations: azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.Operations + :ivar runtime_versions: RuntimeVersionsOperations operations + :vartype runtime_versions: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.RuntimeVersionsOperations + :ivar skus: SkusOperations operations + :vartype skus: azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.SkusOperations + :ivar gateways: GatewaysOperations operations + :vartype gateways: azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.GatewaysOperations + :ivar gateway_route_configs: GatewayRouteConfigsOperations operations + :vartype gateway_route_configs: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.GatewayRouteConfigsOperations + :ivar gateway_custom_domains: GatewayCustomDomainsOperations operations + :vartype gateway_custom_domains: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.GatewayCustomDomainsOperations + :ivar api_portals: ApiPortalsOperations operations + :vartype api_portals: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.ApiPortalsOperations + :ivar api_portal_custom_domains: ApiPortalCustomDomainsOperations operations + :vartype api_portal_custom_domains: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.ApiPortalCustomDomainsOperations + :ivar application_accelerators: ApplicationAcceleratorsOperations operations + :vartype application_accelerators: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.ApplicationAcceleratorsOperations + :ivar customized_accelerators: CustomizedAcceleratorsOperations operations + :vartype customized_accelerators: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.CustomizedAcceleratorsOperations + :ivar predefined_accelerators: PredefinedAcceleratorsOperations operations + :vartype predefined_accelerators: + azure.mgmt.appplatform.v2022_11_01_preview.aio.operations.PredefinedAcceleratorsOperations + :param credential: Credential needed for the client to connect to Azure. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: Gets subscription ID which uniquely identify the Microsoft Azure + subscription. The subscription ID forms part of the URI for every service call. Required. + :type subscription_id: str + :param base_url: Service URL. Default value is "https://management.azure.com". + :type base_url: str + :keyword api_version: Api Version. Default value is "2022-11-01-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + base_url: str = "https://management.azure.com", + **kwargs: Any + ) -> None: + self._config = AppPlatformManagementClientConfiguration( + credential=credential, subscription_id=subscription_id, **kwargs + ) + self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + self._serialize.client_side_validation = False + self.services = ServicesOperations(self._client, self._config, self._serialize, self._deserialize) + self.config_servers = ConfigServersOperations(self._client, self._config, self._serialize, self._deserialize) + self.configuration_services = ConfigurationServicesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.service_registries = ServiceRegistriesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.application_live_views = ApplicationLiveViewsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.application_live_view = ApplicationLiveViewOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.dev_tool_portals = DevToolPortalsOperations(self._client, self._config, self._serialize, self._deserialize) + self.dev_tool_portal = DevToolPortalOperations(self._client, self._config, self._serialize, self._deserialize) + self.build_service = BuildServiceOperations(self._client, self._config, self._serialize, self._deserialize) + self.buildpack_binding = BuildpackBindingOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.build_service_builder = BuildServiceBuilderOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.build_service_agent_pool = BuildServiceAgentPoolOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.monitoring_settings = MonitoringSettingsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.apps = AppsOperations(self._client, self._config, self._serialize, self._deserialize) + self.bindings = BindingsOperations(self._client, self._config, self._serialize, self._deserialize) + self.storages = StoragesOperations(self._client, self._config, self._serialize, self._deserialize) + self.certificates = CertificatesOperations(self._client, self._config, self._serialize, self._deserialize) + self.custom_domains = CustomDomainsOperations(self._client, self._config, self._serialize, self._deserialize) + self.deployments = DeploymentsOperations(self._client, self._config, self._serialize, self._deserialize) + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) + self.runtime_versions = RuntimeVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.skus = SkusOperations(self._client, self._config, self._serialize, self._deserialize) + self.gateways = GatewaysOperations(self._client, self._config, self._serialize, self._deserialize) + self.gateway_route_configs = GatewayRouteConfigsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.gateway_custom_domains = GatewayCustomDomainsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.api_portals = ApiPortalsOperations(self._client, self._config, self._serialize, self._deserialize) + self.api_portal_custom_domains = ApiPortalCustomDomainsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.application_accelerators = ApplicationAcceleratorsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.customized_accelerators = CustomizedAcceleratorsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.predefined_accelerators = PredefinedAcceleratorsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + + def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, **kwargs) + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> "AppPlatformManagementClient": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details) -> None: + await self._client.__aexit__(*exc_details) diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/_configuration.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/_configuration.py new file mode 100644 index 00000000000..4745d7df76c --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/_configuration.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy + +from .._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + + +class AppPlatformManagementClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes + """Configuration for AppPlatformManagementClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: Gets subscription ID which uniquely identify the Microsoft Azure + subscription. The subscription ID forms part of the URI for every service call. Required. + :type subscription_id: str + :keyword api_version: Api Version. Default value is "2022-11-01-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None: + super(AppPlatformManagementClientConfiguration, self).__init__(**kwargs) + api_version = kwargs.pop("api_version", "2022-11-01-preview") # type: str + + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + + self.credential = credential + self.subscription_id = subscription_id + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "mgmt-appplatform/{}".format(VERSION)) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = AsyncARMChallengeAuthenticationPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/_patch.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/_patch.py new file mode 100644 index 00000000000..f7dd3251033 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/__init__.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/__init__.py new file mode 100644 index 00000000000..3d933300029 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/__init__.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._services_operations import ServicesOperations +from ._config_servers_operations import ConfigServersOperations +from ._configuration_services_operations import ConfigurationServicesOperations +from ._service_registries_operations import ServiceRegistriesOperations +from ._application_live_views_operations import ApplicationLiveViewsOperations +from ._application_live_view_operations import ApplicationLiveViewOperations +from ._dev_tool_portals_operations import DevToolPortalsOperations +from ._dev_tool_portal_operations import DevToolPortalOperations +from ._build_service_operations import BuildServiceOperations +from ._buildpack_binding_operations import BuildpackBindingOperations +from ._build_service_builder_operations import BuildServiceBuilderOperations +from ._build_service_agent_pool_operations import BuildServiceAgentPoolOperations +from ._monitoring_settings_operations import MonitoringSettingsOperations +from ._apps_operations import AppsOperations +from ._bindings_operations import BindingsOperations +from ._storages_operations import StoragesOperations +from ._certificates_operations import CertificatesOperations +from ._custom_domains_operations import CustomDomainsOperations +from ._deployments_operations import DeploymentsOperations +from ._operations import Operations +from ._runtime_versions_operations import RuntimeVersionsOperations +from ._skus_operations import SkusOperations +from ._gateways_operations import GatewaysOperations +from ._gateway_route_configs_operations import GatewayRouteConfigsOperations +from ._gateway_custom_domains_operations import GatewayCustomDomainsOperations +from ._api_portals_operations import ApiPortalsOperations +from ._api_portal_custom_domains_operations import ApiPortalCustomDomainsOperations +from ._application_accelerators_operations import ApplicationAcceleratorsOperations +from ._customized_accelerators_operations import CustomizedAcceleratorsOperations +from ._predefined_accelerators_operations import PredefinedAcceleratorsOperations + +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "ServicesOperations", + "ConfigServersOperations", + "ConfigurationServicesOperations", + "ServiceRegistriesOperations", + "ApplicationLiveViewsOperations", + "ApplicationLiveViewOperations", + "DevToolPortalsOperations", + "DevToolPortalOperations", + "BuildServiceOperations", + "BuildpackBindingOperations", + "BuildServiceBuilderOperations", + "BuildServiceAgentPoolOperations", + "MonitoringSettingsOperations", + "AppsOperations", + "BindingsOperations", + "StoragesOperations", + "CertificatesOperations", + "CustomDomainsOperations", + "DeploymentsOperations", + "Operations", + "RuntimeVersionsOperations", + "SkusOperations", + "GatewaysOperations", + "GatewayRouteConfigsOperations", + "GatewayCustomDomainsOperations", + "ApiPortalsOperations", + "ApiPortalCustomDomainsOperations", + "ApplicationAcceleratorsOperations", + "CustomizedAcceleratorsOperations", + "PredefinedAcceleratorsOperations", +] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_api_portal_custom_domains_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_api_portal_custom_domains_operations.py new file mode 100644 index 00000000000..b5158d3c5d2 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_api_portal_custom_domains_operations.py @@ -0,0 +1,588 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._api_portal_custom_domains_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class ApiPortalCustomDomainsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`api_portal_custom_domains` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, service_name: str, api_portal_name: str, domain_name: str, **kwargs: Any + ) -> _models.ApiPortalCustomDomainResource: + """Get the API portal custom domain. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param domain_name: The name of the API portal custom domain. Required. + :type domain_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ApiPortalCustomDomainResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalCustomDomainResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApiPortalCustomDomainResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + domain_name=domain_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ApiPortalCustomDomainResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}/domains/{domainName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + domain_name: str, + api_portal_custom_domain_resource: Union[_models.ApiPortalCustomDomainResource, IO], + **kwargs: Any + ) -> _models.ApiPortalCustomDomainResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApiPortalCustomDomainResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(api_portal_custom_domain_resource, (IO, bytes)): + _content = api_portal_custom_domain_resource + else: + _json = self._serialize.body(api_portal_custom_domain_resource, "ApiPortalCustomDomainResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + domain_name=domain_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ApiPortalCustomDomainResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ApiPortalCustomDomainResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}/domains/{domainName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + domain_name: str, + api_portal_custom_domain_resource: _models.ApiPortalCustomDomainResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ApiPortalCustomDomainResource]: + """Create or update the API portal custom domain. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param domain_name: The name of the API portal custom domain. Required. + :type domain_name: str + :param api_portal_custom_domain_resource: The API portal custom domain for the create or update + operation. Required. + :type api_portal_custom_domain_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalCustomDomainResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ApiPortalCustomDomainResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalCustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + domain_name: str, + api_portal_custom_domain_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ApiPortalCustomDomainResource]: + """Create or update the API portal custom domain. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param domain_name: The name of the API portal custom domain. Required. + :type domain_name: str + :param api_portal_custom_domain_resource: The API portal custom domain for the create or update + operation. Required. + :type api_portal_custom_domain_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ApiPortalCustomDomainResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalCustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + domain_name: str, + api_portal_custom_domain_resource: Union[_models.ApiPortalCustomDomainResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.ApiPortalCustomDomainResource]: + """Create or update the API portal custom domain. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param domain_name: The name of the API portal custom domain. Required. + :type domain_name: str + :param api_portal_custom_domain_resource: The API portal custom domain for the create or update + operation. Is either a model type or a IO type. Required. + :type api_portal_custom_domain_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalCustomDomainResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ApiPortalCustomDomainResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalCustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApiPortalCustomDomainResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + domain_name=domain_name, + api_portal_custom_domain_resource=api_portal_custom_domain_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ApiPortalCustomDomainResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}/domains/{domainName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, api_portal_name: str, domain_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + domain_name=domain_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}/domains/{domainName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, service_name: str, api_portal_name: str, domain_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete the API portal custom domain. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param domain_name: The name of the API portal custom domain. Required. + :type domain_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + domain_name=domain_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}/domains/{domainName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, api_portal_name: str, **kwargs: Any + ) -> AsyncIterable["_models.ApiPortalCustomDomainResource"]: + """Handle requests to list all API portal custom domains. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ApiPortalCustomDomainResource or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalCustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApiPortalCustomDomainResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ApiPortalCustomDomainResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}/domains"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_api_portals_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_api_portals_operations.py new file mode 100644 index 00000000000..5e9ee7c8a60 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_api_portals_operations.py @@ -0,0 +1,714 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._api_portals_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, + build_validate_domain_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class ApiPortalsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`api_portals` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, service_name: str, api_portal_name: str, **kwargs: Any + ) -> _models.ApiPortalResource: + """Get the API portal and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ApiPortalResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApiPortalResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ApiPortalResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + api_portal_resource: Union[_models.ApiPortalResource, IO], + **kwargs: Any + ) -> _models.ApiPortalResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApiPortalResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(api_portal_resource, (IO, bytes)): + _content = api_portal_resource + else: + _json = self._serialize.body(api_portal_resource, "ApiPortalResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ApiPortalResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ApiPortalResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + api_portal_resource: _models.ApiPortalResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ApiPortalResource]: + """Create the default API portal or update the existing API portal. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param api_portal_resource: The API portal for the create or update operation. Required. + :type api_portal_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ApiPortalResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + api_portal_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ApiPortalResource]: + """Create the default API portal or update the existing API portal. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param api_portal_resource: The API portal for the create or update operation. Required. + :type api_portal_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ApiPortalResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + api_portal_resource: Union[_models.ApiPortalResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.ApiPortalResource]: + """Create the default API portal or update the existing API portal. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param api_portal_resource: The API portal for the create or update operation. Is either a + model type or a IO type. Required. + :type api_portal_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalResource + or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ApiPortalResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApiPortalResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + api_portal_resource=api_portal_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ApiPortalResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, api_portal_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, service_name: str, api_portal_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete the default API portal. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> AsyncIterable["_models.ApiPortalResource"]: + """Handles requests to list all resources in a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ApiPortalResource or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApiPortalResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ApiPortalResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals"} # type: ignore + + @overload + async def validate_domain( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + validate_payload: _models.CustomDomainValidatePayload, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CustomDomainValidateResult: + """Check the domains are valid as well as not in use. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param validate_payload: Custom domain payload to be validated. Required. + :type validate_payload: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidatePayload + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomDomainValidateResult or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidateResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def validate_domain( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + validate_payload: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CustomDomainValidateResult: + """Check the domains are valid as well as not in use. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param validate_payload: Custom domain payload to be validated. Required. + :type validate_payload: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomDomainValidateResult or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidateResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def validate_domain( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + validate_payload: Union[_models.CustomDomainValidatePayload, IO], + **kwargs: Any + ) -> _models.CustomDomainValidateResult: + """Check the domains are valid as well as not in use. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param validate_payload: Custom domain payload to be validated. Is either a model type or a IO + type. Required. + :type validate_payload: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidatePayload or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomDomainValidateResult or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidateResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomDomainValidateResult] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(validate_payload, (IO, bytes)): + _content = validate_payload + else: + _json = self._serialize.body(validate_payload, "CustomDomainValidatePayload") + + request = build_validate_domain_request( + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.validate_domain.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("CustomDomainValidateResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + validate_domain.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}/validateDomain"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_application_accelerators_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_application_accelerators_operations.py new file mode 100644 index 00000000000..54019f7262f --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_application_accelerators_operations.py @@ -0,0 +1,566 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._application_accelerators_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class ApplicationAcceleratorsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`application_accelerators` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> AsyncIterable["_models.ApplicationAcceleratorResource"]: + """Handle requests to list all application accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ApplicationAcceleratorResource or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApplicationAcceleratorResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ApplicationAcceleratorResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, service_name: str, application_accelerator_name: str, **kwargs: Any + ) -> _models.ApplicationAcceleratorResource: + """Get the application accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ApplicationAcceleratorResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApplicationAcceleratorResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ApplicationAcceleratorResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + application_accelerator_resource: Union[_models.ApplicationAcceleratorResource, IO], + **kwargs: Any + ) -> _models.ApplicationAcceleratorResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApplicationAcceleratorResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(application_accelerator_resource, (IO, bytes)): + _content = application_accelerator_resource + else: + _json = self._serialize.body(application_accelerator_resource, "ApplicationAcceleratorResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ApplicationAcceleratorResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ApplicationAcceleratorResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + application_accelerator_resource: _models.ApplicationAcceleratorResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ApplicationAcceleratorResource]: + """Create or update the application accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param application_accelerator_resource: The application accelerator for the create or update + operation. Required. + :type application_accelerator_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ApplicationAcceleratorResource or + the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + application_accelerator_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ApplicationAcceleratorResource]: + """Create or update the application accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param application_accelerator_resource: The application accelerator for the create or update + operation. Required. + :type application_accelerator_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ApplicationAcceleratorResource or + the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + application_accelerator_resource: Union[_models.ApplicationAcceleratorResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.ApplicationAcceleratorResource]: + """Create or update the application accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param application_accelerator_resource: The application accelerator for the create or update + operation. Is either a model type or a IO type. Required. + :type application_accelerator_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ApplicationAcceleratorResource or + the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApplicationAcceleratorResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + application_accelerator_resource=application_accelerator_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ApplicationAcceleratorResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, application_accelerator_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, service_name: str, application_accelerator_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete the application accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_application_live_view_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_application_live_view_operations.py new file mode 100644 index 00000000000..7d654ca319a --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_application_live_view_operations.py @@ -0,0 +1,165 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Optional, TypeVar, Union, cast + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._application_live_view_operations import build_delete_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class ApplicationLiveViewOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`application_live_view` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, application_live_view_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_live_view_name=application_live_view_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationLiveViews/{applicationLiveViewName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, service_name: str, application_live_view_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Disable the default Application Live View. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_live_view_name: The name of Application Live View. Required. + :type application_live_view_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + application_live_view_name=application_live_view_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationLiveViews/{applicationLiveViewName}"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_application_live_views_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_application_live_views_operations.py new file mode 100644 index 00000000000..ebbba77bbfb --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_application_live_views_operations.py @@ -0,0 +1,451 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._application_live_views_operations import ( + build_create_or_update_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class ApplicationLiveViewsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`application_live_views` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> AsyncIterable["_models.ApplicationLiveViewResource"]: + """Handles requests to list all resources in a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ApplicationLiveViewResource or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApplicationLiveViewResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ApplicationLiveViewResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationLiveViews"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, service_name: str, application_live_view_name: str, **kwargs: Any + ) -> _models.ApplicationLiveViewResource: + """Get the Application Live and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_live_view_name: The name of Application Live View. Required. + :type application_live_view_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ApplicationLiveViewResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApplicationLiveViewResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_live_view_name=application_live_view_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ApplicationLiveViewResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationLiveViews/{applicationLiveViewName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + application_live_view_name: str, + application_live_view_resource: Union[_models.ApplicationLiveViewResource, IO], + **kwargs: Any + ) -> _models.ApplicationLiveViewResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApplicationLiveViewResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(application_live_view_resource, (IO, bytes)): + _content = application_live_view_resource + else: + _json = self._serialize.body(application_live_view_resource, "ApplicationLiveViewResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_live_view_name=application_live_view_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ApplicationLiveViewResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ApplicationLiveViewResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationLiveViews/{applicationLiveViewName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + application_live_view_name: str, + application_live_view_resource: _models.ApplicationLiveViewResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ApplicationLiveViewResource]: + """Create the default Application Live View or update the existing Application Live View. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_live_view_name: The name of Application Live View. Required. + :type application_live_view_name: str + :param application_live_view_resource: Parameters for the update operation. Required. + :type application_live_view_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ApplicationLiveViewResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + application_live_view_name: str, + application_live_view_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ApplicationLiveViewResource]: + """Create the default Application Live View or update the existing Application Live View. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_live_view_name: The name of Application Live View. Required. + :type application_live_view_name: str + :param application_live_view_resource: Parameters for the update operation. Required. + :type application_live_view_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ApplicationLiveViewResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + application_live_view_name: str, + application_live_view_resource: Union[_models.ApplicationLiveViewResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.ApplicationLiveViewResource]: + """Create the default Application Live View or update the existing Application Live View. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_live_view_name: The name of Application Live View. Required. + :type application_live_view_name: str + :param application_live_view_resource: Parameters for the update operation. Is either a model + type or a IO type. Required. + :type application_live_view_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ApplicationLiveViewResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApplicationLiveViewResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + application_live_view_name=application_live_view_name, + application_live_view_resource=application_live_view_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ApplicationLiveViewResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationLiveViews/{applicationLiveViewName}"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_apps_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_apps_operations.py new file mode 100644 index 00000000000..407b8851a2e --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_apps_operations.py @@ -0,0 +1,1261 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._apps_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_get_resource_upload_url_request, + build_list_request, + build_set_active_deployments_request, + build_update_request, + build_validate_domain_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class AppsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`apps` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + service_name: str, + app_name: str, + sync_status: Optional[str] = None, + **kwargs: Any + ) -> _models.AppResource: + """Get an App and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param sync_status: Indicates whether sync status. Default value is None. + :type sync_status: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AppResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.AppResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + subscription_id=self._config.subscription_id, + sync_status=sync_status, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("AppResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + app_name: str, + app_resource: Union[_models.AppResource, IO], + **kwargs: Any + ) -> _models.AppResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AppResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(app_resource, (IO, bytes)): + _content = app_resource + else: + _json = self._serialize.body(app_resource, "AppResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("AppResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("AppResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("AppResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + app_resource: _models.AppResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.AppResource]: + """Create a new App or update an exiting App. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param app_resource: Parameters for the create or update operation. Required. + :type app_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either AppResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + app_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.AppResource]: + """Create a new App or update an exiting App. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param app_resource: Parameters for the create or update operation. Required. + :type app_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either AppResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + app_resource: Union[_models.AppResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.AppResource]: + """Create a new App or update an exiting App. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param app_resource: Parameters for the create or update operation. Is either a model type or a + IO type. Required. + :type app_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either AppResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AppResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + app_resource=app_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("AppResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, app_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, service_name: str, app_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Operation to delete an App. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}"} # type: ignore + + async def _update_initial( + self, + resource_group_name: str, + service_name: str, + app_name: str, + app_resource: Union[_models.AppResource, IO], + **kwargs: Any + ) -> _models.AppResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AppResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(app_resource, (IO, bytes)): + _content = app_resource + else: + _json = self._serialize.body(app_resource, "AppResource") + + request = build_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("AppResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("AppResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}"} # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + app_resource: _models.AppResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.AppResource]: + """Operation to update an exiting App. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param app_resource: Parameters for the update operation. Required. + :type app_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either AppResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + app_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.AppResource]: + """Operation to update an exiting App. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param app_resource: Parameters for the update operation. Required. + :type app_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either AppResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + app_resource: Union[_models.AppResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.AppResource]: + """Operation to update an exiting App. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param app_resource: Parameters for the update operation. Is either a model type or a IO type. + Required. + :type app_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either AppResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AppResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + app_resource=app_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("AppResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}"} # type: ignore + + @distributed_trace + def list(self, resource_group_name: str, service_name: str, **kwargs: Any) -> AsyncIterable["_models.AppResource"]: + """Handles requests to list all resources in a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either AppResource or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.AppResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("AppResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps"} # type: ignore + + @distributed_trace_async + async def get_resource_upload_url( + self, resource_group_name: str, service_name: str, app_name: str, **kwargs: Any + ) -> _models.ResourceUploadDefinition: + """Get an resource upload URL for an App, which may be artifacts or source archive. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ResourceUploadDefinition or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceUploadDefinition + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ResourceUploadDefinition] + + request = build_get_resource_upload_url_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_resource_upload_url.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ResourceUploadDefinition", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_resource_upload_url.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/getResourceUploadUrl"} # type: ignore + + async def _set_active_deployments_initial( + self, + resource_group_name: str, + service_name: str, + app_name: str, + active_deployment_collection: Union[_models.ActiveDeploymentCollection, IO], + **kwargs: Any + ) -> _models.AppResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AppResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(active_deployment_collection, (IO, bytes)): + _content = active_deployment_collection + else: + _json = self._serialize.body(active_deployment_collection, "ActiveDeploymentCollection") + + request = build_set_active_deployments_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._set_active_deployments_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("AppResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("AppResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _set_active_deployments_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/setActiveDeployments"} # type: ignore + + @overload + async def begin_set_active_deployments( + self, + resource_group_name: str, + service_name: str, + app_name: str, + active_deployment_collection: _models.ActiveDeploymentCollection, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.AppResource]: + """Set existing Deployment under the app as active. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param active_deployment_collection: A list of Deployment name to be active. Required. + :type active_deployment_collection: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ActiveDeploymentCollection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either AppResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_set_active_deployments( + self, + resource_group_name: str, + service_name: str, + app_name: str, + active_deployment_collection: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.AppResource]: + """Set existing Deployment under the app as active. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param active_deployment_collection: A list of Deployment name to be active. Required. + :type active_deployment_collection: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either AppResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_set_active_deployments( + self, + resource_group_name: str, + service_name: str, + app_name: str, + active_deployment_collection: Union[_models.ActiveDeploymentCollection, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.AppResource]: + """Set existing Deployment under the app as active. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param active_deployment_collection: A list of Deployment name to be active. Is either a model + type or a IO type. Required. + :type active_deployment_collection: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ActiveDeploymentCollection or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either AppResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AppResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._set_active_deployments_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + active_deployment_collection=active_deployment_collection, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("AppResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_set_active_deployments.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/setActiveDeployments"} # type: ignore + + @overload + async def validate_domain( + self, + resource_group_name: str, + service_name: str, + app_name: str, + validate_payload: _models.CustomDomainValidatePayload, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CustomDomainValidateResult: + """Check the resource name is valid as well as not in use. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param validate_payload: Custom domain payload to be validated. Required. + :type validate_payload: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidatePayload + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomDomainValidateResult or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidateResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def validate_domain( + self, + resource_group_name: str, + service_name: str, + app_name: str, + validate_payload: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CustomDomainValidateResult: + """Check the resource name is valid as well as not in use. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param validate_payload: Custom domain payload to be validated. Required. + :type validate_payload: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomDomainValidateResult or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidateResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def validate_domain( + self, + resource_group_name: str, + service_name: str, + app_name: str, + validate_payload: Union[_models.CustomDomainValidatePayload, IO], + **kwargs: Any + ) -> _models.CustomDomainValidateResult: + """Check the resource name is valid as well as not in use. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param validate_payload: Custom domain payload to be validated. Is either a model type or a IO + type. Required. + :type validate_payload: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidatePayload or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomDomainValidateResult or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidateResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomDomainValidateResult] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(validate_payload, (IO, bytes)): + _content = validate_payload + else: + _json = self._serialize.body(validate_payload, "CustomDomainValidatePayload") + + request = build_validate_domain_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.validate_domain.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("CustomDomainValidateResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + validate_domain.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/validateDomain"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_bindings_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_bindings_operations.py new file mode 100644 index 00000000000..41508111e7e --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_bindings_operations.py @@ -0,0 +1,836 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._bindings_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, + build_update_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class BindingsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`bindings` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, service_name: str, app_name: str, binding_name: str, **kwargs: Any + ) -> _models.BindingResource: + """Get a Binding and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param binding_name: The name of the Binding resource. Required. + :type binding_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BindingResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BindingResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + binding_name=binding_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("BindingResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/bindings/{bindingName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + app_name: str, + binding_name: str, + binding_resource: Union[_models.BindingResource, IO], + **kwargs: Any + ) -> _models.BindingResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BindingResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(binding_resource, (IO, bytes)): + _content = binding_resource + else: + _json = self._serialize.body(binding_resource, "BindingResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + binding_name=binding_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("BindingResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("BindingResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("BindingResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/bindings/{bindingName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + binding_name: str, + binding_resource: _models.BindingResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BindingResource]: + """Create a new Binding or update an exiting Binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param binding_name: The name of the Binding resource. Required. + :type binding_name: str + :param binding_resource: Parameters for the create or update operation. Required. + :type binding_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BindingResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + binding_name: str, + binding_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BindingResource]: + """Create a new Binding or update an exiting Binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param binding_name: The name of the Binding resource. Required. + :type binding_name: str + :param binding_resource: Parameters for the create or update operation. Required. + :type binding_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BindingResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + binding_name: str, + binding_resource: Union[_models.BindingResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.BindingResource]: + """Create a new Binding or update an exiting Binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param binding_name: The name of the Binding resource. Required. + :type binding_name: str + :param binding_resource: Parameters for the create or update operation. Is either a model type + or a IO type. Required. + :type binding_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BindingResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BindingResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + binding_name=binding_name, + binding_resource=binding_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("BindingResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/bindings/{bindingName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, app_name: str, binding_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + binding_name=binding_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/bindings/{bindingName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, service_name: str, app_name: str, binding_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Operation to delete a Binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param binding_name: The name of the Binding resource. Required. + :type binding_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + binding_name=binding_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/bindings/{bindingName}"} # type: ignore + + async def _update_initial( + self, + resource_group_name: str, + service_name: str, + app_name: str, + binding_name: str, + binding_resource: Union[_models.BindingResource, IO], + **kwargs: Any + ) -> _models.BindingResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BindingResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(binding_resource, (IO, bytes)): + _content = binding_resource + else: + _json = self._serialize.body(binding_resource, "BindingResource") + + request = build_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + binding_name=binding_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("BindingResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("BindingResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/bindings/{bindingName}"} # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + binding_name: str, + binding_resource: _models.BindingResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BindingResource]: + """Operation to update an exiting Binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param binding_name: The name of the Binding resource. Required. + :type binding_name: str + :param binding_resource: Parameters for the update operation. Required. + :type binding_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BindingResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + binding_name: str, + binding_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BindingResource]: + """Operation to update an exiting Binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param binding_name: The name of the Binding resource. Required. + :type binding_name: str + :param binding_resource: Parameters for the update operation. Required. + :type binding_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BindingResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + binding_name: str, + binding_resource: Union[_models.BindingResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.BindingResource]: + """Operation to update an exiting Binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param binding_name: The name of the Binding resource. Required. + :type binding_name: str + :param binding_resource: Parameters for the update operation. Is either a model type or a IO + type. Required. + :type binding_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BindingResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BindingResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + binding_name=binding_name, + binding_resource=binding_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("BindingResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/bindings/{bindingName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, app_name: str, **kwargs: Any + ) -> AsyncIterable["_models.BindingResource"]: + """Handles requests to list all resources in an App. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BindingResource or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BindingResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("BindingResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/bindings"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_build_service_agent_pool_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_build_service_agent_pool_operations.py new file mode 100644 index 00000000000..07aa7e87245 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_build_service_agent_pool_operations.py @@ -0,0 +1,469 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._build_service_agent_pool_operations import ( + build_get_request, + build_list_request, + build_update_put_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class BuildServiceAgentPoolOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`build_service_agent_pool` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, build_service_name: str, **kwargs: Any + ) -> AsyncIterable["_models.BuildServiceAgentPoolResource"]: + """List build service agent pool. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BuildServiceAgentPoolResource or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceAgentPoolResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildServiceAgentPoolResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("BuildServiceAgentPoolResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/agentPools"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, service_name: str, build_service_name: str, agent_pool_name: str, **kwargs: Any + ) -> _models.BuildServiceAgentPoolResource: + """Get build service agent pool. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param agent_pool_name: The name of the build service agent pool resource. Required. + :type agent_pool_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BuildServiceAgentPoolResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceAgentPoolResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildServiceAgentPoolResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + agent_pool_name=agent_pool_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("BuildServiceAgentPoolResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/agentPools/{agentPoolName}"} # type: ignore + + async def _update_put_initial( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + agent_pool_name: str, + agent_pool_resource: Union[_models.BuildServiceAgentPoolResource, IO], + **kwargs: Any + ) -> _models.BuildServiceAgentPoolResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildServiceAgentPoolResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(agent_pool_resource, (IO, bytes)): + _content = agent_pool_resource + else: + _json = self._serialize.body(agent_pool_resource, "BuildServiceAgentPoolResource") + + request = build_update_put_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + agent_pool_name=agent_pool_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_put_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("BuildServiceAgentPoolResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("BuildServiceAgentPoolResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_put_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/agentPools/{agentPoolName}"} # type: ignore + + @overload + async def begin_update_put( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + agent_pool_name: str, + agent_pool_resource: _models.BuildServiceAgentPoolResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BuildServiceAgentPoolResource]: + """Create or update build service agent pool. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param agent_pool_name: The name of the build service agent pool resource. Required. + :type agent_pool_name: str + :param agent_pool_resource: Parameters for the update operation. Required. + :type agent_pool_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceAgentPoolResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BuildServiceAgentPoolResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceAgentPoolResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update_put( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + agent_pool_name: str, + agent_pool_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BuildServiceAgentPoolResource]: + """Create or update build service agent pool. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param agent_pool_name: The name of the build service agent pool resource. Required. + :type agent_pool_name: str + :param agent_pool_resource: Parameters for the update operation. Required. + :type agent_pool_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BuildServiceAgentPoolResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceAgentPoolResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update_put( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + agent_pool_name: str, + agent_pool_resource: Union[_models.BuildServiceAgentPoolResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.BuildServiceAgentPoolResource]: + """Create or update build service agent pool. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param agent_pool_name: The name of the build service agent pool resource. Required. + :type agent_pool_name: str + :param agent_pool_resource: Parameters for the update operation. Is either a model type or a IO + type. Required. + :type agent_pool_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceAgentPoolResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BuildServiceAgentPoolResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceAgentPoolResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildServiceAgentPoolResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_put_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + agent_pool_name=agent_pool_name, + agent_pool_resource=agent_pool_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("BuildServiceAgentPoolResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update_put.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/agentPools/{agentPoolName}"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_build_service_builder_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_build_service_builder_operations.py new file mode 100644 index 00000000000..33a1e0c1a9d --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_build_service_builder_operations.py @@ -0,0 +1,652 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._build_service_builder_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_deployments_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class BuildServiceBuilderOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`build_service_builder` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, service_name: str, build_service_name: str, builder_name: str, **kwargs: Any + ) -> _models.BuilderResource: + """Get a KPack builder. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BuilderResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuilderResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuilderResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("BuilderResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + builder_resource: Union[_models.BuilderResource, IO], + **kwargs: Any + ) -> _models.BuilderResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuilderResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(builder_resource, (IO, bytes)): + _content = builder_resource + else: + _json = self._serialize.body(builder_resource, "BuilderResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("BuilderResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("BuilderResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + builder_resource: _models.BuilderResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BuilderResource]: + """Create or update a KPack builder. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :param builder_resource: The target builder for the create or update operation. Required. + :type builder_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuilderResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BuilderResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuilderResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + builder_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BuilderResource]: + """Create or update a KPack builder. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :param builder_resource: The target builder for the create or update operation. Required. + :type builder_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BuilderResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuilderResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + builder_resource: Union[_models.BuilderResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.BuilderResource]: + """Create or update a KPack builder. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :param builder_resource: The target builder for the create or update operation. Is either a + model type or a IO type. Required. + :type builder_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuilderResource or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BuilderResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuilderResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuilderResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + builder_resource=builder_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("BuilderResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, build_service_name: str, builder_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, service_name: str, build_service_name: str, builder_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a KPack builder. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, build_service_name: str, **kwargs: Any + ) -> AsyncIterable["_models.BuilderResource"]: + """List KPack builders result. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BuilderResource or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuilderResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuilderResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("BuilderResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders"} # type: ignore + + @distributed_trace_async + async def list_deployments( + self, resource_group_name: str, service_name: str, build_service_name: str, builder_name: str, **kwargs: Any + ) -> _models.DeploymentList: + """List deployments that are using the builder. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DeploymentList or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentList + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DeploymentList] + + request = build_list_deployments_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_deployments.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("DeploymentList", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_deployments.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}/listUsingDeployments"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_build_service_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_build_service_operations.py new file mode 100644 index 00000000000..69e9becf480 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_build_service_operations.py @@ -0,0 +1,1116 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._build_service_operations import ( + build_create_or_update_build_request, + build_get_build_request, + build_get_build_result_log_request, + build_get_build_result_request, + build_get_build_service_request, + build_get_resource_upload_url_request, + build_get_supported_buildpack_request, + build_get_supported_stack_request, + build_list_build_results_request, + build_list_build_services_request, + build_list_builds_request, + build_list_supported_buildpacks_request, + build_list_supported_stacks_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class BuildServiceOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`build_service` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list_build_services( + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> AsyncIterable["_models.BuildService"]: + """List build services resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BuildService or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildServiceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_build_services_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_build_services.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("BuildServiceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list_build_services.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices"} # type: ignore + + @distributed_trace_async + async def get_build_service( + self, resource_group_name: str, service_name: str, build_service_name: str, **kwargs: Any + ) -> _models.BuildService: + """Get a build service resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BuildService or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildService + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildService] + + request = build_get_build_service_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_build_service.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("BuildService", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_build_service.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}"} # type: ignore + + @distributed_trace + def list_builds( + self, resource_group_name: str, service_name: str, build_service_name: str, **kwargs: Any + ) -> AsyncIterable["_models.Build"]: + """List KPack builds. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either Build or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.Build] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_builds_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_builds.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("BuildCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list_builds.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builds"} # type: ignore + + @distributed_trace_async + async def get_build( + self, resource_group_name: str, service_name: str, build_service_name: str, build_name: str, **kwargs: Any + ) -> _models.Build: + """Get a KPack build. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param build_name: The name of the build resource. Required. + :type build_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Build or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Build + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.Build] + + request = build_get_build_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + build_name=build_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_build.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("Build", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_build.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builds/{buildName}"} # type: ignore + + @overload + async def create_or_update_build( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + build_name: str, + build: _models.Build, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Build: + """Create or update a KPack build. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param build_name: The name of the build resource. Required. + :type build_name: str + :param build: Parameters for the create or update operation. Required. + :type build: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Build + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Build or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Build + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_build( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + build_name: str, + build: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Build: + """Create or update a KPack build. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param build_name: The name of the build resource. Required. + :type build_name: str + :param build: Parameters for the create or update operation. Required. + :type build: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Build or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Build + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update_build( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + build_name: str, + build: Union[_models.Build, IO], + **kwargs: Any + ) -> _models.Build: + """Create or update a KPack build. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param build_name: The name of the build resource. Required. + :type build_name: str + :param build: Parameters for the create or update operation. Is either a model type or a IO + type. Required. + :type build: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Build or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Build or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Build + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Build] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(build, (IO, bytes)): + _content = build + else: + _json = self._serialize.body(build, "Build") + + request = build_create_or_update_build_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + build_name=build_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update_build.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("Build", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("Build", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update_build.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builds/{buildName}"} # type: ignore + + @distributed_trace + def list_build_results( + self, resource_group_name: str, service_name: str, build_service_name: str, build_name: str, **kwargs: Any + ) -> AsyncIterable["_models.BuildResult"]: + """List KPack build results. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param build_name: The name of the build resource. Required. + :type build_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BuildResult or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildResultCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_build_results_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + build_name=build_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_build_results.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("BuildResultCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list_build_results.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builds/{buildName}/results"} # type: ignore + + @distributed_trace_async + async def get_build_result( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + build_name: str, + build_result_name: str, + **kwargs: Any + ) -> _models.BuildResult: + """Get a KPack build result. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param build_name: The name of the build resource. Required. + :type build_name: str + :param build_result_name: The name of the build result resource. Required. + :type build_result_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BuildResult or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildResult] + + request = build_get_build_result_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + build_name=build_name, + build_result_name=build_result_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_build_result.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("BuildResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_build_result.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builds/{buildName}/results/{buildResultName}"} # type: ignore + + @distributed_trace_async + async def get_build_result_log( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + build_name: str, + build_result_name: str, + **kwargs: Any + ) -> _models.BuildResultLog: + """Get a KPack build result log download URL. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param build_name: The name of the build resource. Required. + :type build_name: str + :param build_result_name: The name of the build result resource. Required. + :type build_result_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BuildResultLog or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildResultLog + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildResultLog] + + request = build_get_build_result_log_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + build_name=build_name, + build_result_name=build_result_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_build_result_log.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("BuildResultLog", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_build_result_log.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builds/{buildName}/results/{buildResultName}/getLogFileUrl"} # type: ignore + + @distributed_trace_async + async def get_resource_upload_url( + self, resource_group_name: str, service_name: str, build_service_name: str, **kwargs: Any + ) -> _models.ResourceUploadDefinition: + """Get an resource upload URL for build service, which may be artifacts or source archive. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ResourceUploadDefinition or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceUploadDefinition + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ResourceUploadDefinition] + + request = build_get_resource_upload_url_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_resource_upload_url.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ResourceUploadDefinition", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_resource_upload_url.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/getResourceUploadUrl"} # type: ignore + + @distributed_trace_async + async def list_supported_buildpacks( + self, resource_group_name: str, service_name: str, build_service_name: str, **kwargs: Any + ) -> _models.SupportedBuildpacksCollection: + """Get all supported buildpacks. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SupportedBuildpacksCollection or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SupportedBuildpacksCollection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.SupportedBuildpacksCollection] + + request = build_list_supported_buildpacks_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_supported_buildpacks.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("SupportedBuildpacksCollection", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_supported_buildpacks.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/supportedBuildpacks"} # type: ignore + + @distributed_trace_async + async def get_supported_buildpack( + self, resource_group_name: str, service_name: str, build_service_name: str, buildpack_name: str, **kwargs: Any + ) -> _models.SupportedBuildpackResource: + """Get the supported buildpack resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param buildpack_name: The name of the buildpack resource. Required. + :type buildpack_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SupportedBuildpackResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SupportedBuildpackResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.SupportedBuildpackResource] + + request = build_get_supported_buildpack_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + buildpack_name=buildpack_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_supported_buildpack.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("SupportedBuildpackResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_supported_buildpack.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/supportedBuildpacks/{buildpackName}"} # type: ignore + + @distributed_trace_async + async def list_supported_stacks( + self, resource_group_name: str, service_name: str, build_service_name: str, **kwargs: Any + ) -> _models.SupportedStacksCollection: + """Get all supported stacks. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SupportedStacksCollection or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SupportedStacksCollection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.SupportedStacksCollection] + + request = build_list_supported_stacks_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_supported_stacks.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("SupportedStacksCollection", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_supported_stacks.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/supportedStacks"} # type: ignore + + @distributed_trace_async + async def get_supported_stack( + self, resource_group_name: str, service_name: str, build_service_name: str, stack_name: str, **kwargs: Any + ) -> _models.SupportedStackResource: + """Get the supported stack resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param stack_name: The name of the stack resource. Required. + :type stack_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SupportedStackResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SupportedStackResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.SupportedStackResource] + + request = build_get_supported_stack_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + stack_name=stack_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_supported_stack.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("SupportedStackResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_supported_stack.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/supportedStacks/{stackName}"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_buildpack_binding_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_buildpack_binding_operations.py new file mode 100644 index 00000000000..e062b659f73 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_buildpack_binding_operations.py @@ -0,0 +1,628 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._buildpack_binding_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class BuildpackBindingOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`buildpack_binding` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + buildpack_binding_name: str, + **kwargs: Any + ) -> _models.BuildpackBindingResource: + """Get a buildpack binding by name. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :param buildpack_binding_name: The name of the Buildpack Binding Name. Required. + :type buildpack_binding_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BuildpackBindingResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackBindingResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildpackBindingResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + buildpack_binding_name=buildpack_binding_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("BuildpackBindingResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}/buildpackBindings/{buildpackBindingName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + buildpack_binding_name: str, + buildpack_binding: Union[_models.BuildpackBindingResource, IO], + **kwargs: Any + ) -> _models.BuildpackBindingResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildpackBindingResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(buildpack_binding, (IO, bytes)): + _content = buildpack_binding + else: + _json = self._serialize.body(buildpack_binding, "BuildpackBindingResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + buildpack_binding_name=buildpack_binding_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("BuildpackBindingResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("BuildpackBindingResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}/buildpackBindings/{buildpackBindingName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + buildpack_binding_name: str, + buildpack_binding: _models.BuildpackBindingResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BuildpackBindingResource]: + """Create or update a buildpack binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :param buildpack_binding_name: The name of the Buildpack Binding Name. Required. + :type buildpack_binding_name: str + :param buildpack_binding: The target buildpack binding for the create or update operation. + Required. + :type buildpack_binding: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackBindingResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BuildpackBindingResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackBindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + buildpack_binding_name: str, + buildpack_binding: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BuildpackBindingResource]: + """Create or update a buildpack binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :param buildpack_binding_name: The name of the Buildpack Binding Name. Required. + :type buildpack_binding_name: str + :param buildpack_binding: The target buildpack binding for the create or update operation. + Required. + :type buildpack_binding: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BuildpackBindingResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackBindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + buildpack_binding_name: str, + buildpack_binding: Union[_models.BuildpackBindingResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.BuildpackBindingResource]: + """Create or update a buildpack binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :param buildpack_binding_name: The name of the Buildpack Binding Name. Required. + :type buildpack_binding_name: str + :param buildpack_binding: The target buildpack binding for the create or update operation. Is + either a model type or a IO type. Required. + :type buildpack_binding: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackBindingResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BuildpackBindingResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackBindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildpackBindingResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + buildpack_binding_name=buildpack_binding_name, + buildpack_binding=buildpack_binding, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("BuildpackBindingResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}/buildpackBindings/{buildpackBindingName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + buildpack_binding_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + buildpack_binding_name=buildpack_binding_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}/buildpackBindings/{buildpackBindingName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + buildpack_binding_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Operation to delete a Buildpack Binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :param buildpack_binding_name: The name of the Buildpack Binding Name. Required. + :type buildpack_binding_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + buildpack_binding_name=buildpack_binding_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}/buildpackBindings/{buildpackBindingName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, build_service_name: str, builder_name: str, **kwargs: Any + ) -> AsyncIterable["_models.BuildpackBindingResource"]: + """Handles requests to list all buildpack bindings in a builder. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BuildpackBindingResource or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackBindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildpackBindingResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("BuildpackBindingResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}/buildpackBindings"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_certificates_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_certificates_operations.py new file mode 100644 index 00000000000..d76b9d7c662 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_certificates_operations.py @@ -0,0 +1,566 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._certificates_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class CertificatesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`certificates` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, service_name: str, certificate_name: str, **kwargs: Any + ) -> _models.CertificateResource: + """Get the certificate resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param certificate_name: The name of the certificate resource. Required. + :type certificate_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CertificateResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CertificateResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CertificateResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + certificate_name=certificate_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("CertificateResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/certificates/{certificateName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + certificate_name: str, + certificate_resource: Union[_models.CertificateResource, IO], + **kwargs: Any + ) -> _models.CertificateResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CertificateResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(certificate_resource, (IO, bytes)): + _content = certificate_resource + else: + _json = self._serialize.body(certificate_resource, "CertificateResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + certificate_name=certificate_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("CertificateResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("CertificateResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("CertificateResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/certificates/{certificateName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + certificate_name: str, + certificate_resource: _models.CertificateResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.CertificateResource]: + """Create or update certificate resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param certificate_name: The name of the certificate resource. Required. + :type certificate_name: str + :param certificate_resource: Parameters for the create or update operation. Required. + :type certificate_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CertificateResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CertificateResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CertificateResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + certificate_name: str, + certificate_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.CertificateResource]: + """Create or update certificate resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param certificate_name: The name of the certificate resource. Required. + :type certificate_name: str + :param certificate_resource: Parameters for the create or update operation. Required. + :type certificate_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CertificateResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CertificateResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + certificate_name: str, + certificate_resource: Union[_models.CertificateResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.CertificateResource]: + """Create or update certificate resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param certificate_name: The name of the certificate resource. Required. + :type certificate_name: str + :param certificate_resource: Parameters for the create or update operation. Is either a model + type or a IO type. Required. + :type certificate_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CertificateResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CertificateResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CertificateResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CertificateResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + certificate_name=certificate_name, + certificate_resource=certificate_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("CertificateResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/certificates/{certificateName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, certificate_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + certificate_name=certificate_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/certificates/{certificateName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, service_name: str, certificate_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete the certificate resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param certificate_name: The name of the certificate resource. Required. + :type certificate_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + certificate_name=certificate_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/certificates/{certificateName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> AsyncIterable["_models.CertificateResource"]: + """List all the certificates of one user. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either CertificateResource or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.CertificateResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CertificateResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("CertificateResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/certificates"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_config_servers_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_config_servers_operations.py new file mode 100644 index 00000000000..0fd9506c739 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_config_servers_operations.py @@ -0,0 +1,794 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._config_servers_operations import ( + build_get_request, + build_update_patch_request, + build_update_put_request, + build_validate_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class ConfigServersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`config_servers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get(self, resource_group_name: str, service_name: str, **kwargs: Any) -> _models.ConfigServerResource: + """Get the config server and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ConfigServerResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigServerResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ConfigServerResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configServers/default"} # type: ignore + + async def _update_put_initial( + self, + resource_group_name: str, + service_name: str, + config_server_resource: Union[_models.ConfigServerResource, IO], + **kwargs: Any + ) -> _models.ConfigServerResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigServerResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(config_server_resource, (IO, bytes)): + _content = config_server_resource + else: + _json = self._serialize.body(config_server_resource, "ConfigServerResource") + + request = build_update_put_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_put_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ConfigServerResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("ConfigServerResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_put_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configServers/default"} # type: ignore + + @overload + async def begin_update_put( + self, + resource_group_name: str, + service_name: str, + config_server_resource: _models.ConfigServerResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ConfigServerResource]: + """Update the config server. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param config_server_resource: Parameters for the update operation. Required. + :type config_server_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ConfigServerResource or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update_put( + self, + resource_group_name: str, + service_name: str, + config_server_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ConfigServerResource]: + """Update the config server. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param config_server_resource: Parameters for the update operation. Required. + :type config_server_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ConfigServerResource or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update_put( + self, + resource_group_name: str, + service_name: str, + config_server_resource: Union[_models.ConfigServerResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.ConfigServerResource]: + """Update the config server. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param config_server_resource: Parameters for the update operation. Is either a model type or a + IO type. Required. + :type config_server_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ConfigServerResource or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigServerResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_put_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + config_server_resource=config_server_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ConfigServerResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update_put.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configServers/default"} # type: ignore + + async def _update_patch_initial( + self, + resource_group_name: str, + service_name: str, + config_server_resource: Union[_models.ConfigServerResource, IO], + **kwargs: Any + ) -> _models.ConfigServerResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigServerResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(config_server_resource, (IO, bytes)): + _content = config_server_resource + else: + _json = self._serialize.body(config_server_resource, "ConfigServerResource") + + request = build_update_patch_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_patch_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ConfigServerResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("ConfigServerResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_patch_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configServers/default"} # type: ignore + + @overload + async def begin_update_patch( + self, + resource_group_name: str, + service_name: str, + config_server_resource: _models.ConfigServerResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ConfigServerResource]: + """Update the config server. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param config_server_resource: Parameters for the update operation. Required. + :type config_server_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ConfigServerResource or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update_patch( + self, + resource_group_name: str, + service_name: str, + config_server_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ConfigServerResource]: + """Update the config server. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param config_server_resource: Parameters for the update operation. Required. + :type config_server_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ConfigServerResource or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update_patch( + self, + resource_group_name: str, + service_name: str, + config_server_resource: Union[_models.ConfigServerResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.ConfigServerResource]: + """Update the config server. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param config_server_resource: Parameters for the update operation. Is either a model type or a + IO type. Required. + :type config_server_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ConfigServerResource or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigServerResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_patch_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + config_server_resource=config_server_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ConfigServerResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update_patch.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configServers/default"} # type: ignore + + async def _validate_initial( + self, + resource_group_name: str, + service_name: str, + config_server_settings: Union[_models.ConfigServerSettings, IO], + **kwargs: Any + ) -> _models.ConfigServerSettingsValidateResult: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigServerSettingsValidateResult] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(config_server_settings, (IO, bytes)): + _content = config_server_settings + else: + _json = self._serialize.body(config_server_settings, "ConfigServerSettings") + + request = build_validate_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._validate_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ConfigServerSettingsValidateResult", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("ConfigServerSettingsValidateResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _validate_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configServers/validate"} # type: ignore + + @overload + async def begin_validate( + self, + resource_group_name: str, + service_name: str, + config_server_settings: _models.ConfigServerSettings, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ConfigServerSettingsValidateResult]: + """Check if the config server settings are valid. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param config_server_settings: Config server settings to be validated. Required. + :type config_server_settings: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerSettings + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ConfigServerSettingsValidateResult + or the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerSettingsValidateResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_validate( + self, + resource_group_name: str, + service_name: str, + config_server_settings: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ConfigServerSettingsValidateResult]: + """Check if the config server settings are valid. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param config_server_settings: Config server settings to be validated. Required. + :type config_server_settings: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ConfigServerSettingsValidateResult + or the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerSettingsValidateResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_validate( + self, + resource_group_name: str, + service_name: str, + config_server_settings: Union[_models.ConfigServerSettings, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.ConfigServerSettingsValidateResult]: + """Check if the config server settings are valid. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param config_server_settings: Config server settings to be validated. Is either a model type + or a IO type. Required. + :type config_server_settings: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerSettings or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ConfigServerSettingsValidateResult + or the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerSettingsValidateResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigServerSettingsValidateResult] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._validate_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + config_server_settings=config_server_settings, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ConfigServerSettingsValidateResult", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_validate.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configServers/validate"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_configuration_services_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_configuration_services_operations.py new file mode 100644 index 00000000000..5baba60cdae --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_configuration_services_operations.py @@ -0,0 +1,806 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._configuration_services_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, + build_validate_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class ConfigurationServicesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`configuration_services` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, service_name: str, configuration_service_name: str, **kwargs: Any + ) -> _models.ConfigurationServiceResource: + """Get the Application Configuration Service and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param configuration_service_name: The name of Application Configuration Service. Required. + :type configuration_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ConfigurationServiceResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigurationServiceResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + configuration_service_name=configuration_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ConfigurationServiceResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configurationServices/{configurationServiceName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + configuration_service_name: str, + configuration_service_resource: Union[_models.ConfigurationServiceResource, IO], + **kwargs: Any + ) -> _models.ConfigurationServiceResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigurationServiceResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(configuration_service_resource, (IO, bytes)): + _content = configuration_service_resource + else: + _json = self._serialize.body(configuration_service_resource, "ConfigurationServiceResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + configuration_service_name=configuration_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ConfigurationServiceResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ConfigurationServiceResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configurationServices/{configurationServiceName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + configuration_service_name: str, + configuration_service_resource: _models.ConfigurationServiceResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ConfigurationServiceResource]: + """Create the default Application Configuration Service or update the existing Application + Configuration Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param configuration_service_name: The name of Application Configuration Service. Required. + :type configuration_service_name: str + :param configuration_service_resource: Parameters for the update operation. Required. + :type configuration_service_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ConfigurationServiceResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + configuration_service_name: str, + configuration_service_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ConfigurationServiceResource]: + """Create the default Application Configuration Service or update the existing Application + Configuration Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param configuration_service_name: The name of Application Configuration Service. Required. + :type configuration_service_name: str + :param configuration_service_resource: Parameters for the update operation. Required. + :type configuration_service_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ConfigurationServiceResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + configuration_service_name: str, + configuration_service_resource: Union[_models.ConfigurationServiceResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.ConfigurationServiceResource]: + """Create the default Application Configuration Service or update the existing Application + Configuration Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param configuration_service_name: The name of Application Configuration Service. Required. + :type configuration_service_name: str + :param configuration_service_resource: Parameters for the update operation. Is either a model + type or a IO type. Required. + :type configuration_service_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ConfigurationServiceResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigurationServiceResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + configuration_service_name=configuration_service_name, + configuration_service_resource=configuration_service_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ConfigurationServiceResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configurationServices/{configurationServiceName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, configuration_service_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + configuration_service_name=configuration_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configurationServices/{configurationServiceName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, service_name: str, configuration_service_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Disable the default Application Configuration Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param configuration_service_name: The name of Application Configuration Service. Required. + :type configuration_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + configuration_service_name=configuration_service_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configurationServices/{configurationServiceName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> AsyncIterable["_models.ConfigurationServiceResource"]: + """Handles requests to list all resources in a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ConfigurationServiceResource or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigurationServiceResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ConfigurationServiceResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configurationServices"} # type: ignore + + async def _validate_initial( + self, + resource_group_name: str, + service_name: str, + configuration_service_name: str, + settings: Union[_models.ConfigurationServiceSettings, IO], + **kwargs: Any + ) -> _models.ConfigurationServiceSettingsValidateResult: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigurationServiceSettingsValidateResult] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(settings, (IO, bytes)): + _content = settings + else: + _json = self._serialize.body(settings, "ConfigurationServiceSettings") + + request = build_validate_request( + resource_group_name=resource_group_name, + service_name=service_name, + configuration_service_name=configuration_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._validate_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ConfigurationServiceSettingsValidateResult", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("ConfigurationServiceSettingsValidateResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _validate_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configurationServices/{configurationServiceName}/validate"} # type: ignore + + @overload + async def begin_validate( + self, + resource_group_name: str, + service_name: str, + configuration_service_name: str, + settings: _models.ConfigurationServiceSettings, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ConfigurationServiceSettingsValidateResult]: + """Check if the Application Configuration Service settings are valid. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param configuration_service_name: The name of Application Configuration Service. Required. + :type configuration_service_name: str + :param settings: Application Configuration Service settings to be validated. Required. + :type settings: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceSettings + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either + ConfigurationServiceSettingsValidateResult or the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceSettingsValidateResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_validate( + self, + resource_group_name: str, + service_name: str, + configuration_service_name: str, + settings: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ConfigurationServiceSettingsValidateResult]: + """Check if the Application Configuration Service settings are valid. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param configuration_service_name: The name of Application Configuration Service. Required. + :type configuration_service_name: str + :param settings: Application Configuration Service settings to be validated. Required. + :type settings: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either + ConfigurationServiceSettingsValidateResult or the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceSettingsValidateResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_validate( + self, + resource_group_name: str, + service_name: str, + configuration_service_name: str, + settings: Union[_models.ConfigurationServiceSettings, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.ConfigurationServiceSettingsValidateResult]: + """Check if the Application Configuration Service settings are valid. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param configuration_service_name: The name of Application Configuration Service. Required. + :type configuration_service_name: str + :param settings: Application Configuration Service settings to be validated. Is either a model + type or a IO type. Required. + :type settings: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceSettings + or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either + ConfigurationServiceSettingsValidateResult or the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceSettingsValidateResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigurationServiceSettingsValidateResult] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._validate_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + configuration_service_name=configuration_service_name, + settings=settings, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ConfigurationServiceSettingsValidateResult", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_validate.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configurationServices/{configurationServiceName}/validate"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_custom_domains_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_custom_domains_operations.py new file mode 100644 index 00000000000..f34c24c0f89 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_custom_domains_operations.py @@ -0,0 +1,837 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._custom_domains_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, + build_update_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class CustomDomainsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`custom_domains` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, service_name: str, app_name: str, domain_name: str, **kwargs: Any + ) -> _models.CustomDomainResource: + """Get the custom domain of one lifecycle application. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param domain_name: The name of the custom domain resource. Required. + :type domain_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomDomainResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomDomainResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + domain_name=domain_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("CustomDomainResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/domains/{domainName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + app_name: str, + domain_name: str, + domain_resource: Union[_models.CustomDomainResource, IO], + **kwargs: Any + ) -> _models.CustomDomainResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomDomainResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(domain_resource, (IO, bytes)): + _content = domain_resource + else: + _json = self._serialize.body(domain_resource, "CustomDomainResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + domain_name=domain_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("CustomDomainResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("CustomDomainResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("CustomDomainResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/domains/{domainName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + domain_name: str, + domain_resource: _models.CustomDomainResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.CustomDomainResource]: + """Create or update custom domain of one lifecycle application. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param domain_name: The name of the custom domain resource. Required. + :type domain_name: str + :param domain_resource: Parameters for the create or update operation. Required. + :type domain_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CustomDomainResource or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + domain_name: str, + domain_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.CustomDomainResource]: + """Create or update custom domain of one lifecycle application. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param domain_name: The name of the custom domain resource. Required. + :type domain_name: str + :param domain_resource: Parameters for the create or update operation. Required. + :type domain_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CustomDomainResource or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + domain_name: str, + domain_resource: Union[_models.CustomDomainResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.CustomDomainResource]: + """Create or update custom domain of one lifecycle application. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param domain_name: The name of the custom domain resource. Required. + :type domain_name: str + :param domain_resource: Parameters for the create or update operation. Is either a model type + or a IO type. Required. + :type domain_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource + or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CustomDomainResource or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomDomainResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + domain_name=domain_name, + domain_resource=domain_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("CustomDomainResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/domains/{domainName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, app_name: str, domain_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + domain_name=domain_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/domains/{domainName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, service_name: str, app_name: str, domain_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete the custom domain of one lifecycle application. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param domain_name: The name of the custom domain resource. Required. + :type domain_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + domain_name=domain_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/domains/{domainName}"} # type: ignore + + async def _update_initial( + self, + resource_group_name: str, + service_name: str, + app_name: str, + domain_name: str, + domain_resource: Union[_models.CustomDomainResource, IO], + **kwargs: Any + ) -> _models.CustomDomainResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomDomainResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(domain_resource, (IO, bytes)): + _content = domain_resource + else: + _json = self._serialize.body(domain_resource, "CustomDomainResource") + + request = build_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + domain_name=domain_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("CustomDomainResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("CustomDomainResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/domains/{domainName}"} # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + domain_name: str, + domain_resource: _models.CustomDomainResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.CustomDomainResource]: + """Update custom domain of one lifecycle application. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param domain_name: The name of the custom domain resource. Required. + :type domain_name: str + :param domain_resource: Parameters for the create or update operation. Required. + :type domain_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CustomDomainResource or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + domain_name: str, + domain_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.CustomDomainResource]: + """Update custom domain of one lifecycle application. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param domain_name: The name of the custom domain resource. Required. + :type domain_name: str + :param domain_resource: Parameters for the create or update operation. Required. + :type domain_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CustomDomainResource or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + domain_name: str, + domain_resource: Union[_models.CustomDomainResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.CustomDomainResource]: + """Update custom domain of one lifecycle application. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param domain_name: The name of the custom domain resource. Required. + :type domain_name: str + :param domain_resource: Parameters for the create or update operation. Is either a model type + or a IO type. Required. + :type domain_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource + or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CustomDomainResource or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomDomainResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + domain_name=domain_name, + domain_resource=domain_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("CustomDomainResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/domains/{domainName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, app_name: str, **kwargs: Any + ) -> AsyncIterable["_models.CustomDomainResource"]: + """List the custom domains of one lifecycle application. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either CustomDomainResource or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomDomainResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("CustomDomainResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/domains"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_customized_accelerators_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_customized_accelerators_operations.py new file mode 100644 index 00000000000..940389eb4dc --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_customized_accelerators_operations.py @@ -0,0 +1,770 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._customized_accelerators_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, + build_validate_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class CustomizedAcceleratorsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`customized_accelerators` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, application_accelerator_name: str, **kwargs: Any + ) -> AsyncIterable["_models.CustomizedAcceleratorResource"]: + """Handle requests to list all customized accelerators. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either CustomizedAcceleratorResource or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomizedAcceleratorResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("CustomizedAcceleratorResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/customizedAccelerators"} # type: ignore + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + **kwargs: Any + ) -> _models.CustomizedAcceleratorResource: + """Get the customized accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param customized_accelerator_name: The name of the customized accelerator. Required. + :type customized_accelerator_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomizedAcceleratorResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomizedAcceleratorResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + customized_accelerator_name=customized_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("CustomizedAcceleratorResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/customizedAccelerators/{customizedAcceleratorName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + customized_accelerator_resource: Union[_models.CustomizedAcceleratorResource, IO], + **kwargs: Any + ) -> _models.CustomizedAcceleratorResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomizedAcceleratorResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(customized_accelerator_resource, (IO, bytes)): + _content = customized_accelerator_resource + else: + _json = self._serialize.body(customized_accelerator_resource, "CustomizedAcceleratorResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + customized_accelerator_name=customized_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("CustomizedAcceleratorResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("CustomizedAcceleratorResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/customizedAccelerators/{customizedAcceleratorName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + customized_accelerator_resource: _models.CustomizedAcceleratorResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.CustomizedAcceleratorResource]: + """Create or update the customized accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param customized_accelerator_name: The name of the customized accelerator. Required. + :type customized_accelerator_name: str + :param customized_accelerator_resource: The customized accelerator for the create or update + operation. Required. + :type customized_accelerator_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CustomizedAcceleratorResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + customized_accelerator_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.CustomizedAcceleratorResource]: + """Create or update the customized accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param customized_accelerator_name: The name of the customized accelerator. Required. + :type customized_accelerator_name: str + :param customized_accelerator_resource: The customized accelerator for the create or update + operation. Required. + :type customized_accelerator_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CustomizedAcceleratorResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + customized_accelerator_resource: Union[_models.CustomizedAcceleratorResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.CustomizedAcceleratorResource]: + """Create or update the customized accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param customized_accelerator_name: The name of the customized accelerator. Required. + :type customized_accelerator_name: str + :param customized_accelerator_resource: The customized accelerator for the create or update + operation. Is either a model type or a IO type. Required. + :type customized_accelerator_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CustomizedAcceleratorResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomizedAcceleratorResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + customized_accelerator_name=customized_accelerator_name, + customized_accelerator_resource=customized_accelerator_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("CustomizedAcceleratorResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/customizedAccelerators/{customizedAcceleratorName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + customized_accelerator_name=customized_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/customizedAccelerators/{customizedAcceleratorName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete the customized accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param customized_accelerator_name: The name of the customized accelerator. Required. + :type customized_accelerator_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + customized_accelerator_name=customized_accelerator_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/customizedAccelerators/{customizedAcceleratorName}"} # type: ignore + + @overload + async def validate( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + properties: _models.CustomizedAcceleratorProperties, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> Optional[_models.CustomizedAcceleratorValidateResult]: + """Check the customized accelerator are valid. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param customized_accelerator_name: The name of the customized accelerator. Required. + :type customized_accelerator_name: str + :param properties: Customized accelerator properties to be validated. Required. + :type properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorProperties + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomizedAcceleratorValidateResult or None or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorValidateResult + or None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def validate( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + properties: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> Optional[_models.CustomizedAcceleratorValidateResult]: + """Check the customized accelerator are valid. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param customized_accelerator_name: The name of the customized accelerator. Required. + :type customized_accelerator_name: str + :param properties: Customized accelerator properties to be validated. Required. + :type properties: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomizedAcceleratorValidateResult or None or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorValidateResult + or None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def validate( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + properties: Union[_models.CustomizedAcceleratorProperties, IO], + **kwargs: Any + ) -> Optional[_models.CustomizedAcceleratorValidateResult]: + """Check the customized accelerator are valid. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param customized_accelerator_name: The name of the customized accelerator. Required. + :type customized_accelerator_name: str + :param properties: Customized accelerator properties to be validated. Is either a model type or + a IO type. Required. + :type properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorProperties or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomizedAcceleratorValidateResult or None or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorValidateResult + or None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.CustomizedAcceleratorValidateResult]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(properties, (IO, bytes)): + _content = properties + else: + _json = self._serialize.body(properties, "CustomizedAcceleratorProperties") + + request = build_validate_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + customized_accelerator_name=customized_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.validate.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize("CustomizedAcceleratorValidateResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + validate.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/customizedAccelerators/{customizedAcceleratorName}/validate"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_deployments_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_deployments_operations.py new file mode 100644 index 00000000000..9748c3d0754 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_deployments_operations.py @@ -0,0 +1,2512 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, List, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._deployments_operations import ( + build_create_or_update_request, + build_delete_request, + build_disable_remote_debugging_request, + build_enable_remote_debugging_request, + build_generate_heap_dump_request, + build_generate_thread_dump_request, + build_get_log_file_url_request, + build_get_remote_debugging_config_request, + build_get_request, + build_list_for_cluster_request, + build_list_request, + build_restart_request, + build_start_jfr_request, + build_start_request, + build_stop_request, + build_update_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class DeploymentsOperations: # pylint: disable=too-many-public-methods + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`deployments` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> _models.DeploymentResource: + """Get a Deployment and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DeploymentResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DeploymentResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("DeploymentResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + deployment_resource: Union[_models.DeploymentResource, IO], + **kwargs: Any + ) -> _models.DeploymentResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DeploymentResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(deployment_resource, (IO, bytes)): + _content = deployment_resource + else: + _json = self._serialize.body(deployment_resource, "DeploymentResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("DeploymentResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("DeploymentResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("DeploymentResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + deployment_resource: _models.DeploymentResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DeploymentResource]: + """Create a new Deployment or update an exiting Deployment. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param deployment_resource: Parameters for the create or update operation. Required. + :type deployment_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DeploymentResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + deployment_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DeploymentResource]: + """Create a new Deployment or update an exiting Deployment. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param deployment_resource: Parameters for the create or update operation. Required. + :type deployment_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DeploymentResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + deployment_resource: Union[_models.DeploymentResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.DeploymentResource]: + """Create a new Deployment or update an exiting Deployment. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param deployment_resource: Parameters for the create or update operation. Is either a model + type or a IO type. Required. + :type deployment_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DeploymentResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DeploymentResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + deployment_resource=deployment_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("DeploymentResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Operation to delete a Deployment. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}"} # type: ignore + + async def _update_initial( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + deployment_resource: Union[_models.DeploymentResource, IO], + **kwargs: Any + ) -> _models.DeploymentResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DeploymentResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(deployment_resource, (IO, bytes)): + _content = deployment_resource + else: + _json = self._serialize.body(deployment_resource, "DeploymentResource") + + request = build_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("DeploymentResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("DeploymentResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}"} # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + deployment_resource: _models.DeploymentResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DeploymentResource]: + """Operation to update an exiting Deployment. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param deployment_resource: Parameters for the update operation. Required. + :type deployment_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DeploymentResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + deployment_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DeploymentResource]: + """Operation to update an exiting Deployment. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param deployment_resource: Parameters for the update operation. Required. + :type deployment_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DeploymentResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + deployment_resource: Union[_models.DeploymentResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.DeploymentResource]: + """Operation to update an exiting Deployment. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param deployment_resource: Parameters for the update operation. Is either a model type or a IO + type. Required. + :type deployment_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DeploymentResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DeploymentResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + deployment_resource=deployment_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("DeploymentResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}"} # type: ignore + + @distributed_trace + def list( + self, + resource_group_name: str, + service_name: str, + app_name: str, + version: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.DeploymentResource"]: + """Handles requests to list all resources in an App. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param version: Version of the deployments to be listed. Default value is None. + :type version: list[str] + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DeploymentResource or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DeploymentResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + subscription_id=self._config.subscription_id, + version=version, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("DeploymentResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments"} # type: ignore + + @distributed_trace + def list_for_cluster( + self, resource_group_name: str, service_name: str, version: Optional[List[str]] = None, **kwargs: Any + ) -> AsyncIterable["_models.DeploymentResource"]: + """List deployments for a certain service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param version: Version of the deployments to be listed. Default value is None. + :type version: list[str] + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DeploymentResource or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DeploymentResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_for_cluster_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + version=version, + api_version=api_version, + template_url=self.list_for_cluster.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("DeploymentResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list_for_cluster.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/deployments"} # type: ignore + + async def _start_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_start_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._start_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _start_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/start"} # type: ignore + + @distributed_trace_async + async def begin_start( + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Start the deployment. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._start_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_start.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/start"} # type: ignore + + async def _stop_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_stop_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._stop_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _stop_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/stop"} # type: ignore + + @distributed_trace_async + async def begin_stop( + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Stop the deployment. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._stop_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_stop.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/stop"} # type: ignore + + async def _restart_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_restart_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._restart_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _restart_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/restart"} # type: ignore + + @distributed_trace_async + async def begin_restart( + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Restart the deployment. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._restart_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_restart.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/restart"} # type: ignore + + async def _enable_remote_debugging_initial( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + remote_debugging_payload: Optional[Union[_models.RemoteDebuggingPayload, IO]] = None, + **kwargs: Any + ) -> _models.RemoteDebugging: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.RemoteDebugging] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(remote_debugging_payload, (IO, bytes)): + _content = remote_debugging_payload + else: + if remote_debugging_payload is not None: + _json = self._serialize.body(remote_debugging_payload, "RemoteDebuggingPayload") + else: + _json = None + + request = build_enable_remote_debugging_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._enable_remote_debugging_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("RemoteDebugging", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("RemoteDebugging", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _enable_remote_debugging_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/enableRemoteDebugging"} # type: ignore + + @overload + async def begin_enable_remote_debugging( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + remote_debugging_payload: Optional[_models.RemoteDebuggingPayload] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.RemoteDebugging]: + """Enable remote debugging. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param remote_debugging_payload: Parameters for enable remote debugging. Default value is None. + :type remote_debugging_payload: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.RemoteDebuggingPayload + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either RemoteDebugging or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.RemoteDebugging] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_enable_remote_debugging( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + remote_debugging_payload: Optional[IO] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.RemoteDebugging]: + """Enable remote debugging. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param remote_debugging_payload: Parameters for enable remote debugging. Default value is None. + :type remote_debugging_payload: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either RemoteDebugging or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.RemoteDebugging] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_enable_remote_debugging( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + remote_debugging_payload: Optional[Union[_models.RemoteDebuggingPayload, IO]] = None, + **kwargs: Any + ) -> AsyncLROPoller[_models.RemoteDebugging]: + """Enable remote debugging. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param remote_debugging_payload: Parameters for enable remote debugging. Is either a model type + or a IO type. Default value is None. + :type remote_debugging_payload: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.RemoteDebuggingPayload or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either RemoteDebugging or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.RemoteDebugging] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.RemoteDebugging] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._enable_remote_debugging_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + remote_debugging_payload=remote_debugging_payload, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("RemoteDebugging", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_enable_remote_debugging.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/enableRemoteDebugging"} # type: ignore + + async def _disable_remote_debugging_initial( + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> _models.RemoteDebugging: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.RemoteDebugging] + + request = build_disable_remote_debugging_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._disable_remote_debugging_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("RemoteDebugging", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("RemoteDebugging", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _disable_remote_debugging_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/disableRemoteDebugging"} # type: ignore + + @distributed_trace_async + async def begin_disable_remote_debugging( + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> AsyncLROPoller[_models.RemoteDebugging]: + """Disable remote debugging. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either RemoteDebugging or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.RemoteDebugging] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.RemoteDebugging] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._disable_remote_debugging_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("RemoteDebugging", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_disable_remote_debugging.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/disableRemoteDebugging"} # type: ignore + + @distributed_trace_async + async def get_remote_debugging_config( + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> _models.RemoteDebugging: + """Get remote debugging config. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: RemoteDebugging or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.RemoteDebugging + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.RemoteDebugging] + + request = build_get_remote_debugging_config_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_remote_debugging_config.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("RemoteDebugging", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_remote_debugging_config.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/getRemoteDebuggingConfig"} # type: ignore + + @distributed_trace_async + async def get_log_file_url( + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> Optional[_models.LogFileUrlResponse]: + """Get deployment log file URL. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LogFileUrlResponse or None or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.LogFileUrlResponse or None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.LogFileUrlResponse]] + + request = build_get_log_file_url_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_log_file_url.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize("LogFileUrlResponse", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_log_file_url.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/getLogFileUrl"} # type: ignore + + async def _generate_heap_dump_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: Union[_models.DiagnosticParameters, IO], + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(diagnostic_parameters, (IO, bytes)): + _content = diagnostic_parameters + else: + _json = self._serialize.body(diagnostic_parameters, "DiagnosticParameters") + + request = build_generate_heap_dump_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._generate_heap_dump_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _generate_heap_dump_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/generateHeapDump"} # type: ignore + + @overload + async def begin_generate_heap_dump( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: _models.DiagnosticParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Generate Heap Dump. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param diagnostic_parameters: Parameters for the diagnostic operation. Required. + :type diagnostic_parameters: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DiagnosticParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_generate_heap_dump( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Generate Heap Dump. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param diagnostic_parameters: Parameters for the diagnostic operation. Required. + :type diagnostic_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_generate_heap_dump( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: Union[_models.DiagnosticParameters, IO], + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Generate Heap Dump. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param diagnostic_parameters: Parameters for the diagnostic operation. Is either a model type + or a IO type. Required. + :type diagnostic_parameters: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DiagnosticParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._generate_heap_dump_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + diagnostic_parameters=diagnostic_parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_generate_heap_dump.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/generateHeapDump"} # type: ignore + + async def _generate_thread_dump_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: Union[_models.DiagnosticParameters, IO], + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(diagnostic_parameters, (IO, bytes)): + _content = diagnostic_parameters + else: + _json = self._serialize.body(diagnostic_parameters, "DiagnosticParameters") + + request = build_generate_thread_dump_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._generate_thread_dump_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _generate_thread_dump_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/generateThreadDump"} # type: ignore + + @overload + async def begin_generate_thread_dump( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: _models.DiagnosticParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Generate Thread Dump. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param diagnostic_parameters: Parameters for the diagnostic operation. Required. + :type diagnostic_parameters: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DiagnosticParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_generate_thread_dump( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Generate Thread Dump. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param diagnostic_parameters: Parameters for the diagnostic operation. Required. + :type diagnostic_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_generate_thread_dump( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: Union[_models.DiagnosticParameters, IO], + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Generate Thread Dump. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param diagnostic_parameters: Parameters for the diagnostic operation. Is either a model type + or a IO type. Required. + :type diagnostic_parameters: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DiagnosticParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._generate_thread_dump_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + diagnostic_parameters=diagnostic_parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_generate_thread_dump.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/generateThreadDump"} # type: ignore + + async def _start_jfr_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: Union[_models.DiagnosticParameters, IO], + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(diagnostic_parameters, (IO, bytes)): + _content = diagnostic_parameters + else: + _json = self._serialize.body(diagnostic_parameters, "DiagnosticParameters") + + request = build_start_jfr_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._start_jfr_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _start_jfr_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/startJFR"} # type: ignore + + @overload + async def begin_start_jfr( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: _models.DiagnosticParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Start JFR. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param diagnostic_parameters: Parameters for the diagnostic operation. Required. + :type diagnostic_parameters: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DiagnosticParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_start_jfr( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Start JFR. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param diagnostic_parameters: Parameters for the diagnostic operation. Required. + :type diagnostic_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_start_jfr( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: Union[_models.DiagnosticParameters, IO], + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Start JFR. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param diagnostic_parameters: Parameters for the diagnostic operation. Is either a model type + or a IO type. Required. + :type diagnostic_parameters: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DiagnosticParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._start_jfr_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + diagnostic_parameters=diagnostic_parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_start_jfr.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/startJFR"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_dev_tool_portal_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_dev_tool_portal_operations.py new file mode 100644 index 00000000000..f63f3daf839 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_dev_tool_portal_operations.py @@ -0,0 +1,165 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Optional, TypeVar, Union, cast + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._dev_tool_portal_operations import build_delete_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class DevToolPortalOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`dev_tool_portal` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, dev_tool_portal_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + dev_tool_portal_name=dev_tool_portal_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/DevToolPortals/{devToolPortalName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, service_name: str, dev_tool_portal_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Disable the default Dev Tool Portal. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param dev_tool_portal_name: The name of Dev Tool Portal. Required. + :type dev_tool_portal_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + dev_tool_portal_name=dev_tool_portal_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/DevToolPortals/{devToolPortalName}"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_dev_tool_portals_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_dev_tool_portals_operations.py new file mode 100644 index 00000000000..b2ad75c4a65 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_dev_tool_portals_operations.py @@ -0,0 +1,451 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._dev_tool_portals_operations import ( + build_create_or_update_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class DevToolPortalsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`dev_tool_portals` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> AsyncIterable["_models.DevToolPortalResource"]: + """Handles requests to list all resources in a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DevToolPortalResource or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DevToolPortalResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("DevToolPortalResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/DevToolPortals"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, service_name: str, dev_tool_portal_name: str, **kwargs: Any + ) -> _models.DevToolPortalResource: + """Get the Application Live and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param dev_tool_portal_name: The name of Dev Tool Portal. Required. + :type dev_tool_portal_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DevToolPortalResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DevToolPortalResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + dev_tool_portal_name=dev_tool_portal_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("DevToolPortalResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/DevToolPortals/{devToolPortalName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + dev_tool_portal_name: str, + dev_tool_portal_resource: Union[_models.DevToolPortalResource, IO], + **kwargs: Any + ) -> _models.DevToolPortalResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DevToolPortalResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(dev_tool_portal_resource, (IO, bytes)): + _content = dev_tool_portal_resource + else: + _json = self._serialize.body(dev_tool_portal_resource, "DevToolPortalResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + dev_tool_portal_name=dev_tool_portal_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("DevToolPortalResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("DevToolPortalResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/DevToolPortals/{devToolPortalName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + dev_tool_portal_name: str, + dev_tool_portal_resource: _models.DevToolPortalResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DevToolPortalResource]: + """Create the default Dev Tool Portal or update the existing Dev Tool Portal. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param dev_tool_portal_name: The name of Dev Tool Portal. Required. + :type dev_tool_portal_name: str + :param dev_tool_portal_resource: Parameters for the create or update operation. Required. + :type dev_tool_portal_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DevToolPortalResource or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + dev_tool_portal_name: str, + dev_tool_portal_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DevToolPortalResource]: + """Create the default Dev Tool Portal or update the existing Dev Tool Portal. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param dev_tool_portal_name: The name of Dev Tool Portal. Required. + :type dev_tool_portal_name: str + :param dev_tool_portal_resource: Parameters for the create or update operation. Required. + :type dev_tool_portal_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DevToolPortalResource or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + dev_tool_portal_name: str, + dev_tool_portal_resource: Union[_models.DevToolPortalResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.DevToolPortalResource]: + """Create the default Dev Tool Portal or update the existing Dev Tool Portal. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param dev_tool_portal_name: The name of Dev Tool Portal. Required. + :type dev_tool_portal_name: str + :param dev_tool_portal_resource: Parameters for the create or update operation. Is either a + model type or a IO type. Required. + :type dev_tool_portal_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DevToolPortalResource or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DevToolPortalResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + dev_tool_portal_name=dev_tool_portal_name, + dev_tool_portal_resource=dev_tool_portal_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("DevToolPortalResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/DevToolPortals/{devToolPortalName}"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_gateway_custom_domains_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_gateway_custom_domains_operations.py new file mode 100644 index 00000000000..7813eefdd58 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_gateway_custom_domains_operations.py @@ -0,0 +1,588 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._gateway_custom_domains_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class GatewayCustomDomainsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`gateway_custom_domains` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, service_name: str, gateway_name: str, domain_name: str, **kwargs: Any + ) -> _models.GatewayCustomDomainResource: + """Get the Spring Cloud Gateway custom domain. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param domain_name: The name of the Spring Cloud Gateway custom domain. Required. + :type domain_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GatewayCustomDomainResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayCustomDomainResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayCustomDomainResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + domain_name=domain_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("GatewayCustomDomainResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/domains/{domainName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + domain_name: str, + gateway_custom_domain_resource: Union[_models.GatewayCustomDomainResource, IO], + **kwargs: Any + ) -> _models.GatewayCustomDomainResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayCustomDomainResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(gateway_custom_domain_resource, (IO, bytes)): + _content = gateway_custom_domain_resource + else: + _json = self._serialize.body(gateway_custom_domain_resource, "GatewayCustomDomainResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + domain_name=domain_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("GatewayCustomDomainResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("GatewayCustomDomainResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/domains/{domainName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + domain_name: str, + gateway_custom_domain_resource: _models.GatewayCustomDomainResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.GatewayCustomDomainResource]: + """Create or update the Spring Cloud Gateway custom domain. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param domain_name: The name of the Spring Cloud Gateway custom domain. Required. + :type domain_name: str + :param gateway_custom_domain_resource: The gateway custom domain resource for the create or + update operation. Required. + :type gateway_custom_domain_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayCustomDomainResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either GatewayCustomDomainResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayCustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + domain_name: str, + gateway_custom_domain_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.GatewayCustomDomainResource]: + """Create or update the Spring Cloud Gateway custom domain. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param domain_name: The name of the Spring Cloud Gateway custom domain. Required. + :type domain_name: str + :param gateway_custom_domain_resource: The gateway custom domain resource for the create or + update operation. Required. + :type gateway_custom_domain_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either GatewayCustomDomainResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayCustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + domain_name: str, + gateway_custom_domain_resource: Union[_models.GatewayCustomDomainResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.GatewayCustomDomainResource]: + """Create or update the Spring Cloud Gateway custom domain. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param domain_name: The name of the Spring Cloud Gateway custom domain. Required. + :type domain_name: str + :param gateway_custom_domain_resource: The gateway custom domain resource for the create or + update operation. Is either a model type or a IO type. Required. + :type gateway_custom_domain_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayCustomDomainResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either GatewayCustomDomainResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayCustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayCustomDomainResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + domain_name=domain_name, + gateway_custom_domain_resource=gateway_custom_domain_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("GatewayCustomDomainResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/domains/{domainName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, gateway_name: str, domain_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + domain_name=domain_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/domains/{domainName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, service_name: str, gateway_name: str, domain_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete the Spring Cloud Gateway custom domain. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param domain_name: The name of the Spring Cloud Gateway custom domain. Required. + :type domain_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + domain_name=domain_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/domains/{domainName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, gateway_name: str, **kwargs: Any + ) -> AsyncIterable["_models.GatewayCustomDomainResource"]: + """Handle requests to list all Spring Cloud Gateway custom domains. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either GatewayCustomDomainResource or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayCustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayCustomDomainResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("GatewayCustomDomainResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/domains"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_gateway_route_configs_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_gateway_route_configs_operations.py new file mode 100644 index 00000000000..a659db85552 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_gateway_route_configs_operations.py @@ -0,0 +1,591 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._gateway_route_configs_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class GatewayRouteConfigsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`gateway_route_configs` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, service_name: str, gateway_name: str, route_config_name: str, **kwargs: Any + ) -> _models.GatewayRouteConfigResource: + """Get the Spring Cloud Gateway route configs. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param route_config_name: The name of the Spring Cloud Gateway route config. Required. + :type route_config_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GatewayRouteConfigResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayRouteConfigResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + route_config_name=route_config_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("GatewayRouteConfigResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/routeConfigs/{routeConfigName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + route_config_name: str, + gateway_route_config_resource: Union[_models.GatewayRouteConfigResource, IO], + **kwargs: Any + ) -> _models.GatewayRouteConfigResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayRouteConfigResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(gateway_route_config_resource, (IO, bytes)): + _content = gateway_route_config_resource + else: + _json = self._serialize.body(gateway_route_config_resource, "GatewayRouteConfigResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + route_config_name=route_config_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("GatewayRouteConfigResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("GatewayRouteConfigResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/routeConfigs/{routeConfigName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + route_config_name: str, + gateway_route_config_resource: _models.GatewayRouteConfigResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.GatewayRouteConfigResource]: + """Create the default Spring Cloud Gateway route configs or update the existing Spring Cloud + Gateway route configs. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param route_config_name: The name of the Spring Cloud Gateway route config. Required. + :type route_config_name: str + :param gateway_route_config_resource: The Spring Cloud Gateway route config for the create or + update operation. Required. + :type gateway_route_config_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either GatewayRouteConfigResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + route_config_name: str, + gateway_route_config_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.GatewayRouteConfigResource]: + """Create the default Spring Cloud Gateway route configs or update the existing Spring Cloud + Gateway route configs. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param route_config_name: The name of the Spring Cloud Gateway route config. Required. + :type route_config_name: str + :param gateway_route_config_resource: The Spring Cloud Gateway route config for the create or + update operation. Required. + :type gateway_route_config_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either GatewayRouteConfigResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + route_config_name: str, + gateway_route_config_resource: Union[_models.GatewayRouteConfigResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.GatewayRouteConfigResource]: + """Create the default Spring Cloud Gateway route configs or update the existing Spring Cloud + Gateway route configs. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param route_config_name: The name of the Spring Cloud Gateway route config. Required. + :type route_config_name: str + :param gateway_route_config_resource: The Spring Cloud Gateway route config for the create or + update operation. Is either a model type or a IO type. Required. + :type gateway_route_config_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either GatewayRouteConfigResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayRouteConfigResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + route_config_name=route_config_name, + gateway_route_config_resource=gateway_route_config_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("GatewayRouteConfigResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/routeConfigs/{routeConfigName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, gateway_name: str, route_config_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + route_config_name=route_config_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/routeConfigs/{routeConfigName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, service_name: str, gateway_name: str, route_config_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete the Spring Cloud Gateway route config. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param route_config_name: The name of the Spring Cloud Gateway route config. Required. + :type route_config_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + route_config_name=route_config_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/routeConfigs/{routeConfigName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, gateway_name: str, **kwargs: Any + ) -> AsyncIterable["_models.GatewayRouteConfigResource"]: + """Handle requests to list all Spring Cloud Gateway route configs. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either GatewayRouteConfigResource or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayRouteConfigResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("GatewayRouteConfigResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/routeConfigs"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_gateways_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_gateways_operations.py new file mode 100644 index 00000000000..b49e9decca5 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_gateways_operations.py @@ -0,0 +1,779 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._gateways_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_env_secrets_request, + build_list_request, + build_validate_domain_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class GatewaysOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`gateways` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, service_name: str, gateway_name: str, **kwargs: Any + ) -> _models.GatewayResource: + """Get the Spring Cloud Gateway and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GatewayResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("GatewayResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + gateway_resource: Union[_models.GatewayResource, IO], + **kwargs: Any + ) -> _models.GatewayResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(gateway_resource, (IO, bytes)): + _content = gateway_resource + else: + _json = self._serialize.body(gateway_resource, "GatewayResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("GatewayResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("GatewayResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + gateway_resource: _models.GatewayResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.GatewayResource]: + """Create the default Spring Cloud Gateway or update the existing Spring Cloud Gateway. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param gateway_resource: The gateway for the create or update operation. Required. + :type gateway_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either GatewayResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + gateway_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.GatewayResource]: + """Create the default Spring Cloud Gateway or update the existing Spring Cloud Gateway. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param gateway_resource: The gateway for the create or update operation. Required. + :type gateway_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either GatewayResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + gateway_resource: Union[_models.GatewayResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.GatewayResource]: + """Create the default Spring Cloud Gateway or update the existing Spring Cloud Gateway. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param gateway_resource: The gateway for the create or update operation. Is either a model type + or a IO type. Required. + :type gateway_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayResource or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either GatewayResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + gateway_resource=gateway_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("GatewayResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, gateway_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, service_name: str, gateway_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Disable the default Spring Cloud Gateway. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}"} # type: ignore + + @distributed_trace_async + async def list_env_secrets( + self, resource_group_name: str, service_name: str, gateway_name: str, **kwargs: Any + ) -> Dict[str, str]: + """List sensitive environment variables of Spring Cloud Gateway. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: dict mapping str to str or the result of cls(response) + :rtype: dict[str, str] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Dict[str, str]] + + request = build_list_env_secrets_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_env_secrets.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("{str}", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_env_secrets.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/listEnvSecrets"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> AsyncIterable["_models.GatewayResource"]: + """Handles requests to list all resources in a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either GatewayResource or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("GatewayResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways"} # type: ignore + + @overload + async def validate_domain( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + validate_payload: _models.CustomDomainValidatePayload, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CustomDomainValidateResult: + """Check the domains are valid as well as not in use. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param validate_payload: Custom domain payload to be validated. Required. + :type validate_payload: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidatePayload + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomDomainValidateResult or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidateResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def validate_domain( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + validate_payload: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CustomDomainValidateResult: + """Check the domains are valid as well as not in use. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param validate_payload: Custom domain payload to be validated. Required. + :type validate_payload: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomDomainValidateResult or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidateResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def validate_domain( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + validate_payload: Union[_models.CustomDomainValidatePayload, IO], + **kwargs: Any + ) -> _models.CustomDomainValidateResult: + """Check the domains are valid as well as not in use. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param validate_payload: Custom domain payload to be validated. Is either a model type or a IO + type. Required. + :type validate_payload: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidatePayload or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomDomainValidateResult or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidateResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomDomainValidateResult] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(validate_payload, (IO, bytes)): + _content = validate_payload + else: + _json = self._serialize.body(validate_payload, "CustomDomainValidatePayload") + + request = build_validate_domain_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.validate_domain.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("CustomDomainValidateResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + validate_domain.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/validateDomain"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_monitoring_settings_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_monitoring_settings_operations.py new file mode 100644 index 00000000000..34d61a56a72 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_monitoring_settings_operations.py @@ -0,0 +1,568 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._monitoring_settings_operations import ( + build_get_request, + build_update_patch_request, + build_update_put_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class MonitoringSettingsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`monitoring_settings` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> _models.MonitoringSettingResource: + """Get the Monitoring Setting and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: MonitoringSettingResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.MonitoringSettingResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("MonitoringSettingResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/monitoringSettings/default"} # type: ignore + + async def _update_put_initial( + self, + resource_group_name: str, + service_name: str, + monitoring_setting_resource: Union[_models.MonitoringSettingResource, IO], + **kwargs: Any + ) -> _models.MonitoringSettingResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.MonitoringSettingResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(monitoring_setting_resource, (IO, bytes)): + _content = monitoring_setting_resource + else: + _json = self._serialize.body(monitoring_setting_resource, "MonitoringSettingResource") + + request = build_update_put_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_put_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("MonitoringSettingResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("MonitoringSettingResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_put_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/monitoringSettings/default"} # type: ignore + + @overload + async def begin_update_put( + self, + resource_group_name: str, + service_name: str, + monitoring_setting_resource: _models.MonitoringSettingResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.MonitoringSettingResource]: + """Update the Monitoring Setting. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param monitoring_setting_resource: Parameters for the update operation. Required. + :type monitoring_setting_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either MonitoringSettingResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update_put( + self, + resource_group_name: str, + service_name: str, + monitoring_setting_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.MonitoringSettingResource]: + """Update the Monitoring Setting. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param monitoring_setting_resource: Parameters for the update operation. Required. + :type monitoring_setting_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either MonitoringSettingResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update_put( + self, + resource_group_name: str, + service_name: str, + monitoring_setting_resource: Union[_models.MonitoringSettingResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.MonitoringSettingResource]: + """Update the Monitoring Setting. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param monitoring_setting_resource: Parameters for the update operation. Is either a model type + or a IO type. Required. + :type monitoring_setting_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either MonitoringSettingResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.MonitoringSettingResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_put_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + monitoring_setting_resource=monitoring_setting_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("MonitoringSettingResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update_put.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/monitoringSettings/default"} # type: ignore + + async def _update_patch_initial( + self, + resource_group_name: str, + service_name: str, + monitoring_setting_resource: Union[_models.MonitoringSettingResource, IO], + **kwargs: Any + ) -> _models.MonitoringSettingResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.MonitoringSettingResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(monitoring_setting_resource, (IO, bytes)): + _content = monitoring_setting_resource + else: + _json = self._serialize.body(monitoring_setting_resource, "MonitoringSettingResource") + + request = build_update_patch_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_patch_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("MonitoringSettingResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("MonitoringSettingResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_patch_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/monitoringSettings/default"} # type: ignore + + @overload + async def begin_update_patch( + self, + resource_group_name: str, + service_name: str, + monitoring_setting_resource: _models.MonitoringSettingResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.MonitoringSettingResource]: + """Update the Monitoring Setting. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param monitoring_setting_resource: Parameters for the update operation. Required. + :type monitoring_setting_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either MonitoringSettingResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update_patch( + self, + resource_group_name: str, + service_name: str, + monitoring_setting_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.MonitoringSettingResource]: + """Update the Monitoring Setting. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param monitoring_setting_resource: Parameters for the update operation. Required. + :type monitoring_setting_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either MonitoringSettingResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update_patch( + self, + resource_group_name: str, + service_name: str, + monitoring_setting_resource: Union[_models.MonitoringSettingResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.MonitoringSettingResource]: + """Update the Monitoring Setting. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param monitoring_setting_resource: Parameters for the update operation. Is either a model type + or a IO type. Required. + :type monitoring_setting_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either MonitoringSettingResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.MonitoringSettingResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_patch_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + monitoring_setting_resource=monitoring_setting_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("MonitoringSettingResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update_patch.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/monitoringSettings/default"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_operations.py new file mode 100644 index 00000000000..7a5802921a8 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_operations.py @@ -0,0 +1,132 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._operations import build_list_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class Operations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`operations` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list(self, **kwargs: Any) -> AsyncIterable["_models.OperationDetail"]: + """Lists all of the available REST API operations of the Microsoft.AppPlatform provider. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OperationDetail or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.OperationDetail] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.AvailableOperations] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("AvailableOperations", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/providers/Microsoft.AppPlatform/operations"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_patch.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_patch.py new file mode 100644 index 00000000000..f7dd3251033 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_predefined_accelerators_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_predefined_accelerators_operations.py new file mode 100644 index 00000000000..226fb55e0bc --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_predefined_accelerators_operations.py @@ -0,0 +1,478 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._predefined_accelerators_operations import ( + build_disable_request, + build_enable_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class PredefinedAcceleratorsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`predefined_accelerators` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, application_accelerator_name: str, **kwargs: Any + ) -> AsyncIterable["_models.PredefinedAcceleratorResource"]: + """Handle requests to list all predefined accelerators. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PredefinedAcceleratorResource or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.PredefinedAcceleratorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PredefinedAcceleratorResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("PredefinedAcceleratorResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/predefinedAccelerators"} # type: ignore + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + predefined_accelerator_name: str, + **kwargs: Any + ) -> _models.PredefinedAcceleratorResource: + """Get the predefined accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param predefined_accelerator_name: The name of the predefined accelerator. Required. + :type predefined_accelerator_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PredefinedAcceleratorResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.PredefinedAcceleratorResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PredefinedAcceleratorResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + predefined_accelerator_name=predefined_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("PredefinedAcceleratorResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/predefinedAccelerators/{predefinedAcceleratorName}"} # type: ignore + + async def _disable_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + predefined_accelerator_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_disable_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + predefined_accelerator_name=predefined_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._disable_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _disable_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/predefinedAccelerators/{predefinedAcceleratorName}/disable"} # type: ignore + + @distributed_trace_async + async def begin_disable( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + predefined_accelerator_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Disable predefined accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param predefined_accelerator_name: The name of the predefined accelerator. Required. + :type predefined_accelerator_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._disable_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + predefined_accelerator_name=predefined_accelerator_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_disable.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/predefinedAccelerators/{predefinedAcceleratorName}/disable"} # type: ignore + + async def _enable_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + predefined_accelerator_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_enable_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + predefined_accelerator_name=predefined_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._enable_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _enable_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/predefinedAccelerators/{predefinedAcceleratorName}/enable"} # type: ignore + + @distributed_trace_async + async def begin_enable( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + predefined_accelerator_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Enable predefined accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param predefined_accelerator_name: The name of the predefined accelerator. Required. + :type predefined_accelerator_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._enable_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + predefined_accelerator_name=predefined_accelerator_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_enable.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/predefinedAccelerators/{predefinedAcceleratorName}/enable"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_runtime_versions_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_runtime_versions_operations.py new file mode 100644 index 00000000000..552d86fd147 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_runtime_versions_operations.py @@ -0,0 +1,102 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Optional, TypeVar + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._runtime_versions_operations import build_list_runtime_versions_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class RuntimeVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`runtime_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def list_runtime_versions(self, **kwargs: Any) -> _models.AvailableRuntimeVersions: + """Lists all of the available runtime versions supported by Microsoft.AppPlatform provider. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AvailableRuntimeVersions or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.AvailableRuntimeVersions + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.AvailableRuntimeVersions] + + request = build_list_runtime_versions_request( + api_version=api_version, + template_url=self.list_runtime_versions.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("AvailableRuntimeVersions", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_runtime_versions.metadata = {"url": "/providers/Microsoft.AppPlatform/runtimeVersions"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_service_registries_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_service_registries_operations.py new file mode 100644 index 00000000000..9b96c8ab63f --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_service_registries_operations.py @@ -0,0 +1,451 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._service_registries_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class ServiceRegistriesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`service_registries` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, service_name: str, service_registry_name: str, **kwargs: Any + ) -> _models.ServiceRegistryResource: + """Get the Service Registry and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param service_registry_name: The name of Service Registry. Required. + :type service_registry_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ServiceRegistryResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceRegistryResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceRegistryResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + service_registry_name=service_registry_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ServiceRegistryResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/serviceRegistries/{serviceRegistryName}"} # type: ignore + + async def _create_or_update_initial( + self, resource_group_name: str, service_name: str, service_registry_name: str, **kwargs: Any + ) -> _models.ServiceRegistryResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceRegistryResource] + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + service_registry_name=service_registry_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ServiceRegistryResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ServiceRegistryResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/serviceRegistries/{serviceRegistryName}"} # type: ignore + + @distributed_trace_async + async def begin_create_or_update( + self, resource_group_name: str, service_name: str, service_registry_name: str, **kwargs: Any + ) -> AsyncLROPoller[_models.ServiceRegistryResource]: + """Create the default Service Registry or update the existing Service Registry. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param service_registry_name: The name of Service Registry. Required. + :type service_registry_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ServiceRegistryResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceRegistryResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceRegistryResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + service_registry_name=service_registry_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ServiceRegistryResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/serviceRegistries/{serviceRegistryName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, service_registry_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + service_registry_name=service_registry_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/serviceRegistries/{serviceRegistryName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, service_name: str, service_registry_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Disable the default Service Registry. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param service_registry_name: The name of Service Registry. Required. + :type service_registry_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + service_registry_name=service_registry_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/serviceRegistries/{serviceRegistryName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> AsyncIterable["_models.ServiceRegistryResource"]: + """Handles requests to list all resources in a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ServiceRegistryResource or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceRegistryResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceRegistryResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ServiceRegistryResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/serviceRegistries"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_services_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_services_operations.py new file mode 100644 index 00000000000..1bb9861931d --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_services_operations.py @@ -0,0 +1,1482 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._services_operations import ( + build_check_name_availability_request, + build_create_or_update_request, + build_delete_request, + build_disable_test_endpoint_request, + build_enable_test_endpoint_request, + build_get_request, + build_list_by_subscription_request, + build_list_request, + build_list_test_keys_request, + build_regenerate_test_key_request, + build_start_request, + build_stop_request, + build_update_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class ServicesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`services` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get(self, resource_group_name: str, service_name: str, **kwargs: Any) -> _models.ServiceResource: + """Get a Service and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ServiceResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ServiceResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}"} # type: ignore + + async def _create_or_update_initial( + self, resource_group_name: str, service_name: str, resource: Union[_models.ServiceResource, IO], **kwargs: Any + ) -> _models.ServiceResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(resource, (IO, bytes)): + _content = resource + else: + _json = self._serialize.body(resource, "ServiceResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ServiceResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ServiceResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("ServiceResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + resource: _models.ServiceResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ServiceResource]: + """Create a new Service or update an exiting Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param resource: Parameters for the create or update operation. Required. + :type resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ServiceResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ServiceResource]: + """Create a new Service or update an exiting Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param resource: Parameters for the create or update operation. Required. + :type resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ServiceResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, resource_group_name: str, service_name: str, resource: Union[_models.ServiceResource, IO], **kwargs: Any + ) -> AsyncLROPoller[_models.ServiceResource]: + """Create a new Service or update an exiting Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param resource: Parameters for the create or update operation. Is either a model type or a IO + type. Required. + :type resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ServiceResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + resource=resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ServiceResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}"} # type: ignore + + @distributed_trace_async + async def begin_delete(self, resource_group_name: str, service_name: str, **kwargs: Any) -> AsyncLROPoller[None]: + """Operation to delete a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}"} # type: ignore + + async def _update_initial( + self, resource_group_name: str, service_name: str, resource: Union[_models.ServiceResource, IO], **kwargs: Any + ) -> _models.ServiceResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(resource, (IO, bytes)): + _content = resource + else: + _json = self._serialize.body(resource, "ServiceResource") + + request = build_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ServiceResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("ServiceResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}"} # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + service_name: str, + resource: _models.ServiceResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ServiceResource]: + """Operation to update an exiting Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param resource: Parameters for the update operation. Required. + :type resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ServiceResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + service_name: str, + resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ServiceResource]: + """Operation to update an exiting Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param resource: Parameters for the update operation. Required. + :type resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ServiceResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, resource_group_name: str, service_name: str, resource: Union[_models.ServiceResource, IO], **kwargs: Any + ) -> AsyncLROPoller[_models.ServiceResource]: + """Operation to update an exiting Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param resource: Parameters for the update operation. Is either a model type or a IO type. + Required. + :type resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ServiceResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + resource=resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ServiceResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}"} # type: ignore + + @distributed_trace_async + async def list_test_keys(self, resource_group_name: str, service_name: str, **kwargs: Any) -> _models.TestKeys: + """List test keys for a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TestKeys or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.TestKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.TestKeys] + + request = build_list_test_keys_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_test_keys.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("TestKeys", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_test_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/listTestKeys"} # type: ignore + + @overload + async def regenerate_test_key( + self, + resource_group_name: str, + service_name: str, + regenerate_test_key_request: _models.RegenerateTestKeyRequestPayload, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.TestKeys: + """Regenerate a test key for a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param regenerate_test_key_request: Parameters for the operation. Required. + :type regenerate_test_key_request: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.RegenerateTestKeyRequestPayload + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TestKeys or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.TestKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def regenerate_test_key( + self, + resource_group_name: str, + service_name: str, + regenerate_test_key_request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.TestKeys: + """Regenerate a test key for a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param regenerate_test_key_request: Parameters for the operation. Required. + :type regenerate_test_key_request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TestKeys or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.TestKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def regenerate_test_key( + self, + resource_group_name: str, + service_name: str, + regenerate_test_key_request: Union[_models.RegenerateTestKeyRequestPayload, IO], + **kwargs: Any + ) -> _models.TestKeys: + """Regenerate a test key for a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param regenerate_test_key_request: Parameters for the operation. Is either a model type or a + IO type. Required. + :type regenerate_test_key_request: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.RegenerateTestKeyRequestPayload or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TestKeys or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.TestKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.TestKeys] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(regenerate_test_key_request, (IO, bytes)): + _content = regenerate_test_key_request + else: + _json = self._serialize.body(regenerate_test_key_request, "RegenerateTestKeyRequestPayload") + + request = build_regenerate_test_key_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.regenerate_test_key.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("TestKeys", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + regenerate_test_key.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/regenerateTestKey"} # type: ignore + + @distributed_trace_async + async def disable_test_endpoint( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> None: + """Disable test endpoint functionality for a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_disable_test_endpoint_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.disable_test_endpoint.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + disable_test_endpoint.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/disableTestEndpoint"} # type: ignore + + @distributed_trace_async + async def enable_test_endpoint( + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> _models.TestKeys: + """Enable test endpoint functionality for a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TestKeys or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.TestKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.TestKeys] + + request = build_enable_test_endpoint_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.enable_test_endpoint.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("TestKeys", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + enable_test_endpoint.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/enableTestEndpoint"} # type: ignore + + async def _stop_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 304: ResourceNotModifiedError, + 404: lambda response: ResourceNotFoundError(response=response, error_format=ARMErrorFormat), + 409: lambda response: ResourceExistsError(response=response, error_format=ARMErrorFormat), + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_stop_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._stop_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _stop_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/stop"} # type: ignore + + @distributed_trace_async + async def begin_stop(self, resource_group_name: str, service_name: str, **kwargs: Any) -> AsyncLROPoller[None]: + """Stop a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._stop_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_stop.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/stop"} # type: ignore + + async def _start_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 304: ResourceNotModifiedError, + 404: lambda response: ResourceNotFoundError(response=response, error_format=ARMErrorFormat), + 409: lambda response: ResourceExistsError(response=response, error_format=ARMErrorFormat), + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_start_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._start_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _start_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/start"} # type: ignore + + @distributed_trace_async + async def begin_start(self, resource_group_name: str, service_name: str, **kwargs: Any) -> AsyncLROPoller[None]: + """Start a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._start_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_start.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/start"} # type: ignore + + @overload + async def check_name_availability( + self, + location: str, + availability_parameters: _models.NameAvailabilityParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.NameAvailability: + """Checks that the resource name is valid and is not already in use. + + :param location: the region. Required. + :type location: str + :param availability_parameters: Parameters supplied to the operation. Required. + :type availability_parameters: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.NameAvailabilityParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: NameAvailability or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.NameAvailability + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def check_name_availability( + self, location: str, availability_parameters: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.NameAvailability: + """Checks that the resource name is valid and is not already in use. + + :param location: the region. Required. + :type location: str + :param availability_parameters: Parameters supplied to the operation. Required. + :type availability_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: NameAvailability or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.NameAvailability + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def check_name_availability( + self, location: str, availability_parameters: Union[_models.NameAvailabilityParameters, IO], **kwargs: Any + ) -> _models.NameAvailability: + """Checks that the resource name is valid and is not already in use. + + :param location: the region. Required. + :type location: str + :param availability_parameters: Parameters supplied to the operation. Is either a model type or + a IO type. Required. + :type availability_parameters: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.NameAvailabilityParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: NameAvailability or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.NameAvailability + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.NameAvailability] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(availability_parameters, (IO, bytes)): + _content = availability_parameters + else: + _json = self._serialize.body(availability_parameters, "NameAvailabilityParameters") + + request = build_check_name_availability_request( + location=location, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.check_name_availability.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("NameAvailability", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + check_name_availability.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.AppPlatform/locations/{location}/checkNameAvailability"} # type: ignore + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.ServiceResource"]: + """Handles requests to list all resources in a subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ServiceResource or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceResourceList] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_by_subscription.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ServiceResourceList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list_by_subscription.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.AppPlatform/Spring"} # type: ignore + + @distributed_trace + def list(self, resource_group_name: str, **kwargs: Any) -> AsyncIterable["_models.ServiceResource"]: + """Handles requests to list all resources in a resource group. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ServiceResource or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceResourceList] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ServiceResourceList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_skus_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_skus_operations.py new file mode 100644 index 00000000000..c060b44f7bd --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_skus_operations.py @@ -0,0 +1,133 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._skus_operations import build_list_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class SkusOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`skus` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list(self, **kwargs: Any) -> AsyncIterable["_models.ResourceSku"]: + """Lists all of the available skus of the Microsoft.AppPlatform provider. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ResourceSku or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceSku] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ResourceSkuCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ResourceSkuCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.AppPlatform/skus"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_storages_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_storages_operations.py new file mode 100644 index 00000000000..557cb9a5b4b --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/aio/operations/_storages_operations.py @@ -0,0 +1,565 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._storages_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class StoragesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.aio.AppPlatformManagementClient`'s + :attr:`storages` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, service_name: str, storage_name: str, **kwargs: Any + ) -> _models.StorageResource: + """Get the storage resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param storage_name: The name of the storage resource. Required. + :type storage_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.StorageResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + storage_name=storage_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("StorageResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/storages/{storageName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + storage_name: str, + storage_resource: Union[_models.StorageResource, IO], + **kwargs: Any + ) -> _models.StorageResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(storage_resource, (IO, bytes)): + _content = storage_resource + else: + _json = self._serialize.body(storage_resource, "StorageResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + storage_name=storage_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("StorageResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("StorageResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("StorageResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/storages/{storageName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + storage_name: str, + storage_resource: _models.StorageResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageResource]: + """Create or update storage resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param storage_name: The name of the storage resource. Required. + :type storage_name: str + :param storage_resource: Parameters for the create or update operation. Required. + :type storage_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.StorageResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either StorageResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.StorageResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + storage_name: str, + storage_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageResource]: + """Create or update storage resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param storage_name: The name of the storage resource. Required. + :type storage_name: str + :param storage_resource: Parameters for the create or update operation. Required. + :type storage_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either StorageResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.StorageResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + storage_name: str, + storage_resource: Union[_models.StorageResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageResource]: + """Create or update storage resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param storage_name: The name of the storage resource. Required. + :type storage_name: str + :param storage_resource: Parameters for the create or update operation. Is either a model type + or a IO type. Required. + :type storage_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.StorageResource or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either StorageResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.StorageResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + storage_name=storage_name, + storage_resource=storage_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("StorageResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/storages/{storageName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, storage_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + storage_name=storage_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/storages/{storageName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, service_name: str, storage_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete the storage resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param storage_name: The name of the storage resource. Required. + :type storage_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + storage_name=storage_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/storages/{storageName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> AsyncIterable["_models.StorageResource"]: + """List all the storages of one Azure Spring Apps resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either StorageResource or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.StorageResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("StorageResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/storages"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/models/__init__.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/models/__init__.py new file mode 100644 index 00000000000..19f10b1eb41 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/models/__init__.py @@ -0,0 +1,543 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._models_py3 import AcceleratorAuthSetting +from ._models_py3 import AcceleratorBasicAuthSetting +from ._models_py3 import AcceleratorGitRepository +from ._models_py3 import AcceleratorPublicSetting +from ._models_py3 import AcceleratorSshSetting +from ._models_py3 import ActiveDeploymentCollection +from ._models_py3 import ApiPortalCustomDomainProperties +from ._models_py3 import ApiPortalCustomDomainResource +from ._models_py3 import ApiPortalCustomDomainResourceCollection +from ._models_py3 import ApiPortalInstance +from ._models_py3 import ApiPortalProperties +from ._models_py3 import ApiPortalResource +from ._models_py3 import ApiPortalResourceCollection +from ._models_py3 import ApiPortalResourceRequests +from ._models_py3 import AppResource +from ._models_py3 import AppResourceCollection +from ._models_py3 import AppResourceProperties +from ._models_py3 import AppVNetAddons +from ._models_py3 import ApplicationAcceleratorComponent +from ._models_py3 import ApplicationAcceleratorInstance +from ._models_py3 import ApplicationAcceleratorProperties +from ._models_py3 import ApplicationAcceleratorResource +from ._models_py3 import ApplicationAcceleratorResourceCollection +from ._models_py3 import ApplicationAcceleratorResourceRequests +from ._models_py3 import ApplicationInsightsAgentVersions +from ._models_py3 import ApplicationLiveViewComponent +from ._models_py3 import ApplicationLiveViewInstance +from ._models_py3 import ApplicationLiveViewProperties +from ._models_py3 import ApplicationLiveViewResource +from ._models_py3 import ApplicationLiveViewResourceCollection +from ._models_py3 import ApplicationLiveViewResourceRequests +from ._models_py3 import AvailableOperations +from ._models_py3 import AvailableRuntimeVersions +from ._models_py3 import AzureFileVolume +from ._models_py3 import BindingResource +from ._models_py3 import BindingResourceCollection +from ._models_py3 import BindingResourceProperties +from ._models_py3 import Build +from ._models_py3 import BuildCollection +from ._models_py3 import BuildProperties +from ._models_py3 import BuildResourceRequests +from ._models_py3 import BuildResult +from ._models_py3 import BuildResultCollection +from ._models_py3 import BuildResultLog +from ._models_py3 import BuildResultProperties +from ._models_py3 import BuildResultUserSourceInfo +from ._models_py3 import BuildService +from ._models_py3 import BuildServiceAgentPoolProperties +from ._models_py3 import BuildServiceAgentPoolResource +from ._models_py3 import BuildServiceAgentPoolResourceCollection +from ._models_py3 import BuildServiceAgentPoolSizeProperties +from ._models_py3 import BuildServiceCollection +from ._models_py3 import BuildServiceProperties +from ._models_py3 import BuildServicePropertiesResourceRequests +from ._models_py3 import BuildStageProperties +from ._models_py3 import BuilderProperties +from ._models_py3 import BuilderResource +from ._models_py3 import BuilderResourceCollection +from ._models_py3 import BuildpackBindingLaunchProperties +from ._models_py3 import BuildpackBindingProperties +from ._models_py3 import BuildpackBindingResource +from ._models_py3 import BuildpackBindingResourceCollection +from ._models_py3 import BuildpackProperties +from ._models_py3 import BuildpacksGroupProperties +from ._models_py3 import CertificateProperties +from ._models_py3 import CertificateResource +from ._models_py3 import CertificateResourceCollection +from ._models_py3 import CloudErrorBody +from ._models_py3 import ClusterResourceProperties +from ._models_py3 import ConfigServerGitProperty +from ._models_py3 import ConfigServerProperties +from ._models_py3 import ConfigServerResource +from ._models_py3 import ConfigServerSettings +from ._models_py3 import ConfigServerSettingsErrorRecord +from ._models_py3 import ConfigServerSettingsValidateResult +from ._models_py3 import ConfigurationServiceGitProperty +from ._models_py3 import ConfigurationServiceGitPropertyValidateResult +from ._models_py3 import ConfigurationServiceGitRepository +from ._models_py3 import ConfigurationServiceInstance +from ._models_py3 import ConfigurationServiceProperties +from ._models_py3 import ConfigurationServiceResource +from ._models_py3 import ConfigurationServiceResourceCollection +from ._models_py3 import ConfigurationServiceResourceRequests +from ._models_py3 import ConfigurationServiceSettings +from ._models_py3 import ConfigurationServiceSettingsValidateResult +from ._models_py3 import ContainerProbeSettings +from ._models_py3 import ContentCertificateProperties +from ._models_py3 import CustomContainer +from ._models_py3 import CustomContainerUserSourceInfo +from ._models_py3 import CustomDomainProperties +from ._models_py3 import CustomDomainResource +from ._models_py3 import CustomDomainResourceCollection +from ._models_py3 import CustomDomainValidatePayload +from ._models_py3 import CustomDomainValidateResult +from ._models_py3 import CustomPersistentDiskProperties +from ._models_py3 import CustomPersistentDiskResource +from ._models_py3 import CustomizedAcceleratorProperties +from ._models_py3 import CustomizedAcceleratorResource +from ._models_py3 import CustomizedAcceleratorResourceCollection +from ._models_py3 import CustomizedAcceleratorValidateResult +from ._models_py3 import DeploymentInstance +from ._models_py3 import DeploymentList +from ._models_py3 import DeploymentResource +from ._models_py3 import DeploymentResourceCollection +from ._models_py3 import DeploymentResourceProperties +from ._models_py3 import DeploymentSettings +from ._models_py3 import DevToolPortalFeatureDetail +from ._models_py3 import DevToolPortalFeatureSettings +from ._models_py3 import DevToolPortalInstance +from ._models_py3 import DevToolPortalProperties +from ._models_py3 import DevToolPortalResource +from ._models_py3 import DevToolPortalResourceCollection +from ._models_py3 import DevToolPortalResourceRequests +from ._models_py3 import DevToolPortalSsoProperties +from ._models_py3 import DiagnosticParameters +from ._models_py3 import Error +from ._models_py3 import ExecAction +from ._models_py3 import GatewayApiMetadataProperties +from ._models_py3 import GatewayApiRoute +from ._models_py3 import GatewayCorsProperties +from ._models_py3 import GatewayCustomDomainProperties +from ._models_py3 import GatewayCustomDomainResource +from ._models_py3 import GatewayCustomDomainResourceCollection +from ._models_py3 import GatewayInstance +from ._models_py3 import GatewayOperatorProperties +from ._models_py3 import GatewayOperatorResourceRequests +from ._models_py3 import GatewayProperties +from ._models_py3 import GatewayPropertiesEnvironmentVariables +from ._models_py3 import GatewayResource +from ._models_py3 import GatewayResourceCollection +from ._models_py3 import GatewayResourceRequests +from ._models_py3 import GatewayRouteConfigOpenApiProperties +from ._models_py3 import GatewayRouteConfigProperties +from ._models_py3 import GatewayRouteConfigResource +from ._models_py3 import GatewayRouteConfigResourceCollection +from ._models_py3 import GitPatternRepository +from ._models_py3 import HTTPGetAction +from ._models_py3 import ImageRegistryCredential +from ._models_py3 import IngressConfig +from ._models_py3 import IngressSettings +from ._models_py3 import IngressSettingsClientAuth +from ._models_py3 import JarUploadedUserSourceInfo +from ._models_py3 import KeyVaultCertificateProperties +from ._models_py3 import LoadedCertificate +from ._models_py3 import LogFileUrlResponse +from ._models_py3 import LogSpecification +from ._models_py3 import ManagedIdentityProperties +from ._models_py3 import MarketplaceResource +from ._models_py3 import MetricDimension +from ._models_py3 import MetricSpecification +from ._models_py3 import MonitoringSettingProperties +from ._models_py3 import MonitoringSettingResource +from ._models_py3 import NameAvailability +from ._models_py3 import NameAvailabilityParameters +from ._models_py3 import NetCoreZipUploadedUserSourceInfo +from ._models_py3 import NetworkProfile +from ._models_py3 import NetworkProfileOutboundIPs +from ._models_py3 import OperationDetail +from ._models_py3 import OperationDisplay +from ._models_py3 import OperationProperties +from ._models_py3 import PersistentDisk +from ._models_py3 import PredefinedAcceleratorProperties +from ._models_py3 import PredefinedAcceleratorResource +from ._models_py3 import PredefinedAcceleratorResourceCollection +from ._models_py3 import Probe +from ._models_py3 import ProbeAction +from ._models_py3 import ProxyResource +from ._models_py3 import RegenerateTestKeyRequestPayload +from ._models_py3 import RemoteDebugging +from ._models_py3 import RemoteDebuggingPayload +from ._models_py3 import RequiredTraffic +from ._models_py3 import Resource +from ._models_py3 import ResourceRequests +from ._models_py3 import ResourceSku +from ._models_py3 import ResourceSkuCapabilities +from ._models_py3 import ResourceSkuCollection +from ._models_py3 import ResourceSkuLocationInfo +from ._models_py3 import ResourceSkuRestrictionInfo +from ._models_py3 import ResourceSkuRestrictions +from ._models_py3 import ResourceSkuZoneDetails +from ._models_py3 import ResourceUploadDefinition +from ._models_py3 import ServiceRegistryInstance +from ._models_py3 import ServiceRegistryProperties +from ._models_py3 import ServiceRegistryResource +from ._models_py3 import ServiceRegistryResourceCollection +from ._models_py3 import ServiceRegistryResourceRequests +from ._models_py3 import ServiceResource +from ._models_py3 import ServiceResourceList +from ._models_py3 import ServiceSpecification +from ._models_py3 import ServiceVNetAddons +from ._models_py3 import Sku +from ._models_py3 import SkuCapacity +from ._models_py3 import SourceUploadedUserSourceInfo +from ._models_py3 import SsoProperties +from ._models_py3 import StackProperties +from ._models_py3 import StorageAccount +from ._models_py3 import StorageProperties +from ._models_py3 import StorageResource +from ._models_py3 import StorageResourceCollection +from ._models_py3 import SupportedBuildpackResource +from ._models_py3 import SupportedBuildpackResourceProperties +from ._models_py3 import SupportedBuildpacksCollection +from ._models_py3 import SupportedRuntimeVersion +from ._models_py3 import SupportedStackResource +from ._models_py3 import SupportedStackResourceProperties +from ._models_py3 import SupportedStacksCollection +from ._models_py3 import SystemData +from ._models_py3 import TCPSocketAction +from ._models_py3 import TemporaryDisk +from ._models_py3 import TestKeys +from ._models_py3 import TrackedResource +from ._models_py3 import TriggeredBuildResult +from ._models_py3 import UploadedUserSourceInfo +from ._models_py3 import UserAssignedManagedIdentity +from ._models_py3 import UserSourceInfo +from ._models_py3 import ValidationMessages + +from ._app_platform_management_client_enums import ActionType +from ._app_platform_management_client_enums import ApiPortalProvisioningState +from ._app_platform_management_client_enums import ApmType +from ._app_platform_management_client_enums import AppResourceProvisioningState +from ._app_platform_management_client_enums import ApplicationAcceleratorProvisioningState +from ._app_platform_management_client_enums import ApplicationLiveViewProvisioningState +from ._app_platform_management_client_enums import BackendProtocol +from ._app_platform_management_client_enums import BindingType +from ._app_platform_management_client_enums import BuildProvisioningState +from ._app_platform_management_client_enums import BuildResultProvisioningState +from ._app_platform_management_client_enums import BuildServiceProvisioningState +from ._app_platform_management_client_enums import BuilderProvisioningState +from ._app_platform_management_client_enums import BuildpackBindingProvisioningState +from ._app_platform_management_client_enums import CertificateResourceProvisioningState +from ._app_platform_management_client_enums import ConfigServerState +from ._app_platform_management_client_enums import ConfigurationServiceProvisioningState +from ._app_platform_management_client_enums import CreatedByType +from ._app_platform_management_client_enums import CustomDomainResourceProvisioningState +from ._app_platform_management_client_enums import CustomizedAcceleratorProvisioningState +from ._app_platform_management_client_enums import CustomizedAcceleratorValidateResultState +from ._app_platform_management_client_enums import DeploymentResourceProvisioningState +from ._app_platform_management_client_enums import DeploymentResourceStatus +from ._app_platform_management_client_enums import DevToolPortalFeatureState +from ._app_platform_management_client_enums import DevToolPortalProvisioningState +from ._app_platform_management_client_enums import GatewayProvisioningState +from ._app_platform_management_client_enums import GatewayRouteConfigProtocol +from ._app_platform_management_client_enums import HTTPSchemeType +from ._app_platform_management_client_enums import KPackBuildStageProvisioningState +from ._app_platform_management_client_enums import LastModifiedByType +from ._app_platform_management_client_enums import ManagedIdentityType +from ._app_platform_management_client_enums import MonitoringSettingState +from ._app_platform_management_client_enums import PowerState +from ._app_platform_management_client_enums import PredefinedAcceleratorProvisioningState +from ._app_platform_management_client_enums import PredefinedAcceleratorState +from ._app_platform_management_client_enums import ProbeActionType +from ._app_platform_management_client_enums import ProvisioningState +from ._app_platform_management_client_enums import ResourceSkuRestrictionsReasonCode +from ._app_platform_management_client_enums import ResourceSkuRestrictionsType +from ._app_platform_management_client_enums import ServiceRegistryProvisioningState +from ._app_platform_management_client_enums import SessionAffinity +from ._app_platform_management_client_enums import SkuScaleType +from ._app_platform_management_client_enums import StorageType +from ._app_platform_management_client_enums import SupportedRuntimePlatform +from ._app_platform_management_client_enums import SupportedRuntimeValue +from ._app_platform_management_client_enums import TestKeyType +from ._app_platform_management_client_enums import TrafficDirection +from ._app_platform_management_client_enums import Type +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "AcceleratorAuthSetting", + "AcceleratorBasicAuthSetting", + "AcceleratorGitRepository", + "AcceleratorPublicSetting", + "AcceleratorSshSetting", + "ActiveDeploymentCollection", + "ApiPortalCustomDomainProperties", + "ApiPortalCustomDomainResource", + "ApiPortalCustomDomainResourceCollection", + "ApiPortalInstance", + "ApiPortalProperties", + "ApiPortalResource", + "ApiPortalResourceCollection", + "ApiPortalResourceRequests", + "AppResource", + "AppResourceCollection", + "AppResourceProperties", + "AppVNetAddons", + "ApplicationAcceleratorComponent", + "ApplicationAcceleratorInstance", + "ApplicationAcceleratorProperties", + "ApplicationAcceleratorResource", + "ApplicationAcceleratorResourceCollection", + "ApplicationAcceleratorResourceRequests", + "ApplicationInsightsAgentVersions", + "ApplicationLiveViewComponent", + "ApplicationLiveViewInstance", + "ApplicationLiveViewProperties", + "ApplicationLiveViewResource", + "ApplicationLiveViewResourceCollection", + "ApplicationLiveViewResourceRequests", + "AvailableOperations", + "AvailableRuntimeVersions", + "AzureFileVolume", + "BindingResource", + "BindingResourceCollection", + "BindingResourceProperties", + "Build", + "BuildCollection", + "BuildProperties", + "BuildResourceRequests", + "BuildResult", + "BuildResultCollection", + "BuildResultLog", + "BuildResultProperties", + "BuildResultUserSourceInfo", + "BuildService", + "BuildServiceAgentPoolProperties", + "BuildServiceAgentPoolResource", + "BuildServiceAgentPoolResourceCollection", + "BuildServiceAgentPoolSizeProperties", + "BuildServiceCollection", + "BuildServiceProperties", + "BuildServicePropertiesResourceRequests", + "BuildStageProperties", + "BuilderProperties", + "BuilderResource", + "BuilderResourceCollection", + "BuildpackBindingLaunchProperties", + "BuildpackBindingProperties", + "BuildpackBindingResource", + "BuildpackBindingResourceCollection", + "BuildpackProperties", + "BuildpacksGroupProperties", + "CertificateProperties", + "CertificateResource", + "CertificateResourceCollection", + "CloudErrorBody", + "ClusterResourceProperties", + "ConfigServerGitProperty", + "ConfigServerProperties", + "ConfigServerResource", + "ConfigServerSettings", + "ConfigServerSettingsErrorRecord", + "ConfigServerSettingsValidateResult", + "ConfigurationServiceGitProperty", + "ConfigurationServiceGitPropertyValidateResult", + "ConfigurationServiceGitRepository", + "ConfigurationServiceInstance", + "ConfigurationServiceProperties", + "ConfigurationServiceResource", + "ConfigurationServiceResourceCollection", + "ConfigurationServiceResourceRequests", + "ConfigurationServiceSettings", + "ConfigurationServiceSettingsValidateResult", + "ContainerProbeSettings", + "ContentCertificateProperties", + "CustomContainer", + "CustomContainerUserSourceInfo", + "CustomDomainProperties", + "CustomDomainResource", + "CustomDomainResourceCollection", + "CustomDomainValidatePayload", + "CustomDomainValidateResult", + "CustomPersistentDiskProperties", + "CustomPersistentDiskResource", + "CustomizedAcceleratorProperties", + "CustomizedAcceleratorResource", + "CustomizedAcceleratorResourceCollection", + "CustomizedAcceleratorValidateResult", + "DeploymentInstance", + "DeploymentList", + "DeploymentResource", + "DeploymentResourceCollection", + "DeploymentResourceProperties", + "DeploymentSettings", + "DevToolPortalFeatureDetail", + "DevToolPortalFeatureSettings", + "DevToolPortalInstance", + "DevToolPortalProperties", + "DevToolPortalResource", + "DevToolPortalResourceCollection", + "DevToolPortalResourceRequests", + "DevToolPortalSsoProperties", + "DiagnosticParameters", + "Error", + "ExecAction", + "GatewayApiMetadataProperties", + "GatewayApiRoute", + "GatewayCorsProperties", + "GatewayCustomDomainProperties", + "GatewayCustomDomainResource", + "GatewayCustomDomainResourceCollection", + "GatewayInstance", + "GatewayOperatorProperties", + "GatewayOperatorResourceRequests", + "GatewayProperties", + "GatewayPropertiesEnvironmentVariables", + "GatewayResource", + "GatewayResourceCollection", + "GatewayResourceRequests", + "GatewayRouteConfigOpenApiProperties", + "GatewayRouteConfigProperties", + "GatewayRouteConfigResource", + "GatewayRouteConfigResourceCollection", + "GitPatternRepository", + "HTTPGetAction", + "ImageRegistryCredential", + "IngressConfig", + "IngressSettings", + "IngressSettingsClientAuth", + "JarUploadedUserSourceInfo", + "KeyVaultCertificateProperties", + "LoadedCertificate", + "LogFileUrlResponse", + "LogSpecification", + "ManagedIdentityProperties", + "MarketplaceResource", + "MetricDimension", + "MetricSpecification", + "MonitoringSettingProperties", + "MonitoringSettingResource", + "NameAvailability", + "NameAvailabilityParameters", + "NetCoreZipUploadedUserSourceInfo", + "NetworkProfile", + "NetworkProfileOutboundIPs", + "OperationDetail", + "OperationDisplay", + "OperationProperties", + "PersistentDisk", + "PredefinedAcceleratorProperties", + "PredefinedAcceleratorResource", + "PredefinedAcceleratorResourceCollection", + "Probe", + "ProbeAction", + "ProxyResource", + "RegenerateTestKeyRequestPayload", + "RemoteDebugging", + "RemoteDebuggingPayload", + "RequiredTraffic", + "Resource", + "ResourceRequests", + "ResourceSku", + "ResourceSkuCapabilities", + "ResourceSkuCollection", + "ResourceSkuLocationInfo", + "ResourceSkuRestrictionInfo", + "ResourceSkuRestrictions", + "ResourceSkuZoneDetails", + "ResourceUploadDefinition", + "ServiceRegistryInstance", + "ServiceRegistryProperties", + "ServiceRegistryResource", + "ServiceRegistryResourceCollection", + "ServiceRegistryResourceRequests", + "ServiceResource", + "ServiceResourceList", + "ServiceSpecification", + "ServiceVNetAddons", + "Sku", + "SkuCapacity", + "SourceUploadedUserSourceInfo", + "SsoProperties", + "StackProperties", + "StorageAccount", + "StorageProperties", + "StorageResource", + "StorageResourceCollection", + "SupportedBuildpackResource", + "SupportedBuildpackResourceProperties", + "SupportedBuildpacksCollection", + "SupportedRuntimeVersion", + "SupportedStackResource", + "SupportedStackResourceProperties", + "SupportedStacksCollection", + "SystemData", + "TCPSocketAction", + "TemporaryDisk", + "TestKeys", + "TrackedResource", + "TriggeredBuildResult", + "UploadedUserSourceInfo", + "UserAssignedManagedIdentity", + "UserSourceInfo", + "ValidationMessages", + "ActionType", + "ApiPortalProvisioningState", + "ApmType", + "AppResourceProvisioningState", + "ApplicationAcceleratorProvisioningState", + "ApplicationLiveViewProvisioningState", + "BackendProtocol", + "BindingType", + "BuildProvisioningState", + "BuildResultProvisioningState", + "BuildServiceProvisioningState", + "BuilderProvisioningState", + "BuildpackBindingProvisioningState", + "CertificateResourceProvisioningState", + "ConfigServerState", + "ConfigurationServiceProvisioningState", + "CreatedByType", + "CustomDomainResourceProvisioningState", + "CustomizedAcceleratorProvisioningState", + "CustomizedAcceleratorValidateResultState", + "DeploymentResourceProvisioningState", + "DeploymentResourceStatus", + "DevToolPortalFeatureState", + "DevToolPortalProvisioningState", + "GatewayProvisioningState", + "GatewayRouteConfigProtocol", + "HTTPSchemeType", + "KPackBuildStageProvisioningState", + "LastModifiedByType", + "ManagedIdentityType", + "MonitoringSettingState", + "PowerState", + "PredefinedAcceleratorProvisioningState", + "PredefinedAcceleratorState", + "ProbeActionType", + "ProvisioningState", + "ResourceSkuRestrictionsReasonCode", + "ResourceSkuRestrictionsType", + "ServiceRegistryProvisioningState", + "SessionAffinity", + "SkuScaleType", + "StorageType", + "SupportedRuntimePlatform", + "SupportedRuntimeValue", + "TestKeyType", + "TrafficDirection", + "Type", +] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/models/_app_platform_management_client_enums.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/models/_app_platform_management_client_enums.py new file mode 100644 index 00000000000..9c426cdb471 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/models/_app_platform_management_client_enums.py @@ -0,0 +1,439 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta + + +class ActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum. Indicates the action type. "Internal" refers to actions that are for internal only APIs.""" + + INTERNAL = "Internal" + + +class ApiPortalProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of the API portal.""" + + CREATING = "Creating" + UPDATING = "Updating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + DELETING = "Deleting" + + +class ApmType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of application performance monitoring.""" + + APPLICATION_INSIGHTS = "ApplicationInsights" + APP_DYNAMICS = "AppDynamics" + DYNATRACE = "Dynatrace" + NEW_RELIC = "NewRelic" + ELASTIC_APM = "ElasticAPM" + + +class ApplicationAcceleratorProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of the application accelerator.""" + + CREATING = "Creating" + UPDATING = "Updating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + DELETING = "Deleting" + + +class ApplicationLiveViewProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of the Application Live View.""" + + CREATING = "Creating" + UPDATING = "Updating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + DELETING = "Deleting" + CANCELED = "Canceled" + + +class AppResourceProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Provisioning state of the App.""" + + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CREATING = "Creating" + UPDATING = "Updating" + DELETING = "Deleting" + + +class BackendProtocol(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """How ingress should communicate with this app backend service.""" + + GRPC = "GRPC" + DEFAULT = "Default" + + +class BindingType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Buildpack Binding Type.""" + + APPLICATION_INSIGHTS = "ApplicationInsights" + APACHE_SKY_WALKING = "ApacheSkyWalking" + APP_DYNAMICS = "AppDynamics" + DYNATRACE = "Dynatrace" + NEW_RELIC = "NewRelic" + ELASTIC_APM = "ElasticAPM" + CA_CERTIFICATES = "CACertificates" + + +class BuilderProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Builder provision status.""" + + CREATING = "Creating" + UPDATING = "Updating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + DELETING = "Deleting" + + +class BuildpackBindingProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of the Buildpack Binding.""" + + CREATING = "Creating" + UPDATING = "Updating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + DELETING = "Deleting" + + +class BuildProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Provisioning state of the KPack build result.""" + + CREATING = "Creating" + UPDATING = "Updating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + DELETING = "Deleting" + + +class BuildResultProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Provisioning state of the KPack build result.""" + + QUEUING = "Queuing" + BUILDING = "Building" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + DELETING = "Deleting" + + +class BuildServiceProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Provisioning state of the KPack build result.""" + + CREATING = "Creating" + UPDATING = "Updating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + DELETING = "Deleting" + + +class CertificateResourceProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Provisioning state of the Certificate.""" + + CREATING = "Creating" + UPDATING = "Updating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + DELETING = "Deleting" + + +class ConfigServerState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of the config server.""" + + NOT_AVAILABLE = "NotAvailable" + DELETED = "Deleted" + FAILED = "Failed" + SUCCEEDED = "Succeeded" + UPDATING = "Updating" + + +class ConfigurationServiceProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of the Application Configuration Service.""" + + CREATING = "Creating" + UPDATING = "Updating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + DELETING = "Deleting" + + +class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of identity that created the resource.""" + + USER = "User" + APPLICATION = "Application" + MANAGED_IDENTITY = "ManagedIdentity" + KEY = "Key" + + +class CustomDomainResourceProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Provisioning state of the Domain.""" + + CREATING = "Creating" + UPDATING = "Updating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + DELETING = "Deleting" + + +class CustomizedAcceleratorProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of the customized accelerator.""" + + CREATING = "Creating" + UPDATING = "Updating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + DELETING = "Deleting" + + +class CustomizedAcceleratorValidateResultState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of the customized accelerator validation result.""" + + #: Customized accelerator properties are valid. + VALID = "Valid" + #: Customized accelerator properties are invalid. + INVALID = "Invalid" + + +class DeploymentResourceProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Provisioning state of the Deployment.""" + + CREATING = "Creating" + UPDATING = "Updating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + + +class DeploymentResourceStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Status of the Deployment.""" + + STOPPED = "Stopped" + RUNNING = "Running" + + +class DevToolPortalFeatureState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of the plugin.""" + + #: Enable the plugin in Dev Tool Portal. + ENABLED = "Enabled" + #: Disable the plugin in Dev Tool Portal. + DISABLED = "Disabled" + + +class DevToolPortalProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of the Dev Tool Portal.""" + + CREATING = "Creating" + UPDATING = "Updating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + DELETING = "Deleting" + CANCELED = "Canceled" + + +class GatewayProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of the Spring Cloud Gateway.""" + + CREATING = "Creating" + UPDATING = "Updating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + DELETING = "Deleting" + + +class GatewayRouteConfigProtocol(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Protocol of routed Azure Spring Apps applications.""" + + HTTP = "HTTP" + HTTPS = "HTTPS" + + +class HTTPSchemeType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Scheme to use for connecting to the host. Defaults to HTTP. + + Possible enum values: + + + * ``"HTTP"`` means that the scheme used will be http:// + * ``"HTTPS"`` means that the scheme used will be https://. + """ + + HTTP = "HTTP" + HTTPS = "HTTPS" + + +class KPackBuildStageProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The provisioning state of this build stage resource.""" + + NOT_STARTED = "NotStarted" + RUNNING = "Running" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + + +class LastModifiedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of identity that last modified the resource.""" + + USER = "User" + APPLICATION = "Application" + MANAGED_IDENTITY = "ManagedIdentity" + KEY = "Key" + + +class ManagedIdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the managed identity.""" + + NONE = "None" + SYSTEM_ASSIGNED = "SystemAssigned" + USER_ASSIGNED = "UserAssigned" + SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned" + + +class MonitoringSettingState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of the Monitoring Setting.""" + + NOT_AVAILABLE = "NotAvailable" + FAILED = "Failed" + SUCCEEDED = "Succeeded" + UPDATING = "Updating" + + +class PowerState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Power state of the Service.""" + + RUNNING = "Running" + STOPPED = "Stopped" + + +class PredefinedAcceleratorProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Provisioning state of the predefined accelerator.""" + + CREATING = "Creating" + UPDATING = "Updating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + + +class PredefinedAcceleratorState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of the predefined accelerator.""" + + #: Enable the predefined accelerator. + ENABLED = "Enabled" + #: Disable the predefined accelerator. + DISABLED = "Disabled" + + +class ProbeActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of the action to take to perform the health check.""" + + HTTP_GET_ACTION = "HTTPGetAction" + TCP_SOCKET_ACTION = "TCPSocketAction" + EXEC_ACTION = "ExecAction" + + +class ProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Provisioning state of the Service.""" + + CREATING = "Creating" + UPDATING = "Updating" + STARTING = "Starting" + STOPPING = "Stopping" + DELETING = "Deleting" + DELETED = "Deleted" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + MOVING = "Moving" + MOVED = "Moved" + MOVE_FAILED = "MoveFailed" + + +class ResourceSkuRestrictionsReasonCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Gets the reason for restriction. Possible values include: 'QuotaId', + 'NotAvailableForSubscription'. + """ + + QUOTA_ID = "QuotaId" + NOT_AVAILABLE_FOR_SUBSCRIPTION = "NotAvailableForSubscription" + + +class ResourceSkuRestrictionsType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Gets the type of restrictions. Possible values include: 'Location', 'Zone'.""" + + LOCATION = "Location" + ZONE = "Zone" + + +class ServiceRegistryProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of the Service Registry.""" + + CREATING = "Creating" + UPDATING = "Updating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + DELETING = "Deleting" + + +class SessionAffinity(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the affinity, set this to Cookie to enable session affinity.""" + + COOKIE = "Cookie" + NONE = "None" + + +class SkuScaleType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Gets or sets the type of the scale.""" + + NONE = "None" + MANUAL = "Manual" + AUTOMATIC = "Automatic" + + +class StorageType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of the storage.""" + + STORAGE_ACCOUNT = "StorageAccount" + + +class SupportedRuntimePlatform(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The platform of this runtime version (possible values: "Java" or ".NET").""" + + JAVA = "Java" + _NET_CORE = ".NET Core" + + +class SupportedRuntimeValue(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The raw value which could be passed to deployment CRUD operations.""" + + JAVA8 = "Java_8" + JAVA11 = "Java_11" + JAVA17 = "Java_17" + NET_CORE31 = "NetCore_31" + + +class TestKeyType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the test key.""" + + PRIMARY = "Primary" + SECONDARY = "Secondary" + + +class TrafficDirection(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The direction of required traffic.""" + + INBOUND = "Inbound" + OUTBOUND = "Outbound" + + +class Type(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of the underlying resource to mount as a persistent disk.""" + + AZURE_FILE_VOLUME = "AzureFileVolume" diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/models/_models_py3.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/models/_models_py3.py new file mode 100644 index 00000000000..0837290c7aa --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/models/_models_py3.py @@ -0,0 +1,9165 @@ +# coding=utf-8 +# pylint: disable=too-many-lines +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +import datetime +import sys +from typing import Any, Dict, List, Optional, TYPE_CHECKING, Union + +from ... import _serialization + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from .. import models as _models +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object + + +class AcceleratorAuthSetting(_serialization.Model): + """Auth setting payload. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AcceleratorBasicAuthSetting, AcceleratorPublicSetting, AcceleratorSshSetting + + All required parameters must be populated in order to send to Azure. + + :ivar auth_type: The type of the auth setting. Required. + :vartype auth_type: str + """ + + _validation = { + "auth_type": {"required": True}, + } + + _attribute_map = { + "auth_type": {"key": "authType", "type": "str"}, + } + + _subtype_map = { + "auth_type": { + "BasicAuth": "AcceleratorBasicAuthSetting", + "Public": "AcceleratorPublicSetting", + "SSH": "AcceleratorSshSetting", + } + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.auth_type = None # type: Optional[str] + + +class AcceleratorBasicAuthSetting(AcceleratorAuthSetting): + """Auth setting for basic auth. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_type: The type of the auth setting. Required. + :vartype auth_type: str + :ivar username: Username of git repository basic auth. Required. + :vartype username: str + :ivar password: Password of git repository basic auth. + :vartype password: str + """ + + _validation = { + "auth_type": {"required": True}, + "username": {"required": True}, + } + + _attribute_map = { + "auth_type": {"key": "authType", "type": "str"}, + "username": {"key": "username", "type": "str"}, + "password": {"key": "password", "type": "str"}, + } + + def __init__(self, *, username: str, password: Optional[str] = None, **kwargs): + """ + :keyword username: Username of git repository basic auth. Required. + :paramtype username: str + :keyword password: Password of git repository basic auth. + :paramtype password: str + """ + super().__init__(**kwargs) + self.auth_type = "BasicAuth" # type: str + self.username = username + self.password = password + + +class AcceleratorGitRepository(_serialization.Model): + """AcceleratorGitRepository. + + All required parameters must be populated in order to send to Azure. + + :ivar url: Git repository URL for the accelerator. Required. + :vartype url: str + :ivar interval_in_seconds: Interval for checking for updates to Git or image repository. + :vartype interval_in_seconds: int + :ivar branch: Git repository branch to be used. + :vartype branch: str + :ivar commit: Git repository commit to be used. + :vartype commit: str + :ivar git_tag: Git repository tag to be used. + :vartype git_tag: str + :ivar auth_setting: Properties of the auth setting payload. Required. + :vartype auth_setting: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.AcceleratorAuthSetting + """ + + _validation = { + "url": {"required": True}, + "auth_setting": {"required": True}, + } + + _attribute_map = { + "url": {"key": "url", "type": "str"}, + "interval_in_seconds": {"key": "intervalInSeconds", "type": "int"}, + "branch": {"key": "branch", "type": "str"}, + "commit": {"key": "commit", "type": "str"}, + "git_tag": {"key": "gitTag", "type": "str"}, + "auth_setting": {"key": "authSetting", "type": "AcceleratorAuthSetting"}, + } + + def __init__( + self, + *, + url: str, + auth_setting: "_models.AcceleratorAuthSetting", + interval_in_seconds: Optional[int] = None, + branch: Optional[str] = None, + commit: Optional[str] = None, + git_tag: Optional[str] = None, + **kwargs + ): + """ + :keyword url: Git repository URL for the accelerator. Required. + :paramtype url: str + :keyword interval_in_seconds: Interval for checking for updates to Git or image repository. + :paramtype interval_in_seconds: int + :keyword branch: Git repository branch to be used. + :paramtype branch: str + :keyword commit: Git repository commit to be used. + :paramtype commit: str + :keyword git_tag: Git repository tag to be used. + :paramtype git_tag: str + :keyword auth_setting: Properties of the auth setting payload. Required. + :paramtype auth_setting: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.AcceleratorAuthSetting + """ + super().__init__(**kwargs) + self.url = url + self.interval_in_seconds = interval_in_seconds + self.branch = branch + self.commit = commit + self.git_tag = git_tag + self.auth_setting = auth_setting + + +class AcceleratorPublicSetting(AcceleratorAuthSetting): + """Auth setting for public url. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_type: The type of the auth setting. Required. + :vartype auth_type: str + """ + + _validation = { + "auth_type": {"required": True}, + } + + _attribute_map = { + "auth_type": {"key": "authType", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.auth_type = "Public" # type: str + + +class AcceleratorSshSetting(AcceleratorAuthSetting): + """Auth setting for SSH auth. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_type: The type of the auth setting. Required. + :vartype auth_type: str + :ivar host_key: Public SSH Key of git repository. + :vartype host_key: str + :ivar host_key_algorithm: SSH Key algorithm of git repository. + :vartype host_key_algorithm: str + :ivar private_key: Private SSH Key algorithm of git repository. + :vartype private_key: str + """ + + _validation = { + "auth_type": {"required": True}, + } + + _attribute_map = { + "auth_type": {"key": "authType", "type": "str"}, + "host_key": {"key": "hostKey", "type": "str"}, + "host_key_algorithm": {"key": "hostKeyAlgorithm", "type": "str"}, + "private_key": {"key": "privateKey", "type": "str"}, + } + + def __init__( + self, + *, + host_key: Optional[str] = None, + host_key_algorithm: Optional[str] = None, + private_key: Optional[str] = None, + **kwargs + ): + """ + :keyword host_key: Public SSH Key of git repository. + :paramtype host_key: str + :keyword host_key_algorithm: SSH Key algorithm of git repository. + :paramtype host_key_algorithm: str + :keyword private_key: Private SSH Key algorithm of git repository. + :paramtype private_key: str + """ + super().__init__(**kwargs) + self.auth_type = "SSH" # type: str + self.host_key = host_key + self.host_key_algorithm = host_key_algorithm + self.private_key = private_key + + +class ActiveDeploymentCollection(_serialization.Model): + """Object that includes an array of Deployment resource name and set them as active. + + :ivar active_deployment_names: Collection of Deployment name. + :vartype active_deployment_names: list[str] + """ + + _attribute_map = { + "active_deployment_names": {"key": "activeDeploymentNames", "type": "[str]"}, + } + + def __init__(self, *, active_deployment_names: Optional[List[str]] = None, **kwargs): + """ + :keyword active_deployment_names: Collection of Deployment name. + :paramtype active_deployment_names: list[str] + """ + super().__init__(**kwargs) + self.active_deployment_names = active_deployment_names + + +class ApiPortalCustomDomainProperties(_serialization.Model): + """The properties of custom domain for API portal. + + :ivar thumbprint: The thumbprint of bound certificate. + :vartype thumbprint: str + """ + + _attribute_map = { + "thumbprint": {"key": "thumbprint", "type": "str"}, + } + + def __init__(self, *, thumbprint: Optional[str] = None, **kwargs): + """ + :keyword thumbprint: The thumbprint of bound certificate. + :paramtype thumbprint: str + """ + super().__init__(**kwargs) + self.thumbprint = thumbprint + + +class Resource(_serialization.Model): + """The core properties of ARM resources. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.system_data = None + + +class ProxyResource(Resource): + """The resource model definition for a ARM proxy resource. It will have everything other than required location and tags. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + + +class ApiPortalCustomDomainResource(ProxyResource): + """Custom domain of the API portal. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: The properties of custom domain for API portal. + :vartype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalCustomDomainProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ApiPortalCustomDomainProperties"}, + } + + def __init__(self, *, properties: Optional["_models.ApiPortalCustomDomainProperties"] = None, **kwargs): + """ + :keyword properties: The properties of custom domain for API portal. + :paramtype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalCustomDomainProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class ApiPortalCustomDomainResourceCollection(_serialization.Model): + """Object that includes an array of API portal custom domain resources and a possible link for next set. + + :ivar value: Collection of API portal custom domain resources. + :vartype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalCustomDomainResource] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[ApiPortalCustomDomainResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, + *, + value: Optional[List["_models.ApiPortalCustomDomainResource"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword value: Collection of API portal custom domain resources. + :paramtype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalCustomDomainResource] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class ApiPortalInstance(_serialization.Model): + """Collection of instances belong to the API portal. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Name of the API portal instance. + :vartype name: str + :ivar status: Status of the API portal instance. + :vartype status: str + """ + + _validation = { + "name": {"readonly": True}, + "status": {"readonly": True}, + } + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "status": {"key": "status", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.name = None + self.status = None + + +class ApiPortalProperties(_serialization.Model): + """API portal properties payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provisioning_state: State of the API portal. Known values are: "Creating", "Updating", + "Succeeded", "Failed", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalProvisioningState + :ivar public: Indicates whether the API portal exposes endpoint. + :vartype public: bool + :ivar url: URL of the API portal, exposed when 'public' is true. + :vartype url: str + :ivar https_only: Indicate if only https is allowed. + :vartype https_only: bool + :ivar gateway_ids: The array of resource Ids of gateway to integrate with API portal. + :vartype gateway_ids: list[str] + :ivar source_urls: Collection of OpenAPI source URL locations. + :vartype source_urls: list[str] + :ivar sso_properties: Single sign-on related configuration. + :vartype sso_properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SsoProperties + :ivar resource_requests: The requested resource quantity for required CPU and Memory. + :vartype resource_requests: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalResourceRequests + :ivar instances: Collection of instances belong to API portal. + :vartype instances: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalInstance] + """ + + _validation = { + "provisioning_state": {"readonly": True}, + "url": {"readonly": True}, + "resource_requests": {"readonly": True}, + "instances": {"readonly": True}, + } + + _attribute_map = { + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "public": {"key": "public", "type": "bool"}, + "url": {"key": "url", "type": "str"}, + "https_only": {"key": "httpsOnly", "type": "bool"}, + "gateway_ids": {"key": "gatewayIds", "type": "[str]"}, + "source_urls": {"key": "sourceUrls", "type": "[str]"}, + "sso_properties": {"key": "ssoProperties", "type": "SsoProperties"}, + "resource_requests": {"key": "resourceRequests", "type": "ApiPortalResourceRequests"}, + "instances": {"key": "instances", "type": "[ApiPortalInstance]"}, + } + + def __init__( + self, + *, + public: bool = False, + https_only: bool = False, + gateway_ids: Optional[List[str]] = None, + source_urls: Optional[List[str]] = None, + sso_properties: Optional["_models.SsoProperties"] = None, + **kwargs + ): + """ + :keyword public: Indicates whether the API portal exposes endpoint. + :paramtype public: bool + :keyword https_only: Indicate if only https is allowed. + :paramtype https_only: bool + :keyword gateway_ids: The array of resource Ids of gateway to integrate with API portal. + :paramtype gateway_ids: list[str] + :keyword source_urls: Collection of OpenAPI source URL locations. + :paramtype source_urls: list[str] + :keyword sso_properties: Single sign-on related configuration. + :paramtype sso_properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SsoProperties + """ + super().__init__(**kwargs) + self.provisioning_state = None + self.public = public + self.url = None + self.https_only = https_only + self.gateway_ids = gateway_ids + self.source_urls = source_urls + self.sso_properties = sso_properties + self.resource_requests = None + self.instances = None + + +class ApiPortalResource(ProxyResource): + """API portal resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: API portal properties payload. + :vartype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalProperties + :ivar sku: Sku of the API portal resource. + :vartype sku: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Sku + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ApiPortalProperties"}, + "sku": {"key": "sku", "type": "Sku"}, + } + + def __init__( + self, + *, + properties: Optional["_models.ApiPortalProperties"] = None, + sku: Optional["_models.Sku"] = None, + **kwargs + ): + """ + :keyword properties: API portal properties payload. + :paramtype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalProperties + :keyword sku: Sku of the API portal resource. + :paramtype sku: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Sku + """ + super().__init__(**kwargs) + self.properties = properties + self.sku = sku + + +class ApiPortalResourceCollection(_serialization.Model): + """Object that includes an array of API portal resources and a possible link for next set. + + :ivar value: Collection of API portal resources. + :vartype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalResource] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[ApiPortalResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, *, value: Optional[List["_models.ApiPortalResource"]] = None, next_link: Optional[str] = None, **kwargs + ): + """ + :keyword value: Collection of API portal resources. + :paramtype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalResource] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class ApiPortalResourceRequests(_serialization.Model): + """Resource requests of the API portal. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar cpu: Cpu allocated to each API portal instance. + :vartype cpu: str + :ivar memory: Memory allocated to each API portal instance. + :vartype memory: str + """ + + _validation = { + "cpu": {"readonly": True}, + "memory": {"readonly": True}, + } + + _attribute_map = { + "cpu": {"key": "cpu", "type": "str"}, + "memory": {"key": "memory", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.cpu = None + self.memory = None + + +class ApplicationAcceleratorComponent(_serialization.Model): + """ApplicationAcceleratorComponent. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: + :vartype name: str + :ivar resource_requests: + :vartype resource_requests: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorResourceRequests + :ivar instances: + :vartype instances: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorInstance] + """ + + _validation = { + "name": {"readonly": True}, + "instances": {"readonly": True}, + } + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "resource_requests": {"key": "resourceRequests", "type": "ApplicationAcceleratorResourceRequests"}, + "instances": {"key": "instances", "type": "[ApplicationAcceleratorInstance]"}, + } + + def __init__( + self, *, resource_requests: Optional["_models.ApplicationAcceleratorResourceRequests"] = None, **kwargs + ): + """ + :keyword resource_requests: + :paramtype resource_requests: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorResourceRequests + """ + super().__init__(**kwargs) + self.name = None + self.resource_requests = resource_requests + self.instances = None + + +class ApplicationAcceleratorInstance(_serialization.Model): + """ApplicationAcceleratorInstance. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Name of the Application Accelerator instance. + :vartype name: str + :ivar status: Status of the Application Accelerator instance. It can be Pending, Running, + Succeeded, Failed, Unknown. + :vartype status: str + """ + + _validation = { + "name": {"readonly": True}, + "status": {"readonly": True}, + } + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "status": {"key": "status", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.name = None + self.status = None + + +class ApplicationAcceleratorProperties(_serialization.Model): + """Application accelerator properties payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provisioning_state: State of the application accelerator. Known values are: "Creating", + "Updating", "Succeeded", "Failed", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorProvisioningState + :ivar components: Collection of components belong to application accelerator. + :vartype components: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorComponent] + """ + + _validation = { + "provisioning_state": {"readonly": True}, + "components": {"readonly": True}, + } + + _attribute_map = { + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "components": {"key": "components", "type": "[ApplicationAcceleratorComponent]"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.provisioning_state = None + self.components = None + + +class ApplicationAcceleratorResource(ProxyResource): + """Application accelerator resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Application accelerator properties payload. + :vartype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorProperties + :ivar sku: Sku of the application accelerator resource. + :vartype sku: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Sku + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ApplicationAcceleratorProperties"}, + "sku": {"key": "sku", "type": "Sku"}, + } + + def __init__( + self, + *, + properties: Optional["_models.ApplicationAcceleratorProperties"] = None, + sku: Optional["_models.Sku"] = None, + **kwargs + ): + """ + :keyword properties: Application accelerator properties payload. + :paramtype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorProperties + :keyword sku: Sku of the application accelerator resource. + :paramtype sku: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Sku + """ + super().__init__(**kwargs) + self.properties = properties + self.sku = sku + + +class ApplicationAcceleratorResourceCollection(_serialization.Model): + """Object that includes an array of application accelerator resources and a possible link for next set. + + :ivar value: Collection of application accelerator resources. + :vartype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorResource] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[ApplicationAcceleratorResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, + *, + value: Optional[List["_models.ApplicationAcceleratorResource"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword value: Collection of application accelerator resources. + :paramtype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorResource] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class ApplicationAcceleratorResourceRequests(_serialization.Model): + """ApplicationAcceleratorResourceRequests. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar cpu: Cpu allocated to each application accelerator component. 1 core can be represented + by 1 or 1000m. + :vartype cpu: str + :ivar memory: Memory allocated to each application accelerator component. 1 GB can be + represented by 1Gi or 1024Mi. + :vartype memory: str + :ivar instance_count: Instance count of the application accelerator component. + :vartype instance_count: int + """ + + _validation = { + "cpu": {"readonly": True}, + "memory": {"readonly": True}, + "instance_count": {"readonly": True}, + } + + _attribute_map = { + "cpu": {"key": "cpu", "type": "str"}, + "memory": {"key": "memory", "type": "str"}, + "instance_count": {"key": "instanceCount", "type": "int"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.cpu = None + self.memory = None + self.instance_count = None + + +class ApplicationInsightsAgentVersions(_serialization.Model): + """Application Insights agent versions properties payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar java: Indicates the version of application insight java agent. + :vartype java: str + """ + + _validation = { + "java": {"readonly": True}, + } + + _attribute_map = { + "java": {"key": "java", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.java = None + + +class ApplicationLiveViewComponent(_serialization.Model): + """Application Live View properties payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Name of the component. + :vartype name: any + :ivar resource_requests: The requested resource quantity for required CPU and Memory. + :vartype resource_requests: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewResourceRequests + :ivar instances: Collection of instances belong to Application Live View. + :vartype instances: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewInstance] + """ + + _validation = { + "name": {"readonly": True}, + "resource_requests": {"readonly": True}, + "instances": {"readonly": True}, + } + + _attribute_map = { + "name": {"key": "name", "type": "object"}, + "resource_requests": {"key": "resourceRequests", "type": "ApplicationLiveViewResourceRequests"}, + "instances": {"key": "instances", "type": "[ApplicationLiveViewInstance]"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.name = None + self.resource_requests = None + self.instances = None + + +class ApplicationLiveViewInstance(_serialization.Model): + """Collection of instances belong to the Application Live View. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Name of the Application Live View instance. + :vartype name: str + :ivar status: Status of the Application Live View instance. It can be Pending, Running, + Succeeded, Failed, Unknown. + :vartype status: str + """ + + _validation = { + "name": {"readonly": True}, + "status": {"readonly": True}, + } + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "status": {"key": "status", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.name = None + self.status = None + + +class ApplicationLiveViewProperties(_serialization.Model): + """Application Live View properties payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provisioning_state: State of the Application Live View. Known values are: "Creating", + "Updating", "Succeeded", "Failed", "Deleting", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewProvisioningState + :ivar components: Component details of Application Live View. + :vartype components: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewComponent] + """ + + _validation = { + "provisioning_state": {"readonly": True}, + "components": {"readonly": True}, + } + + _attribute_map = { + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "components": {"key": "components", "type": "[ApplicationLiveViewComponent]"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.provisioning_state = None + self.components = None + + +class ApplicationLiveViewResource(ProxyResource): + """Application Live View resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Application Live View properties payload. + :vartype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ApplicationLiveViewProperties"}, + } + + def __init__(self, *, properties: Optional["_models.ApplicationLiveViewProperties"] = None, **kwargs): + """ + :keyword properties: Application Live View properties payload. + :paramtype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class ApplicationLiveViewResourceCollection(_serialization.Model): + """Object that includes an array of Application Live View resources and a possible link for next set. + + :ivar value: Collection of Application Live View resources. + :vartype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewResource] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[ApplicationLiveViewResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, + *, + value: Optional[List["_models.ApplicationLiveViewResource"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword value: Collection of Application Live View resources. + :paramtype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewResource] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class ApplicationLiveViewResourceRequests(_serialization.Model): + """The resource quantity for required CPU and Memory of Application Live View component. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar cpu: Cpu quantity allocated to each Application Live View component instance. 1 core can + be represented by 1 or 1000m. + :vartype cpu: str + :ivar memory: Memory quantity allocated to each Application Live View component instance. 1 GB + can be represented by 1Gi or 1024Mi. + :vartype memory: str + :ivar instance_count: Desired instance count of Application Live View component instance. + :vartype instance_count: int + """ + + _validation = { + "cpu": {"readonly": True}, + "memory": {"readonly": True}, + "instance_count": {"readonly": True}, + } + + _attribute_map = { + "cpu": {"key": "cpu", "type": "str"}, + "memory": {"key": "memory", "type": "str"}, + "instance_count": {"key": "instanceCount", "type": "int"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.cpu = None + self.memory = None + self.instance_count = None + + +class AppResource(ProxyResource): + """App resource payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Properties of the App resource. + :vartype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResourceProperties + :ivar identity: The Managed Identity type of the app resource. + :vartype identity: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ManagedIdentityProperties + :ivar location: The GEO location of the application, always the same with its parent resource. + :vartype location: str + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "AppResourceProperties"}, + "identity": {"key": "identity", "type": "ManagedIdentityProperties"}, + "location": {"key": "location", "type": "str"}, + } + + def __init__( + self, + *, + properties: Optional["_models.AppResourceProperties"] = None, + identity: Optional["_models.ManagedIdentityProperties"] = None, + location: Optional[str] = None, + **kwargs + ): + """ + :keyword properties: Properties of the App resource. + :paramtype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResourceProperties + :keyword identity: The Managed Identity type of the app resource. + :paramtype identity: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ManagedIdentityProperties + :keyword location: The GEO location of the application, always the same with its parent + resource. + :paramtype location: str + """ + super().__init__(**kwargs) + self.properties = properties + self.identity = identity + self.location = location + + +class AppResourceCollection(_serialization.Model): + """Object that includes an array of App resources and a possible link for next set. + + :ivar value: Collection of App resources. + :vartype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[AppResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, *, value: Optional[List["_models.AppResource"]] = None, next_link: Optional[str] = None, **kwargs + ): + """ + :keyword value: Collection of App resources. + :paramtype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class AppResourceProperties(_serialization.Model): # pylint: disable=too-many-instance-attributes + """App resource properties payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar public: Indicates whether the App exposes public endpoint. + :vartype public: bool + :ivar url: URL of the App. + :vartype url: str + :ivar addon_configs: Collection of addons. + :vartype addon_configs: dict[str, dict[str, JSON]] + :ivar provisioning_state: Provisioning state of the App. Known values are: "Succeeded", + "Failed", "Creating", "Updating", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResourceProvisioningState + :ivar fqdn: Fully qualified dns Name. + :vartype fqdn: str + :ivar https_only: Indicate if only https is allowed. + :vartype https_only: bool + :ivar temporary_disk: Temporary disk settings. + :vartype temporary_disk: ~azure.mgmt.appplatform.v2022_11_01_preview.models.TemporaryDisk + :ivar persistent_disk: Persistent disk settings. + :vartype persistent_disk: ~azure.mgmt.appplatform.v2022_11_01_preview.models.PersistentDisk + :ivar custom_persistent_disks: List of custom persistent disks. + :vartype custom_persistent_disks: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomPersistentDiskResource] + :ivar enable_end_to_end_tls: Indicate if end to end TLS is enabled. + :vartype enable_end_to_end_tls: bool + :ivar loaded_certificates: Collection of loaded certificates. + :vartype loaded_certificates: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.LoadedCertificate] + :ivar vnet_addons: Additional App settings in vnet injection instance. + :vartype vnet_addons: ~azure.mgmt.appplatform.v2022_11_01_preview.models.AppVNetAddons + :ivar ingress_settings: App ingress settings payload. + :vartype ingress_settings: ~azure.mgmt.appplatform.v2022_11_01_preview.models.IngressSettings + """ + + _validation = { + "url": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "fqdn": {"readonly": True}, + } + + _attribute_map = { + "public": {"key": "public", "type": "bool"}, + "url": {"key": "url", "type": "str"}, + "addon_configs": {"key": "addonConfigs", "type": "{{object}}"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "fqdn": {"key": "fqdn", "type": "str"}, + "https_only": {"key": "httpsOnly", "type": "bool"}, + "temporary_disk": {"key": "temporaryDisk", "type": "TemporaryDisk"}, + "persistent_disk": {"key": "persistentDisk", "type": "PersistentDisk"}, + "custom_persistent_disks": {"key": "customPersistentDisks", "type": "[CustomPersistentDiskResource]"}, + "enable_end_to_end_tls": {"key": "enableEndToEndTLS", "type": "bool"}, + "loaded_certificates": {"key": "loadedCertificates", "type": "[LoadedCertificate]"}, + "vnet_addons": {"key": "vnetAddons", "type": "AppVNetAddons"}, + "ingress_settings": {"key": "ingressSettings", "type": "IngressSettings"}, + } + + def __init__( + self, + *, + public: Optional[bool] = None, + addon_configs: Optional[Dict[str, Dict[str, JSON]]] = None, + https_only: bool = False, + temporary_disk: Optional["_models.TemporaryDisk"] = None, + persistent_disk: Optional["_models.PersistentDisk"] = None, + custom_persistent_disks: Optional[List["_models.CustomPersistentDiskResource"]] = None, + enable_end_to_end_tls: bool = False, + loaded_certificates: Optional[List["_models.LoadedCertificate"]] = None, + vnet_addons: Optional["_models.AppVNetAddons"] = None, + ingress_settings: Optional["_models.IngressSettings"] = None, + **kwargs + ): + """ + :keyword public: Indicates whether the App exposes public endpoint. + :paramtype public: bool + :keyword addon_configs: Collection of addons. + :paramtype addon_configs: dict[str, dict[str, JSON]] + :keyword https_only: Indicate if only https is allowed. + :paramtype https_only: bool + :keyword temporary_disk: Temporary disk settings. + :paramtype temporary_disk: ~azure.mgmt.appplatform.v2022_11_01_preview.models.TemporaryDisk + :keyword persistent_disk: Persistent disk settings. + :paramtype persistent_disk: ~azure.mgmt.appplatform.v2022_11_01_preview.models.PersistentDisk + :keyword custom_persistent_disks: List of custom persistent disks. + :paramtype custom_persistent_disks: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomPersistentDiskResource] + :keyword enable_end_to_end_tls: Indicate if end to end TLS is enabled. + :paramtype enable_end_to_end_tls: bool + :keyword loaded_certificates: Collection of loaded certificates. + :paramtype loaded_certificates: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.LoadedCertificate] + :keyword vnet_addons: Additional App settings in vnet injection instance. + :paramtype vnet_addons: ~azure.mgmt.appplatform.v2022_11_01_preview.models.AppVNetAddons + :keyword ingress_settings: App ingress settings payload. + :paramtype ingress_settings: ~azure.mgmt.appplatform.v2022_11_01_preview.models.IngressSettings + """ + super().__init__(**kwargs) + self.public = public + self.url = None + self.addon_configs = addon_configs + self.provisioning_state = None + self.fqdn = None + self.https_only = https_only + self.temporary_disk = temporary_disk + self.persistent_disk = persistent_disk + self.custom_persistent_disks = custom_persistent_disks + self.enable_end_to_end_tls = enable_end_to_end_tls + self.loaded_certificates = loaded_certificates + self.vnet_addons = vnet_addons + self.ingress_settings = ingress_settings + + +class AppVNetAddons(_serialization.Model): + """Additional App settings in vnet injection instance. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar public_endpoint: Indicates whether the App in vnet injection instance exposes endpoint + which could be accessed from internet. + :vartype public_endpoint: bool + :ivar public_endpoint_url: URL of the App in vnet injection instance which could be accessed + from internet. + :vartype public_endpoint_url: str + """ + + _validation = { + "public_endpoint_url": {"readonly": True}, + } + + _attribute_map = { + "public_endpoint": {"key": "publicEndpoint", "type": "bool"}, + "public_endpoint_url": {"key": "publicEndpointUrl", "type": "str"}, + } + + def __init__(self, *, public_endpoint: bool = False, **kwargs): + """ + :keyword public_endpoint: Indicates whether the App in vnet injection instance exposes endpoint + which could be accessed from internet. + :paramtype public_endpoint: bool + """ + super().__init__(**kwargs) + self.public_endpoint = public_endpoint + self.public_endpoint_url = None + + +class AvailableOperations(_serialization.Model): + """Available operations of the service. + + :ivar value: Collection of available operation details. + :vartype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.OperationDetail] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[OperationDetail]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, *, value: Optional[List["_models.OperationDetail"]] = None, next_link: Optional[str] = None, **kwargs + ): + """ + :keyword value: Collection of available operation details. + :paramtype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.OperationDetail] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class AvailableRuntimeVersions(_serialization.Model): + """AvailableRuntimeVersions. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: A list of all supported runtime versions. + :vartype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.SupportedRuntimeVersion] + """ + + _validation = { + "value": {"readonly": True}, + } + + _attribute_map = { + "value": {"key": "value", "type": "[SupportedRuntimeVersion]"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.value = None + + +class CustomPersistentDiskProperties(_serialization.Model): + """Custom persistent disk resource payload. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureFileVolume + + All required parameters must be populated in order to send to Azure. + + :ivar type: The type of the underlying resource to mount as a persistent disk. Required. + "AzureFileVolume" + :vartype type: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.Type + :ivar mount_path: The mount path of the persistent disk. Required. + :vartype mount_path: str + :ivar read_only: Indicates whether the persistent disk is a readOnly one. + :vartype read_only: bool + :ivar mount_options: These are the mount options for a persistent disk. + :vartype mount_options: list[str] + """ + + _validation = { + "type": {"required": True}, + "mount_path": {"required": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "mount_path": {"key": "mountPath", "type": "str"}, + "read_only": {"key": "readOnly", "type": "bool"}, + "mount_options": {"key": "mountOptions", "type": "[str]"}, + } + + _subtype_map = {"type": {"AzureFileVolume": "AzureFileVolume"}} + + def __init__( + self, *, mount_path: str, read_only: Optional[bool] = None, mount_options: Optional[List[str]] = None, **kwargs + ): + """ + :keyword mount_path: The mount path of the persistent disk. Required. + :paramtype mount_path: str + :keyword read_only: Indicates whether the persistent disk is a readOnly one. + :paramtype read_only: bool + :keyword mount_options: These are the mount options for a persistent disk. + :paramtype mount_options: list[str] + """ + super().__init__(**kwargs) + self.type = None # type: Optional[str] + self.mount_path = mount_path + self.read_only = read_only + self.mount_options = mount_options + + +class AzureFileVolume(CustomPersistentDiskProperties): + """The properties of the Azure File volume. Azure File shares are mounted as volumes. + + All required parameters must be populated in order to send to Azure. + + :ivar type: The type of the underlying resource to mount as a persistent disk. Required. + "AzureFileVolume" + :vartype type: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.Type + :ivar mount_path: The mount path of the persistent disk. Required. + :vartype mount_path: str + :ivar read_only: Indicates whether the persistent disk is a readOnly one. + :vartype read_only: bool + :ivar mount_options: These are the mount options for a persistent disk. + :vartype mount_options: list[str] + :ivar share_name: The share name of the Azure File share. Required. + :vartype share_name: str + """ + + _validation = { + "type": {"required": True}, + "mount_path": {"required": True}, + "share_name": {"required": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "mount_path": {"key": "mountPath", "type": "str"}, + "read_only": {"key": "readOnly", "type": "bool"}, + "mount_options": {"key": "mountOptions", "type": "[str]"}, + "share_name": {"key": "shareName", "type": "str"}, + } + + def __init__( + self, + *, + mount_path: str, + share_name: str, + read_only: Optional[bool] = None, + mount_options: Optional[List[str]] = None, + **kwargs + ): + """ + :keyword mount_path: The mount path of the persistent disk. Required. + :paramtype mount_path: str + :keyword read_only: Indicates whether the persistent disk is a readOnly one. + :paramtype read_only: bool + :keyword mount_options: These are the mount options for a persistent disk. + :paramtype mount_options: list[str] + :keyword share_name: The share name of the Azure File share. Required. + :paramtype share_name: str + """ + super().__init__(mount_path=mount_path, read_only=read_only, mount_options=mount_options, **kwargs) + self.type = "AzureFileVolume" # type: str + self.share_name = share_name + + +class BindingResource(ProxyResource): + """Binding resource payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Properties of the Binding resource. + :vartype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResourceProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "BindingResourceProperties"}, + } + + def __init__(self, *, properties: Optional["_models.BindingResourceProperties"] = None, **kwargs): + """ + :keyword properties: Properties of the Binding resource. + :paramtype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResourceProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class BindingResourceCollection(_serialization.Model): + """Object that includes an array of Binding resources and a possible link for next set. + + :ivar value: Collection of Binding resources. + :vartype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[BindingResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, *, value: Optional[List["_models.BindingResource"]] = None, next_link: Optional[str] = None, **kwargs + ): + """ + :keyword value: Collection of Binding resources. + :paramtype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class BindingResourceProperties(_serialization.Model): + """Binding resource properties payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar resource_name: The name of the bound resource. + :vartype resource_name: str + :ivar resource_type: The standard Azure resource type of the bound resource. + :vartype resource_type: str + :ivar resource_id: The Azure resource id of the bound resource. + :vartype resource_id: str + :ivar key: The key of the bound resource. + :vartype key: str + :ivar binding_parameters: Binding parameters of the Binding resource. + :vartype binding_parameters: dict[str, JSON] + :ivar generated_properties: The generated Spring Boot property file for this binding. The + secret will be deducted. + :vartype generated_properties: str + :ivar created_at: Creation time of the Binding resource. + :vartype created_at: str + :ivar updated_at: Update time of the Binding resource. + :vartype updated_at: str + """ + + _validation = { + "resource_name": {"readonly": True}, + "resource_type": {"readonly": True}, + "generated_properties": {"readonly": True}, + "created_at": {"readonly": True}, + "updated_at": {"readonly": True}, + } + + _attribute_map = { + "resource_name": {"key": "resourceName", "type": "str"}, + "resource_type": {"key": "resourceType", "type": "str"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "key": {"key": "key", "type": "str"}, + "binding_parameters": {"key": "bindingParameters", "type": "{object}"}, + "generated_properties": {"key": "generatedProperties", "type": "str"}, + "created_at": {"key": "createdAt", "type": "str"}, + "updated_at": {"key": "updatedAt", "type": "str"}, + } + + def __init__( + self, + *, + resource_id: Optional[str] = None, + key: Optional[str] = None, + binding_parameters: Optional[Dict[str, JSON]] = None, + **kwargs + ): + """ + :keyword resource_id: The Azure resource id of the bound resource. + :paramtype resource_id: str + :keyword key: The key of the bound resource. + :paramtype key: str + :keyword binding_parameters: Binding parameters of the Binding resource. + :paramtype binding_parameters: dict[str, JSON] + """ + super().__init__(**kwargs) + self.resource_name = None + self.resource_type = None + self.resource_id = resource_id + self.key = key + self.binding_parameters = binding_parameters + self.generated_properties = None + self.created_at = None + self.updated_at = None + + +class Build(ProxyResource): + """Build resource payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Properties of the build resource. + :vartype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "BuildProperties"}, + } + + def __init__(self, *, properties: Optional["_models.BuildProperties"] = None, **kwargs): + """ + :keyword properties: Properties of the build resource. + :paramtype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class BuildCollection(_serialization.Model): + """Object that includes an array of Build resources and a possible link for next set. + + :ivar value: Collection of Build resources. + :vartype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.Build] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[Build]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__(self, *, value: Optional[List["_models.Build"]] = None, next_link: Optional[str] = None, **kwargs): + """ + :keyword value: Collection of Build resources. + :paramtype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.Build] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class BuilderProperties(_serialization.Model): + """KPack Builder properties payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provisioning_state: Builder provision status. Known values are: "Creating", "Updating", + "Succeeded", "Failed", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuilderProvisioningState + :ivar stack: Builder cluster stack property. + :vartype stack: ~azure.mgmt.appplatform.v2022_11_01_preview.models.StackProperties + :ivar buildpack_groups: Builder buildpack groups. + :vartype buildpack_groups: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpacksGroupProperties] + """ + + _validation = { + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "stack": {"key": "stack", "type": "StackProperties"}, + "buildpack_groups": {"key": "buildpackGroups", "type": "[BuildpacksGroupProperties]"}, + } + + def __init__( + self, + *, + stack: Optional["_models.StackProperties"] = None, + buildpack_groups: Optional[List["_models.BuildpacksGroupProperties"]] = None, + **kwargs + ): + """ + :keyword stack: Builder cluster stack property. + :paramtype stack: ~azure.mgmt.appplatform.v2022_11_01_preview.models.StackProperties + :keyword buildpack_groups: Builder buildpack groups. + :paramtype buildpack_groups: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpacksGroupProperties] + """ + super().__init__(**kwargs) + self.provisioning_state = None + self.stack = stack + self.buildpack_groups = buildpack_groups + + +class BuilderResource(ProxyResource): + """KPack Builder resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Property of the Builder resource. + :vartype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuilderProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "BuilderProperties"}, + } + + def __init__(self, *, properties: Optional["_models.BuilderProperties"] = None, **kwargs): + """ + :keyword properties: Property of the Builder resource. + :paramtype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuilderProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class BuilderResourceCollection(_serialization.Model): + """Object that includes an array of Builder resources and a possible link for next set. + + :ivar value: Collection of Builder resources. + :vartype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuilderResource] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[BuilderResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, *, value: Optional[List["_models.BuilderResource"]] = None, next_link: Optional[str] = None, **kwargs + ): + """ + :keyword value: Collection of Builder resources. + :paramtype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuilderResource] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class BuildpackBindingLaunchProperties(_serialization.Model): + """Buildpack Binding Launch Properties. + + :ivar properties: Non-sensitive properties for launchProperties. + :vartype properties: dict[str, str] + :ivar secrets: Sensitive properties for launchProperties. + :vartype secrets: dict[str, str] + """ + + _attribute_map = { + "properties": {"key": "properties", "type": "{str}"}, + "secrets": {"key": "secrets", "type": "{str}"}, + } + + def __init__( + self, *, properties: Optional[Dict[str, str]] = None, secrets: Optional[Dict[str, str]] = None, **kwargs + ): + """ + :keyword properties: Non-sensitive properties for launchProperties. + :paramtype properties: dict[str, str] + :keyword secrets: Sensitive properties for launchProperties. + :paramtype secrets: dict[str, str] + """ + super().__init__(**kwargs) + self.properties = properties + self.secrets = secrets + + +class BuildpackBindingProperties(_serialization.Model): + """Properties of a buildpack binding. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar binding_type: Buildpack Binding Type. Known values are: "ApplicationInsights", + "ApacheSkyWalking", "AppDynamics", "Dynatrace", "NewRelic", "ElasticAPM", and "CACertificates". + :vartype binding_type: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingType + :ivar provisioning_state: State of the Buildpack Binding. Known values are: "Creating", + "Updating", "Succeeded", "Failed", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackBindingProvisioningState + :ivar launch_properties: The object describes the buildpack binding launch properties. + :vartype launch_properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackBindingLaunchProperties + """ + + _validation = { + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "binding_type": {"key": "bindingType", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "launch_properties": {"key": "launchProperties", "type": "BuildpackBindingLaunchProperties"}, + } + + def __init__( + self, + *, + binding_type: Optional[Union[str, "_models.BindingType"]] = None, + launch_properties: Optional["_models.BuildpackBindingLaunchProperties"] = None, + **kwargs + ): + """ + :keyword binding_type: Buildpack Binding Type. Known values are: "ApplicationInsights", + "ApacheSkyWalking", "AppDynamics", "Dynatrace", "NewRelic", "ElasticAPM", and "CACertificates". + :paramtype binding_type: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingType + :keyword launch_properties: The object describes the buildpack binding launch properties. + :paramtype launch_properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackBindingLaunchProperties + """ + super().__init__(**kwargs) + self.binding_type = binding_type + self.provisioning_state = None + self.launch_properties = launch_properties + + +class BuildpackBindingResource(ProxyResource): + """Buildpack Binding Resource object. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Properties of a buildpack binding. + :vartype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackBindingProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "BuildpackBindingProperties"}, + } + + def __init__(self, *, properties: Optional["_models.BuildpackBindingProperties"] = None, **kwargs): + """ + :keyword properties: Properties of a buildpack binding. + :paramtype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackBindingProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class BuildpackBindingResourceCollection(_serialization.Model): + """Object that includes an array of BuildpackBinding resources and a possible link for next set. + + :ivar value: Collection of BuildpackBinding resources. + :vartype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackBindingResource] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[BuildpackBindingResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, + *, + value: Optional[List["_models.BuildpackBindingResource"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword value: Collection of BuildpackBinding resources. + :paramtype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackBindingResource] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class BuildpackProperties(_serialization.Model): + """Buildpack properties payload. + + :ivar id: Id of the buildpack. + :vartype id: str + """ + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + } + + def __init__(self, *, id: Optional[str] = None, **kwargs): # pylint: disable=redefined-builtin + """ + :keyword id: Id of the buildpack. + :paramtype id: str + """ + super().__init__(**kwargs) + self.id = id + + +class BuildpacksGroupProperties(_serialization.Model): + """Buildpack group properties of the Builder. + + :ivar name: Buildpack group name. + :vartype name: str + :ivar buildpacks: Buildpacks in the buildpack group. + :vartype buildpacks: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackProperties] + """ + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "buildpacks": {"key": "buildpacks", "type": "[BuildpackProperties]"}, + } + + def __init__( + self, *, name: Optional[str] = None, buildpacks: Optional[List["_models.BuildpackProperties"]] = None, **kwargs + ): + """ + :keyword name: Buildpack group name. + :paramtype name: str + :keyword buildpacks: Buildpacks in the buildpack group. + :paramtype buildpacks: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackProperties] + """ + super().__init__(**kwargs) + self.name = name + self.buildpacks = buildpacks + + +class BuildProperties(_serialization.Model): + """Build resource properties payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar relative_path: The relative path of source code. + :vartype relative_path: str + :ivar builder: The resource id of builder to build the source code. + :vartype builder: str + :ivar agent_pool: The resource id of agent pool. + :vartype agent_pool: str + :ivar provisioning_state: Provisioning state of the KPack build result. Known values are: + "Creating", "Updating", "Succeeded", "Failed", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildProvisioningState + :ivar env: The environment variables for this build. + :vartype env: dict[str, str] + :ivar triggered_build_result: The build result triggered by this build. + :vartype triggered_build_result: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.TriggeredBuildResult + :ivar resource_requests: The customized build resource for this build. + :vartype resource_requests: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildResourceRequests + """ + + _validation = { + "provisioning_state": {"readonly": True}, + "triggered_build_result": {"readonly": True}, + } + + _attribute_map = { + "relative_path": {"key": "relativePath", "type": "str"}, + "builder": {"key": "builder", "type": "str"}, + "agent_pool": {"key": "agentPool", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "env": {"key": "env", "type": "{str}"}, + "triggered_build_result": {"key": "triggeredBuildResult", "type": "TriggeredBuildResult"}, + "resource_requests": {"key": "resourceRequests", "type": "BuildResourceRequests"}, + } + + def __init__( + self, + *, + relative_path: Optional[str] = None, + builder: Optional[str] = None, + agent_pool: Optional[str] = None, + env: Optional[Dict[str, str]] = None, + resource_requests: Optional["_models.BuildResourceRequests"] = None, + **kwargs + ): + """ + :keyword relative_path: The relative path of source code. + :paramtype relative_path: str + :keyword builder: The resource id of builder to build the source code. + :paramtype builder: str + :keyword agent_pool: The resource id of agent pool. + :paramtype agent_pool: str + :keyword env: The environment variables for this build. + :paramtype env: dict[str, str] + :keyword resource_requests: The customized build resource for this build. + :paramtype resource_requests: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildResourceRequests + """ + super().__init__(**kwargs) + self.relative_path = relative_path + self.builder = builder + self.agent_pool = agent_pool + self.provisioning_state = None + self.env = env + self.triggered_build_result = None + self.resource_requests = resource_requests + + +class BuildResourceRequests(_serialization.Model): + """Resource request payload of Build Resource. + + :ivar cpu: Optional Cpu allocated to the build resource. 1 core can be represented by 1 or + 1000m. + The default value is 1, this should not exceed build service agent pool cpu size. + :vartype cpu: str + :ivar memory: Optional Memory allocated to the build resource. 1 GB can be represented by 1Gi + or 1024Mi. + The default value is 2Gi, this should not exceed build service agent pool memory size. + :vartype memory: str + """ + + _attribute_map = { + "cpu": {"key": "cpu", "type": "str"}, + "memory": {"key": "memory", "type": "str"}, + } + + def __init__(self, *, cpu: str = "1", memory: str = "2Gi", **kwargs): + """ + :keyword cpu: Optional Cpu allocated to the build resource. 1 core can be represented by 1 or + 1000m. + The default value is 1, this should not exceed build service agent pool cpu size. + :paramtype cpu: str + :keyword memory: Optional Memory allocated to the build resource. 1 GB can be represented by + 1Gi or 1024Mi. + The default value is 2Gi, this should not exceed build service agent pool memory size. + :paramtype memory: str + """ + super().__init__(**kwargs) + self.cpu = cpu + self.memory = memory + + +class BuildResult(ProxyResource): + """Build result resource payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Properties of the build result resource. + :vartype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildResultProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "BuildResultProperties"}, + } + + def __init__(self, *, properties: Optional["_models.BuildResultProperties"] = None, **kwargs): + """ + :keyword properties: Properties of the build result resource. + :paramtype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildResultProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class BuildResultCollection(_serialization.Model): + """Object that includes an array of Build result resources and a possible link for next set. + + :ivar value: Collection of Build result resources. + :vartype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildResult] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[BuildResult]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, *, value: Optional[List["_models.BuildResult"]] = None, next_link: Optional[str] = None, **kwargs + ): + """ + :keyword value: Collection of Build result resources. + :paramtype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildResult] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class BuildResultLog(_serialization.Model): + """Build result log resource properties payload. + + :ivar blob_url: The public download URL of this build result log. + :vartype blob_url: str + """ + + _attribute_map = { + "blob_url": {"key": "blobUrl", "type": "str"}, + } + + def __init__(self, *, blob_url: Optional[str] = None, **kwargs): + """ + :keyword blob_url: The public download URL of this build result log. + :paramtype blob_url: str + """ + super().__init__(**kwargs) + self.blob_url = blob_url + + +class BuildResultProperties(_serialization.Model): + """Build result resource properties payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: The name of this build result. + :vartype name: str + :ivar provisioning_state: Provisioning state of the KPack build result. Known values are: + "Queuing", "Building", "Succeeded", "Failed", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildResultProvisioningState + :ivar error: Error when build is failed. + :vartype error: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Error + :ivar build_pod_name: The build pod name which can be used to get the build log streaming. + :vartype build_pod_name: str + :ivar build_stages: All of the build stage (init-container and container) resources in build + pod. + :vartype build_stages: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildStageProperties] + """ + + _validation = { + "provisioning_state": {"readonly": True}, + "build_stages": {"readonly": True}, + } + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "error": {"key": "error", "type": "Error"}, + "build_pod_name": {"key": "buildPodName", "type": "str"}, + "build_stages": {"key": "buildStages", "type": "[BuildStageProperties]"}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + error: Optional["_models.Error"] = None, + build_pod_name: Optional[str] = None, + **kwargs + ): + """ + :keyword name: The name of this build result. + :paramtype name: str + :keyword error: Error when build is failed. + :paramtype error: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Error + :keyword build_pod_name: The build pod name which can be used to get the build log streaming. + :paramtype build_pod_name: str + """ + super().__init__(**kwargs) + self.name = name + self.provisioning_state = None + self.error = error + self.build_pod_name = build_pod_name + self.build_stages = None + + +class UserSourceInfo(_serialization.Model): + """Source information for a deployment. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + BuildResultUserSourceInfo, CustomContainerUserSourceInfo, UploadedUserSourceInfo + + All required parameters must be populated in order to send to Azure. + + :ivar type: Type of the source uploaded. Required. + :vartype type: str + :ivar version: Version of the source. + :vartype version: str + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, + } + + _subtype_map = { + "type": { + "BuildResult": "BuildResultUserSourceInfo", + "Container": "CustomContainerUserSourceInfo", + "UploadedUserSourceInfo": "UploadedUserSourceInfo", + } + } + + def __init__(self, *, version: Optional[str] = None, **kwargs): + """ + :keyword version: Version of the source. + :paramtype version: str + """ + super().__init__(**kwargs) + self.type = None # type: Optional[str] + self.version = version + + +class BuildResultUserSourceInfo(UserSourceInfo): + """Reference to a build result. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Type of the source uploaded. Required. + :vartype type: str + :ivar version: Version of the source. + :vartype version: str + :ivar build_result_id: Resource id of an existing succeeded build result under the same Spring + instance. + :vartype build_result_id: str + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, + "build_result_id": {"key": "buildResultId", "type": "str"}, + } + + def __init__(self, *, version: Optional[str] = None, build_result_id: Optional[str] = None, **kwargs): + """ + :keyword version: Version of the source. + :paramtype version: str + :keyword build_result_id: Resource id of an existing succeeded build result under the same + Spring instance. + :paramtype build_result_id: str + """ + super().__init__(version=version, **kwargs) + self.type = "BuildResult" # type: str + self.build_result_id = build_result_id + + +class BuildService(ProxyResource): + """Build service resource payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Properties of the build resource. + :vartype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "BuildServiceProperties"}, + } + + def __init__(self, *, properties: Optional["_models.BuildServiceProperties"] = None, **kwargs): + """ + :keyword properties: Properties of the build resource. + :paramtype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class BuildServiceAgentPoolProperties(_serialization.Model): + """Build service agent pool properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provisioning_state: Provisioning state of the build service agent pool. + :vartype provisioning_state: str + :ivar pool_size: build service agent pool size properties. + :vartype pool_size: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceAgentPoolSizeProperties + """ + + _validation = { + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "pool_size": {"key": "poolSize", "type": "BuildServiceAgentPoolSizeProperties"}, + } + + def __init__(self, *, pool_size: Optional["_models.BuildServiceAgentPoolSizeProperties"] = None, **kwargs): + """ + :keyword pool_size: build service agent pool size properties. + :paramtype pool_size: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceAgentPoolSizeProperties + """ + super().__init__(**kwargs) + self.provisioning_state = None + self.pool_size = pool_size + + +class BuildServiceAgentPoolResource(ProxyResource): + """The build service agent pool resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: build service agent pool properties. + :vartype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceAgentPoolProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "BuildServiceAgentPoolProperties"}, + } + + def __init__(self, *, properties: Optional["_models.BuildServiceAgentPoolProperties"] = None, **kwargs): + """ + :keyword properties: build service agent pool properties. + :paramtype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceAgentPoolProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class BuildServiceAgentPoolResourceCollection(_serialization.Model): + """Object that includes an array of build service agent pool resources and a possible link for next set. + + :ivar value: Collection of build service agent pool resource. + :vartype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceAgentPoolResource] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[BuildServiceAgentPoolResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, + *, + value: Optional[List["_models.BuildServiceAgentPoolResource"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword value: Collection of build service agent pool resource. + :paramtype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceAgentPoolResource] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class BuildServiceAgentPoolSizeProperties(_serialization.Model): + """Build service agent pool size properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: The name of build service agent pool size. + :vartype name: str + :ivar cpu: The cpu property of build service agent pool size. + :vartype cpu: str + :ivar memory: The memory property of build service agent pool size. + :vartype memory: str + """ + + _validation = { + "cpu": {"readonly": True}, + "memory": {"readonly": True}, + } + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "cpu": {"key": "cpu", "type": "str"}, + "memory": {"key": "memory", "type": "str"}, + } + + def __init__(self, *, name: Optional[str] = None, **kwargs): + """ + :keyword name: The name of build service agent pool size. + :paramtype name: str + """ + super().__init__(**kwargs) + self.name = name + self.cpu = None + self.memory = None + + +class BuildServiceCollection(_serialization.Model): + """Object that includes an array of Build service resources and a possible link for next set. + + :ivar value: Collection of Build service resources. + :vartype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildService] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[BuildService]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, *, value: Optional[List["_models.BuildService"]] = None, next_link: Optional[str] = None, **kwargs + ): + """ + :keyword value: Collection of Build service resources. + :paramtype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildService] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class BuildServiceProperties(_serialization.Model): + """Build service resource properties payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar k_pack_version: The installed KPack version in this build service. + :vartype k_pack_version: str + :ivar provisioning_state: Provisioning state of the KPack build result. Known values are: + "Creating", "Updating", "Succeeded", "Failed", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceProvisioningState + :ivar resource_requests: The runtime resource configuration of this build service. + :vartype resource_requests: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServicePropertiesResourceRequests + """ + + _validation = { + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "k_pack_version": {"key": "kPackVersion", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "resource_requests": {"key": "resourceRequests", "type": "BuildServicePropertiesResourceRequests"}, + } + + def __init__( + self, + *, + k_pack_version: Optional[str] = None, + resource_requests: Optional["_models.BuildServicePropertiesResourceRequests"] = None, + **kwargs + ): + """ + :keyword k_pack_version: The installed KPack version in this build service. + :paramtype k_pack_version: str + :keyword resource_requests: The runtime resource configuration of this build service. + :paramtype resource_requests: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServicePropertiesResourceRequests + """ + super().__init__(**kwargs) + self.k_pack_version = k_pack_version + self.provisioning_state = None + self.resource_requests = resource_requests + + +class BuildServicePropertiesResourceRequests(_serialization.Model): + """The runtime resource configuration of this build service. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar cpu: vCPU allocated to the entire build service node pool. + :vartype cpu: str + :ivar memory: Memory allocated to the entire build service node pool. + :vartype memory: str + """ + + _validation = { + "cpu": {"readonly": True}, + "memory": {"readonly": True}, + } + + _attribute_map = { + "cpu": {"key": "cpu", "type": "str"}, + "memory": {"key": "memory", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.cpu = None + self.memory = None + + +class BuildStageProperties(_serialization.Model): + """The build stage (init-container and container) resources in build pod. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: The name of this build stage resource. + :vartype name: str + :ivar status: The provisioning state of this build stage resource. Known values are: + "NotStarted", "Running", "Succeeded", and "Failed". + :vartype status: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.KPackBuildStageProvisioningState + :ivar exit_code: The exit code of this build init container. + :vartype exit_code: str + :ivar reason: The reason of this build init container. + :vartype reason: str + """ + + _validation = { + "name": {"readonly": True}, + "status": {"readonly": True}, + "exit_code": {"readonly": True}, + "reason": {"readonly": True}, + } + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "exit_code": {"key": "exitCode", "type": "str"}, + "reason": {"key": "reason", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.name = None + self.status = None + self.exit_code = None + self.reason = None + + +class CertificateProperties(_serialization.Model): + """Certificate resource payload. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ContentCertificateProperties, KeyVaultCertificateProperties + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: The type of the certificate source. Required. + :vartype type: str + :ivar thumbprint: The thumbprint of certificate. + :vartype thumbprint: str + :ivar issuer: The issuer of certificate. + :vartype issuer: str + :ivar issued_date: The issue date of certificate. + :vartype issued_date: str + :ivar expiration_date: The expiration date of certificate. + :vartype expiration_date: str + :ivar activate_date: The activate date of certificate. + :vartype activate_date: str + :ivar subject_name: The subject name of certificate. + :vartype subject_name: str + :ivar dns_names: The domain list of certificate. + :vartype dns_names: list[str] + :ivar provisioning_state: Provisioning state of the Certificate. Known values are: "Creating", + "Updating", "Succeeded", "Failed", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CertificateResourceProvisioningState + """ + + _validation = { + "type": {"required": True}, + "thumbprint": {"readonly": True}, + "issuer": {"readonly": True}, + "issued_date": {"readonly": True}, + "expiration_date": {"readonly": True}, + "activate_date": {"readonly": True}, + "subject_name": {"readonly": True}, + "dns_names": {"readonly": True}, + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "thumbprint": {"key": "thumbprint", "type": "str"}, + "issuer": {"key": "issuer", "type": "str"}, + "issued_date": {"key": "issuedDate", "type": "str"}, + "expiration_date": {"key": "expirationDate", "type": "str"}, + "activate_date": {"key": "activateDate", "type": "str"}, + "subject_name": {"key": "subjectName", "type": "str"}, + "dns_names": {"key": "dnsNames", "type": "[str]"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + } + + _subtype_map = { + "type": { + "ContentCertificate": "ContentCertificateProperties", + "KeyVaultCertificate": "KeyVaultCertificateProperties", + } + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.type = None # type: Optional[str] + self.thumbprint = None + self.issuer = None + self.issued_date = None + self.expiration_date = None + self.activate_date = None + self.subject_name = None + self.dns_names = None + self.provisioning_state = None + + +class CertificateResource(ProxyResource): + """Certificate resource payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Properties of the certificate resource payload. + :vartype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CertificateProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "CertificateProperties"}, + } + + def __init__(self, *, properties: Optional["_models.CertificateProperties"] = None, **kwargs): + """ + :keyword properties: Properties of the certificate resource payload. + :paramtype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CertificateProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class CertificateResourceCollection(_serialization.Model): + """Collection compose of certificate resources list and a possible link for next page. + + :ivar value: The certificate resources list. + :vartype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.CertificateResource] + :ivar next_link: The link to next page of certificate list. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[CertificateResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, *, value: Optional[List["_models.CertificateResource"]] = None, next_link: Optional[str] = None, **kwargs + ): + """ + :keyword value: The certificate resources list. + :paramtype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.CertificateResource] + :keyword next_link: The link to next page of certificate list. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class CloudErrorBody(_serialization.Model): + """An error response from the service. + + :ivar code: An identifier for the error. Codes are invariant and are intended to be consumed + programmatically. + :vartype code: str + :ivar message: A message describing the error, intended to be suitable for display in a user + interface. + :vartype message: str + :ivar target: The target of the particular error. For example, the name of the property in + error. + :vartype target: str + :ivar details: A list of additional details about the error. + :vartype details: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.CloudErrorBody] + """ + + _attribute_map = { + "code": {"key": "code", "type": "str"}, + "message": {"key": "message", "type": "str"}, + "target": {"key": "target", "type": "str"}, + "details": {"key": "details", "type": "[CloudErrorBody]"}, + } + + def __init__( + self, + *, + code: Optional[str] = None, + message: Optional[str] = None, + target: Optional[str] = None, + details: Optional[List["_models.CloudErrorBody"]] = None, + **kwargs + ): + """ + :keyword code: An identifier for the error. Codes are invariant and are intended to be consumed + programmatically. + :paramtype code: str + :keyword message: A message describing the error, intended to be suitable for display in a user + interface. + :paramtype message: str + :keyword target: The target of the particular error. For example, the name of the property in + error. + :paramtype target: str + :keyword details: A list of additional details about the error. + :paramtype details: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.CloudErrorBody] + """ + super().__init__(**kwargs) + self.code = code + self.message = message + self.target = target + self.details = details + + +class ClusterResourceProperties(_serialization.Model): + """Service properties payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provisioning_state: Provisioning state of the Service. Known values are: "Creating", + "Updating", "Starting", "Stopping", "Deleting", "Deleted", "Succeeded", "Failed", "Moving", + "Moved", and "MoveFailed". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ProvisioningState + :ivar network_profile: Network profile of the Service. + :vartype network_profile: ~azure.mgmt.appplatform.v2022_11_01_preview.models.NetworkProfile + :ivar vnet_addons: Additional Service settings in vnet injection instance. + :vartype vnet_addons: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceVNetAddons + :ivar version: Version of the Service. + :vartype version: int + :ivar service_id: ServiceInstanceEntity GUID which uniquely identifies a created resource. + :vartype service_id: str + :ivar power_state: Power state of the Service. Known values are: "Running" and "Stopped". + :vartype power_state: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.PowerState + :ivar zone_redundant: + :vartype zone_redundant: bool + :ivar fqdn: Fully qualified dns name of the service instance. + :vartype fqdn: str + :ivar marketplace_resource: Purchasing 3rd party product of the Service resource. + :vartype marketplace_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.MarketplaceResource + """ + + _validation = { + "provisioning_state": {"readonly": True}, + "version": {"readonly": True}, + "service_id": {"readonly": True}, + "power_state": {"readonly": True}, + "fqdn": {"readonly": True}, + } + + _attribute_map = { + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "network_profile": {"key": "networkProfile", "type": "NetworkProfile"}, + "vnet_addons": {"key": "vnetAddons", "type": "ServiceVNetAddons"}, + "version": {"key": "version", "type": "int"}, + "service_id": {"key": "serviceId", "type": "str"}, + "power_state": {"key": "powerState", "type": "str"}, + "zone_redundant": {"key": "zoneRedundant", "type": "bool"}, + "fqdn": {"key": "fqdn", "type": "str"}, + "marketplace_resource": {"key": "marketplaceResource", "type": "MarketplaceResource"}, + } + + def __init__( + self, + *, + network_profile: Optional["_models.NetworkProfile"] = None, + vnet_addons: Optional["_models.ServiceVNetAddons"] = None, + zone_redundant: bool = False, + marketplace_resource: Optional["_models.MarketplaceResource"] = None, + **kwargs + ): + """ + :keyword network_profile: Network profile of the Service. + :paramtype network_profile: ~azure.mgmt.appplatform.v2022_11_01_preview.models.NetworkProfile + :keyword vnet_addons: Additional Service settings in vnet injection instance. + :paramtype vnet_addons: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceVNetAddons + :keyword zone_redundant: + :paramtype zone_redundant: bool + :keyword marketplace_resource: Purchasing 3rd party product of the Service resource. + :paramtype marketplace_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.MarketplaceResource + """ + super().__init__(**kwargs) + self.provisioning_state = None + self.network_profile = network_profile + self.vnet_addons = vnet_addons + self.version = None + self.service_id = None + self.power_state = None + self.zone_redundant = zone_redundant + self.fqdn = None + self.marketplace_resource = marketplace_resource + + +class ConfigServerGitProperty(_serialization.Model): + """Property of git. + + All required parameters must be populated in order to send to Azure. + + :ivar repositories: Repositories of git. + :vartype repositories: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.GitPatternRepository] + :ivar uri: URI of the repository. Required. + :vartype uri: str + :ivar label: Label of the repository. + :vartype label: str + :ivar search_paths: Searching path of the repository. + :vartype search_paths: list[str] + :ivar username: Username of git repository basic auth. + :vartype username: str + :ivar password: Password of git repository basic auth. + :vartype password: str + :ivar host_key: Public sshKey of git repository. + :vartype host_key: str + :ivar host_key_algorithm: SshKey algorithm of git repository. + :vartype host_key_algorithm: str + :ivar private_key: Private sshKey algorithm of git repository. + :vartype private_key: str + :ivar strict_host_key_checking: Strict host key checking or not. + :vartype strict_host_key_checking: bool + """ + + _validation = { + "uri": {"required": True}, + } + + _attribute_map = { + "repositories": {"key": "repositories", "type": "[GitPatternRepository]"}, + "uri": {"key": "uri", "type": "str"}, + "label": {"key": "label", "type": "str"}, + "search_paths": {"key": "searchPaths", "type": "[str]"}, + "username": {"key": "username", "type": "str"}, + "password": {"key": "password", "type": "str"}, + "host_key": {"key": "hostKey", "type": "str"}, + "host_key_algorithm": {"key": "hostKeyAlgorithm", "type": "str"}, + "private_key": {"key": "privateKey", "type": "str"}, + "strict_host_key_checking": {"key": "strictHostKeyChecking", "type": "bool"}, + } + + def __init__( + self, + *, + uri: str, + repositories: Optional[List["_models.GitPatternRepository"]] = None, + label: Optional[str] = None, + search_paths: Optional[List[str]] = None, + username: Optional[str] = None, + password: Optional[str] = None, + host_key: Optional[str] = None, + host_key_algorithm: Optional[str] = None, + private_key: Optional[str] = None, + strict_host_key_checking: Optional[bool] = None, + **kwargs + ): + """ + :keyword repositories: Repositories of git. + :paramtype repositories: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.GitPatternRepository] + :keyword uri: URI of the repository. Required. + :paramtype uri: str + :keyword label: Label of the repository. + :paramtype label: str + :keyword search_paths: Searching path of the repository. + :paramtype search_paths: list[str] + :keyword username: Username of git repository basic auth. + :paramtype username: str + :keyword password: Password of git repository basic auth. + :paramtype password: str + :keyword host_key: Public sshKey of git repository. + :paramtype host_key: str + :keyword host_key_algorithm: SshKey algorithm of git repository. + :paramtype host_key_algorithm: str + :keyword private_key: Private sshKey algorithm of git repository. + :paramtype private_key: str + :keyword strict_host_key_checking: Strict host key checking or not. + :paramtype strict_host_key_checking: bool + """ + super().__init__(**kwargs) + self.repositories = repositories + self.uri = uri + self.label = label + self.search_paths = search_paths + self.username = username + self.password = password + self.host_key = host_key + self.host_key_algorithm = host_key_algorithm + self.private_key = private_key + self.strict_host_key_checking = strict_host_key_checking + + +class ConfigServerProperties(_serialization.Model): + """Config server git properties payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provisioning_state: State of the config server. Known values are: "NotAvailable", + "Deleted", "Failed", "Succeeded", and "Updating". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerState + :ivar error: Error when apply config server settings. + :vartype error: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Error + :ivar config_server: Settings of config server. + :vartype config_server: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerSettings + """ + + _validation = { + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "error": {"key": "error", "type": "Error"}, + "config_server": {"key": "configServer", "type": "ConfigServerSettings"}, + } + + def __init__( + self, + *, + error: Optional["_models.Error"] = None, + config_server: Optional["_models.ConfigServerSettings"] = None, + **kwargs + ): + """ + :keyword error: Error when apply config server settings. + :paramtype error: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Error + :keyword config_server: Settings of config server. + :paramtype config_server: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerSettings + """ + super().__init__(**kwargs) + self.provisioning_state = None + self.error = error + self.config_server = config_server + + +class ConfigServerResource(ProxyResource): + """Config Server resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Properties of the Config Server resource. + :vartype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ConfigServerProperties"}, + } + + def __init__(self, *, properties: Optional["_models.ConfigServerProperties"] = None, **kwargs): + """ + :keyword properties: Properties of the Config Server resource. + :paramtype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class ConfigServerSettings(_serialization.Model): + """The settings of config server. + + :ivar git_property: Property of git environment. + :vartype git_property: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerGitProperty + """ + + _attribute_map = { + "git_property": {"key": "gitProperty", "type": "ConfigServerGitProperty"}, + } + + def __init__(self, *, git_property: Optional["_models.ConfigServerGitProperty"] = None, **kwargs): + """ + :keyword git_property: Property of git environment. + :paramtype git_property: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerGitProperty + """ + super().__init__(**kwargs) + self.git_property = git_property + + +class ConfigServerSettingsErrorRecord(_serialization.Model): + """Error record of the config server settings. + + :ivar name: The name of the config server settings error record. + :vartype name: str + :ivar uri: The uri of the config server settings error record. + :vartype uri: str + :ivar messages: The detail error messages of the record. + :vartype messages: list[str] + """ + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + "messages": {"key": "messages", "type": "[str]"}, + } + + def __init__( + self, *, name: Optional[str] = None, uri: Optional[str] = None, messages: Optional[List[str]] = None, **kwargs + ): + """ + :keyword name: The name of the config server settings error record. + :paramtype name: str + :keyword uri: The uri of the config server settings error record. + :paramtype uri: str + :keyword messages: The detail error messages of the record. + :paramtype messages: list[str] + """ + super().__init__(**kwargs) + self.name = name + self.uri = uri + self.messages = messages + + +class ConfigServerSettingsValidateResult(_serialization.Model): + """Validation result for config server settings. + + :ivar is_valid: Indicate if the config server settings are valid. + :vartype is_valid: bool + :ivar details: The detail validation results. + :vartype details: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerSettingsErrorRecord] + """ + + _attribute_map = { + "is_valid": {"key": "isValid", "type": "bool"}, + "details": {"key": "details", "type": "[ConfigServerSettingsErrorRecord]"}, + } + + def __init__( + self, + *, + is_valid: Optional[bool] = None, + details: Optional[List["_models.ConfigServerSettingsErrorRecord"]] = None, + **kwargs + ): + """ + :keyword is_valid: Indicate if the config server settings are valid. + :paramtype is_valid: bool + :keyword details: The detail validation results. + :paramtype details: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerSettingsErrorRecord] + """ + super().__init__(**kwargs) + self.is_valid = is_valid + self.details = details + + +class ConfigurationServiceGitProperty(_serialization.Model): + """Property of git environment. + + :ivar repositories: Repositories of Application Configuration Service git property. + :vartype repositories: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceGitRepository] + """ + + _attribute_map = { + "repositories": {"key": "repositories", "type": "[ConfigurationServiceGitRepository]"}, + } + + def __init__(self, *, repositories: Optional[List["_models.ConfigurationServiceGitRepository"]] = None, **kwargs): + """ + :keyword repositories: Repositories of Application Configuration Service git property. + :paramtype repositories: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceGitRepository] + """ + super().__init__(**kwargs) + self.repositories = repositories + + +class ConfigurationServiceGitPropertyValidateResult(_serialization.Model): + """Validation result for configuration service settings. + + :ivar is_valid: Indicate if the configuration service settings are valid. + :vartype is_valid: bool + :ivar git_repos_validation_result: The detail validation results. + :vartype git_repos_validation_result: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ValidationMessages] + """ + + _attribute_map = { + "is_valid": {"key": "isValid", "type": "bool"}, + "git_repos_validation_result": {"key": "gitReposValidationResult", "type": "[ValidationMessages]"}, + } + + def __init__( + self, + *, + is_valid: Optional[bool] = None, + git_repos_validation_result: Optional[List["_models.ValidationMessages"]] = None, + **kwargs + ): + """ + :keyword is_valid: Indicate if the configuration service settings are valid. + :paramtype is_valid: bool + :keyword git_repos_validation_result: The detail validation results. + :paramtype git_repos_validation_result: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ValidationMessages] + """ + super().__init__(**kwargs) + self.is_valid = is_valid + self.git_repos_validation_result = git_repos_validation_result + + +class ConfigurationServiceGitRepository(_serialization.Model): # pylint: disable=too-many-instance-attributes + """Git repository property payload for Application Configuration Service. + + All required parameters must be populated in order to send to Azure. + + :ivar name: Name of the repository. Required. + :vartype name: str + :ivar patterns: Collection of patterns of the repository. Required. + :vartype patterns: list[str] + :ivar uri: URI of the repository. Required. + :vartype uri: str + :ivar label: Label of the repository. Required. + :vartype label: str + :ivar search_paths: Searching path of the repository. + :vartype search_paths: list[str] + :ivar username: Username of git repository basic auth. + :vartype username: str + :ivar password: Password of git repository basic auth. + :vartype password: str + :ivar host_key: Public sshKey of git repository. + :vartype host_key: str + :ivar host_key_algorithm: SshKey algorithm of git repository. + :vartype host_key_algorithm: str + :ivar private_key: Private sshKey algorithm of git repository. + :vartype private_key: str + :ivar strict_host_key_checking: Strict host key checking or not. + :vartype strict_host_key_checking: bool + """ + + _validation = { + "name": {"required": True}, + "patterns": {"required": True}, + "uri": {"required": True}, + "label": {"required": True}, + } + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "patterns": {"key": "patterns", "type": "[str]"}, + "uri": {"key": "uri", "type": "str"}, + "label": {"key": "label", "type": "str"}, + "search_paths": {"key": "searchPaths", "type": "[str]"}, + "username": {"key": "username", "type": "str"}, + "password": {"key": "password", "type": "str"}, + "host_key": {"key": "hostKey", "type": "str"}, + "host_key_algorithm": {"key": "hostKeyAlgorithm", "type": "str"}, + "private_key": {"key": "privateKey", "type": "str"}, + "strict_host_key_checking": {"key": "strictHostKeyChecking", "type": "bool"}, + } + + def __init__( + self, + *, + name: str, + patterns: List[str], + uri: str, + label: str, + search_paths: Optional[List[str]] = None, + username: Optional[str] = None, + password: Optional[str] = None, + host_key: Optional[str] = None, + host_key_algorithm: Optional[str] = None, + private_key: Optional[str] = None, + strict_host_key_checking: Optional[bool] = None, + **kwargs + ): + """ + :keyword name: Name of the repository. Required. + :paramtype name: str + :keyword patterns: Collection of patterns of the repository. Required. + :paramtype patterns: list[str] + :keyword uri: URI of the repository. Required. + :paramtype uri: str + :keyword label: Label of the repository. Required. + :paramtype label: str + :keyword search_paths: Searching path of the repository. + :paramtype search_paths: list[str] + :keyword username: Username of git repository basic auth. + :paramtype username: str + :keyword password: Password of git repository basic auth. + :paramtype password: str + :keyword host_key: Public sshKey of git repository. + :paramtype host_key: str + :keyword host_key_algorithm: SshKey algorithm of git repository. + :paramtype host_key_algorithm: str + :keyword private_key: Private sshKey algorithm of git repository. + :paramtype private_key: str + :keyword strict_host_key_checking: Strict host key checking or not. + :paramtype strict_host_key_checking: bool + """ + super().__init__(**kwargs) + self.name = name + self.patterns = patterns + self.uri = uri + self.label = label + self.search_paths = search_paths + self.username = username + self.password = password + self.host_key = host_key + self.host_key_algorithm = host_key_algorithm + self.private_key = private_key + self.strict_host_key_checking = strict_host_key_checking + + +class ConfigurationServiceInstance(_serialization.Model): + """Collection of instances belong to the Application Configuration Service. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Name of the Application Configuration Service instance. + :vartype name: str + :ivar status: Status of the Application Configuration Service instance. + :vartype status: str + """ + + _validation = { + "name": {"readonly": True}, + "status": {"readonly": True}, + } + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "status": {"key": "status", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.name = None + self.status = None + + +class ConfigurationServiceProperties(_serialization.Model): + """Application Configuration Service properties payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provisioning_state: State of the Application Configuration Service. Known values are: + "Creating", "Updating", "Succeeded", "Failed", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceProvisioningState + :ivar resource_requests: The requested resource quantity for required CPU and Memory. + :vartype resource_requests: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceResourceRequests + :ivar instances: Collection of instances belong to Application Configuration Service. + :vartype instances: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceInstance] + :ivar settings: The settings of Application Configuration Service. + :vartype settings: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceSettings + """ + + _validation = { + "provisioning_state": {"readonly": True}, + "resource_requests": {"readonly": True}, + "instances": {"readonly": True}, + } + + _attribute_map = { + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "resource_requests": {"key": "resourceRequests", "type": "ConfigurationServiceResourceRequests"}, + "instances": {"key": "instances", "type": "[ConfigurationServiceInstance]"}, + "settings": {"key": "settings", "type": "ConfigurationServiceSettings"}, + } + + def __init__(self, *, settings: Optional["_models.ConfigurationServiceSettings"] = None, **kwargs): + """ + :keyword settings: The settings of Application Configuration Service. + :paramtype settings: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceSettings + """ + super().__init__(**kwargs) + self.provisioning_state = None + self.resource_requests = None + self.instances = None + self.settings = settings + + +class ConfigurationServiceResource(ProxyResource): + """Application Configuration Service resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Application Configuration Service properties payload. + :vartype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ConfigurationServiceProperties"}, + } + + def __init__(self, *, properties: Optional["_models.ConfigurationServiceProperties"] = None, **kwargs): + """ + :keyword properties: Application Configuration Service properties payload. + :paramtype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class ConfigurationServiceResourceCollection(_serialization.Model): + """Object that includes an array of configuration service resources and a possible link for next set. + + :ivar value: Collection of configuration service resources. + :vartype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceResource] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[ConfigurationServiceResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, + *, + value: Optional[List["_models.ConfigurationServiceResource"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword value: Collection of configuration service resources. + :paramtype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceResource] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class ConfigurationServiceResourceRequests(_serialization.Model): + """Resource request payload of Application Configuration Service. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar cpu: Cpu allocated to each Application Configuration Service instance. + :vartype cpu: str + :ivar memory: Memory allocated to each Application Configuration Service instance. + :vartype memory: str + :ivar instance_count: Instance count of the Application Configuration Service. + :vartype instance_count: int + """ + + _validation = { + "cpu": {"readonly": True}, + "memory": {"readonly": True}, + "instance_count": {"readonly": True}, + } + + _attribute_map = { + "cpu": {"key": "cpu", "type": "str"}, + "memory": {"key": "memory", "type": "str"}, + "instance_count": {"key": "instanceCount", "type": "int"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.cpu = None + self.memory = None + self.instance_count = None + + +class ConfigurationServiceSettings(_serialization.Model): + """The settings of Application Configuration Service. + + :ivar git_property: Property of git environment. + :vartype git_property: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceGitProperty + """ + + _attribute_map = { + "git_property": {"key": "gitProperty", "type": "ConfigurationServiceGitProperty"}, + } + + def __init__(self, *, git_property: Optional["_models.ConfigurationServiceGitProperty"] = None, **kwargs): + """ + :keyword git_property: Property of git environment. + :paramtype git_property: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceGitProperty + """ + super().__init__(**kwargs) + self.git_property = git_property + + +class ConfigurationServiceSettingsValidateResult(_serialization.Model): + """Validation result for configuration service settings. + + :ivar git_property_validation_result: Validation result for configuration service settings. + :vartype git_property_validation_result: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceGitPropertyValidateResult + """ + + _attribute_map = { + "git_property_validation_result": { + "key": "gitPropertyValidationResult", + "type": "ConfigurationServiceGitPropertyValidateResult", + }, + } + + def __init__( + self, + *, + git_property_validation_result: Optional["_models.ConfigurationServiceGitPropertyValidateResult"] = None, + **kwargs + ): + """ + :keyword git_property_validation_result: Validation result for configuration service settings. + :paramtype git_property_validation_result: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceGitPropertyValidateResult + """ + super().__init__(**kwargs) + self.git_property_validation_result = git_property_validation_result + + +class ContainerProbeSettings(_serialization.Model): + """Container liveness and readiness probe settings. + + :ivar disable_probe: Indicates whether disable the liveness and readiness probe. + :vartype disable_probe: bool + """ + + _attribute_map = { + "disable_probe": {"key": "disableProbe", "type": "bool"}, + } + + def __init__(self, *, disable_probe: Optional[bool] = None, **kwargs): + """ + :keyword disable_probe: Indicates whether disable the liveness and readiness probe. + :paramtype disable_probe: bool + """ + super().__init__(**kwargs) + self.disable_probe = disable_probe + + +class ContentCertificateProperties(CertificateProperties): + """Properties of certificate imported from key vault. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: The type of the certificate source. Required. + :vartype type: str + :ivar thumbprint: The thumbprint of certificate. + :vartype thumbprint: str + :ivar issuer: The issuer of certificate. + :vartype issuer: str + :ivar issued_date: The issue date of certificate. + :vartype issued_date: str + :ivar expiration_date: The expiration date of certificate. + :vartype expiration_date: str + :ivar activate_date: The activate date of certificate. + :vartype activate_date: str + :ivar subject_name: The subject name of certificate. + :vartype subject_name: str + :ivar dns_names: The domain list of certificate. + :vartype dns_names: list[str] + :ivar provisioning_state: Provisioning state of the Certificate. Known values are: "Creating", + "Updating", "Succeeded", "Failed", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CertificateResourceProvisioningState + :ivar content: The content of uploaded certificate. + :vartype content: str + """ + + _validation = { + "type": {"required": True}, + "thumbprint": {"readonly": True}, + "issuer": {"readonly": True}, + "issued_date": {"readonly": True}, + "expiration_date": {"readonly": True}, + "activate_date": {"readonly": True}, + "subject_name": {"readonly": True}, + "dns_names": {"readonly": True}, + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "thumbprint": {"key": "thumbprint", "type": "str"}, + "issuer": {"key": "issuer", "type": "str"}, + "issued_date": {"key": "issuedDate", "type": "str"}, + "expiration_date": {"key": "expirationDate", "type": "str"}, + "activate_date": {"key": "activateDate", "type": "str"}, + "subject_name": {"key": "subjectName", "type": "str"}, + "dns_names": {"key": "dnsNames", "type": "[str]"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "content": {"key": "content", "type": "str"}, + } + + def __init__(self, *, content: Optional[str] = None, **kwargs): + """ + :keyword content: The content of uploaded certificate. + :paramtype content: str + """ + super().__init__(**kwargs) + self.type = "ContentCertificate" # type: str + self.content = content + + +class CustomContainer(_serialization.Model): + """Custom container payload. + + :ivar server: The name of the registry that contains the container image. + :vartype server: str + :ivar container_image: Container image of the custom container. This should be in the form of + :code:``::code:`` without the server name of the registry. + :vartype container_image: str + :ivar command: Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is + used if this is not provided. + :vartype command: list[str] + :ivar args: Arguments to the entrypoint. The docker image's CMD is used if this is not + provided. + :vartype args: list[str] + :ivar image_registry_credential: Credential of the image registry. + :vartype image_registry_credential: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ImageRegistryCredential + :ivar language_framework: Language framework of the container image uploaded. + :vartype language_framework: str + """ + + _attribute_map = { + "server": {"key": "server", "type": "str"}, + "container_image": {"key": "containerImage", "type": "str"}, + "command": {"key": "command", "type": "[str]"}, + "args": {"key": "args", "type": "[str]"}, + "image_registry_credential": {"key": "imageRegistryCredential", "type": "ImageRegistryCredential"}, + "language_framework": {"key": "languageFramework", "type": "str"}, + } + + def __init__( + self, + *, + server: Optional[str] = None, + container_image: Optional[str] = None, + command: Optional[List[str]] = None, + args: Optional[List[str]] = None, + image_registry_credential: Optional["_models.ImageRegistryCredential"] = None, + language_framework: Optional[str] = None, + **kwargs + ): + """ + :keyword server: The name of the registry that contains the container image. + :paramtype server: str + :keyword container_image: Container image of the custom container. This should be in the form + of :code:``::code:`` without the server name of the registry. + :paramtype container_image: str + :keyword command: Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT + is used if this is not provided. + :paramtype command: list[str] + :keyword args: Arguments to the entrypoint. The docker image's CMD is used if this is not + provided. + :paramtype args: list[str] + :keyword image_registry_credential: Credential of the image registry. + :paramtype image_registry_credential: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ImageRegistryCredential + :keyword language_framework: Language framework of the container image uploaded. + :paramtype language_framework: str + """ + super().__init__(**kwargs) + self.server = server + self.container_image = container_image + self.command = command + self.args = args + self.image_registry_credential = image_registry_credential + self.language_framework = language_framework + + +class CustomContainerUserSourceInfo(UserSourceInfo): + """Custom container user source info. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Type of the source uploaded. Required. + :vartype type: str + :ivar version: Version of the source. + :vartype version: str + :ivar custom_container: Custom container payload. + :vartype custom_container: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomContainer + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, + "custom_container": {"key": "customContainer", "type": "CustomContainer"}, + } + + def __init__( + self, *, version: Optional[str] = None, custom_container: Optional["_models.CustomContainer"] = None, **kwargs + ): + """ + :keyword version: Version of the source. + :paramtype version: str + :keyword custom_container: Custom container payload. + :paramtype custom_container: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomContainer + """ + super().__init__(version=version, **kwargs) + self.type = "Container" # type: str + self.custom_container = custom_container + + +class CustomDomainProperties(_serialization.Model): + """Custom domain of app resource payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar thumbprint: The thumbprint of bound certificate. + :vartype thumbprint: str + :ivar app_name: The app name of domain. + :vartype app_name: str + :ivar cert_name: The bound certificate name of domain. + :vartype cert_name: str + :ivar provisioning_state: Provisioning state of the Domain. Known values are: "Creating", + "Updating", "Succeeded", "Failed", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResourceProvisioningState + """ + + _validation = { + "app_name": {"readonly": True}, + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "thumbprint": {"key": "thumbprint", "type": "str"}, + "app_name": {"key": "appName", "type": "str"}, + "cert_name": {"key": "certName", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + } + + def __init__(self, *, thumbprint: Optional[str] = None, cert_name: Optional[str] = None, **kwargs): + """ + :keyword thumbprint: The thumbprint of bound certificate. + :paramtype thumbprint: str + :keyword cert_name: The bound certificate name of domain. + :paramtype cert_name: str + """ + super().__init__(**kwargs) + self.thumbprint = thumbprint + self.app_name = None + self.cert_name = cert_name + self.provisioning_state = None + + +class CustomDomainResource(ProxyResource): + """Custom domain resource payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Properties of the custom domain resource. + :vartype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "CustomDomainProperties"}, + } + + def __init__(self, *, properties: Optional["_models.CustomDomainProperties"] = None, **kwargs): + """ + :keyword properties: Properties of the custom domain resource. + :paramtype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class CustomDomainResourceCollection(_serialization.Model): + """Collection compose of a custom domain resources list and a possible link for next page. + + :ivar value: The custom domain resources list. + :vartype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource] + :ivar next_link: The link to next page of custom domain list. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[CustomDomainResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, *, value: Optional[List["_models.CustomDomainResource"]] = None, next_link: Optional[str] = None, **kwargs + ): + """ + :keyword value: The custom domain resources list. + :paramtype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource] + :keyword next_link: The link to next page of custom domain list. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class CustomDomainValidatePayload(_serialization.Model): + """Custom domain validate payload. + + All required parameters must be populated in order to send to Azure. + + :ivar name: Name to be validated. Required. + :vartype name: str + """ + + _validation = { + "name": {"required": True}, + } + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + } + + def __init__(self, *, name: str, **kwargs): + """ + :keyword name: Name to be validated. Required. + :paramtype name: str + """ + super().__init__(**kwargs) + self.name = name + + +class CustomDomainValidateResult(_serialization.Model): + """Validation result for custom domain. + + :ivar is_valid: Indicates if domain name is valid. + :vartype is_valid: bool + :ivar message: Message of why domain name is invalid. + :vartype message: str + """ + + _attribute_map = { + "is_valid": {"key": "isValid", "type": "bool"}, + "message": {"key": "message", "type": "str"}, + } + + def __init__(self, *, is_valid: Optional[bool] = None, message: Optional[str] = None, **kwargs): + """ + :keyword is_valid: Indicates if domain name is valid. + :paramtype is_valid: bool + :keyword message: Message of why domain name is invalid. + :paramtype message: str + """ + super().__init__(**kwargs) + self.is_valid = is_valid + self.message = message + + +class CustomizedAcceleratorProperties(_serialization.Model): + """Customized accelerator properties payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar provisioning_state: State of the customized accelerator. Known values are: "Creating", + "Updating", "Succeeded", "Failed", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorProvisioningState + :ivar display_name: + :vartype display_name: str + :ivar description: + :vartype description: str + :ivar icon_url: + :vartype icon_url: str + :ivar accelerator_tags: + :vartype accelerator_tags: list[str] + :ivar git_repository: Required. + :vartype git_repository: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.AcceleratorGitRepository + """ + + _validation = { + "provisioning_state": {"readonly": True}, + "git_repository": {"required": True}, + } + + _attribute_map = { + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "icon_url": {"key": "iconUrl", "type": "str"}, + "accelerator_tags": {"key": "acceleratorTags", "type": "[str]"}, + "git_repository": {"key": "gitRepository", "type": "AcceleratorGitRepository"}, + } + + def __init__( + self, + *, + git_repository: "_models.AcceleratorGitRepository", + display_name: Optional[str] = None, + description: Optional[str] = None, + icon_url: Optional[str] = None, + accelerator_tags: Optional[List[str]] = None, + **kwargs + ): + """ + :keyword display_name: + :paramtype display_name: str + :keyword description: + :paramtype description: str + :keyword icon_url: + :paramtype icon_url: str + :keyword accelerator_tags: + :paramtype accelerator_tags: list[str] + :keyword git_repository: Required. + :paramtype git_repository: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.AcceleratorGitRepository + """ + super().__init__(**kwargs) + self.provisioning_state = None + self.display_name = display_name + self.description = description + self.icon_url = icon_url + self.accelerator_tags = accelerator_tags + self.git_repository = git_repository + + +class CustomizedAcceleratorResource(ProxyResource): + """Customized accelerator resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Customized accelerator properties payload. + :vartype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorProperties + :ivar sku: Sku of the customized accelerator resource. + :vartype sku: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Sku + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "CustomizedAcceleratorProperties"}, + "sku": {"key": "sku", "type": "Sku"}, + } + + def __init__( + self, + *, + properties: Optional["_models.CustomizedAcceleratorProperties"] = None, + sku: Optional["_models.Sku"] = None, + **kwargs + ): + """ + :keyword properties: Customized accelerator properties payload. + :paramtype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorProperties + :keyword sku: Sku of the customized accelerator resource. + :paramtype sku: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Sku + """ + super().__init__(**kwargs) + self.properties = properties + self.sku = sku + + +class CustomizedAcceleratorResourceCollection(_serialization.Model): + """CustomizedAcceleratorResourceCollection. + + :ivar value: + :vartype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorResource] + :ivar next_link: + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[CustomizedAcceleratorResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, + *, + value: Optional[List["_models.CustomizedAcceleratorResource"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword value: + :paramtype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorResource] + :keyword next_link: + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class CustomizedAcceleratorValidateResult(_serialization.Model): + """Validation result for customized accelerator properties. + + :ivar state: State of the customized accelerator validation result. Known values are: "Valid" + and "Invalid". + :vartype state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorValidateResultState + :ivar error_message: The detail validation results. + :vartype error_message: str + """ + + _attribute_map = { + "state": {"key": "state", "type": "str"}, + "error_message": {"key": "errorMessage", "type": "str"}, + } + + def __init__( + self, + *, + state: Union[str, "_models.CustomizedAcceleratorValidateResultState"] = "Valid", + error_message: Optional[str] = None, + **kwargs + ): + """ + :keyword state: State of the customized accelerator validation result. Known values are: + "Valid" and "Invalid". + :paramtype state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorValidateResultState + :keyword error_message: The detail validation results. + :paramtype error_message: str + """ + super().__init__(**kwargs) + self.state = state + self.error_message = error_message + + +class CustomPersistentDiskResource(_serialization.Model): + """Custom persistent disk resource payload. + + All required parameters must be populated in order to send to Azure. + + :ivar custom_persistent_disk_properties: Properties of the custom persistent disk resource + payload. + :vartype custom_persistent_disk_properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomPersistentDiskProperties + :ivar storage_id: The resource id of Azure Spring Apps Storage resource. Required. + :vartype storage_id: str + """ + + _validation = { + "storage_id": {"required": True}, + } + + _attribute_map = { + "custom_persistent_disk_properties": { + "key": "customPersistentDiskProperties", + "type": "CustomPersistentDiskProperties", + }, + "storage_id": {"key": "storageId", "type": "str"}, + } + + def __init__( + self, + *, + storage_id: str, + custom_persistent_disk_properties: Optional["_models.CustomPersistentDiskProperties"] = None, + **kwargs + ): + """ + :keyword custom_persistent_disk_properties: Properties of the custom persistent disk resource + payload. + :paramtype custom_persistent_disk_properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomPersistentDiskProperties + :keyword storage_id: The resource id of Azure Spring Apps Storage resource. Required. + :paramtype storage_id: str + """ + super().__init__(**kwargs) + self.custom_persistent_disk_properties = custom_persistent_disk_properties + self.storage_id = storage_id + + +class DeploymentInstance(_serialization.Model): + """Deployment instance payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Name of the deployment instance. + :vartype name: str + :ivar status: Status of the deployment instance. + :vartype status: str + :ivar reason: Failed reason of the deployment instance. + :vartype reason: str + :ivar discovery_status: Discovery status of the deployment instance. + :vartype discovery_status: str + :ivar start_time: Start time of the deployment instance. + :vartype start_time: str + :ivar zone: Availability zone information of the deployment instance. + :vartype zone: str + """ + + _validation = { + "name": {"readonly": True}, + "status": {"readonly": True}, + "reason": {"readonly": True}, + "discovery_status": {"readonly": True}, + "start_time": {"readonly": True}, + "zone": {"readonly": True}, + } + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "reason": {"key": "reason", "type": "str"}, + "discovery_status": {"key": "discoveryStatus", "type": "str"}, + "start_time": {"key": "startTime", "type": "str"}, + "zone": {"key": "zone", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.name = None + self.status = None + self.reason = None + self.discovery_status = None + self.start_time = None + self.zone = None + + +class DeploymentList(_serialization.Model): + """A list of deployments resource ids. + + :ivar deployments: A list of deployment resource ids. + :vartype deployments: list[str] + """ + + _attribute_map = { + "deployments": {"key": "deployments", "type": "[str]"}, + } + + def __init__(self, *, deployments: Optional[List[str]] = None, **kwargs): + """ + :keyword deployments: A list of deployment resource ids. + :paramtype deployments: list[str] + """ + super().__init__(**kwargs) + self.deployments = deployments + + +class DeploymentResource(ProxyResource): + """Deployment resource payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Properties of the Deployment resource. + :vartype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResourceProperties + :ivar sku: Sku of the Deployment resource. + :vartype sku: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Sku + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "DeploymentResourceProperties"}, + "sku": {"key": "sku", "type": "Sku"}, + } + + def __init__( + self, + *, + properties: Optional["_models.DeploymentResourceProperties"] = None, + sku: Optional["_models.Sku"] = None, + **kwargs + ): + """ + :keyword properties: Properties of the Deployment resource. + :paramtype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResourceProperties + :keyword sku: Sku of the Deployment resource. + :paramtype sku: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Sku + """ + super().__init__(**kwargs) + self.properties = properties + self.sku = sku + + +class DeploymentResourceCollection(_serialization.Model): + """Object that includes an array of App resources and a possible link for next set. + + :ivar value: Collection of Deployment resources. + :vartype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[DeploymentResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, *, value: Optional[List["_models.DeploymentResource"]] = None, next_link: Optional[str] = None, **kwargs + ): + """ + :keyword value: Collection of Deployment resources. + :paramtype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class DeploymentResourceProperties(_serialization.Model): + """Deployment resource properties payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar source: Uploaded source information of the deployment. + :vartype source: ~azure.mgmt.appplatform.v2022_11_01_preview.models.UserSourceInfo + :ivar deployment_settings: Deployment settings of the Deployment. + :vartype deployment_settings: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentSettings + :ivar provisioning_state: Provisioning state of the Deployment. Known values are: "Creating", + "Updating", "Succeeded", and "Failed". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResourceProvisioningState + :ivar status: Status of the Deployment. Known values are: "Stopped" and "Running". + :vartype status: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResourceStatus + :ivar active: Indicates whether the Deployment is active. + :vartype active: bool + :ivar instances: Collection of instances belong to the Deployment. + :vartype instances: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentInstance] + """ + + _validation = { + "provisioning_state": {"readonly": True}, + "status": {"readonly": True}, + "instances": {"readonly": True}, + } + + _attribute_map = { + "source": {"key": "source", "type": "UserSourceInfo"}, + "deployment_settings": {"key": "deploymentSettings", "type": "DeploymentSettings"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "active": {"key": "active", "type": "bool"}, + "instances": {"key": "instances", "type": "[DeploymentInstance]"}, + } + + def __init__( + self, + *, + source: Optional["_models.UserSourceInfo"] = None, + deployment_settings: Optional["_models.DeploymentSettings"] = None, + active: Optional[bool] = None, + **kwargs + ): + """ + :keyword source: Uploaded source information of the deployment. + :paramtype source: ~azure.mgmt.appplatform.v2022_11_01_preview.models.UserSourceInfo + :keyword deployment_settings: Deployment settings of the Deployment. + :paramtype deployment_settings: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentSettings + :keyword active: Indicates whether the Deployment is active. + :paramtype active: bool + """ + super().__init__(**kwargs) + self.source = source + self.deployment_settings = deployment_settings + self.provisioning_state = None + self.status = None + self.active = active + self.instances = None + + +class DeploymentSettings(_serialization.Model): + """Deployment settings payload. + + :ivar resource_requests: The requested resource quantity for required CPU and Memory. It is + recommended that using this field to represent the required CPU and Memory, the old field cpu + and memoryInGB will be deprecated later. + :vartype resource_requests: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceRequests + :ivar environment_variables: Collection of environment variables. + :vartype environment_variables: dict[str, str] + :ivar addon_configs: Collection of addons. + :vartype addon_configs: dict[str, dict[str, JSON]] + :ivar liveness_probe: Periodic probe of App Instance liveness. App Instance will be restarted + if the probe fails. More info: + https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes. + :vartype liveness_probe: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Probe + :ivar readiness_probe: Periodic probe of App Instance service readiness. App Instance will be + removed from service endpoints if the probe fails. More info: + https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes. + :vartype readiness_probe: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Probe + :ivar startup_probe: StartupProbe indicates that the App Instance has successfully initialized. + If specified, no other probes are executed until this completes successfully. If this probe + fails, the Pod will be restarted, just as if the livenessProbe failed. This can be used to + provide different probe parameters at the beginning of a App Instance's lifecycle, when it + might take a long time to load data or warm a cache, than during steady-state operation. This + cannot be updated. More info: + https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes. + :vartype startup_probe: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Probe + :ivar termination_grace_period_seconds: Optional duration in seconds the App Instance needs to + terminate gracefully. May be decreased in delete request. Value must be non-negative integer. + The value zero indicates stop immediately via the kill signal (no opportunity to shut down). If + this value is nil, the default grace period will be used instead. The grace period is the + duration in seconds after the processes running in the App Instance are sent a termination + signal and the time when the processes are forcibly halted with a kill signal. Set this value + longer than the expected cleanup time for your process. Defaults to 90 seconds. + :vartype termination_grace_period_seconds: int + :ivar container_probe_settings: Container liveness and readiness probe settings. + :vartype container_probe_settings: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ContainerProbeSettings + """ + + _attribute_map = { + "resource_requests": {"key": "resourceRequests", "type": "ResourceRequests"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "addon_configs": {"key": "addonConfigs", "type": "{{object}}"}, + "liveness_probe": {"key": "livenessProbe", "type": "Probe"}, + "readiness_probe": {"key": "readinessProbe", "type": "Probe"}, + "startup_probe": {"key": "startupProbe", "type": "Probe"}, + "termination_grace_period_seconds": {"key": "terminationGracePeriodSeconds", "type": "int"}, + "container_probe_settings": {"key": "containerProbeSettings", "type": "ContainerProbeSettings"}, + } + + def __init__( + self, + *, + resource_requests: Optional["_models.ResourceRequests"] = None, + environment_variables: Optional[Dict[str, str]] = None, + addon_configs: Optional[Dict[str, Dict[str, JSON]]] = None, + liveness_probe: Optional["_models.Probe"] = None, + readiness_probe: Optional["_models.Probe"] = None, + startup_probe: Optional["_models.Probe"] = None, + termination_grace_period_seconds: int = 90, + container_probe_settings: Optional["_models.ContainerProbeSettings"] = None, + **kwargs + ): + """ + :keyword resource_requests: The requested resource quantity for required CPU and Memory. It is + recommended that using this field to represent the required CPU and Memory, the old field cpu + and memoryInGB will be deprecated later. + :paramtype resource_requests: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceRequests + :keyword environment_variables: Collection of environment variables. + :paramtype environment_variables: dict[str, str] + :keyword addon_configs: Collection of addons. + :paramtype addon_configs: dict[str, dict[str, JSON]] + :keyword liveness_probe: Periodic probe of App Instance liveness. App Instance will be + restarted if the probe fails. More info: + https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes. + :paramtype liveness_probe: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Probe + :keyword readiness_probe: Periodic probe of App Instance service readiness. App Instance will + be removed from service endpoints if the probe fails. More info: + https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes. + :paramtype readiness_probe: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Probe + :keyword startup_probe: StartupProbe indicates that the App Instance has successfully + initialized. If specified, no other probes are executed until this completes successfully. If + this probe fails, the Pod will be restarted, just as if the livenessProbe failed. This can be + used to provide different probe parameters at the beginning of a App Instance's lifecycle, when + it might take a long time to load data or warm a cache, than during steady-state operation. + This cannot be updated. More info: + https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes. + :paramtype startup_probe: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Probe + :keyword termination_grace_period_seconds: Optional duration in seconds the App Instance needs + to terminate gracefully. May be decreased in delete request. Value must be non-negative + integer. The value zero indicates stop immediately via the kill signal (no opportunity to shut + down). If this value is nil, the default grace period will be used instead. The grace period is + the duration in seconds after the processes running in the App Instance are sent a termination + signal and the time when the processes are forcibly halted with a kill signal. Set this value + longer than the expected cleanup time for your process. Defaults to 90 seconds. + :paramtype termination_grace_period_seconds: int + :keyword container_probe_settings: Container liveness and readiness probe settings. + :paramtype container_probe_settings: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ContainerProbeSettings + """ + super().__init__(**kwargs) + self.resource_requests = resource_requests + self.environment_variables = environment_variables + self.addon_configs = addon_configs + self.liveness_probe = liveness_probe + self.readiness_probe = readiness_probe + self.startup_probe = startup_probe + self.termination_grace_period_seconds = termination_grace_period_seconds + self.container_probe_settings = container_probe_settings + + +class DevToolPortalFeatureDetail(_serialization.Model): + """Detail settings for Dev Tool Portal feature. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar state: State of the plugin. Known values are: "Enabled" and "Disabled". + :vartype state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalFeatureState + :ivar route: Route path to visit the plugin. + :vartype route: str + """ + + _validation = { + "route": {"readonly": True}, + } + + _attribute_map = { + "state": {"key": "state", "type": "str"}, + "route": {"key": "route", "type": "str"}, + } + + def __init__(self, *, state: Union[str, "_models.DevToolPortalFeatureState"] = "Enabled", **kwargs): + """ + :keyword state: State of the plugin. Known values are: "Enabled" and "Disabled". + :paramtype state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalFeatureState + """ + super().__init__(**kwargs) + self.state = state + self.route = None + + +class DevToolPortalFeatureSettings(_serialization.Model): + """Settings for Dev Tool Portal. + + :ivar application_accelerator: Detail of Accelerator plugin. + :vartype application_accelerator: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalFeatureDetail + :ivar application_live_view: Detail of App Live View plugin. + :vartype application_live_view: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalFeatureDetail + """ + + _attribute_map = { + "application_accelerator": {"key": "applicationAccelerator", "type": "DevToolPortalFeatureDetail"}, + "application_live_view": {"key": "applicationLiveView", "type": "DevToolPortalFeatureDetail"}, + } + + def __init__( + self, + *, + application_accelerator: Optional["_models.DevToolPortalFeatureDetail"] = None, + application_live_view: Optional["_models.DevToolPortalFeatureDetail"] = None, + **kwargs + ): + """ + :keyword application_accelerator: Detail of Accelerator plugin. + :paramtype application_accelerator: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalFeatureDetail + :keyword application_live_view: Detail of App Live View plugin. + :paramtype application_live_view: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalFeatureDetail + """ + super().__init__(**kwargs) + self.application_accelerator = application_accelerator + self.application_live_view = application_live_view + + +class DevToolPortalInstance(_serialization.Model): + """Collection of instances belong to the Dev Tool Portal. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Name of the Dev Tool Portal instance. + :vartype name: str + :ivar status: Status of the Dev Tool Portal instance. It can be Pending, Running, Succeeded, + Failed, Unknown. + :vartype status: str + """ + + _validation = { + "name": {"readonly": True}, + "status": {"readonly": True}, + } + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "status": {"key": "status", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.name = None + self.status = None + + +class DevToolPortalProperties(_serialization.Model): + """Dev Tool Portal properties payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provisioning_state: State of the Dev Tool Portal. Known values are: "Creating", + "Updating", "Succeeded", "Failed", "Deleting", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalProvisioningState + :ivar resource_requests: The requested resource quantity for required CPU and Memory. + :vartype resource_requests: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalResourceRequests + :ivar instances: Collection of instances belong to Dev Tool Portal. + :vartype instances: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalInstance] + :ivar public: Indicates whether the resource exposes public endpoint. + :vartype public: bool + :ivar url: URL of the resource, exposed when 'public' is true. + :vartype url: str + :ivar sso_properties: Single sign-on related configuration. + :vartype sso_properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalSsoProperties + :ivar features: Settings for Dev Tool Portal. + :vartype features: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalFeatureSettings + """ + + _validation = { + "provisioning_state": {"readonly": True}, + "resource_requests": {"readonly": True}, + "instances": {"readonly": True}, + "url": {"readonly": True}, + } + + _attribute_map = { + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "resource_requests": {"key": "resourceRequests", "type": "DevToolPortalResourceRequests"}, + "instances": {"key": "instances", "type": "[DevToolPortalInstance]"}, + "public": {"key": "public", "type": "bool"}, + "url": {"key": "url", "type": "str"}, + "sso_properties": {"key": "ssoProperties", "type": "DevToolPortalSsoProperties"}, + "features": {"key": "features", "type": "DevToolPortalFeatureSettings"}, + } + + def __init__( + self, + *, + public: bool = False, + sso_properties: Optional["_models.DevToolPortalSsoProperties"] = None, + features: Optional["_models.DevToolPortalFeatureSettings"] = None, + **kwargs + ): + """ + :keyword public: Indicates whether the resource exposes public endpoint. + :paramtype public: bool + :keyword sso_properties: Single sign-on related configuration. + :paramtype sso_properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalSsoProperties + :keyword features: Settings for Dev Tool Portal. + :paramtype features: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalFeatureSettings + """ + super().__init__(**kwargs) + self.provisioning_state = None + self.resource_requests = None + self.instances = None + self.public = public + self.url = None + self.sso_properties = sso_properties + self.features = features + + +class DevToolPortalResource(ProxyResource): + """Dev Tool Portal resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Dev Tool Portal properties payload. + :vartype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "DevToolPortalProperties"}, + } + + def __init__(self, *, properties: Optional["_models.DevToolPortalProperties"] = None, **kwargs): + """ + :keyword properties: Dev Tool Portal properties payload. + :paramtype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class DevToolPortalResourceCollection(_serialization.Model): + """Object that includes an array of Dev Tool Portal resources and a possible link for next set. + + :ivar value: Collection of Dev Tool Portal resources. + :vartype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalResource] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[DevToolPortalResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, + *, + value: Optional[List["_models.DevToolPortalResource"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword value: Collection of Dev Tool Portal resources. + :paramtype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalResource] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class DevToolPortalResourceRequests(_serialization.Model): + """The resource quantity for required CPU and Memory of Dev Tool Portal. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar cpu: Cpu quantity allocated to each Dev Tool Portal instance. 1 core can be represented + by 1 or 1000m. + :vartype cpu: str + :ivar memory: Memory quantity allocated to each Dev Tool Portal instance. 1 GB can be + represented by 1Gi or 1024Mi. + :vartype memory: str + :ivar instance_count: Desired instance count of Dev Tool Portal. + :vartype instance_count: int + """ + + _validation = { + "cpu": {"readonly": True}, + "memory": {"readonly": True}, + "instance_count": {"readonly": True}, + } + + _attribute_map = { + "cpu": {"key": "cpu", "type": "str"}, + "memory": {"key": "memory", "type": "str"}, + "instance_count": {"key": "instanceCount", "type": "int"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.cpu = None + self.memory = None + self.instance_count = None + + +class DevToolPortalSsoProperties(_serialization.Model): + """Single sign-on related configuration. + + :ivar scopes: It defines the specific actions applications can be allowed to do on a user's + behalf. + :vartype scopes: list[str] + :ivar client_id: The public identifier for the application. + :vartype client_id: str + :ivar client_secret: The secret known only to the application and the authorization server. + :vartype client_secret: str + :ivar metadata_url: The URI of a JSON file with generic OIDC provider configuration. + :vartype metadata_url: str + """ + + _attribute_map = { + "scopes": {"key": "scopes", "type": "[str]"}, + "client_id": {"key": "clientId", "type": "str"}, + "client_secret": {"key": "clientSecret", "type": "str"}, + "metadata_url": {"key": "metadataUrl", "type": "str"}, + } + + def __init__( + self, + *, + scopes: Optional[List[str]] = None, + client_id: Optional[str] = None, + client_secret: Optional[str] = None, + metadata_url: Optional[str] = None, + **kwargs + ): + """ + :keyword scopes: It defines the specific actions applications can be allowed to do on a user's + behalf. + :paramtype scopes: list[str] + :keyword client_id: The public identifier for the application. + :paramtype client_id: str + :keyword client_secret: The secret known only to the application and the authorization server. + :paramtype client_secret: str + :keyword metadata_url: The URI of a JSON file with generic OIDC provider configuration. + :paramtype metadata_url: str + """ + super().__init__(**kwargs) + self.scopes = scopes + self.client_id = client_id + self.client_secret = client_secret + self.metadata_url = metadata_url + + +class DiagnosticParameters(_serialization.Model): + """Diagnostic parameters of diagnostic operations. + + :ivar app_instance: App instance name. + :vartype app_instance: str + :ivar file_path: Your target file path in your own BYOS. + :vartype file_path: str + :ivar duration: Duration of your JFR. 1 min can be represented by 1m or 60s. + :vartype duration: str + """ + + _attribute_map = { + "app_instance": {"key": "appInstance", "type": "str"}, + "file_path": {"key": "filePath", "type": "str"}, + "duration": {"key": "duration", "type": "str"}, + } + + def __init__( + self, + *, + app_instance: Optional[str] = None, + file_path: Optional[str] = None, + duration: Optional[str] = None, + **kwargs + ): + """ + :keyword app_instance: App instance name. + :paramtype app_instance: str + :keyword file_path: Your target file path in your own BYOS. + :paramtype file_path: str + :keyword duration: Duration of your JFR. 1 min can be represented by 1m or 60s. + :paramtype duration: str + """ + super().__init__(**kwargs) + self.app_instance = app_instance + self.file_path = file_path + self.duration = duration + + +class Error(_serialization.Model): + """The error code compose of code and message. + + :ivar code: The code of error. + :vartype code: str + :ivar message: The message of error. + :vartype message: str + """ + + _attribute_map = { + "code": {"key": "code", "type": "str"}, + "message": {"key": "message", "type": "str"}, + } + + def __init__(self, *, code: Optional[str] = None, message: Optional[str] = None, **kwargs): + """ + :keyword code: The code of error. + :paramtype code: str + :keyword message: The message of error. + :paramtype message: str + """ + super().__init__(**kwargs) + self.code = code + self.message = message + + +class ProbeAction(_serialization.Model): + """The action of the probe. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ExecAction, HTTPGetAction, TCPSocketAction + + All required parameters must be populated in order to send to Azure. + + :ivar type: The type of the action to take to perform the health check. Required. Known values + are: "HTTPGetAction", "TCPSocketAction", and "ExecAction". + :vartype type: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.ProbeActionType + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + } + + _subtype_map = { + "type": {"ExecAction": "ExecAction", "HTTPGetAction": "HTTPGetAction", "TCPSocketAction": "TCPSocketAction"} + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.type = None # type: Optional[str] + + +class ExecAction(ProbeAction): + """ExecAction describes a "run in container" action. + + All required parameters must be populated in order to send to Azure. + + :ivar type: The type of the action to take to perform the health check. Required. Known values + are: "HTTPGetAction", "TCPSocketAction", and "ExecAction". + :vartype type: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.ProbeActionType + :ivar command: Command is the command line to execute inside the container, the working + directory for the command is root ('/') in the container's filesystem. The command is not run + inside a shell, so traditional shell instructions ('|', etc) won't work. To use a shell, you + need to explicitly call out to that shell. Exit status of 0 is treated as live/healthy and + non-zero is unhealthy. + :vartype command: list[str] + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "command": {"key": "command", "type": "[str]"}, + } + + def __init__(self, *, command: Optional[List[str]] = None, **kwargs): + """ + :keyword command: Command is the command line to execute inside the container, the working + directory for the command is root ('/') in the container's filesystem. The command is not run + inside a shell, so traditional shell instructions ('|', etc) won't work. To use a shell, you + need to explicitly call out to that shell. Exit status of 0 is treated as live/healthy and + non-zero is unhealthy. + :paramtype command: list[str] + """ + super().__init__(**kwargs) + self.type = "ExecAction" # type: str + self.command = command + + +class GatewayApiMetadataProperties(_serialization.Model): + """API metadata property for Spring Cloud Gateway. + + :ivar title: Title describing the context of the APIs available on the Gateway instance + (default: ``Spring Cloud Gateway for K8S``\ ). + :vartype title: str + :ivar description: Detailed description of the APIs available on the Gateway instance (default: + ``Generated OpenAPI 3 document that describes the API routes configured.``\ ). + :vartype description: str + :ivar documentation: Location of additional documentation for the APIs available on the Gateway + instance. + :vartype documentation: str + :ivar version: Version of APIs available on this Gateway instance (default: ``unspecified``\ ). + :vartype version: str + :ivar server_url: Base URL that API consumers will use to access APIs on the Gateway instance. + :vartype server_url: str + """ + + _attribute_map = { + "title": {"key": "title", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "documentation": {"key": "documentation", "type": "str"}, + "version": {"key": "version", "type": "str"}, + "server_url": {"key": "serverUrl", "type": "str"}, + } + + def __init__( + self, + *, + title: Optional[str] = None, + description: Optional[str] = None, + documentation: Optional[str] = None, + version: Optional[str] = None, + server_url: Optional[str] = None, + **kwargs + ): + """ + :keyword title: Title describing the context of the APIs available on the Gateway instance + (default: ``Spring Cloud Gateway for K8S``\ ). + :paramtype title: str + :keyword description: Detailed description of the APIs available on the Gateway instance + (default: ``Generated OpenAPI 3 document that describes the API routes configured.``\ ). + :paramtype description: str + :keyword documentation: Location of additional documentation for the APIs available on the + Gateway instance. + :paramtype documentation: str + :keyword version: Version of APIs available on this Gateway instance (default: ``unspecified``\ + ). + :paramtype version: str + :keyword server_url: Base URL that API consumers will use to access APIs on the Gateway + instance. + :paramtype server_url: str + """ + super().__init__(**kwargs) + self.title = title + self.description = description + self.documentation = documentation + self.version = version + self.server_url = server_url + + +class GatewayApiRoute(_serialization.Model): + """API route config of the Spring Cloud Gateway. + + :ivar title: A title, will be applied to methods in the generated OpenAPI documentation. + :vartype title: str + :ivar description: A description, will be applied to methods in the generated OpenAPI + documentation. + :vartype description: str + :ivar uri: Full uri, will override ``appName``. + :vartype uri: str + :ivar sso_enabled: Enable sso validation. + :vartype sso_enabled: bool + :ivar token_relay: Pass currently-authenticated user's identity token to application service, + default is 'false'. + :vartype token_relay: bool + :ivar predicates: A number of conditions to evaluate a route for each request. Each predicate + may be evaluated against request headers and parameter values. All of the predicates associated + with a route must evaluate to true for the route to be matched to the request. + :vartype predicates: list[str] + :ivar filters: To modify the request before sending it to the target endpoint, or the received + response. + :vartype filters: list[str] + :ivar order: Route processing order. + :vartype order: int + :ivar tags: Classification tags, will be applied to methods in the generated OpenAPI + documentation. + :vartype tags: list[str] + """ + + _attribute_map = { + "title": {"key": "title", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + "sso_enabled": {"key": "ssoEnabled", "type": "bool"}, + "token_relay": {"key": "tokenRelay", "type": "bool"}, + "predicates": {"key": "predicates", "type": "[str]"}, + "filters": {"key": "filters", "type": "[str]"}, + "order": {"key": "order", "type": "int"}, + "tags": {"key": "tags", "type": "[str]"}, + } + + def __init__( + self, + *, + title: Optional[str] = None, + description: Optional[str] = None, + uri: Optional[str] = None, + sso_enabled: Optional[bool] = None, + token_relay: Optional[bool] = None, + predicates: Optional[List[str]] = None, + filters: Optional[List[str]] = None, + order: Optional[int] = None, + tags: Optional[List[str]] = None, + **kwargs + ): + """ + :keyword title: A title, will be applied to methods in the generated OpenAPI documentation. + :paramtype title: str + :keyword description: A description, will be applied to methods in the generated OpenAPI + documentation. + :paramtype description: str + :keyword uri: Full uri, will override ``appName``. + :paramtype uri: str + :keyword sso_enabled: Enable sso validation. + :paramtype sso_enabled: bool + :keyword token_relay: Pass currently-authenticated user's identity token to application + service, default is 'false'. + :paramtype token_relay: bool + :keyword predicates: A number of conditions to evaluate a route for each request. Each + predicate may be evaluated against request headers and parameter values. All of the predicates + associated with a route must evaluate to true for the route to be matched to the request. + :paramtype predicates: list[str] + :keyword filters: To modify the request before sending it to the target endpoint, or the + received response. + :paramtype filters: list[str] + :keyword order: Route processing order. + :paramtype order: int + :keyword tags: Classification tags, will be applied to methods in the generated OpenAPI + documentation. + :paramtype tags: list[str] + """ + super().__init__(**kwargs) + self.title = title + self.description = description + self.uri = uri + self.sso_enabled = sso_enabled + self.token_relay = token_relay + self.predicates = predicates + self.filters = filters + self.order = order + self.tags = tags + + +class GatewayCorsProperties(_serialization.Model): + """Cross-Origin Resource Sharing property. + + :ivar allowed_origins: Allowed origins to make cross-site requests. The special value ``*`` + allows all domains. + :vartype allowed_origins: list[str] + :ivar allowed_methods: Allowed HTTP methods on cross-site requests. The special value ``*`` + allows all methods. If not set, ``GET`` and ``HEAD`` are allowed by default. + :vartype allowed_methods: list[str] + :ivar allowed_headers: Allowed headers in cross-site requests. The special value ``*`` allows + actual requests to send any header. + :vartype allowed_headers: list[str] + :ivar max_age: How long, in seconds, the response from a pre-flight request can be cached by + clients. + :vartype max_age: int + :ivar allow_credentials: Whether user credentials are supported on cross-site requests. Valid + values: ``true``\ , ``false``. + :vartype allow_credentials: bool + :ivar exposed_headers: HTTP response headers to expose for cross-site requests. + :vartype exposed_headers: list[str] + """ + + _attribute_map = { + "allowed_origins": {"key": "allowedOrigins", "type": "[str]"}, + "allowed_methods": {"key": "allowedMethods", "type": "[str]"}, + "allowed_headers": {"key": "allowedHeaders", "type": "[str]"}, + "max_age": {"key": "maxAge", "type": "int"}, + "allow_credentials": {"key": "allowCredentials", "type": "bool"}, + "exposed_headers": {"key": "exposedHeaders", "type": "[str]"}, + } + + def __init__( + self, + *, + allowed_origins: Optional[List[str]] = None, + allowed_methods: Optional[List[str]] = None, + allowed_headers: Optional[List[str]] = None, + max_age: Optional[int] = None, + allow_credentials: Optional[bool] = None, + exposed_headers: Optional[List[str]] = None, + **kwargs + ): + """ + :keyword allowed_origins: Allowed origins to make cross-site requests. The special value ``*`` + allows all domains. + :paramtype allowed_origins: list[str] + :keyword allowed_methods: Allowed HTTP methods on cross-site requests. The special value ``*`` + allows all methods. If not set, ``GET`` and ``HEAD`` are allowed by default. + :paramtype allowed_methods: list[str] + :keyword allowed_headers: Allowed headers in cross-site requests. The special value ``*`` + allows actual requests to send any header. + :paramtype allowed_headers: list[str] + :keyword max_age: How long, in seconds, the response from a pre-flight request can be cached by + clients. + :paramtype max_age: int + :keyword allow_credentials: Whether user credentials are supported on cross-site requests. + Valid values: ``true``\ , ``false``. + :paramtype allow_credentials: bool + :keyword exposed_headers: HTTP response headers to expose for cross-site requests. + :paramtype exposed_headers: list[str] + """ + super().__init__(**kwargs) + self.allowed_origins = allowed_origins + self.allowed_methods = allowed_methods + self.allowed_headers = allowed_headers + self.max_age = max_age + self.allow_credentials = allow_credentials + self.exposed_headers = exposed_headers + + +class GatewayCustomDomainProperties(_serialization.Model): + """The properties of custom domain for Spring Cloud Gateway. + + :ivar thumbprint: The thumbprint of bound certificate. + :vartype thumbprint: str + """ + + _attribute_map = { + "thumbprint": {"key": "thumbprint", "type": "str"}, + } + + def __init__(self, *, thumbprint: Optional[str] = None, **kwargs): + """ + :keyword thumbprint: The thumbprint of bound certificate. + :paramtype thumbprint: str + """ + super().__init__(**kwargs) + self.thumbprint = thumbprint + + +class GatewayCustomDomainResource(ProxyResource): + """Custom domain of the Spring Cloud Gateway. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: The properties of custom domain for Spring Cloud Gateway. + :vartype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayCustomDomainProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "GatewayCustomDomainProperties"}, + } + + def __init__(self, *, properties: Optional["_models.GatewayCustomDomainProperties"] = None, **kwargs): + """ + :keyword properties: The properties of custom domain for Spring Cloud Gateway. + :paramtype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayCustomDomainProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class GatewayCustomDomainResourceCollection(_serialization.Model): + """Object that includes an array of Spring Cloud Gateway custom domain resources and a possible link for next set. + + :ivar value: Collection of Spring Cloud Gateway custom domain resources. + :vartype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayCustomDomainResource] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[GatewayCustomDomainResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, + *, + value: Optional[List["_models.GatewayCustomDomainResource"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword value: Collection of Spring Cloud Gateway custom domain resources. + :paramtype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayCustomDomainResource] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class GatewayInstance(_serialization.Model): + """Collection of instances belong to the Spring Cloud Gateway. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Name of the Spring Cloud Gateway instance. + :vartype name: str + :ivar status: Status of the Spring Cloud Gateway instance. + :vartype status: str + """ + + _validation = { + "name": {"readonly": True}, + "status": {"readonly": True}, + } + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "status": {"key": "status", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.name = None + self.status = None + + +class GatewayOperatorProperties(_serialization.Model): + """Properties of the Spring Cloud Gateway Operator. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar resource_requests: The requested resource quantity for required CPU and Memory. + :vartype resource_requests: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayOperatorResourceRequests + :ivar instances: Collection of instances belong to Spring Cloud Gateway operator. + :vartype instances: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayInstance] + """ + + _validation = { + "resource_requests": {"readonly": True}, + "instances": {"readonly": True}, + } + + _attribute_map = { + "resource_requests": {"key": "resourceRequests", "type": "GatewayOperatorResourceRequests"}, + "instances": {"key": "instances", "type": "[GatewayInstance]"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.resource_requests = None + self.instances = None + + +class GatewayOperatorResourceRequests(_serialization.Model): + """Properties of the Spring Cloud Gateway Operator. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar cpu: Cpu allocated to each Spring Cloud Gateway Operator instance. + :vartype cpu: str + :ivar memory: Memory allocated to each Spring Cloud Gateway Operator instance. + :vartype memory: str + :ivar instance_count: Instance count of the Spring Cloud Gateway Operator. + :vartype instance_count: int + """ + + _validation = { + "cpu": {"readonly": True}, + "memory": {"readonly": True}, + "instance_count": {"readonly": True}, + } + + _attribute_map = { + "cpu": {"key": "cpu", "type": "str"}, + "memory": {"key": "memory", "type": "str"}, + "instance_count": {"key": "instanceCount", "type": "int"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.cpu = None + self.memory = None + self.instance_count = None + + +class GatewayProperties(_serialization.Model): # pylint: disable=too-many-instance-attributes + """Spring Cloud Gateway properties payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provisioning_state: State of the Spring Cloud Gateway. Known values are: "Creating", + "Updating", "Succeeded", "Failed", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayProvisioningState + :ivar public: Indicates whether the Spring Cloud Gateway exposes endpoint. + :vartype public: bool + :ivar url: URL of the Spring Cloud Gateway, exposed when 'public' is true. + :vartype url: str + :ivar https_only: Indicate if only https is allowed. + :vartype https_only: bool + :ivar sso_properties: Single sign-on related configuration. + :vartype sso_properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SsoProperties + :ivar api_metadata_properties: API metadata property for Spring Cloud Gateway. + :vartype api_metadata_properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayApiMetadataProperties + :ivar cors_properties: Cross-Origin Resource Sharing property. + :vartype cors_properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayCorsProperties + :ivar apm_types: Collection of APM type used in Spring Cloud Gateway. + :vartype apm_types: list[str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApmType] + :ivar environment_variables: Environment variables of Spring Cloud Gateway. + :vartype environment_variables: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayPropertiesEnvironmentVariables + :ivar resource_requests: The requested resource quantity for required CPU and Memory. + :vartype resource_requests: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayResourceRequests + :ivar instances: Collection of instances belong to Spring Cloud Gateway. + :vartype instances: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayInstance] + :ivar operator_properties: Properties of the Spring Cloud Gateway Operator. + :vartype operator_properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayOperatorProperties + """ + + _validation = { + "provisioning_state": {"readonly": True}, + "url": {"readonly": True}, + "apm_types": {"unique": True}, + "instances": {"readonly": True}, + "operator_properties": {"readonly": True}, + } + + _attribute_map = { + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "public": {"key": "public", "type": "bool"}, + "url": {"key": "url", "type": "str"}, + "https_only": {"key": "httpsOnly", "type": "bool"}, + "sso_properties": {"key": "ssoProperties", "type": "SsoProperties"}, + "api_metadata_properties": {"key": "apiMetadataProperties", "type": "GatewayApiMetadataProperties"}, + "cors_properties": {"key": "corsProperties", "type": "GatewayCorsProperties"}, + "apm_types": {"key": "apmTypes", "type": "[str]"}, + "environment_variables": {"key": "environmentVariables", "type": "GatewayPropertiesEnvironmentVariables"}, + "resource_requests": {"key": "resourceRequests", "type": "GatewayResourceRequests"}, + "instances": {"key": "instances", "type": "[GatewayInstance]"}, + "operator_properties": {"key": "operatorProperties", "type": "GatewayOperatorProperties"}, + } + + def __init__( + self, + *, + public: bool = False, + https_only: bool = False, + sso_properties: Optional["_models.SsoProperties"] = None, + api_metadata_properties: Optional["_models.GatewayApiMetadataProperties"] = None, + cors_properties: Optional["_models.GatewayCorsProperties"] = None, + apm_types: Optional[List[Union[str, "_models.ApmType"]]] = None, + environment_variables: Optional["_models.GatewayPropertiesEnvironmentVariables"] = None, + resource_requests: Optional["_models.GatewayResourceRequests"] = None, + **kwargs + ): + """ + :keyword public: Indicates whether the Spring Cloud Gateway exposes endpoint. + :paramtype public: bool + :keyword https_only: Indicate if only https is allowed. + :paramtype https_only: bool + :keyword sso_properties: Single sign-on related configuration. + :paramtype sso_properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SsoProperties + :keyword api_metadata_properties: API metadata property for Spring Cloud Gateway. + :paramtype api_metadata_properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayApiMetadataProperties + :keyword cors_properties: Cross-Origin Resource Sharing property. + :paramtype cors_properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayCorsProperties + :keyword apm_types: Collection of APM type used in Spring Cloud Gateway. + :paramtype apm_types: list[str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApmType] + :keyword environment_variables: Environment variables of Spring Cloud Gateway. + :paramtype environment_variables: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayPropertiesEnvironmentVariables + :keyword resource_requests: The requested resource quantity for required CPU and Memory. + :paramtype resource_requests: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayResourceRequests + """ + super().__init__(**kwargs) + self.provisioning_state = None + self.public = public + self.url = None + self.https_only = https_only + self.sso_properties = sso_properties + self.api_metadata_properties = api_metadata_properties + self.cors_properties = cors_properties + self.apm_types = apm_types + self.environment_variables = environment_variables + self.resource_requests = resource_requests + self.instances = None + self.operator_properties = None + + +class GatewayPropertiesEnvironmentVariables(_serialization.Model): + """Environment variables of Spring Cloud Gateway. + + :ivar properties: Non-sensitive properties. + :vartype properties: dict[str, str] + :ivar secrets: Sensitive properties. + :vartype secrets: dict[str, str] + """ + + _attribute_map = { + "properties": {"key": "properties", "type": "{str}"}, + "secrets": {"key": "secrets", "type": "{str}"}, + } + + def __init__( + self, *, properties: Optional[Dict[str, str]] = None, secrets: Optional[Dict[str, str]] = None, **kwargs + ): + """ + :keyword properties: Non-sensitive properties. + :paramtype properties: dict[str, str] + :keyword secrets: Sensitive properties. + :paramtype secrets: dict[str, str] + """ + super().__init__(**kwargs) + self.properties = properties + self.secrets = secrets + + +class GatewayResource(ProxyResource): + """Spring Cloud Gateway resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Spring Cloud Gateway properties payload. + :vartype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayProperties + :ivar sku: Sku of the Spring Cloud Gateway resource. + :vartype sku: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Sku + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "GatewayProperties"}, + "sku": {"key": "sku", "type": "Sku"}, + } + + def __init__( + self, *, properties: Optional["_models.GatewayProperties"] = None, sku: Optional["_models.Sku"] = None, **kwargs + ): + """ + :keyword properties: Spring Cloud Gateway properties payload. + :paramtype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayProperties + :keyword sku: Sku of the Spring Cloud Gateway resource. + :paramtype sku: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Sku + """ + super().__init__(**kwargs) + self.properties = properties + self.sku = sku + + +class GatewayResourceCollection(_serialization.Model): + """Object that includes an array of gateway resources and a possible link for next set. + + :ivar value: Collection of gateway resources. + :vartype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayResource] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[GatewayResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, *, value: Optional[List["_models.GatewayResource"]] = None, next_link: Optional[str] = None, **kwargs + ): + """ + :keyword value: Collection of gateway resources. + :paramtype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayResource] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class GatewayResourceRequests(_serialization.Model): + """Resource request payload of Spring Cloud Gateway. + + :ivar cpu: Cpu allocated to each Spring Cloud Gateway instance. + :vartype cpu: str + :ivar memory: Memory allocated to each Spring Cloud Gateway instance. + :vartype memory: str + """ + + _attribute_map = { + "cpu": {"key": "cpu", "type": "str"}, + "memory": {"key": "memory", "type": "str"}, + } + + def __init__(self, *, cpu: str = "1", memory: str = "2Gi", **kwargs): + """ + :keyword cpu: Cpu allocated to each Spring Cloud Gateway instance. + :paramtype cpu: str + :keyword memory: Memory allocated to each Spring Cloud Gateway instance. + :paramtype memory: str + """ + super().__init__(**kwargs) + self.cpu = cpu + self.memory = memory + + +class GatewayRouteConfigOpenApiProperties(_serialization.Model): + """OpenAPI properties of Spring Cloud Gateway route config. + + :ivar uri: The URI of OpenAPI specification. + :vartype uri: str + """ + + _attribute_map = { + "uri": {"key": "uri", "type": "str"}, + } + + def __init__(self, *, uri: Optional[str] = None, **kwargs): + """ + :keyword uri: The URI of OpenAPI specification. + :paramtype uri: str + """ + super().__init__(**kwargs) + self.uri = uri + + +class GatewayRouteConfigProperties(_serialization.Model): + """API route config of the Spring Cloud Gateway. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provisioning_state: State of the Spring Cloud Gateway route config. Known values are: + "Creating", "Updating", "Succeeded", "Failed", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayProvisioningState + :ivar app_resource_id: The resource Id of the Azure Spring Apps app, required unless route + defines ``uri``. + :vartype app_resource_id: str + :ivar open_api: OpenAPI properties of Spring Cloud Gateway route config. + :vartype open_api: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigOpenApiProperties + :ivar protocol: Protocol of routed Azure Spring Apps applications. Known values are: "HTTP" and + "HTTPS". + :vartype protocol: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigProtocol + :ivar routes: Array of API routes, each route contains properties such as ``title``\ , ``uri``\ + , ``ssoEnabled``\ , ``predicates``\ , ``filters``. + :vartype routes: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayApiRoute] + :ivar sso_enabled: Enable Single Sign-On in app level. + :vartype sso_enabled: bool + :ivar predicates: A number of conditions to evaluate a route for each request in app level. + Each predicate may be evaluated against request headers and parameter values. All of the + predicates associated with a route must evaluate to true for the route to be matched to the + request. + :vartype predicates: list[str] + :ivar filters: To modify the request before sending it to the target endpoint, or the received + response in app level. + :vartype filters: list[str] + """ + + _validation = { + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "app_resource_id": {"key": "appResourceId", "type": "str"}, + "open_api": {"key": "openApi", "type": "GatewayRouteConfigOpenApiProperties"}, + "protocol": {"key": "protocol", "type": "str"}, + "routes": {"key": "routes", "type": "[GatewayApiRoute]"}, + "sso_enabled": {"key": "ssoEnabled", "type": "bool"}, + "predicates": {"key": "predicates", "type": "[str]"}, + "filters": {"key": "filters", "type": "[str]"}, + } + + def __init__( + self, + *, + app_resource_id: Optional[str] = None, + open_api: Optional["_models.GatewayRouteConfigOpenApiProperties"] = None, + protocol: Union[str, "_models.GatewayRouteConfigProtocol"] = "HTTP", + routes: Optional[List["_models.GatewayApiRoute"]] = None, + sso_enabled: Optional[bool] = None, + predicates: Optional[List[str]] = None, + filters: Optional[List[str]] = None, + **kwargs + ): + """ + :keyword app_resource_id: The resource Id of the Azure Spring Apps app, required unless route + defines ``uri``. + :paramtype app_resource_id: str + :keyword open_api: OpenAPI properties of Spring Cloud Gateway route config. + :paramtype open_api: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigOpenApiProperties + :keyword protocol: Protocol of routed Azure Spring Apps applications. Known values are: "HTTP" + and "HTTPS". + :paramtype protocol: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigProtocol + :keyword routes: Array of API routes, each route contains properties such as ``title``\ , + ``uri``\ , ``ssoEnabled``\ , ``predicates``\ , ``filters``. + :paramtype routes: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayApiRoute] + :keyword sso_enabled: Enable Single Sign-On in app level. + :paramtype sso_enabled: bool + :keyword predicates: A number of conditions to evaluate a route for each request in app level. + Each predicate may be evaluated against request headers and parameter values. All of the + predicates associated with a route must evaluate to true for the route to be matched to the + request. + :paramtype predicates: list[str] + :keyword filters: To modify the request before sending it to the target endpoint, or the + received response in app level. + :paramtype filters: list[str] + """ + super().__init__(**kwargs) + self.provisioning_state = None + self.app_resource_id = app_resource_id + self.open_api = open_api + self.protocol = protocol + self.routes = routes + self.sso_enabled = sso_enabled + self.predicates = predicates + self.filters = filters + + +class GatewayRouteConfigResource(ProxyResource): + """Spring Cloud Gateway route config resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: API route config of the Spring Cloud Gateway. + :vartype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "GatewayRouteConfigProperties"}, + } + + def __init__(self, *, properties: Optional["_models.GatewayRouteConfigProperties"] = None, **kwargs): + """ + :keyword properties: API route config of the Spring Cloud Gateway. + :paramtype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class GatewayRouteConfigResourceCollection(_serialization.Model): + """Object that includes an array of Spring Cloud Gateway route config resources and a possible link for next set. + + :ivar value: Collection of Spring Cloud Gateway route config resources. + :vartype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigResource] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[GatewayRouteConfigResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, + *, + value: Optional[List["_models.GatewayRouteConfigResource"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword value: Collection of Spring Cloud Gateway route config resources. + :paramtype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigResource] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class GitPatternRepository(_serialization.Model): # pylint: disable=too-many-instance-attributes + """Git repository property payload for config server. + + All required parameters must be populated in order to send to Azure. + + :ivar name: Name of the repository. Required. + :vartype name: str + :ivar pattern: Collection of pattern of the repository. + :vartype pattern: list[str] + :ivar uri: URI of the repository. Required. + :vartype uri: str + :ivar label: Label of the repository. + :vartype label: str + :ivar search_paths: Searching path of the repository. + :vartype search_paths: list[str] + :ivar username: Username of git repository basic auth. + :vartype username: str + :ivar password: Password of git repository basic auth. + :vartype password: str + :ivar host_key: Public sshKey of git repository. + :vartype host_key: str + :ivar host_key_algorithm: SshKey algorithm of git repository. + :vartype host_key_algorithm: str + :ivar private_key: Private sshKey algorithm of git repository. + :vartype private_key: str + :ivar strict_host_key_checking: Strict host key checking or not. + :vartype strict_host_key_checking: bool + """ + + _validation = { + "name": {"required": True}, + "uri": {"required": True}, + } + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "pattern": {"key": "pattern", "type": "[str]"}, + "uri": {"key": "uri", "type": "str"}, + "label": {"key": "label", "type": "str"}, + "search_paths": {"key": "searchPaths", "type": "[str]"}, + "username": {"key": "username", "type": "str"}, + "password": {"key": "password", "type": "str"}, + "host_key": {"key": "hostKey", "type": "str"}, + "host_key_algorithm": {"key": "hostKeyAlgorithm", "type": "str"}, + "private_key": {"key": "privateKey", "type": "str"}, + "strict_host_key_checking": {"key": "strictHostKeyChecking", "type": "bool"}, + } + + def __init__( + self, + *, + name: str, + uri: str, + pattern: Optional[List[str]] = None, + label: Optional[str] = None, + search_paths: Optional[List[str]] = None, + username: Optional[str] = None, + password: Optional[str] = None, + host_key: Optional[str] = None, + host_key_algorithm: Optional[str] = None, + private_key: Optional[str] = None, + strict_host_key_checking: Optional[bool] = None, + **kwargs + ): + """ + :keyword name: Name of the repository. Required. + :paramtype name: str + :keyword pattern: Collection of pattern of the repository. + :paramtype pattern: list[str] + :keyword uri: URI of the repository. Required. + :paramtype uri: str + :keyword label: Label of the repository. + :paramtype label: str + :keyword search_paths: Searching path of the repository. + :paramtype search_paths: list[str] + :keyword username: Username of git repository basic auth. + :paramtype username: str + :keyword password: Password of git repository basic auth. + :paramtype password: str + :keyword host_key: Public sshKey of git repository. + :paramtype host_key: str + :keyword host_key_algorithm: SshKey algorithm of git repository. + :paramtype host_key_algorithm: str + :keyword private_key: Private sshKey algorithm of git repository. + :paramtype private_key: str + :keyword strict_host_key_checking: Strict host key checking or not. + :paramtype strict_host_key_checking: bool + """ + super().__init__(**kwargs) + self.name = name + self.pattern = pattern + self.uri = uri + self.label = label + self.search_paths = search_paths + self.username = username + self.password = password + self.host_key = host_key + self.host_key_algorithm = host_key_algorithm + self.private_key = private_key + self.strict_host_key_checking = strict_host_key_checking + + +class HTTPGetAction(ProbeAction): + """HTTPGetAction describes an action based on HTTP Get requests. + + All required parameters must be populated in order to send to Azure. + + :ivar type: The type of the action to take to perform the health check. Required. Known values + are: "HTTPGetAction", "TCPSocketAction", and "ExecAction". + :vartype type: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.ProbeActionType + :ivar path: Path to access on the HTTP server. + :vartype path: str + :ivar scheme: Scheme to use for connecting to the host. Defaults to HTTP. + + Possible enum values: + + + * ``"HTTP"`` means that the scheme used will be http:// + * ``"HTTPS"`` means that the scheme used will be https://. Known values are: "HTTP" and + "HTTPS". + :vartype scheme: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.HTTPSchemeType + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "path": {"key": "path", "type": "str"}, + "scheme": {"key": "scheme", "type": "str"}, + } + + def __init__( + self, *, path: Optional[str] = None, scheme: Optional[Union[str, "_models.HTTPSchemeType"]] = None, **kwargs + ): + """ + :keyword path: Path to access on the HTTP server. + :paramtype path: str + :keyword scheme: Scheme to use for connecting to the host. Defaults to HTTP. + + Possible enum values: + + + * ``"HTTP"`` means that the scheme used will be http:// + * ``"HTTPS"`` means that the scheme used will be https://. Known values are: "HTTP" and + "HTTPS". + :paramtype scheme: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.HTTPSchemeType + """ + super().__init__(**kwargs) + self.type = "HTTPGetAction" # type: str + self.path = path + self.scheme = scheme + + +class ImageRegistryCredential(_serialization.Model): + """Credential of the image registry. + + :ivar username: The username of the image registry credential. + :vartype username: str + :ivar password: The password of the image registry credential. + :vartype password: str + """ + + _attribute_map = { + "username": {"key": "username", "type": "str"}, + "password": {"key": "password", "type": "str"}, + } + + def __init__(self, *, username: Optional[str] = None, password: Optional[str] = None, **kwargs): + """ + :keyword username: The username of the image registry credential. + :paramtype username: str + :keyword password: The password of the image registry credential. + :paramtype password: str + """ + super().__init__(**kwargs) + self.username = username + self.password = password + + +class IngressConfig(_serialization.Model): + """Ingress configuration payload for Azure Spring Apps resource. + + :ivar read_timeout_in_seconds: Ingress read time out in seconds. + :vartype read_timeout_in_seconds: int + """ + + _attribute_map = { + "read_timeout_in_seconds": {"key": "readTimeoutInSeconds", "type": "int"}, + } + + def __init__(self, *, read_timeout_in_seconds: Optional[int] = None, **kwargs): + """ + :keyword read_timeout_in_seconds: Ingress read time out in seconds. + :paramtype read_timeout_in_seconds: int + """ + super().__init__(**kwargs) + self.read_timeout_in_seconds = read_timeout_in_seconds + + +class IngressSettings(_serialization.Model): + """App ingress settings payload. + + :ivar read_timeout_in_seconds: Ingress read time out in seconds. + :vartype read_timeout_in_seconds: int + :ivar send_timeout_in_seconds: Ingress send time out in seconds. + :vartype send_timeout_in_seconds: int + :ivar session_affinity: Type of the affinity, set this to Cookie to enable session affinity. + Known values are: "Cookie" and "None". + :vartype session_affinity: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.SessionAffinity + :ivar session_cookie_max_age: Time in seconds until the cookie expires. + :vartype session_cookie_max_age: int + :ivar backend_protocol: How ingress should communicate with this app backend service. Known + values are: "GRPC" and "Default". + :vartype backend_protocol: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BackendProtocol + :ivar client_auth: Client-Certification Authentication. + :vartype client_auth: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.IngressSettingsClientAuth + """ + + _attribute_map = { + "read_timeout_in_seconds": {"key": "readTimeoutInSeconds", "type": "int"}, + "send_timeout_in_seconds": {"key": "sendTimeoutInSeconds", "type": "int"}, + "session_affinity": {"key": "sessionAffinity", "type": "str"}, + "session_cookie_max_age": {"key": "sessionCookieMaxAge", "type": "int"}, + "backend_protocol": {"key": "backendProtocol", "type": "str"}, + "client_auth": {"key": "clientAuth", "type": "IngressSettingsClientAuth"}, + } + + def __init__( + self, + *, + read_timeout_in_seconds: Optional[int] = None, + send_timeout_in_seconds: Optional[int] = None, + session_affinity: Optional[Union[str, "_models.SessionAffinity"]] = None, + session_cookie_max_age: Optional[int] = None, + backend_protocol: Optional[Union[str, "_models.BackendProtocol"]] = None, + client_auth: Optional["_models.IngressSettingsClientAuth"] = None, + **kwargs + ): + """ + :keyword read_timeout_in_seconds: Ingress read time out in seconds. + :paramtype read_timeout_in_seconds: int + :keyword send_timeout_in_seconds: Ingress send time out in seconds. + :paramtype send_timeout_in_seconds: int + :keyword session_affinity: Type of the affinity, set this to Cookie to enable session affinity. + Known values are: "Cookie" and "None". + :paramtype session_affinity: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.SessionAffinity + :keyword session_cookie_max_age: Time in seconds until the cookie expires. + :paramtype session_cookie_max_age: int + :keyword backend_protocol: How ingress should communicate with this app backend service. Known + values are: "GRPC" and "Default". + :paramtype backend_protocol: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BackendProtocol + :keyword client_auth: Client-Certification Authentication. + :paramtype client_auth: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.IngressSettingsClientAuth + """ + super().__init__(**kwargs) + self.read_timeout_in_seconds = read_timeout_in_seconds + self.send_timeout_in_seconds = send_timeout_in_seconds + self.session_affinity = session_affinity + self.session_cookie_max_age = session_cookie_max_age + self.backend_protocol = backend_protocol + self.client_auth = client_auth + + +class IngressSettingsClientAuth(_serialization.Model): + """Client-Certification Authentication. + + :ivar certificates: Collection of certificate resource id. + :vartype certificates: list[str] + """ + + _attribute_map = { + "certificates": {"key": "certificates", "type": "[str]"}, + } + + def __init__(self, *, certificates: Optional[List[str]] = None, **kwargs): + """ + :keyword certificates: Collection of certificate resource id. + :paramtype certificates: list[str] + """ + super().__init__(**kwargs) + self.certificates = certificates + + +class UploadedUserSourceInfo(UserSourceInfo): + """Source with uploaded location. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + JarUploadedUserSourceInfo, NetCoreZipUploadedUserSourceInfo, SourceUploadedUserSourceInfo + + All required parameters must be populated in order to send to Azure. + + :ivar type: Type of the source uploaded. Required. + :vartype type: str + :ivar version: Version of the source. + :vartype version: str + :ivar relative_path: Relative path of the storage which stores the source. + :vartype relative_path: str + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, + "relative_path": {"key": "relativePath", "type": "str"}, + } + + _subtype_map = { + "type": { + "Jar": "JarUploadedUserSourceInfo", + "NetCoreZip": "NetCoreZipUploadedUserSourceInfo", + "Source": "SourceUploadedUserSourceInfo", + } + } + + def __init__(self, *, version: Optional[str] = None, relative_path: Optional[str] = None, **kwargs): + """ + :keyword version: Version of the source. + :paramtype version: str + :keyword relative_path: Relative path of the storage which stores the source. + :paramtype relative_path: str + """ + super().__init__(version=version, **kwargs) + self.type = "UploadedUserSourceInfo" # type: str + self.relative_path = relative_path + + +class JarUploadedUserSourceInfo(UploadedUserSourceInfo): + """Uploaded Jar binary for a deployment. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Type of the source uploaded. Required. + :vartype type: str + :ivar version: Version of the source. + :vartype version: str + :ivar relative_path: Relative path of the storage which stores the source. + :vartype relative_path: str + :ivar runtime_version: Runtime version of the Jar file. + :vartype runtime_version: str + :ivar jvm_options: JVM parameter. + :vartype jvm_options: str + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, + "relative_path": {"key": "relativePath", "type": "str"}, + "runtime_version": {"key": "runtimeVersion", "type": "str"}, + "jvm_options": {"key": "jvmOptions", "type": "str"}, + } + + def __init__( + self, + *, + version: Optional[str] = None, + relative_path: Optional[str] = None, + runtime_version: Optional[str] = None, + jvm_options: Optional[str] = None, + **kwargs + ): + """ + :keyword version: Version of the source. + :paramtype version: str + :keyword relative_path: Relative path of the storage which stores the source. + :paramtype relative_path: str + :keyword runtime_version: Runtime version of the Jar file. + :paramtype runtime_version: str + :keyword jvm_options: JVM parameter. + :paramtype jvm_options: str + """ + super().__init__(version=version, relative_path=relative_path, **kwargs) + self.type = "Jar" # type: str + self.runtime_version = runtime_version + self.jvm_options = jvm_options + + +class KeyVaultCertificateProperties(CertificateProperties): # pylint: disable=too-many-instance-attributes + """Properties of certificate imported from key vault. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: The type of the certificate source. Required. + :vartype type: str + :ivar thumbprint: The thumbprint of certificate. + :vartype thumbprint: str + :ivar issuer: The issuer of certificate. + :vartype issuer: str + :ivar issued_date: The issue date of certificate. + :vartype issued_date: str + :ivar expiration_date: The expiration date of certificate. + :vartype expiration_date: str + :ivar activate_date: The activate date of certificate. + :vartype activate_date: str + :ivar subject_name: The subject name of certificate. + :vartype subject_name: str + :ivar dns_names: The domain list of certificate. + :vartype dns_names: list[str] + :ivar provisioning_state: Provisioning state of the Certificate. Known values are: "Creating", + "Updating", "Succeeded", "Failed", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CertificateResourceProvisioningState + :ivar vault_uri: The vault uri of user key vault. Required. + :vartype vault_uri: str + :ivar key_vault_cert_name: The certificate name of key vault. Required. + :vartype key_vault_cert_name: str + :ivar cert_version: The certificate version of key vault. + :vartype cert_version: str + :ivar exclude_private_key: Optional. If set to true, it will not import private key from key + vault. + :vartype exclude_private_key: bool + """ + + _validation = { + "type": {"required": True}, + "thumbprint": {"readonly": True}, + "issuer": {"readonly": True}, + "issued_date": {"readonly": True}, + "expiration_date": {"readonly": True}, + "activate_date": {"readonly": True}, + "subject_name": {"readonly": True}, + "dns_names": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "vault_uri": {"required": True}, + "key_vault_cert_name": {"required": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "thumbprint": {"key": "thumbprint", "type": "str"}, + "issuer": {"key": "issuer", "type": "str"}, + "issued_date": {"key": "issuedDate", "type": "str"}, + "expiration_date": {"key": "expirationDate", "type": "str"}, + "activate_date": {"key": "activateDate", "type": "str"}, + "subject_name": {"key": "subjectName", "type": "str"}, + "dns_names": {"key": "dnsNames", "type": "[str]"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "vault_uri": {"key": "vaultUri", "type": "str"}, + "key_vault_cert_name": {"key": "keyVaultCertName", "type": "str"}, + "cert_version": {"key": "certVersion", "type": "str"}, + "exclude_private_key": {"key": "excludePrivateKey", "type": "bool"}, + } + + def __init__( + self, + *, + vault_uri: str, + key_vault_cert_name: str, + cert_version: Optional[str] = None, + exclude_private_key: bool = False, + **kwargs + ): + """ + :keyword vault_uri: The vault uri of user key vault. Required. + :paramtype vault_uri: str + :keyword key_vault_cert_name: The certificate name of key vault. Required. + :paramtype key_vault_cert_name: str + :keyword cert_version: The certificate version of key vault. + :paramtype cert_version: str + :keyword exclude_private_key: Optional. If set to true, it will not import private key from key + vault. + :paramtype exclude_private_key: bool + """ + super().__init__(**kwargs) + self.type = "KeyVaultCertificate" # type: str + self.vault_uri = vault_uri + self.key_vault_cert_name = key_vault_cert_name + self.cert_version = cert_version + self.exclude_private_key = exclude_private_key + + +class LoadedCertificate(_serialization.Model): + """Loaded certificate payload. + + All required parameters must be populated in order to send to Azure. + + :ivar resource_id: Resource Id of loaded certificate. Required. + :vartype resource_id: str + :ivar load_trust_store: Indicate whether the certificate will be loaded into default trust + store, only work for Java runtime. + :vartype load_trust_store: bool + """ + + _validation = { + "resource_id": {"required": True}, + } + + _attribute_map = { + "resource_id": {"key": "resourceId", "type": "str"}, + "load_trust_store": {"key": "loadTrustStore", "type": "bool"}, + } + + def __init__(self, *, resource_id: str, load_trust_store: bool = False, **kwargs): + """ + :keyword resource_id: Resource Id of loaded certificate. Required. + :paramtype resource_id: str + :keyword load_trust_store: Indicate whether the certificate will be loaded into default trust + store, only work for Java runtime. + :paramtype load_trust_store: bool + """ + super().__init__(**kwargs) + self.resource_id = resource_id + self.load_trust_store = load_trust_store + + +class LogFileUrlResponse(_serialization.Model): + """Log file URL payload. + + All required parameters must be populated in order to send to Azure. + + :ivar url: URL of the log file. Required. + :vartype url: str + """ + + _validation = { + "url": {"required": True}, + } + + _attribute_map = { + "url": {"key": "url", "type": "str"}, + } + + def __init__(self, *, url: str, **kwargs): + """ + :keyword url: URL of the log file. Required. + :paramtype url: str + """ + super().__init__(**kwargs) + self.url = url + + +class LogSpecification(_serialization.Model): + """Specifications of the Log for Azure Monitoring. + + :ivar name: Name of the log. + :vartype name: str + :ivar display_name: Localized friendly display name of the log. + :vartype display_name: str + :ivar blob_duration: Blob duration of the log. + :vartype blob_duration: str + """ + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "blob_duration": {"key": "blobDuration", "type": "str"}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + display_name: Optional[str] = None, + blob_duration: Optional[str] = None, + **kwargs + ): + """ + :keyword name: Name of the log. + :paramtype name: str + :keyword display_name: Localized friendly display name of the log. + :paramtype display_name: str + :keyword blob_duration: Blob duration of the log. + :paramtype blob_duration: str + """ + super().__init__(**kwargs) + self.name = name + self.display_name = display_name + self.blob_duration = blob_duration + + +class ManagedIdentityProperties(_serialization.Model): + """Managed identity properties retrieved from ARM request headers. + + :ivar type: Type of the managed identity. Known values are: "None", "SystemAssigned", + "UserAssigned", and "SystemAssigned,UserAssigned". + :vartype type: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.ManagedIdentityType + :ivar principal_id: Principal Id of system-assigned managed identity. + :vartype principal_id: str + :ivar tenant_id: Tenant Id of system-assigned managed identity. + :vartype tenant_id: str + :ivar user_assigned_identities: Properties of user-assigned managed identities. + :vartype user_assigned_identities: dict[str, + ~azure.mgmt.appplatform.v2022_11_01_preview.models.UserAssignedManagedIdentity] + """ + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "principal_id": {"key": "principalId", "type": "str"}, + "tenant_id": {"key": "tenantId", "type": "str"}, + "user_assigned_identities": {"key": "userAssignedIdentities", "type": "{UserAssignedManagedIdentity}"}, + } + + def __init__( + self, + *, + type: Optional[Union[str, "_models.ManagedIdentityType"]] = None, + principal_id: Optional[str] = None, + tenant_id: Optional[str] = None, + user_assigned_identities: Optional[Dict[str, "_models.UserAssignedManagedIdentity"]] = None, + **kwargs + ): + """ + :keyword type: Type of the managed identity. Known values are: "None", "SystemAssigned", + "UserAssigned", and "SystemAssigned,UserAssigned". + :paramtype type: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.ManagedIdentityType + :keyword principal_id: Principal Id of system-assigned managed identity. + :paramtype principal_id: str + :keyword tenant_id: Tenant Id of system-assigned managed identity. + :paramtype tenant_id: str + :keyword user_assigned_identities: Properties of user-assigned managed identities. + :paramtype user_assigned_identities: dict[str, + ~azure.mgmt.appplatform.v2022_11_01_preview.models.UserAssignedManagedIdentity] + """ + super().__init__(**kwargs) + self.type = type + self.principal_id = principal_id + self.tenant_id = tenant_id + self.user_assigned_identities = user_assigned_identities + + +class MarketplaceResource(_serialization.Model): + """Purchasing 3rd Party product for one Azure Spring Apps instance. + + :ivar plan: The plan id of the 3rd Party Artifact that is being procured. + :vartype plan: str + :ivar publisher: The publisher id of the 3rd Party Artifact that is being bought. + :vartype publisher: str + :ivar product: The 3rd Party artifact that is being procured. + :vartype product: str + """ + + _attribute_map = { + "plan": {"key": "plan", "type": "str"}, + "publisher": {"key": "publisher", "type": "str"}, + "product": {"key": "product", "type": "str"}, + } + + def __init__( + self, *, plan: Optional[str] = None, publisher: Optional[str] = None, product: Optional[str] = None, **kwargs + ): + """ + :keyword plan: The plan id of the 3rd Party Artifact that is being procured. + :paramtype plan: str + :keyword publisher: The publisher id of the 3rd Party Artifact that is being bought. + :paramtype publisher: str + :keyword product: The 3rd Party artifact that is being procured. + :paramtype product: str + """ + super().__init__(**kwargs) + self.plan = plan + self.publisher = publisher + self.product = product + + +class MetricDimension(_serialization.Model): + """Specifications of the Dimension of metrics. + + :ivar name: Name of the dimension. + :vartype name: str + :ivar display_name: Localized friendly display name of the dimension. + :vartype display_name: str + :ivar to_be_exported_for_shoebox: Whether this dimension should be included for the Shoebox + export scenario. + :vartype to_be_exported_for_shoebox: bool + """ + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "to_be_exported_for_shoebox": {"key": "toBeExportedForShoebox", "type": "bool"}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + display_name: Optional[str] = None, + to_be_exported_for_shoebox: Optional[bool] = None, + **kwargs + ): + """ + :keyword name: Name of the dimension. + :paramtype name: str + :keyword display_name: Localized friendly display name of the dimension. + :paramtype display_name: str + :keyword to_be_exported_for_shoebox: Whether this dimension should be included for the Shoebox + export scenario. + :paramtype to_be_exported_for_shoebox: bool + """ + super().__init__(**kwargs) + self.name = name + self.display_name = display_name + self.to_be_exported_for_shoebox = to_be_exported_for_shoebox + + +class MetricSpecification(_serialization.Model): # pylint: disable=too-many-instance-attributes + """Specifications of the Metrics for Azure Monitoring. + + :ivar name: Name of the metric. + :vartype name: str + :ivar display_name: Localized friendly display name of the metric. + :vartype display_name: str + :ivar display_description: Localized friendly description of the metric. + :vartype display_description: str + :ivar unit: Unit that makes sense for the metric. + :vartype unit: str + :ivar category: Name of the metric category that the metric belongs to. A metric can only + belong to a single category. + :vartype category: str + :ivar aggregation_type: Only provide one value for this field. Valid values: Average, Minimum, + Maximum, Total, Count. + :vartype aggregation_type: str + :ivar supported_aggregation_types: Supported aggregation types. + :vartype supported_aggregation_types: list[str] + :ivar supported_time_grain_types: Supported time grain types. + :vartype supported_time_grain_types: list[str] + :ivar fill_gap_with_zero: Optional. If set to true, then zero will be returned for time + duration where no metric is emitted/published. + :vartype fill_gap_with_zero: bool + :ivar dimensions: Dimensions of the metric. + :vartype dimensions: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.MetricDimension] + :ivar source_mdm_namespace: Name of the MDM namespace. Optional. + :vartype source_mdm_namespace: str + """ + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "display_description": {"key": "displayDescription", "type": "str"}, + "unit": {"key": "unit", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "aggregation_type": {"key": "aggregationType", "type": "str"}, + "supported_aggregation_types": {"key": "supportedAggregationTypes", "type": "[str]"}, + "supported_time_grain_types": {"key": "supportedTimeGrainTypes", "type": "[str]"}, + "fill_gap_with_zero": {"key": "fillGapWithZero", "type": "bool"}, + "dimensions": {"key": "dimensions", "type": "[MetricDimension]"}, + "source_mdm_namespace": {"key": "sourceMdmNamespace", "type": "str"}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + display_name: Optional[str] = None, + display_description: Optional[str] = None, + unit: Optional[str] = None, + category: Optional[str] = None, + aggregation_type: Optional[str] = None, + supported_aggregation_types: Optional[List[str]] = None, + supported_time_grain_types: Optional[List[str]] = None, + fill_gap_with_zero: Optional[bool] = None, + dimensions: Optional[List["_models.MetricDimension"]] = None, + source_mdm_namespace: Optional[str] = None, + **kwargs + ): + """ + :keyword name: Name of the metric. + :paramtype name: str + :keyword display_name: Localized friendly display name of the metric. + :paramtype display_name: str + :keyword display_description: Localized friendly description of the metric. + :paramtype display_description: str + :keyword unit: Unit that makes sense for the metric. + :paramtype unit: str + :keyword category: Name of the metric category that the metric belongs to. A metric can only + belong to a single category. + :paramtype category: str + :keyword aggregation_type: Only provide one value for this field. Valid values: Average, + Minimum, Maximum, Total, Count. + :paramtype aggregation_type: str + :keyword supported_aggregation_types: Supported aggregation types. + :paramtype supported_aggregation_types: list[str] + :keyword supported_time_grain_types: Supported time grain types. + :paramtype supported_time_grain_types: list[str] + :keyword fill_gap_with_zero: Optional. If set to true, then zero will be returned for time + duration where no metric is emitted/published. + :paramtype fill_gap_with_zero: bool + :keyword dimensions: Dimensions of the metric. + :paramtype dimensions: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.MetricDimension] + :keyword source_mdm_namespace: Name of the MDM namespace. Optional. + :paramtype source_mdm_namespace: str + """ + super().__init__(**kwargs) + self.name = name + self.display_name = display_name + self.display_description = display_description + self.unit = unit + self.category = category + self.aggregation_type = aggregation_type + self.supported_aggregation_types = supported_aggregation_types + self.supported_time_grain_types = supported_time_grain_types + self.fill_gap_with_zero = fill_gap_with_zero + self.dimensions = dimensions + self.source_mdm_namespace = source_mdm_namespace + + +class MonitoringSettingProperties(_serialization.Model): + """Monitoring Setting properties payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provisioning_state: State of the Monitoring Setting. Known values are: "NotAvailable", + "Failed", "Succeeded", and "Updating". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingState + :ivar error: Error when apply Monitoring Setting changes. + :vartype error: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Error + :ivar trace_enabled: Indicates whether enable the trace functionality, which will be deprecated + since api version 2020-11-01-preview. Please leverage appInsightsInstrumentationKey to indicate + if monitoringSettings enabled or not. + :vartype trace_enabled: bool + :ivar app_insights_instrumentation_key: Target application insight instrumentation key, null or + whitespace include empty will disable monitoringSettings. + :vartype app_insights_instrumentation_key: str + :ivar app_insights_sampling_rate: Indicates the sampling rate of application insight agent, + should be in range [0.0, 100.0]. + :vartype app_insights_sampling_rate: float + :ivar app_insights_agent_versions: Indicates the versions of application insight agent. + :vartype app_insights_agent_versions: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationInsightsAgentVersions + """ + + _validation = { + "provisioning_state": {"readonly": True}, + "app_insights_sampling_rate": {"maximum": 100, "minimum": 0}, + } + + _attribute_map = { + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "error": {"key": "error", "type": "Error"}, + "trace_enabled": {"key": "traceEnabled", "type": "bool"}, + "app_insights_instrumentation_key": {"key": "appInsightsInstrumentationKey", "type": "str"}, + "app_insights_sampling_rate": {"key": "appInsightsSamplingRate", "type": "float"}, + "app_insights_agent_versions": {"key": "appInsightsAgentVersions", "type": "ApplicationInsightsAgentVersions"}, + } + + def __init__( + self, + *, + error: Optional["_models.Error"] = None, + trace_enabled: Optional[bool] = None, + app_insights_instrumentation_key: Optional[str] = None, + app_insights_sampling_rate: Optional[float] = None, + app_insights_agent_versions: Optional["_models.ApplicationInsightsAgentVersions"] = None, + **kwargs + ): + """ + :keyword error: Error when apply Monitoring Setting changes. + :paramtype error: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Error + :keyword trace_enabled: Indicates whether enable the trace functionality, which will be + deprecated since api version 2020-11-01-preview. Please leverage appInsightsInstrumentationKey + to indicate if monitoringSettings enabled or not. + :paramtype trace_enabled: bool + :keyword app_insights_instrumentation_key: Target application insight instrumentation key, null + or whitespace include empty will disable monitoringSettings. + :paramtype app_insights_instrumentation_key: str + :keyword app_insights_sampling_rate: Indicates the sampling rate of application insight agent, + should be in range [0.0, 100.0]. + :paramtype app_insights_sampling_rate: float + :keyword app_insights_agent_versions: Indicates the versions of application insight agent. + :paramtype app_insights_agent_versions: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationInsightsAgentVersions + """ + super().__init__(**kwargs) + self.provisioning_state = None + self.error = error + self.trace_enabled = trace_enabled + self.app_insights_instrumentation_key = app_insights_instrumentation_key + self.app_insights_sampling_rate = app_insights_sampling_rate + self.app_insights_agent_versions = app_insights_agent_versions + + +class MonitoringSettingResource(ProxyResource): + """Monitoring Setting resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Properties of the Monitoring Setting resource. + :vartype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "MonitoringSettingProperties"}, + } + + def __init__(self, *, properties: Optional["_models.MonitoringSettingProperties"] = None, **kwargs): + """ + :keyword properties: Properties of the Monitoring Setting resource. + :paramtype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class NameAvailability(_serialization.Model): + """Name availability result payload. + + :ivar name_available: Indicates whether the name is available. + :vartype name_available: bool + :ivar reason: Reason why the name is not available. + :vartype reason: str + :ivar message: Message why the name is not available. + :vartype message: str + """ + + _attribute_map = { + "name_available": {"key": "nameAvailable", "type": "bool"}, + "reason": {"key": "reason", "type": "str"}, + "message": {"key": "message", "type": "str"}, + } + + def __init__( + self, + *, + name_available: Optional[bool] = None, + reason: Optional[str] = None, + message: Optional[str] = None, + **kwargs + ): + """ + :keyword name_available: Indicates whether the name is available. + :paramtype name_available: bool + :keyword reason: Reason why the name is not available. + :paramtype reason: str + :keyword message: Message why the name is not available. + :paramtype message: str + """ + super().__init__(**kwargs) + self.name_available = name_available + self.reason = reason + self.message = message + + +class NameAvailabilityParameters(_serialization.Model): + """Name availability parameters payload. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Type of the resource to check name availability. Required. + :vartype type: str + :ivar name: Name to be checked. Required. + :vartype name: str + """ + + _validation = { + "type": {"required": True}, + "name": {"required": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "name": {"key": "name", "type": "str"}, + } + + def __init__(self, *, type: str, name: str, **kwargs): + """ + :keyword type: Type of the resource to check name availability. Required. + :paramtype type: str + :keyword name: Name to be checked. Required. + :paramtype name: str + """ + super().__init__(**kwargs) + self.type = type + self.name = name + + +class NetCoreZipUploadedUserSourceInfo(UploadedUserSourceInfo): + """Uploaded Jar binary for a deployment. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Type of the source uploaded. Required. + :vartype type: str + :ivar version: Version of the source. + :vartype version: str + :ivar relative_path: Relative path of the storage which stores the source. + :vartype relative_path: str + :ivar net_core_main_entry_path: The path to the .NET executable relative to zip root. + :vartype net_core_main_entry_path: str + :ivar runtime_version: Runtime version of the .Net file. + :vartype runtime_version: str + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, + "relative_path": {"key": "relativePath", "type": "str"}, + "net_core_main_entry_path": {"key": "netCoreMainEntryPath", "type": "str"}, + "runtime_version": {"key": "runtimeVersion", "type": "str"}, + } + + def __init__( + self, + *, + version: Optional[str] = None, + relative_path: Optional[str] = None, + net_core_main_entry_path: Optional[str] = None, + runtime_version: Optional[str] = None, + **kwargs + ): + """ + :keyword version: Version of the source. + :paramtype version: str + :keyword relative_path: Relative path of the storage which stores the source. + :paramtype relative_path: str + :keyword net_core_main_entry_path: The path to the .NET executable relative to zip root. + :paramtype net_core_main_entry_path: str + :keyword runtime_version: Runtime version of the .Net file. + :paramtype runtime_version: str + """ + super().__init__(version=version, relative_path=relative_path, **kwargs) + self.type = "NetCoreZip" # type: str + self.net_core_main_entry_path = net_core_main_entry_path + self.runtime_version = runtime_version + + +class NetworkProfile(_serialization.Model): + """Service network profile payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar service_runtime_subnet_id: Fully qualified resource Id of the subnet to host Azure Spring + Apps Service Runtime. + :vartype service_runtime_subnet_id: str + :ivar app_subnet_id: Fully qualified resource Id of the subnet to host customer apps in Azure + Spring Apps. + :vartype app_subnet_id: str + :ivar service_cidr: Azure Spring Apps service reserved CIDR. + :vartype service_cidr: str + :ivar service_runtime_network_resource_group: Name of the resource group containing network + resources of Azure Spring Apps Service Runtime. + :vartype service_runtime_network_resource_group: str + :ivar app_network_resource_group: Name of the resource group containing network resources for + customer apps in Azure Spring Apps. + :vartype app_network_resource_group: str + :ivar outbound_i_ps: Desired outbound IP resources for Azure Spring Apps resource. + :vartype outbound_i_ps: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.NetworkProfileOutboundIPs + :ivar required_traffics: Required inbound or outbound traffics for Azure Spring Apps resource. + :vartype required_traffics: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.RequiredTraffic] + :ivar ingress_config: Ingress configuration payload for Azure Spring Apps resource. + :vartype ingress_config: ~azure.mgmt.appplatform.v2022_11_01_preview.models.IngressConfig + :ivar outbound_type: The egress traffic type of Azure Spring Apps VNet instances. + :vartype outbound_type: str + """ + + _validation = { + "outbound_i_ps": {"readonly": True}, + "required_traffics": {"readonly": True}, + } + + _attribute_map = { + "service_runtime_subnet_id": {"key": "serviceRuntimeSubnetId", "type": "str"}, + "app_subnet_id": {"key": "appSubnetId", "type": "str"}, + "service_cidr": {"key": "serviceCidr", "type": "str"}, + "service_runtime_network_resource_group": {"key": "serviceRuntimeNetworkResourceGroup", "type": "str"}, + "app_network_resource_group": {"key": "appNetworkResourceGroup", "type": "str"}, + "outbound_i_ps": {"key": "outboundIPs", "type": "NetworkProfileOutboundIPs"}, + "required_traffics": {"key": "requiredTraffics", "type": "[RequiredTraffic]"}, + "ingress_config": {"key": "ingressConfig", "type": "IngressConfig"}, + "outbound_type": {"key": "outboundType", "type": "str"}, + } + + def __init__( + self, + *, + service_runtime_subnet_id: Optional[str] = None, + app_subnet_id: Optional[str] = None, + service_cidr: Optional[str] = None, + service_runtime_network_resource_group: Optional[str] = None, + app_network_resource_group: Optional[str] = None, + ingress_config: Optional["_models.IngressConfig"] = None, + outbound_type: Optional[str] = None, + **kwargs + ): + """ + :keyword service_runtime_subnet_id: Fully qualified resource Id of the subnet to host Azure + Spring Apps Service Runtime. + :paramtype service_runtime_subnet_id: str + :keyword app_subnet_id: Fully qualified resource Id of the subnet to host customer apps in + Azure Spring Apps. + :paramtype app_subnet_id: str + :keyword service_cidr: Azure Spring Apps service reserved CIDR. + :paramtype service_cidr: str + :keyword service_runtime_network_resource_group: Name of the resource group containing network + resources of Azure Spring Apps Service Runtime. + :paramtype service_runtime_network_resource_group: str + :keyword app_network_resource_group: Name of the resource group containing network resources + for customer apps in Azure Spring Apps. + :paramtype app_network_resource_group: str + :keyword ingress_config: Ingress configuration payload for Azure Spring Apps resource. + :paramtype ingress_config: ~azure.mgmt.appplatform.v2022_11_01_preview.models.IngressConfig + :keyword outbound_type: The egress traffic type of Azure Spring Apps VNet instances. + :paramtype outbound_type: str + """ + super().__init__(**kwargs) + self.service_runtime_subnet_id = service_runtime_subnet_id + self.app_subnet_id = app_subnet_id + self.service_cidr = service_cidr + self.service_runtime_network_resource_group = service_runtime_network_resource_group + self.app_network_resource_group = app_network_resource_group + self.outbound_i_ps = None + self.required_traffics = None + self.ingress_config = ingress_config + self.outbound_type = outbound_type + + +class NetworkProfileOutboundIPs(_serialization.Model): + """Desired outbound IP resources for Azure Spring Apps resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar public_i_ps: A list of public IP addresses. + :vartype public_i_ps: list[str] + """ + + _validation = { + "public_i_ps": {"readonly": True}, + } + + _attribute_map = { + "public_i_ps": {"key": "publicIPs", "type": "[str]"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.public_i_ps = None + + +class OperationDetail(_serialization.Model): + """Operation detail payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Name of the operation. + :vartype name: str + :ivar is_data_action: Indicates whether the operation is a data action. + :vartype is_data_action: bool + :ivar display: Display of the operation. + :vartype display: ~azure.mgmt.appplatform.v2022_11_01_preview.models.OperationDisplay + :ivar action_type: Enum. Indicates the action type. "Internal" refers to actions that are for + internal only APIs. "Internal" + :vartype action_type: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.ActionType + :ivar origin: Origin of the operation. + :vartype origin: str + :ivar properties: Properties of the operation. + :vartype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.OperationProperties + """ + + _validation = { + "action_type": {"readonly": True}, + } + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "is_data_action": {"key": "isDataAction", "type": "bool"}, + "display": {"key": "display", "type": "OperationDisplay"}, + "action_type": {"key": "actionType", "type": "str"}, + "origin": {"key": "origin", "type": "str"}, + "properties": {"key": "properties", "type": "OperationProperties"}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + is_data_action: Optional[bool] = None, + display: Optional["_models.OperationDisplay"] = None, + origin: Optional[str] = None, + properties: Optional["_models.OperationProperties"] = None, + **kwargs + ): + """ + :keyword name: Name of the operation. + :paramtype name: str + :keyword is_data_action: Indicates whether the operation is a data action. + :paramtype is_data_action: bool + :keyword display: Display of the operation. + :paramtype display: ~azure.mgmt.appplatform.v2022_11_01_preview.models.OperationDisplay + :keyword origin: Origin of the operation. + :paramtype origin: str + :keyword properties: Properties of the operation. + :paramtype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.OperationProperties + """ + super().__init__(**kwargs) + self.name = name + self.is_data_action = is_data_action + self.display = display + self.action_type = None + self.origin = origin + self.properties = properties + + +class OperationDisplay(_serialization.Model): + """Operation display payload. + + :ivar provider: Resource provider of the operation. + :vartype provider: str + :ivar resource: Resource of the operation. + :vartype resource: str + :ivar operation: Localized friendly name for the operation. + :vartype operation: str + :ivar description: Localized friendly description for the operation. + :vartype description: str + """ + + _attribute_map = { + "provider": {"key": "provider", "type": "str"}, + "resource": {"key": "resource", "type": "str"}, + "operation": {"key": "operation", "type": "str"}, + "description": {"key": "description", "type": "str"}, + } + + def __init__( + self, + *, + provider: Optional[str] = None, + resource: Optional[str] = None, + operation: Optional[str] = None, + description: Optional[str] = None, + **kwargs + ): + """ + :keyword provider: Resource provider of the operation. + :paramtype provider: str + :keyword resource: Resource of the operation. + :paramtype resource: str + :keyword operation: Localized friendly name for the operation. + :paramtype operation: str + :keyword description: Localized friendly description for the operation. + :paramtype description: str + """ + super().__init__(**kwargs) + self.provider = provider + self.resource = resource + self.operation = operation + self.description = description + + +class OperationProperties(_serialization.Model): + """Extra Operation properties. + + :ivar service_specification: Service specifications of the operation. + :vartype service_specification: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceSpecification + """ + + _attribute_map = { + "service_specification": {"key": "serviceSpecification", "type": "ServiceSpecification"}, + } + + def __init__(self, *, service_specification: Optional["_models.ServiceSpecification"] = None, **kwargs): + """ + :keyword service_specification: Service specifications of the operation. + :paramtype service_specification: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceSpecification + """ + super().__init__(**kwargs) + self.service_specification = service_specification + + +class PersistentDisk(_serialization.Model): + """Persistent disk payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar size_in_gb: Size of the persistent disk in GB. + :vartype size_in_gb: int + :ivar used_in_gb: Size of the used persistent disk in GB. + :vartype used_in_gb: int + :ivar mount_path: Mount path of the persistent disk. + :vartype mount_path: str + """ + + _validation = { + "size_in_gb": {"maximum": 50, "minimum": 0}, + "used_in_gb": {"readonly": True, "maximum": 50, "minimum": 0}, + } + + _attribute_map = { + "size_in_gb": {"key": "sizeInGB", "type": "int"}, + "used_in_gb": {"key": "usedInGB", "type": "int"}, + "mount_path": {"key": "mountPath", "type": "str"}, + } + + def __init__(self, *, size_in_gb: Optional[int] = None, mount_path: Optional[str] = None, **kwargs): + """ + :keyword size_in_gb: Size of the persistent disk in GB. + :paramtype size_in_gb: int + :keyword mount_path: Mount path of the persistent disk. + :paramtype mount_path: str + """ + super().__init__(**kwargs) + self.size_in_gb = size_in_gb + self.used_in_gb = None + self.mount_path = mount_path + + +class PredefinedAcceleratorProperties(_serialization.Model): + """Predefined accelerator properties payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provisioning_state: Provisioning state of the predefined accelerator. Known values are: + "Creating", "Updating", "Succeeded", and "Failed". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.PredefinedAcceleratorProvisioningState + :ivar display_name: + :vartype display_name: str + :ivar description: + :vartype description: str + :ivar icon_url: + :vartype icon_url: str + :ivar accelerator_tags: + :vartype accelerator_tags: list[str] + :ivar state: State of the predefined accelerator. Known values are: "Enabled" and "Disabled". + :vartype state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.PredefinedAcceleratorState + """ + + _validation = { + "provisioning_state": {"readonly": True}, + "display_name": {"readonly": True}, + "description": {"readonly": True}, + "icon_url": {"readonly": True}, + "accelerator_tags": {"readonly": True}, + } + + _attribute_map = { + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "icon_url": {"key": "iconUrl", "type": "str"}, + "accelerator_tags": {"key": "acceleratorTags", "type": "[str]"}, + "state": {"key": "state", "type": "str"}, + } + + def __init__(self, *, state: Union[str, "_models.PredefinedAcceleratorState"] = "Enabled", **kwargs): + """ + :keyword state: State of the predefined accelerator. Known values are: "Enabled" and + "Disabled". + :paramtype state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.PredefinedAcceleratorState + """ + super().__init__(**kwargs) + self.provisioning_state = None + self.display_name = None + self.description = None + self.icon_url = None + self.accelerator_tags = None + self.state = state + + +class PredefinedAcceleratorResource(ProxyResource): + """Predefined accelerator resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Predefined accelerator properties payload. + :vartype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.PredefinedAcceleratorProperties + :ivar sku: Sku of the predefined accelerator resource. + :vartype sku: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Sku + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "PredefinedAcceleratorProperties"}, + "sku": {"key": "sku", "type": "Sku"}, + } + + def __init__( + self, + *, + properties: Optional["_models.PredefinedAcceleratorProperties"] = None, + sku: Optional["_models.Sku"] = None, + **kwargs + ): + """ + :keyword properties: Predefined accelerator properties payload. + :paramtype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.PredefinedAcceleratorProperties + :keyword sku: Sku of the predefined accelerator resource. + :paramtype sku: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Sku + """ + super().__init__(**kwargs) + self.properties = properties + self.sku = sku + + +class PredefinedAcceleratorResourceCollection(_serialization.Model): + """PredefinedAcceleratorResourceCollection. + + :ivar value: + :vartype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.PredefinedAcceleratorResource] + :ivar next_link: + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[PredefinedAcceleratorResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, + *, + value: Optional[List["_models.PredefinedAcceleratorResource"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword value: + :paramtype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.PredefinedAcceleratorResource] + :keyword next_link: + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class Probe(_serialization.Model): + """Probe describes a health check to be performed against an App Instance to determine whether it is alive or ready to receive traffic. + + All required parameters must be populated in order to send to Azure. + + :ivar probe_action: The action of the probe. + :vartype probe_action: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ProbeAction + :ivar disable_probe: Indicate whether the probe is disabled. Required. + :vartype disable_probe: bool + :ivar initial_delay_seconds: Number of seconds after the App Instance has started before probes + are initiated. More info: + https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes. + :vartype initial_delay_seconds: int + :ivar period_seconds: How often (in seconds) to perform the probe. Minimum value is 1. + :vartype period_seconds: int + :ivar timeout_seconds: Number of seconds after which the probe times out. Minimum value is 1. + :vartype timeout_seconds: int + :ivar failure_threshold: Minimum consecutive failures for the probe to be considered failed + after having succeeded. Minimum value is 1. + :vartype failure_threshold: int + :ivar success_threshold: Minimum consecutive successes for the probe to be considered + successful after having failed. Must be 1 for liveness and startup. Minimum value is 1. + :vartype success_threshold: int + """ + + _validation = { + "disable_probe": {"required": True}, + } + + _attribute_map = { + "probe_action": {"key": "probeAction", "type": "ProbeAction"}, + "disable_probe": {"key": "disableProbe", "type": "bool"}, + "initial_delay_seconds": {"key": "initialDelaySeconds", "type": "int"}, + "period_seconds": {"key": "periodSeconds", "type": "int"}, + "timeout_seconds": {"key": "timeoutSeconds", "type": "int"}, + "failure_threshold": {"key": "failureThreshold", "type": "int"}, + "success_threshold": {"key": "successThreshold", "type": "int"}, + } + + def __init__( + self, + *, + disable_probe: bool = False, + probe_action: Optional["_models.ProbeAction"] = None, + initial_delay_seconds: Optional[int] = None, + period_seconds: Optional[int] = None, + timeout_seconds: Optional[int] = None, + failure_threshold: Optional[int] = None, + success_threshold: Optional[int] = None, + **kwargs + ): + """ + :keyword probe_action: The action of the probe. + :paramtype probe_action: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ProbeAction + :keyword disable_probe: Indicate whether the probe is disabled. Required. + :paramtype disable_probe: bool + :keyword initial_delay_seconds: Number of seconds after the App Instance has started before + probes are initiated. More info: + https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes. + :paramtype initial_delay_seconds: int + :keyword period_seconds: How often (in seconds) to perform the probe. Minimum value is 1. + :paramtype period_seconds: int + :keyword timeout_seconds: Number of seconds after which the probe times out. Minimum value is + 1. + :paramtype timeout_seconds: int + :keyword failure_threshold: Minimum consecutive failures for the probe to be considered failed + after having succeeded. Minimum value is 1. + :paramtype failure_threshold: int + :keyword success_threshold: Minimum consecutive successes for the probe to be considered + successful after having failed. Must be 1 for liveness and startup. Minimum value is 1. + :paramtype success_threshold: int + """ + super().__init__(**kwargs) + self.probe_action = probe_action + self.disable_probe = disable_probe + self.initial_delay_seconds = initial_delay_seconds + self.period_seconds = period_seconds + self.timeout_seconds = timeout_seconds + self.failure_threshold = failure_threshold + self.success_threshold = success_threshold + + +class RegenerateTestKeyRequestPayload(_serialization.Model): + """Regenerate test key request payload. + + All required parameters must be populated in order to send to Azure. + + :ivar key_type: Type of the test key. Required. Known values are: "Primary" and "Secondary". + :vartype key_type: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.TestKeyType + """ + + _validation = { + "key_type": {"required": True}, + } + + _attribute_map = { + "key_type": {"key": "keyType", "type": "str"}, + } + + def __init__(self, *, key_type: Union[str, "_models.TestKeyType"], **kwargs): + """ + :keyword key_type: Type of the test key. Required. Known values are: "Primary" and "Secondary". + :paramtype key_type: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.TestKeyType + """ + super().__init__(**kwargs) + self.key_type = key_type + + +class RemoteDebugging(_serialization.Model): + """Remote debugging config. + + :ivar port: Application debugging port. + :vartype port: int + :ivar enabled: Indicate if remote debugging is enabled. + :vartype enabled: bool + """ + + _attribute_map = { + "port": {"key": "port", "type": "int"}, + "enabled": {"key": "enabled", "type": "bool"}, + } + + def __init__(self, *, port: Optional[int] = None, enabled: Optional[bool] = None, **kwargs): + """ + :keyword port: Application debugging port. + :paramtype port: int + :keyword enabled: Indicate if remote debugging is enabled. + :paramtype enabled: bool + """ + super().__init__(**kwargs) + self.port = port + self.enabled = enabled + + +class RemoteDebuggingPayload(_serialization.Model): + """Remote debugging payload. + + :ivar port: Application debugging port. + :vartype port: int + """ + + _attribute_map = { + "port": {"key": "port", "type": "int"}, + } + + def __init__(self, *, port: Optional[int] = None, **kwargs): + """ + :keyword port: Application debugging port. + :paramtype port: int + """ + super().__init__(**kwargs) + self.port = port + + +class RequiredTraffic(_serialization.Model): + """Required inbound or outbound traffic for Azure Spring Apps resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar protocol: The protocol of required traffic. + :vartype protocol: str + :ivar port: The port of required traffic. + :vartype port: int + :ivar ips: The ip list of required traffic. + :vartype ips: list[str] + :ivar fqdns: The FQDN list of required traffic. + :vartype fqdns: list[str] + :ivar direction: The direction of required traffic. Known values are: "Inbound" and "Outbound". + :vartype direction: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.TrafficDirection + """ + + _validation = { + "protocol": {"readonly": True}, + "port": {"readonly": True}, + "ips": {"readonly": True}, + "fqdns": {"readonly": True}, + "direction": {"readonly": True}, + } + + _attribute_map = { + "protocol": {"key": "protocol", "type": "str"}, + "port": {"key": "port", "type": "int"}, + "ips": {"key": "ips", "type": "[str]"}, + "fqdns": {"key": "fqdns", "type": "[str]"}, + "direction": {"key": "direction", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.protocol = None + self.port = None + self.ips = None + self.fqdns = None + self.direction = None + + +class ResourceRequests(_serialization.Model): + """Deployment resource request payload. + + :ivar cpu: Required CPU. 1 core can be represented by 1 or 1000m. This should be 500m or 1 for + Basic tier, and {500m, 1, 2, 3, 4} for Standard tier. + :vartype cpu: str + :ivar memory: Required memory. 1 GB can be represented by 1Gi or 1024Mi. This should be {512Mi, + 1Gi, 2Gi} for Basic tier, and {512Mi, 1Gi, 2Gi, ..., 8Gi} for Standard tier. + :vartype memory: str + """ + + _attribute_map = { + "cpu": {"key": "cpu", "type": "str"}, + "memory": {"key": "memory", "type": "str"}, + } + + def __init__(self, *, cpu: Optional[str] = None, memory: Optional[str] = None, **kwargs): + """ + :keyword cpu: Required CPU. 1 core can be represented by 1 or 1000m. This should be 500m or 1 + for Basic tier, and {500m, 1, 2, 3, 4} for Standard tier. + :paramtype cpu: str + :keyword memory: Required memory. 1 GB can be represented by 1Gi or 1024Mi. This should be + {512Mi, 1Gi, 2Gi} for Basic tier, and {512Mi, 1Gi, 2Gi, ..., 8Gi} for Standard tier. + :paramtype memory: str + """ + super().__init__(**kwargs) + self.cpu = cpu + self.memory = memory + + +class ResourceSku(_serialization.Model): + """Describes an available Azure Spring Apps SKU. + + :ivar resource_type: Gets the type of resource the SKU applies to. + :vartype resource_type: str + :ivar name: Gets the name of SKU. + :vartype name: str + :ivar tier: Gets the tier of SKU. + :vartype tier: str + :ivar capacity: Gets the capacity of SKU. + :vartype capacity: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SkuCapacity + :ivar locations: Gets the set of locations that the SKU is available. + :vartype locations: list[str] + :ivar location_info: Gets a list of locations and availability zones in those locations where + the SKU is available. + :vartype location_info: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceSkuLocationInfo] + :ivar restrictions: Gets the restrictions because of which SKU cannot be used. This is + empty if there are no restrictions. + :vartype restrictions: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceSkuRestrictions] + """ + + _attribute_map = { + "resource_type": {"key": "resourceType", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "tier": {"key": "tier", "type": "str"}, + "capacity": {"key": "capacity", "type": "SkuCapacity"}, + "locations": {"key": "locations", "type": "[str]"}, + "location_info": {"key": "locationInfo", "type": "[ResourceSkuLocationInfo]"}, + "restrictions": {"key": "restrictions", "type": "[ResourceSkuRestrictions]"}, + } + + def __init__( + self, + *, + resource_type: Optional[str] = None, + name: Optional[str] = None, + tier: Optional[str] = None, + capacity: Optional["_models.SkuCapacity"] = None, + locations: Optional[List[str]] = None, + location_info: Optional[List["_models.ResourceSkuLocationInfo"]] = None, + restrictions: Optional[List["_models.ResourceSkuRestrictions"]] = None, + **kwargs + ): + """ + :keyword resource_type: Gets the type of resource the SKU applies to. + :paramtype resource_type: str + :keyword name: Gets the name of SKU. + :paramtype name: str + :keyword tier: Gets the tier of SKU. + :paramtype tier: str + :keyword capacity: Gets the capacity of SKU. + :paramtype capacity: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SkuCapacity + :keyword locations: Gets the set of locations that the SKU is available. + :paramtype locations: list[str] + :keyword location_info: Gets a list of locations and availability zones in those locations + where the SKU is available. + :paramtype location_info: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceSkuLocationInfo] + :keyword restrictions: Gets the restrictions because of which SKU cannot be used. This is + empty if there are no restrictions. + :paramtype restrictions: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceSkuRestrictions] + """ + super().__init__(**kwargs) + self.resource_type = resource_type + self.name = name + self.tier = tier + self.capacity = capacity + self.locations = locations + self.location_info = location_info + self.restrictions = restrictions + + +class ResourceSkuCapabilities(_serialization.Model): + """ResourceSkuCapabilities. + + :ivar name: Gets an invariant to describe the feature. + :vartype name: str + :ivar value: Gets an invariant if the feature is measured by quantity. + :vartype value: str + """ + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "value": {"key": "value", "type": "str"}, + } + + def __init__(self, *, name: Optional[str] = None, value: Optional[str] = None, **kwargs): + """ + :keyword name: Gets an invariant to describe the feature. + :paramtype name: str + :keyword value: Gets an invariant if the feature is measured by quantity. + :paramtype value: str + """ + super().__init__(**kwargs) + self.name = name + self.value = value + + +class ResourceSkuCollection(_serialization.Model): + """Object that includes an array of Azure Spring Apps SKU and a possible link for next set. + + :ivar value: Collection of resource SKU. + :vartype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceSku] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[ResourceSku]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, *, value: Optional[List["_models.ResourceSku"]] = None, next_link: Optional[str] = None, **kwargs + ): + """ + :keyword value: Collection of resource SKU. + :paramtype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceSku] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class ResourceSkuLocationInfo(_serialization.Model): + """Locations and availability zones where the SKU is available. + + :ivar location: Gets location of the SKU. + :vartype location: str + :ivar zones: Gets list of availability zones where the SKU is supported. + :vartype zones: list[str] + :ivar zone_details: Gets details of capabilities available to a SKU in specific zones. + :vartype zone_details: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceSkuZoneDetails] + """ + + _attribute_map = { + "location": {"key": "location", "type": "str"}, + "zones": {"key": "zones", "type": "[str]"}, + "zone_details": {"key": "zoneDetails", "type": "[ResourceSkuZoneDetails]"}, + } + + def __init__( + self, + *, + location: Optional[str] = None, + zones: Optional[List[str]] = None, + zone_details: Optional[List["_models.ResourceSkuZoneDetails"]] = None, + **kwargs + ): + """ + :keyword location: Gets location of the SKU. + :paramtype location: str + :keyword zones: Gets list of availability zones where the SKU is supported. + :paramtype zones: list[str] + :keyword zone_details: Gets details of capabilities available to a SKU in specific zones. + :paramtype zone_details: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceSkuZoneDetails] + """ + super().__init__(**kwargs) + self.location = location + self.zones = zones + self.zone_details = zone_details + + +class ResourceSkuRestrictionInfo(_serialization.Model): + """Information about the restriction where the SKU cannot be used. + + :ivar locations: Gets locations where the SKU is restricted. + :vartype locations: list[str] + :ivar zones: Gets list of availability zones where the SKU is restricted. + :vartype zones: list[str] + """ + + _attribute_map = { + "locations": {"key": "locations", "type": "[str]"}, + "zones": {"key": "zones", "type": "[str]"}, + } + + def __init__(self, *, locations: Optional[List[str]] = None, zones: Optional[List[str]] = None, **kwargs): + """ + :keyword locations: Gets locations where the SKU is restricted. + :paramtype locations: list[str] + :keyword zones: Gets list of availability zones where the SKU is restricted. + :paramtype zones: list[str] + """ + super().__init__(**kwargs) + self.locations = locations + self.zones = zones + + +class ResourceSkuRestrictions(_serialization.Model): + """Restrictions where the SKU cannot be used. + + :ivar type: Gets the type of restrictions. Possible values include: 'Location', 'Zone'. Known + values are: "Location" and "Zone". + :vartype type: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceSkuRestrictionsType + :ivar values: Gets the value of restrictions. If the restriction type is set to + location. This would be different locations where the SKU is restricted. + :vartype values: list[str] + :ivar restriction_info: Gets the information about the restriction where the SKU cannot be + used. + :vartype restriction_info: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceSkuRestrictionInfo + :ivar reason_code: Gets the reason for restriction. Possible values include: 'QuotaId', + 'NotAvailableForSubscription'. Known values are: "QuotaId" and "NotAvailableForSubscription". + :vartype reason_code: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceSkuRestrictionsReasonCode + """ + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "values": {"key": "values", "type": "[str]"}, + "restriction_info": {"key": "restrictionInfo", "type": "ResourceSkuRestrictionInfo"}, + "reason_code": {"key": "reasonCode", "type": "str"}, + } + + def __init__( + self, + *, + type: Optional[Union[str, "_models.ResourceSkuRestrictionsType"]] = None, + values: Optional[List[str]] = None, + restriction_info: Optional["_models.ResourceSkuRestrictionInfo"] = None, + reason_code: Optional[Union[str, "_models.ResourceSkuRestrictionsReasonCode"]] = None, + **kwargs + ): + """ + :keyword type: Gets the type of restrictions. Possible values include: 'Location', 'Zone'. + Known values are: "Location" and "Zone". + :paramtype type: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceSkuRestrictionsType + :keyword values: Gets the value of restrictions. If the restriction type is set to + location. This would be different locations where the SKU is restricted. + :paramtype values: list[str] + :keyword restriction_info: Gets the information about the restriction where the SKU cannot be + used. + :paramtype restriction_info: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceSkuRestrictionInfo + :keyword reason_code: Gets the reason for restriction. Possible values include: 'QuotaId', + 'NotAvailableForSubscription'. Known values are: "QuotaId" and "NotAvailableForSubscription". + :paramtype reason_code: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceSkuRestrictionsReasonCode + """ + super().__init__(**kwargs) + self.type = type + self.values = values + self.restriction_info = restriction_info + self.reason_code = reason_code + + +class ResourceSkuZoneDetails(_serialization.Model): + """Details of capabilities available to a SKU in specific zones. + + :ivar name: Gets the set of zones that the SKU is available in with the + specified capabilities. + :vartype name: list[str] + :ivar capabilities: Gets a list of capabilities that are available for the SKU in the + specified list of zones. + :vartype capabilities: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceSkuCapabilities] + """ + + _attribute_map = { + "name": {"key": "name", "type": "[str]"}, + "capabilities": {"key": "capabilities", "type": "[ResourceSkuCapabilities]"}, + } + + def __init__( + self, + *, + name: Optional[List[str]] = None, + capabilities: Optional[List["_models.ResourceSkuCapabilities"]] = None, + **kwargs + ): + """ + :keyword name: Gets the set of zones that the SKU is available in with the + specified capabilities. + :paramtype name: list[str] + :keyword capabilities: Gets a list of capabilities that are available for the SKU in the + specified list of zones. + :paramtype capabilities: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceSkuCapabilities] + """ + super().__init__(**kwargs) + self.name = name + self.capabilities = capabilities + + +class ResourceUploadDefinition(_serialization.Model): + """Resource upload definition payload. + + :ivar relative_path: Source relative path. + :vartype relative_path: str + :ivar upload_url: Upload URL. + :vartype upload_url: str + """ + + _attribute_map = { + "relative_path": {"key": "relativePath", "type": "str"}, + "upload_url": {"key": "uploadUrl", "type": "str"}, + } + + def __init__(self, *, relative_path: Optional[str] = None, upload_url: Optional[str] = None, **kwargs): + """ + :keyword relative_path: Source relative path. + :paramtype relative_path: str + :keyword upload_url: Upload URL. + :paramtype upload_url: str + """ + super().__init__(**kwargs) + self.relative_path = relative_path + self.upload_url = upload_url + + +class ServiceRegistryInstance(_serialization.Model): + """Collection of instances belong to the Service Registry. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Name of the Service Registry instance. + :vartype name: str + :ivar status: Status of the Service Registry instance. + :vartype status: str + """ + + _validation = { + "name": {"readonly": True}, + "status": {"readonly": True}, + } + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "status": {"key": "status", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.name = None + self.status = None + + +class ServiceRegistryProperties(_serialization.Model): + """Service Registry properties payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provisioning_state: State of the Service Registry. Known values are: "Creating", + "Updating", "Succeeded", "Failed", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceRegistryProvisioningState + :ivar resource_requests: The requested resource quantity for required CPU and Memory. + :vartype resource_requests: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceRegistryResourceRequests + :ivar instances: Collection of instances belong to Service Registry. + :vartype instances: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceRegistryInstance] + """ + + _validation = { + "provisioning_state": {"readonly": True}, + "resource_requests": {"readonly": True}, + "instances": {"readonly": True}, + } + + _attribute_map = { + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "resource_requests": {"key": "resourceRequests", "type": "ServiceRegistryResourceRequests"}, + "instances": {"key": "instances", "type": "[ServiceRegistryInstance]"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.provisioning_state = None + self.resource_requests = None + self.instances = None + + +class ServiceRegistryResource(ProxyResource): + """Service Registry resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Service Registry properties payload. + :vartype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceRegistryProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ServiceRegistryProperties"}, + } + + def __init__(self, *, properties: Optional["_models.ServiceRegistryProperties"] = None, **kwargs): + """ + :keyword properties: Service Registry properties payload. + :paramtype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceRegistryProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class ServiceRegistryResourceCollection(_serialization.Model): + """Object that includes an array of Service Registry resources and a possible link for next set. + + :ivar value: Collection of Service Registry resources. + :vartype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceRegistryResource] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[ServiceRegistryResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, + *, + value: Optional[List["_models.ServiceRegistryResource"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword value: Collection of Service Registry resources. + :paramtype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceRegistryResource] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class ServiceRegistryResourceRequests(_serialization.Model): + """Resource request payload of Service Registry. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar cpu: Cpu allocated to each Service Registry instance. + :vartype cpu: str + :ivar memory: Memory allocated to each Service Registry instance. + :vartype memory: str + :ivar instance_count: Instance count of the Service Registry. + :vartype instance_count: int + """ + + _validation = { + "cpu": {"readonly": True}, + "memory": {"readonly": True}, + "instance_count": {"readonly": True}, + } + + _attribute_map = { + "cpu": {"key": "cpu", "type": "str"}, + "memory": {"key": "memory", "type": "str"}, + "instance_count": {"key": "instanceCount", "type": "int"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.cpu = None + self.memory = None + self.instance_count = None + + +class TrackedResource(Resource): + """The resource model definition for a ARM tracked top level resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar location: The GEO location of the resource. + :vartype location: str + :ivar tags: Tags of the service which is a list of key value pairs that describe the resource. + :vartype tags: dict[str, str] + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "location": {"key": "location", "type": "str"}, + "tags": {"key": "tags", "type": "{str}"}, + } + + def __init__(self, *, location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, **kwargs): + """ + :keyword location: The GEO location of the resource. + :paramtype location: str + :keyword tags: Tags of the service which is a list of key value pairs that describe the + resource. + :paramtype tags: dict[str, str] + """ + super().__init__(**kwargs) + self.location = location + self.tags = tags + + +class ServiceResource(TrackedResource): + """Service resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar location: The GEO location of the resource. + :vartype location: str + :ivar tags: Tags of the service which is a list of key value pairs that describe the resource. + :vartype tags: dict[str, str] + :ivar properties: Properties of the Service resource. + :vartype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ClusterResourceProperties + :ivar sku: Sku of the Service resource. + :vartype sku: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Sku + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "location": {"key": "location", "type": "str"}, + "tags": {"key": "tags", "type": "{str}"}, + "properties": {"key": "properties", "type": "ClusterResourceProperties"}, + "sku": {"key": "sku", "type": "Sku"}, + } + + def __init__( + self, + *, + location: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + properties: Optional["_models.ClusterResourceProperties"] = None, + sku: Optional["_models.Sku"] = None, + **kwargs + ): + """ + :keyword location: The GEO location of the resource. + :paramtype location: str + :keyword tags: Tags of the service which is a list of key value pairs that describe the + resource. + :paramtype tags: dict[str, str] + :keyword properties: Properties of the Service resource. + :paramtype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ClusterResourceProperties + :keyword sku: Sku of the Service resource. + :paramtype sku: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Sku + """ + super().__init__(location=location, tags=tags, **kwargs) + self.properties = properties + self.sku = sku + + +class ServiceResourceList(_serialization.Model): + """Object that includes an array of Service resources and a possible link for next set. + + :ivar value: Collection of Service resources. + :vartype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[ServiceResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, *, value: Optional[List["_models.ServiceResource"]] = None, next_link: Optional[str] = None, **kwargs + ): + """ + :keyword value: Collection of Service resources. + :paramtype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class ServiceSpecification(_serialization.Model): + """Service specification payload. + + :ivar log_specifications: Specifications of the Log for Azure Monitoring. + :vartype log_specifications: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.LogSpecification] + :ivar metric_specifications: Specifications of the Metrics for Azure Monitoring. + :vartype metric_specifications: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.MetricSpecification] + """ + + _attribute_map = { + "log_specifications": {"key": "logSpecifications", "type": "[LogSpecification]"}, + "metric_specifications": {"key": "metricSpecifications", "type": "[MetricSpecification]"}, + } + + def __init__( + self, + *, + log_specifications: Optional[List["_models.LogSpecification"]] = None, + metric_specifications: Optional[List["_models.MetricSpecification"]] = None, + **kwargs + ): + """ + :keyword log_specifications: Specifications of the Log for Azure Monitoring. + :paramtype log_specifications: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.LogSpecification] + :keyword metric_specifications: Specifications of the Metrics for Azure Monitoring. + :paramtype metric_specifications: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.MetricSpecification] + """ + super().__init__(**kwargs) + self.log_specifications = log_specifications + self.metric_specifications = metric_specifications + + +class ServiceVNetAddons(_serialization.Model): + """Additional Service settings in vnet injection instance. + + :ivar log_stream_public_endpoint: Indicates whether the log stream in vnet injection instance + could be accessed from internet. + :vartype log_stream_public_endpoint: bool + """ + + _attribute_map = { + "log_stream_public_endpoint": {"key": "logStreamPublicEndpoint", "type": "bool"}, + } + + def __init__(self, *, log_stream_public_endpoint: bool = False, **kwargs): + """ + :keyword log_stream_public_endpoint: Indicates whether the log stream in vnet injection + instance could be accessed from internet. + :paramtype log_stream_public_endpoint: bool + """ + super().__init__(**kwargs) + self.log_stream_public_endpoint = log_stream_public_endpoint + + +class Sku(_serialization.Model): + """Sku of Azure Spring Apps. + + :ivar name: Name of the Sku. + :vartype name: str + :ivar tier: Tier of the Sku. + :vartype tier: str + :ivar capacity: Current capacity of the target resource. + :vartype capacity: int + """ + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "tier": {"key": "tier", "type": "str"}, + "capacity": {"key": "capacity", "type": "int"}, + } + + def __init__(self, *, name: str = "S0", tier: str = "Standard", capacity: Optional[int] = None, **kwargs): + """ + :keyword name: Name of the Sku. + :paramtype name: str + :keyword tier: Tier of the Sku. + :paramtype tier: str + :keyword capacity: Current capacity of the target resource. + :paramtype capacity: int + """ + super().__init__(**kwargs) + self.name = name + self.tier = tier + self.capacity = capacity + + +class SkuCapacity(_serialization.Model): + """The SKU capacity. + + All required parameters must be populated in order to send to Azure. + + :ivar minimum: Gets or sets the minimum. Required. + :vartype minimum: int + :ivar maximum: Gets or sets the maximum. + :vartype maximum: int + :ivar default: Gets or sets the default. + :vartype default: int + :ivar scale_type: Gets or sets the type of the scale. Known values are: "None", "Manual", and + "Automatic". + :vartype scale_type: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.SkuScaleType + """ + + _validation = { + "minimum": {"required": True}, + } + + _attribute_map = { + "minimum": {"key": "minimum", "type": "int"}, + "maximum": {"key": "maximum", "type": "int"}, + "default": {"key": "default", "type": "int"}, + "scale_type": {"key": "scaleType", "type": "str"}, + } + + def __init__( + self, + *, + minimum: int, + maximum: Optional[int] = None, + default: Optional[int] = None, + scale_type: Optional[Union[str, "_models.SkuScaleType"]] = None, + **kwargs + ): + """ + :keyword minimum: Gets or sets the minimum. Required. + :paramtype minimum: int + :keyword maximum: Gets or sets the maximum. + :paramtype maximum: int + :keyword default: Gets or sets the default. + :paramtype default: int + :keyword scale_type: Gets or sets the type of the scale. Known values are: "None", "Manual", + and "Automatic". + :paramtype scale_type: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.SkuScaleType + """ + super().__init__(**kwargs) + self.minimum = minimum + self.maximum = maximum + self.default = default + self.scale_type = scale_type + + +class SourceUploadedUserSourceInfo(UploadedUserSourceInfo): + """Uploaded Java source code binary for a deployment. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Type of the source uploaded. Required. + :vartype type: str + :ivar version: Version of the source. + :vartype version: str + :ivar relative_path: Relative path of the storage which stores the source. + :vartype relative_path: str + :ivar artifact_selector: Selector for the artifact to be used for the deployment for + multi-module projects. This should be + the relative path to the target module/project. + :vartype artifact_selector: str + :ivar runtime_version: Runtime version of the source file. + :vartype runtime_version: str + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, + "relative_path": {"key": "relativePath", "type": "str"}, + "artifact_selector": {"key": "artifactSelector", "type": "str"}, + "runtime_version": {"key": "runtimeVersion", "type": "str"}, + } + + def __init__( + self, + *, + version: Optional[str] = None, + relative_path: Optional[str] = None, + artifact_selector: Optional[str] = None, + runtime_version: Optional[str] = None, + **kwargs + ): + """ + :keyword version: Version of the source. + :paramtype version: str + :keyword relative_path: Relative path of the storage which stores the source. + :paramtype relative_path: str + :keyword artifact_selector: Selector for the artifact to be used for the deployment for + multi-module projects. This should be + the relative path to the target module/project. + :paramtype artifact_selector: str + :keyword runtime_version: Runtime version of the source file. + :paramtype runtime_version: str + """ + super().__init__(version=version, relative_path=relative_path, **kwargs) + self.type = "Source" # type: str + self.artifact_selector = artifact_selector + self.runtime_version = runtime_version + + +class SsoProperties(_serialization.Model): + """Single sign-on related configuration. + + :ivar scope: It defines the specific actions applications can be allowed to do on a user's + behalf. + :vartype scope: list[str] + :ivar client_id: The public identifier for the application. + :vartype client_id: str + :ivar client_secret: The secret known only to the application and the authorization server. + :vartype client_secret: str + :ivar issuer_uri: The URI of Issuer Identifier. + :vartype issuer_uri: str + """ + + _attribute_map = { + "scope": {"key": "scope", "type": "[str]"}, + "client_id": {"key": "clientId", "type": "str"}, + "client_secret": {"key": "clientSecret", "type": "str"}, + "issuer_uri": {"key": "issuerUri", "type": "str"}, + } + + def __init__( + self, + *, + scope: Optional[List[str]] = None, + client_id: Optional[str] = None, + client_secret: Optional[str] = None, + issuer_uri: Optional[str] = None, + **kwargs + ): + """ + :keyword scope: It defines the specific actions applications can be allowed to do on a user's + behalf. + :paramtype scope: list[str] + :keyword client_id: The public identifier for the application. + :paramtype client_id: str + :keyword client_secret: The secret known only to the application and the authorization server. + :paramtype client_secret: str + :keyword issuer_uri: The URI of Issuer Identifier. + :paramtype issuer_uri: str + """ + super().__init__(**kwargs) + self.scope = scope + self.client_id = client_id + self.client_secret = client_secret + self.issuer_uri = issuer_uri + + +class StackProperties(_serialization.Model): + """KPack ClusterStack properties payload. + + :ivar id: Id of the ClusterStack. + :vartype id: str + :ivar version: Version of the ClusterStack. + :vartype version: str + """ + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "version": {"key": "version", "type": "str"}, + } + + def __init__( + self, *, id: Optional[str] = None, version: Optional[str] = None, **kwargs # pylint: disable=redefined-builtin + ): + """ + :keyword id: Id of the ClusterStack. + :paramtype id: str + :keyword version: Version of the ClusterStack. + :paramtype version: str + """ + super().__init__(**kwargs) + self.id = id + self.version = version + + +class StorageProperties(_serialization.Model): + """Storage resource payload. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + StorageAccount + + All required parameters must be populated in order to send to Azure. + + :ivar storage_type: The type of the storage. Required. "StorageAccount" + :vartype storage_type: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.StorageType + """ + + _validation = { + "storage_type": {"required": True}, + } + + _attribute_map = { + "storage_type": {"key": "storageType", "type": "str"}, + } + + _subtype_map = {"storage_type": {"StorageAccount": "StorageAccount"}} + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.storage_type = None # type: Optional[str] + + +class StorageAccount(StorageProperties): + """storage resource of type Azure Storage Account. + + All required parameters must be populated in order to send to Azure. + + :ivar storage_type: The type of the storage. Required. "StorageAccount" + :vartype storage_type: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.StorageType + :ivar account_name: The account name of the Azure Storage Account. Required. + :vartype account_name: str + :ivar account_key: The account key of the Azure Storage Account. Required. + :vartype account_key: str + """ + + _validation = { + "storage_type": {"required": True}, + "account_name": {"required": True}, + "account_key": {"required": True}, + } + + _attribute_map = { + "storage_type": {"key": "storageType", "type": "str"}, + "account_name": {"key": "accountName", "type": "str"}, + "account_key": {"key": "accountKey", "type": "str"}, + } + + def __init__(self, *, account_name: str, account_key: str, **kwargs): + """ + :keyword account_name: The account name of the Azure Storage Account. Required. + :paramtype account_name: str + :keyword account_key: The account key of the Azure Storage Account. Required. + :paramtype account_key: str + """ + super().__init__(**kwargs) + self.storage_type = "StorageAccount" # type: str + self.account_name = account_name + self.account_key = account_key + + +class StorageResource(ProxyResource): + """Storage resource payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Properties of the storage resource payload. + :vartype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.StorageProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "StorageProperties"}, + } + + def __init__(self, *, properties: Optional["_models.StorageProperties"] = None, **kwargs): + """ + :keyword properties: Properties of the storage resource payload. + :paramtype properties: ~azure.mgmt.appplatform.v2022_11_01_preview.models.StorageProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class StorageResourceCollection(_serialization.Model): + """Collection compose of storage resources list and a possible link for next page. + + :ivar value: The storage resources list. + :vartype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.StorageResource] + :ivar next_link: The link to next page of storage list. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[StorageResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, *, value: Optional[List["_models.StorageResource"]] = None, next_link: Optional[str] = None, **kwargs + ): + """ + :keyword value: The storage resources list. + :paramtype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.StorageResource] + :keyword next_link: The link to next page of storage list. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class SupportedBuildpackResource(ProxyResource): + """Supported buildpack resource payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Supported buildpack resource properties. + :vartype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.SupportedBuildpackResourceProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "SupportedBuildpackResourceProperties"}, + } + + def __init__(self, *, properties: Optional["_models.SupportedBuildpackResourceProperties"] = None, **kwargs): + """ + :keyword properties: Supported buildpack resource properties. + :paramtype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.SupportedBuildpackResourceProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class SupportedBuildpackResourceProperties(_serialization.Model): + """Supported buildpack resource properties. + + :ivar buildpack_id: The id of supported buildpack. + :vartype buildpack_id: str + """ + + _attribute_map = { + "buildpack_id": {"key": "buildpackId", "type": "str"}, + } + + def __init__(self, *, buildpack_id: Optional[str] = None, **kwargs): + """ + :keyword buildpack_id: The id of supported buildpack. + :paramtype buildpack_id: str + """ + super().__init__(**kwargs) + self.buildpack_id = buildpack_id + + +class SupportedBuildpacksCollection(_serialization.Model): + """Object that includes an array of supported buildpacks resources and a possible link for next set. + + :ivar value: Collection of supported buildpacks resources. + :vartype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.SupportedBuildpackResource] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[SupportedBuildpackResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, + *, + value: Optional[List["_models.SupportedBuildpackResource"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword value: Collection of supported buildpacks resources. + :paramtype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.SupportedBuildpackResource] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class SupportedRuntimeVersion(_serialization.Model): + """Supported deployment runtime version descriptor. + + :ivar value: The raw value which could be passed to deployment CRUD operations. Known values + are: "Java_8", "Java_11", "Java_17", and "NetCore_31". + :vartype value: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.SupportedRuntimeValue + :ivar platform: The platform of this runtime version (possible values: "Java" or ".NET"). Known + values are: "Java" and ".NET Core". + :vartype platform: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.SupportedRuntimePlatform + :ivar version: The detailed version (major.minor) of the platform. + :vartype version: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "str"}, + "platform": {"key": "platform", "type": "str"}, + "version": {"key": "version", "type": "str"}, + } + + def __init__( + self, + *, + value: Optional[Union[str, "_models.SupportedRuntimeValue"]] = None, + platform: Optional[Union[str, "_models.SupportedRuntimePlatform"]] = None, + version: Optional[str] = None, + **kwargs + ): + """ + :keyword value: The raw value which could be passed to deployment CRUD operations. Known values + are: "Java_8", "Java_11", "Java_17", and "NetCore_31". + :paramtype value: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.SupportedRuntimeValue + :keyword platform: The platform of this runtime version (possible values: "Java" or ".NET"). + Known values are: "Java" and ".NET Core". + :paramtype platform: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.SupportedRuntimePlatform + :keyword version: The detailed version (major.minor) of the platform. + :paramtype version: str + """ + super().__init__(**kwargs) + self.value = value + self.platform = platform + self.version = version + + +class SupportedStackResource(ProxyResource): + """Supported stack resource payload. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SystemData + :ivar properties: Supported stack resource properties. + :vartype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.SupportedStackResourceProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "SupportedStackResourceProperties"}, + } + + def __init__(self, *, properties: Optional["_models.SupportedStackResourceProperties"] = None, **kwargs): + """ + :keyword properties: Supported stack resource properties. + :paramtype properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.SupportedStackResourceProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class SupportedStackResourceProperties(_serialization.Model): + """Supported stack resource properties. + + :ivar stack_id: The id of supported stack. + :vartype stack_id: str + :ivar version: The version of supported stack. + :vartype version: str + """ + + _attribute_map = { + "stack_id": {"key": "stackId", "type": "str"}, + "version": {"key": "version", "type": "str"}, + } + + def __init__(self, *, stack_id: Optional[str] = None, version: Optional[str] = None, **kwargs): + """ + :keyword stack_id: The id of supported stack. + :paramtype stack_id: str + :keyword version: The version of supported stack. + :paramtype version: str + """ + super().__init__(**kwargs) + self.stack_id = stack_id + self.version = version + + +class SupportedStacksCollection(_serialization.Model): + """Object that includes an array of supported stacks resources and a possible link for next set. + + :ivar value: Collection of supported stacks resources. + :vartype value: list[~azure.mgmt.appplatform.v2022_11_01_preview.models.SupportedStackResource] + :ivar next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[SupportedStackResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, + *, + value: Optional[List["_models.SupportedStackResource"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword value: Collection of supported stacks resources. + :paramtype value: + list[~azure.mgmt.appplatform.v2022_11_01_preview.models.SupportedStackResource] + :keyword next_link: URL client should use to fetch the next page (per server side paging). + It's null for now, added for future use. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class SystemData(_serialization.Model): + """Metadata pertaining to creation and last modification of the resource. + + :ivar created_by: The identity that created the resource. + :vartype created_by: str + :ivar created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", and "Key". + :vartype created_by_type: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CreatedByType + :ivar created_at: The timestamp of resource creation (UTC). + :vartype created_at: ~datetime.datetime + :ivar last_modified_by: The identity that last modified the resource. + :vartype last_modified_by: str + :ivar last_modified_by_type: The type of identity that last modified the resource. Known values + are: "User", "Application", "ManagedIdentity", and "Key". + :vartype last_modified_by_type: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.LastModifiedByType + :ivar last_modified_at: The timestamp of resource modification (UTC). + :vartype last_modified_at: ~datetime.datetime + """ + + _attribute_map = { + "created_by": {"key": "createdBy", "type": "str"}, + "created_by_type": {"key": "createdByType", "type": "str"}, + "created_at": {"key": "createdAt", "type": "iso-8601"}, + "last_modified_by": {"key": "lastModifiedBy", "type": "str"}, + "last_modified_by_type": {"key": "lastModifiedByType", "type": "str"}, + "last_modified_at": {"key": "lastModifiedAt", "type": "iso-8601"}, + } + + def __init__( + self, + *, + created_by: Optional[str] = None, + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, + created_at: Optional[datetime.datetime] = None, + last_modified_by: Optional[str] = None, + last_modified_by_type: Optional[Union[str, "_models.LastModifiedByType"]] = None, + last_modified_at: Optional[datetime.datetime] = None, + **kwargs + ): + """ + :keyword created_by: The identity that created the resource. + :paramtype created_by: str + :keyword created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", and "Key". + :paramtype created_by_type: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CreatedByType + :keyword created_at: The timestamp of resource creation (UTC). + :paramtype created_at: ~datetime.datetime + :keyword last_modified_by: The identity that last modified the resource. + :paramtype last_modified_by: str + :keyword last_modified_by_type: The type of identity that last modified the resource. Known + values are: "User", "Application", "ManagedIdentity", and "Key". + :paramtype last_modified_by_type: str or + ~azure.mgmt.appplatform.v2022_11_01_preview.models.LastModifiedByType + :keyword last_modified_at: The timestamp of resource modification (UTC). + :paramtype last_modified_at: ~datetime.datetime + """ + super().__init__(**kwargs) + self.created_by = created_by + self.created_by_type = created_by_type + self.created_at = created_at + self.last_modified_by = last_modified_by + self.last_modified_by_type = last_modified_by_type + self.last_modified_at = last_modified_at + + +class TCPSocketAction(ProbeAction): + """TCPSocketAction describes an action based on opening a socket. + + All required parameters must be populated in order to send to Azure. + + :ivar type: The type of the action to take to perform the health check. Required. Known values + are: "HTTPGetAction", "TCPSocketAction", and "ExecAction". + :vartype type: str or ~azure.mgmt.appplatform.v2022_11_01_preview.models.ProbeActionType + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.type = "TCPSocketAction" # type: str + + +class TemporaryDisk(_serialization.Model): + """Temporary disk payload. + + :ivar size_in_gb: Size of the temporary disk in GB. + :vartype size_in_gb: int + :ivar mount_path: Mount path of the temporary disk. + :vartype mount_path: str + """ + + _validation = { + "size_in_gb": {"maximum": 5, "minimum": 0}, + } + + _attribute_map = { + "size_in_gb": {"key": "sizeInGB", "type": "int"}, + "mount_path": {"key": "mountPath", "type": "str"}, + } + + def __init__(self, *, size_in_gb: Optional[int] = None, mount_path: str = "/tmp", **kwargs): + """ + :keyword size_in_gb: Size of the temporary disk in GB. + :paramtype size_in_gb: int + :keyword mount_path: Mount path of the temporary disk. + :paramtype mount_path: str + """ + super().__init__(**kwargs) + self.size_in_gb = size_in_gb + self.mount_path = mount_path + + +class TestKeys(_serialization.Model): + """Test keys payload. + + :ivar primary_key: Primary key. + :vartype primary_key: str + :ivar secondary_key: Secondary key. + :vartype secondary_key: str + :ivar primary_test_endpoint: Primary test endpoint. + :vartype primary_test_endpoint: str + :ivar secondary_test_endpoint: Secondary test endpoint. + :vartype secondary_test_endpoint: str + :ivar enabled: Indicates whether the test endpoint feature enabled or not. + :vartype enabled: bool + """ + + _attribute_map = { + "primary_key": {"key": "primaryKey", "type": "str"}, + "secondary_key": {"key": "secondaryKey", "type": "str"}, + "primary_test_endpoint": {"key": "primaryTestEndpoint", "type": "str"}, + "secondary_test_endpoint": {"key": "secondaryTestEndpoint", "type": "str"}, + "enabled": {"key": "enabled", "type": "bool"}, + } + + def __init__( + self, + *, + primary_key: Optional[str] = None, + secondary_key: Optional[str] = None, + primary_test_endpoint: Optional[str] = None, + secondary_test_endpoint: Optional[str] = None, + enabled: Optional[bool] = None, + **kwargs + ): + """ + :keyword primary_key: Primary key. + :paramtype primary_key: str + :keyword secondary_key: Secondary key. + :paramtype secondary_key: str + :keyword primary_test_endpoint: Primary test endpoint. + :paramtype primary_test_endpoint: str + :keyword secondary_test_endpoint: Secondary test endpoint. + :paramtype secondary_test_endpoint: str + :keyword enabled: Indicates whether the test endpoint feature enabled or not. + :paramtype enabled: bool + """ + super().__init__(**kwargs) + self.primary_key = primary_key + self.secondary_key = secondary_key + self.primary_test_endpoint = primary_test_endpoint + self.secondary_test_endpoint = secondary_test_endpoint + self.enabled = enabled + + +class TriggeredBuildResult(_serialization.Model): + """The build result triggered by a build. + + :ivar id: The unique build id of this build result. + :vartype id: str + """ + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + } + + def __init__(self, *, id: Optional[str] = None, **kwargs): # pylint: disable=redefined-builtin + """ + :keyword id: The unique build id of this build result. + :paramtype id: str + """ + super().__init__(**kwargs) + self.id = id + + +class UserAssignedManagedIdentity(_serialization.Model): + """The details of the user-assigned managed identity assigned to an App. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: Principal Id of user-assigned managed identity. + :vartype principal_id: str + :ivar client_id: Client Id of user-assigned managed identity. + :vartype client_id: str + """ + + _validation = { + "principal_id": {"readonly": True}, + "client_id": {"readonly": True}, + } + + _attribute_map = { + "principal_id": {"key": "principalId", "type": "str"}, + "client_id": {"key": "clientId", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.principal_id = None + self.client_id = None + + +class ValidationMessages(_serialization.Model): + """Validate messages of the configuration service git repositories. + + :ivar name: The name of the configuration service git repository. + :vartype name: str + :ivar messages: Detailed validation messages. + :vartype messages: list[str] + """ + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "messages": {"key": "messages", "type": "[str]"}, + } + + def __init__(self, *, name: Optional[str] = None, messages: Optional[List[str]] = None, **kwargs): + """ + :keyword name: The name of the configuration service git repository. + :paramtype name: str + :keyword messages: Detailed validation messages. + :paramtype messages: list[str] + """ + super().__init__(**kwargs) + self.name = name + self.messages = messages diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/models/_patch.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/models/_patch.py new file mode 100644 index 00000000000..f7dd3251033 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/models/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/__init__.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/__init__.py new file mode 100644 index 00000000000..3d933300029 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/__init__.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._services_operations import ServicesOperations +from ._config_servers_operations import ConfigServersOperations +from ._configuration_services_operations import ConfigurationServicesOperations +from ._service_registries_operations import ServiceRegistriesOperations +from ._application_live_views_operations import ApplicationLiveViewsOperations +from ._application_live_view_operations import ApplicationLiveViewOperations +from ._dev_tool_portals_operations import DevToolPortalsOperations +from ._dev_tool_portal_operations import DevToolPortalOperations +from ._build_service_operations import BuildServiceOperations +from ._buildpack_binding_operations import BuildpackBindingOperations +from ._build_service_builder_operations import BuildServiceBuilderOperations +from ._build_service_agent_pool_operations import BuildServiceAgentPoolOperations +from ._monitoring_settings_operations import MonitoringSettingsOperations +from ._apps_operations import AppsOperations +from ._bindings_operations import BindingsOperations +from ._storages_operations import StoragesOperations +from ._certificates_operations import CertificatesOperations +from ._custom_domains_operations import CustomDomainsOperations +from ._deployments_operations import DeploymentsOperations +from ._operations import Operations +from ._runtime_versions_operations import RuntimeVersionsOperations +from ._skus_operations import SkusOperations +from ._gateways_operations import GatewaysOperations +from ._gateway_route_configs_operations import GatewayRouteConfigsOperations +from ._gateway_custom_domains_operations import GatewayCustomDomainsOperations +from ._api_portals_operations import ApiPortalsOperations +from ._api_portal_custom_domains_operations import ApiPortalCustomDomainsOperations +from ._application_accelerators_operations import ApplicationAcceleratorsOperations +from ._customized_accelerators_operations import CustomizedAcceleratorsOperations +from ._predefined_accelerators_operations import PredefinedAcceleratorsOperations + +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "ServicesOperations", + "ConfigServersOperations", + "ConfigurationServicesOperations", + "ServiceRegistriesOperations", + "ApplicationLiveViewsOperations", + "ApplicationLiveViewOperations", + "DevToolPortalsOperations", + "DevToolPortalOperations", + "BuildServiceOperations", + "BuildpackBindingOperations", + "BuildServiceBuilderOperations", + "BuildServiceAgentPoolOperations", + "MonitoringSettingsOperations", + "AppsOperations", + "BindingsOperations", + "StoragesOperations", + "CertificatesOperations", + "CustomDomainsOperations", + "DeploymentsOperations", + "Operations", + "RuntimeVersionsOperations", + "SkusOperations", + "GatewaysOperations", + "GatewayRouteConfigsOperations", + "GatewayCustomDomainsOperations", + "ApiPortalsOperations", + "ApiPortalCustomDomainsOperations", + "ApplicationAcceleratorsOperations", + "CustomizedAcceleratorsOperations", + "PredefinedAcceleratorsOperations", +] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_api_portal_custom_domains_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_api_portal_custom_domains_operations.py new file mode 100644 index 00000000000..7294760ab4a --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_api_portal_custom_domains_operations.py @@ -0,0 +1,734 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, + service_name: str, + api_portal_name: str, + domain_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}/domains/{domainName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "apiPortalName": _SERIALIZER.url("api_portal_name", api_portal_name, "str"), + "domainName": _SERIALIZER.url("domain_name", domain_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, + service_name: str, + api_portal_name: str, + domain_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}/domains/{domainName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "apiPortalName": _SERIALIZER.url("api_portal_name", api_portal_name, "str"), + "domainName": _SERIALIZER.url("domain_name", domain_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, + service_name: str, + api_portal_name: str, + domain_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}/domains/{domainName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "apiPortalName": _SERIALIZER.url("api_portal_name", api_portal_name, "str"), + "domainName": _SERIALIZER.url("domain_name", domain_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_request( + resource_group_name: str, service_name: str, api_portal_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}/domains", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "apiPortalName": _SERIALIZER.url("api_portal_name", api_portal_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class ApiPortalCustomDomainsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`api_portal_custom_domains` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, service_name: str, api_portal_name: str, domain_name: str, **kwargs: Any + ) -> _models.ApiPortalCustomDomainResource: + """Get the API portal custom domain. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param domain_name: The name of the API portal custom domain. Required. + :type domain_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ApiPortalCustomDomainResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalCustomDomainResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApiPortalCustomDomainResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + domain_name=domain_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ApiPortalCustomDomainResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}/domains/{domainName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + domain_name: str, + api_portal_custom_domain_resource: Union[_models.ApiPortalCustomDomainResource, IO], + **kwargs: Any + ) -> _models.ApiPortalCustomDomainResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApiPortalCustomDomainResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(api_portal_custom_domain_resource, (IO, bytes)): + _content = api_portal_custom_domain_resource + else: + _json = self._serialize.body(api_portal_custom_domain_resource, "ApiPortalCustomDomainResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + domain_name=domain_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ApiPortalCustomDomainResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ApiPortalCustomDomainResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}/domains/{domainName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + domain_name: str, + api_portal_custom_domain_resource: _models.ApiPortalCustomDomainResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ApiPortalCustomDomainResource]: + """Create or update the API portal custom domain. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param domain_name: The name of the API portal custom domain. Required. + :type domain_name: str + :param api_portal_custom_domain_resource: The API portal custom domain for the create or update + operation. Required. + :type api_portal_custom_domain_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalCustomDomainResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ApiPortalCustomDomainResource or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalCustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + domain_name: str, + api_portal_custom_domain_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ApiPortalCustomDomainResource]: + """Create or update the API portal custom domain. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param domain_name: The name of the API portal custom domain. Required. + :type domain_name: str + :param api_portal_custom_domain_resource: The API portal custom domain for the create or update + operation. Required. + :type api_portal_custom_domain_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ApiPortalCustomDomainResource or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalCustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + domain_name: str, + api_portal_custom_domain_resource: Union[_models.ApiPortalCustomDomainResource, IO], + **kwargs: Any + ) -> LROPoller[_models.ApiPortalCustomDomainResource]: + """Create or update the API portal custom domain. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param domain_name: The name of the API portal custom domain. Required. + :type domain_name: str + :param api_portal_custom_domain_resource: The API portal custom domain for the create or update + operation. Is either a model type or a IO type. Required. + :type api_portal_custom_domain_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalCustomDomainResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ApiPortalCustomDomainResource or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalCustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApiPortalCustomDomainResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + domain_name=domain_name, + api_portal_custom_domain_resource=api_portal_custom_domain_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ApiPortalCustomDomainResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}/domains/{domainName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, api_portal_name: str, domain_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + domain_name=domain_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}/domains/{domainName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, service_name: str, api_portal_name: str, domain_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete the API portal custom domain. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param domain_name: The name of the API portal custom domain. Required. + :type domain_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + domain_name=domain_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}/domains/{domainName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, api_portal_name: str, **kwargs: Any + ) -> Iterable["_models.ApiPortalCustomDomainResource"]: + """Handle requests to list all API portal custom domains. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ApiPortalCustomDomainResource or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalCustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApiPortalCustomDomainResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ApiPortalCustomDomainResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}/domains"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_api_portals_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_api_portals_operations.py new file mode 100644 index 00000000000..54f3692372c --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_api_portals_operations.py @@ -0,0 +1,871 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, service_name: str, api_portal_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "apiPortalName": _SERIALIZER.url("api_portal_name", api_portal_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, service_name: str, api_portal_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "apiPortalName": _SERIALIZER.url("api_portal_name", api_portal_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, service_name: str, api_portal_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "apiPortalName": _SERIALIZER.url("api_portal_name", api_portal_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_request(resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_validate_domain_request( + resource_group_name: str, service_name: str, api_portal_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}/validateDomain", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "apiPortalName": _SERIALIZER.url("api_portal_name", api_portal_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class ApiPortalsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`api_portals` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, service_name: str, api_portal_name: str, **kwargs: Any + ) -> _models.ApiPortalResource: + """Get the API portal and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ApiPortalResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApiPortalResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ApiPortalResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + api_portal_resource: Union[_models.ApiPortalResource, IO], + **kwargs: Any + ) -> _models.ApiPortalResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApiPortalResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(api_portal_resource, (IO, bytes)): + _content = api_portal_resource + else: + _json = self._serialize.body(api_portal_resource, "ApiPortalResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ApiPortalResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ApiPortalResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + api_portal_resource: _models.ApiPortalResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ApiPortalResource]: + """Create the default API portal or update the existing API portal. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param api_portal_resource: The API portal for the create or update operation. Required. + :type api_portal_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ApiPortalResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + api_portal_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ApiPortalResource]: + """Create the default API portal or update the existing API portal. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param api_portal_resource: The API portal for the create or update operation. Required. + :type api_portal_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ApiPortalResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + api_portal_resource: Union[_models.ApiPortalResource, IO], + **kwargs: Any + ) -> LROPoller[_models.ApiPortalResource]: + """Create the default API portal or update the existing API portal. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param api_portal_resource: The API portal for the create or update operation. Is either a + model type or a IO type. Required. + :type api_portal_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalResource + or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ApiPortalResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApiPortalResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + api_portal_resource=api_portal_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ApiPortalResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, api_portal_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, service_name: str, api_portal_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete the default API portal. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}"} # type: ignore + + @distributed_trace + def list(self, resource_group_name: str, service_name: str, **kwargs: Any) -> Iterable["_models.ApiPortalResource"]: + """Handles requests to list all resources in a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ApiPortalResource or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApiPortalResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApiPortalResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ApiPortalResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals"} # type: ignore + + @overload + def validate_domain( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + validate_payload: _models.CustomDomainValidatePayload, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CustomDomainValidateResult: + """Check the domains are valid as well as not in use. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param validate_payload: Custom domain payload to be validated. Required. + :type validate_payload: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidatePayload + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomDomainValidateResult or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidateResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def validate_domain( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + validate_payload: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CustomDomainValidateResult: + """Check the domains are valid as well as not in use. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param validate_payload: Custom domain payload to be validated. Required. + :type validate_payload: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomDomainValidateResult or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidateResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def validate_domain( + self, + resource_group_name: str, + service_name: str, + api_portal_name: str, + validate_payload: Union[_models.CustomDomainValidatePayload, IO], + **kwargs: Any + ) -> _models.CustomDomainValidateResult: + """Check the domains are valid as well as not in use. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param api_portal_name: The name of API portal. Required. + :type api_portal_name: str + :param validate_payload: Custom domain payload to be validated. Is either a model type or a IO + type. Required. + :type validate_payload: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidatePayload or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomDomainValidateResult or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidateResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomDomainValidateResult] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(validate_payload, (IO, bytes)): + _content = validate_payload + else: + _json = self._serialize.body(validate_payload, "CustomDomainValidatePayload") + + request = build_validate_domain_request( + resource_group_name=resource_group_name, + service_name=service_name, + api_portal_name=api_portal_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.validate_domain.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("CustomDomainValidateResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + validate_domain.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apiPortals/{apiPortalName}/validateDomain"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_application_accelerators_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_application_accelerators_operations.py new file mode 100644 index 00000000000..6f1733e4a46 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_application_accelerators_operations.py @@ -0,0 +1,697 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request(resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, service_name: str, application_accelerator_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "applicationAcceleratorName": _SERIALIZER.url( + "application_accelerator_name", application_accelerator_name, "str" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, service_name: str, application_accelerator_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "applicationAcceleratorName": _SERIALIZER.url( + "application_accelerator_name", application_accelerator_name, "str" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, service_name: str, application_accelerator_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "applicationAcceleratorName": _SERIALIZER.url( + "application_accelerator_name", application_accelerator_name, "str" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +class ApplicationAcceleratorsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`application_accelerators` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> Iterable["_models.ApplicationAcceleratorResource"]: + """Handle requests to list all application accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ApplicationAcceleratorResource or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApplicationAcceleratorResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ApplicationAcceleratorResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, service_name: str, application_accelerator_name: str, **kwargs: Any + ) -> _models.ApplicationAcceleratorResource: + """Get the application accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ApplicationAcceleratorResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApplicationAcceleratorResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ApplicationAcceleratorResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + application_accelerator_resource: Union[_models.ApplicationAcceleratorResource, IO], + **kwargs: Any + ) -> _models.ApplicationAcceleratorResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApplicationAcceleratorResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(application_accelerator_resource, (IO, bytes)): + _content = application_accelerator_resource + else: + _json = self._serialize.body(application_accelerator_resource, "ApplicationAcceleratorResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ApplicationAcceleratorResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ApplicationAcceleratorResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + application_accelerator_resource: _models.ApplicationAcceleratorResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ApplicationAcceleratorResource]: + """Create or update the application accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param application_accelerator_resource: The application accelerator for the create or update + operation. Required. + :type application_accelerator_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ApplicationAcceleratorResource or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + application_accelerator_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ApplicationAcceleratorResource]: + """Create or update the application accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param application_accelerator_resource: The application accelerator for the create or update + operation. Required. + :type application_accelerator_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ApplicationAcceleratorResource or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + application_accelerator_resource: Union[_models.ApplicationAcceleratorResource, IO], + **kwargs: Any + ) -> LROPoller[_models.ApplicationAcceleratorResource]: + """Create or update the application accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param application_accelerator_resource: The application accelerator for the create or update + operation. Is either a model type or a IO type. Required. + :type application_accelerator_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ApplicationAcceleratorResource or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationAcceleratorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApplicationAcceleratorResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + application_accelerator_resource=application_accelerator_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ApplicationAcceleratorResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, application_accelerator_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, service_name: str, application_accelerator_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete the application accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_application_live_view_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_application_live_view_operations.py new file mode 100644 index 00000000000..2a73c4e24c7 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_application_live_view_operations.py @@ -0,0 +1,200 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Optional, TypeVar, Union, cast + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_delete_request( + resource_group_name: str, service_name: str, application_live_view_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationLiveViews/{applicationLiveViewName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "applicationLiveViewName": _SERIALIZER.url("application_live_view_name", application_live_view_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +class ApplicationLiveViewOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`application_live_view` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, application_live_view_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_live_view_name=application_live_view_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationLiveViews/{applicationLiveViewName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, service_name: str, application_live_view_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Disable the default Application Live View. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_live_view_name: The name of Application Live View. Required. + :type application_live_view_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + application_live_view_name=application_live_view_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationLiveViews/{applicationLiveViewName}"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_application_live_views_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_application_live_views_operations.py new file mode 100644 index 00000000000..f085460fb5b --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_application_live_views_operations.py @@ -0,0 +1,545 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request(resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationLiveViews", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, service_name: str, application_live_view_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationLiveViews/{applicationLiveViewName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "applicationLiveViewName": _SERIALIZER.url("application_live_view_name", application_live_view_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, service_name: str, application_live_view_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationLiveViews/{applicationLiveViewName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "applicationLiveViewName": _SERIALIZER.url("application_live_view_name", application_live_view_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class ApplicationLiveViewsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`application_live_views` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> Iterable["_models.ApplicationLiveViewResource"]: + """Handles requests to list all resources in a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ApplicationLiveViewResource or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApplicationLiveViewResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ApplicationLiveViewResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationLiveViews"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, service_name: str, application_live_view_name: str, **kwargs: Any + ) -> _models.ApplicationLiveViewResource: + """Get the Application Live and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_live_view_name: The name of Application Live View. Required. + :type application_live_view_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ApplicationLiveViewResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApplicationLiveViewResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_live_view_name=application_live_view_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ApplicationLiveViewResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationLiveViews/{applicationLiveViewName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + application_live_view_name: str, + application_live_view_resource: Union[_models.ApplicationLiveViewResource, IO], + **kwargs: Any + ) -> _models.ApplicationLiveViewResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApplicationLiveViewResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(application_live_view_resource, (IO, bytes)): + _content = application_live_view_resource + else: + _json = self._serialize.body(application_live_view_resource, "ApplicationLiveViewResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_live_view_name=application_live_view_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ApplicationLiveViewResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ApplicationLiveViewResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationLiveViews/{applicationLiveViewName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + application_live_view_name: str, + application_live_view_resource: _models.ApplicationLiveViewResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ApplicationLiveViewResource]: + """Create the default Application Live View or update the existing Application Live View. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_live_view_name: The name of Application Live View. Required. + :type application_live_view_name: str + :param application_live_view_resource: Parameters for the update operation. Required. + :type application_live_view_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ApplicationLiveViewResource or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + application_live_view_name: str, + application_live_view_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ApplicationLiveViewResource]: + """Create the default Application Live View or update the existing Application Live View. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_live_view_name: The name of Application Live View. Required. + :type application_live_view_name: str + :param application_live_view_resource: Parameters for the update operation. Required. + :type application_live_view_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ApplicationLiveViewResource or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + application_live_view_name: str, + application_live_view_resource: Union[_models.ApplicationLiveViewResource, IO], + **kwargs: Any + ) -> LROPoller[_models.ApplicationLiveViewResource]: + """Create the default Application Live View or update the existing Application Live View. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_live_view_name: The name of Application Live View. Required. + :type application_live_view_name: str + :param application_live_view_resource: Parameters for the update operation. Is either a model + type or a IO type. Required. + :type application_live_view_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ApplicationLiveViewResource or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ApplicationLiveViewResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ApplicationLiveViewResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + application_live_view_name=application_live_view_name, + application_live_view_resource=application_live_view_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ApplicationLiveViewResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationLiveViews/{applicationLiveViewName}"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_apps_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_apps_operations.py new file mode 100644 index 00000000000..af710799dbd --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_apps_operations.py @@ -0,0 +1,1527 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, + service_name: str, + app_name: str, + subscription_id: str, + *, + sync_status: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if sync_status is not None: + _params["syncStatus"] = _SERIALIZER.query("sync_status", sync_status, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, service_name: str, app_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, service_name: str, app_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, service_name: str, app_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_request(resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_resource_upload_url_request( + resource_group_name: str, service_name: str, app_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/getResourceUploadUrl", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_set_active_deployments_request( + resource_group_name: str, service_name: str, app_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/setActiveDeployments", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_validate_domain_request( + resource_group_name: str, service_name: str, app_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/validateDomain", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class AppsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`apps` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, + resource_group_name: str, + service_name: str, + app_name: str, + sync_status: Optional[str] = None, + **kwargs: Any + ) -> _models.AppResource: + """Get an App and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param sync_status: Indicates whether sync status. Default value is None. + :type sync_status: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AppResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.AppResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + subscription_id=self._config.subscription_id, + sync_status=sync_status, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("AppResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + app_name: str, + app_resource: Union[_models.AppResource, IO], + **kwargs: Any + ) -> _models.AppResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AppResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(app_resource, (IO, bytes)): + _content = app_resource + else: + _json = self._serialize.body(app_resource, "AppResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("AppResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("AppResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("AppResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + app_resource: _models.AppResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.AppResource]: + """Create a new App or update an exiting App. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param app_resource: Parameters for the create or update operation. Required. + :type app_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either AppResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + app_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.AppResource]: + """Create a new App or update an exiting App. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param app_resource: Parameters for the create or update operation. Required. + :type app_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either AppResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + app_resource: Union[_models.AppResource, IO], + **kwargs: Any + ) -> LROPoller[_models.AppResource]: + """Create a new App or update an exiting App. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param app_resource: Parameters for the create or update operation. Is either a model type or a + IO type. Required. + :type app_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either AppResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AppResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + app_resource=app_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("AppResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, app_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, service_name: str, app_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Operation to delete an App. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}"} # type: ignore + + def _update_initial( + self, + resource_group_name: str, + service_name: str, + app_name: str, + app_resource: Union[_models.AppResource, IO], + **kwargs: Any + ) -> _models.AppResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AppResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(app_resource, (IO, bytes)): + _content = app_resource + else: + _json = self._serialize.body(app_resource, "AppResource") + + request = build_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("AppResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("AppResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}"} # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + app_resource: _models.AppResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.AppResource]: + """Operation to update an exiting App. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param app_resource: Parameters for the update operation. Required. + :type app_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either AppResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + app_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.AppResource]: + """Operation to update an exiting App. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param app_resource: Parameters for the update operation. Required. + :type app_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either AppResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + app_resource: Union[_models.AppResource, IO], + **kwargs: Any + ) -> LROPoller[_models.AppResource]: + """Operation to update an exiting App. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param app_resource: Parameters for the update operation. Is either a model type or a IO type. + Required. + :type app_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either AppResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AppResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + app_resource=app_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("AppResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}"} # type: ignore + + @distributed_trace + def list(self, resource_group_name: str, service_name: str, **kwargs: Any) -> Iterable["_models.AppResource"]: + """Handles requests to list all resources in a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either AppResource or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.AppResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("AppResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps"} # type: ignore + + @distributed_trace + def get_resource_upload_url( + self, resource_group_name: str, service_name: str, app_name: str, **kwargs: Any + ) -> _models.ResourceUploadDefinition: + """Get an resource upload URL for an App, which may be artifacts or source archive. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ResourceUploadDefinition or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceUploadDefinition + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ResourceUploadDefinition] + + request = build_get_resource_upload_url_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_resource_upload_url.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ResourceUploadDefinition", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_resource_upload_url.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/getResourceUploadUrl"} # type: ignore + + def _set_active_deployments_initial( + self, + resource_group_name: str, + service_name: str, + app_name: str, + active_deployment_collection: Union[_models.ActiveDeploymentCollection, IO], + **kwargs: Any + ) -> _models.AppResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AppResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(active_deployment_collection, (IO, bytes)): + _content = active_deployment_collection + else: + _json = self._serialize.body(active_deployment_collection, "ActiveDeploymentCollection") + + request = build_set_active_deployments_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._set_active_deployments_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("AppResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("AppResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _set_active_deployments_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/setActiveDeployments"} # type: ignore + + @overload + def begin_set_active_deployments( + self, + resource_group_name: str, + service_name: str, + app_name: str, + active_deployment_collection: _models.ActiveDeploymentCollection, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.AppResource]: + """Set existing Deployment under the app as active. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param active_deployment_collection: A list of Deployment name to be active. Required. + :type active_deployment_collection: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ActiveDeploymentCollection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either AppResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_set_active_deployments( + self, + resource_group_name: str, + service_name: str, + app_name: str, + active_deployment_collection: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.AppResource]: + """Set existing Deployment under the app as active. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param active_deployment_collection: A list of Deployment name to be active. Required. + :type active_deployment_collection: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either AppResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_set_active_deployments( + self, + resource_group_name: str, + service_name: str, + app_name: str, + active_deployment_collection: Union[_models.ActiveDeploymentCollection, IO], + **kwargs: Any + ) -> LROPoller[_models.AppResource]: + """Set existing Deployment under the app as active. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param active_deployment_collection: A list of Deployment name to be active. Is either a model + type or a IO type. Required. + :type active_deployment_collection: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ActiveDeploymentCollection or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either AppResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.AppResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AppResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._set_active_deployments_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + active_deployment_collection=active_deployment_collection, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("AppResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_set_active_deployments.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/setActiveDeployments"} # type: ignore + + @overload + def validate_domain( + self, + resource_group_name: str, + service_name: str, + app_name: str, + validate_payload: _models.CustomDomainValidatePayload, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CustomDomainValidateResult: + """Check the resource name is valid as well as not in use. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param validate_payload: Custom domain payload to be validated. Required. + :type validate_payload: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidatePayload + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomDomainValidateResult or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidateResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def validate_domain( + self, + resource_group_name: str, + service_name: str, + app_name: str, + validate_payload: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CustomDomainValidateResult: + """Check the resource name is valid as well as not in use. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param validate_payload: Custom domain payload to be validated. Required. + :type validate_payload: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomDomainValidateResult or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidateResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def validate_domain( + self, + resource_group_name: str, + service_name: str, + app_name: str, + validate_payload: Union[_models.CustomDomainValidatePayload, IO], + **kwargs: Any + ) -> _models.CustomDomainValidateResult: + """Check the resource name is valid as well as not in use. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param validate_payload: Custom domain payload to be validated. Is either a model type or a IO + type. Required. + :type validate_payload: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidatePayload or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomDomainValidateResult or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidateResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomDomainValidateResult] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(validate_payload, (IO, bytes)): + _content = validate_payload + else: + _json = self._serialize.body(validate_payload, "CustomDomainValidatePayload") + + request = build_validate_domain_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.validate_domain.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("CustomDomainValidateResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + validate_domain.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/validateDomain"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_bindings_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_bindings_operations.py new file mode 100644 index 00000000000..a694c0c5a9a --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_bindings_operations.py @@ -0,0 +1,1002 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, service_name: str, app_name: str, binding_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/bindings/{bindingName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "bindingName": _SERIALIZER.url("binding_name", binding_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, service_name: str, app_name: str, binding_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/bindings/{bindingName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "bindingName": _SERIALIZER.url("binding_name", binding_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, service_name: str, app_name: str, binding_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/bindings/{bindingName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "bindingName": _SERIALIZER.url("binding_name", binding_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, service_name: str, app_name: str, binding_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/bindings/{bindingName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "bindingName": _SERIALIZER.url("binding_name", binding_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_request( + resource_group_name: str, service_name: str, app_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/bindings", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class BindingsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`bindings` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, service_name: str, app_name: str, binding_name: str, **kwargs: Any + ) -> _models.BindingResource: + """Get a Binding and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param binding_name: The name of the Binding resource. Required. + :type binding_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BindingResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BindingResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + binding_name=binding_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("BindingResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/bindings/{bindingName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + app_name: str, + binding_name: str, + binding_resource: Union[_models.BindingResource, IO], + **kwargs: Any + ) -> _models.BindingResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BindingResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(binding_resource, (IO, bytes)): + _content = binding_resource + else: + _json = self._serialize.body(binding_resource, "BindingResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + binding_name=binding_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("BindingResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("BindingResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("BindingResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/bindings/{bindingName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + binding_name: str, + binding_resource: _models.BindingResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BindingResource]: + """Create a new Binding or update an exiting Binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param binding_name: The name of the Binding resource. Required. + :type binding_name: str + :param binding_resource: Parameters for the create or update operation. Required. + :type binding_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BindingResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + binding_name: str, + binding_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BindingResource]: + """Create a new Binding or update an exiting Binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param binding_name: The name of the Binding resource. Required. + :type binding_name: str + :param binding_resource: Parameters for the create or update operation. Required. + :type binding_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BindingResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + binding_name: str, + binding_resource: Union[_models.BindingResource, IO], + **kwargs: Any + ) -> LROPoller[_models.BindingResource]: + """Create a new Binding or update an exiting Binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param binding_name: The name of the Binding resource. Required. + :type binding_name: str + :param binding_resource: Parameters for the create or update operation. Is either a model type + or a IO type. Required. + :type binding_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BindingResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BindingResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + binding_name=binding_name, + binding_resource=binding_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("BindingResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/bindings/{bindingName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, app_name: str, binding_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + binding_name=binding_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/bindings/{bindingName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, service_name: str, app_name: str, binding_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Operation to delete a Binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param binding_name: The name of the Binding resource. Required. + :type binding_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + binding_name=binding_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/bindings/{bindingName}"} # type: ignore + + def _update_initial( + self, + resource_group_name: str, + service_name: str, + app_name: str, + binding_name: str, + binding_resource: Union[_models.BindingResource, IO], + **kwargs: Any + ) -> _models.BindingResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BindingResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(binding_resource, (IO, bytes)): + _content = binding_resource + else: + _json = self._serialize.body(binding_resource, "BindingResource") + + request = build_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + binding_name=binding_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("BindingResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("BindingResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/bindings/{bindingName}"} # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + binding_name: str, + binding_resource: _models.BindingResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BindingResource]: + """Operation to update an exiting Binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param binding_name: The name of the Binding resource. Required. + :type binding_name: str + :param binding_resource: Parameters for the update operation. Required. + :type binding_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BindingResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + binding_name: str, + binding_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BindingResource]: + """Operation to update an exiting Binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param binding_name: The name of the Binding resource. Required. + :type binding_name: str + :param binding_resource: Parameters for the update operation. Required. + :type binding_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BindingResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + binding_name: str, + binding_resource: Union[_models.BindingResource, IO], + **kwargs: Any + ) -> LROPoller[_models.BindingResource]: + """Operation to update an exiting Binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param binding_name: The name of the Binding resource. Required. + :type binding_name: str + :param binding_resource: Parameters for the update operation. Is either a model type or a IO + type. Required. + :type binding_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BindingResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BindingResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + binding_name=binding_name, + binding_resource=binding_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("BindingResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/bindings/{bindingName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, app_name: str, **kwargs: Any + ) -> Iterable["_models.BindingResource"]: + """Handles requests to list all resources in an App. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BindingResource or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.BindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BindingResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("BindingResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/bindings"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_build_service_agent_pool_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_build_service_agent_pool_operations.py new file mode 100644 index 00000000000..88eb04912d5 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_build_service_agent_pool_operations.py @@ -0,0 +1,578 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, service_name: str, build_service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/agentPools", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, + service_name: str, + build_service_name: str, + agent_pool_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/agentPools/{agentPoolName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + "agentPoolName": _SERIALIZER.url("agent_pool_name", agent_pool_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_put_request( + resource_group_name: str, + service_name: str, + build_service_name: str, + agent_pool_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/agentPools/{agentPoolName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + "agentPoolName": _SERIALIZER.url("agent_pool_name", agent_pool_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class BuildServiceAgentPoolOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`build_service_agent_pool` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, build_service_name: str, **kwargs: Any + ) -> Iterable["_models.BuildServiceAgentPoolResource"]: + """List build service agent pool. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BuildServiceAgentPoolResource or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceAgentPoolResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildServiceAgentPoolResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("BuildServiceAgentPoolResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/agentPools"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, service_name: str, build_service_name: str, agent_pool_name: str, **kwargs: Any + ) -> _models.BuildServiceAgentPoolResource: + """Get build service agent pool. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param agent_pool_name: The name of the build service agent pool resource. Required. + :type agent_pool_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BuildServiceAgentPoolResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceAgentPoolResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildServiceAgentPoolResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + agent_pool_name=agent_pool_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("BuildServiceAgentPoolResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/agentPools/{agentPoolName}"} # type: ignore + + def _update_put_initial( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + agent_pool_name: str, + agent_pool_resource: Union[_models.BuildServiceAgentPoolResource, IO], + **kwargs: Any + ) -> _models.BuildServiceAgentPoolResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildServiceAgentPoolResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(agent_pool_resource, (IO, bytes)): + _content = agent_pool_resource + else: + _json = self._serialize.body(agent_pool_resource, "BuildServiceAgentPoolResource") + + request = build_update_put_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + agent_pool_name=agent_pool_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_put_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("BuildServiceAgentPoolResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("BuildServiceAgentPoolResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_put_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/agentPools/{agentPoolName}"} # type: ignore + + @overload + def begin_update_put( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + agent_pool_name: str, + agent_pool_resource: _models.BuildServiceAgentPoolResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BuildServiceAgentPoolResource]: + """Create or update build service agent pool. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param agent_pool_name: The name of the build service agent pool resource. Required. + :type agent_pool_name: str + :param agent_pool_resource: Parameters for the update operation. Required. + :type agent_pool_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceAgentPoolResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BuildServiceAgentPoolResource or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceAgentPoolResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update_put( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + agent_pool_name: str, + agent_pool_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BuildServiceAgentPoolResource]: + """Create or update build service agent pool. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param agent_pool_name: The name of the build service agent pool resource. Required. + :type agent_pool_name: str + :param agent_pool_resource: Parameters for the update operation. Required. + :type agent_pool_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BuildServiceAgentPoolResource or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceAgentPoolResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update_put( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + agent_pool_name: str, + agent_pool_resource: Union[_models.BuildServiceAgentPoolResource, IO], + **kwargs: Any + ) -> LROPoller[_models.BuildServiceAgentPoolResource]: + """Create or update build service agent pool. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param agent_pool_name: The name of the build service agent pool resource. Required. + :type agent_pool_name: str + :param agent_pool_resource: Parameters for the update operation. Is either a model type or a IO + type. Required. + :type agent_pool_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceAgentPoolResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BuildServiceAgentPoolResource or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildServiceAgentPoolResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildServiceAgentPoolResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_put_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + agent_pool_name=agent_pool_name, + agent_pool_resource=agent_pool_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("BuildServiceAgentPoolResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update_put.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/agentPools/{agentPoolName}"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_build_service_builder_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_build_service_builder_operations.py new file mode 100644 index 00000000000..f505375b166 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_build_service_builder_operations.py @@ -0,0 +1,835 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + "builderName": _SERIALIZER.url("builder_name", builder_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + "builderName": _SERIALIZER.url("builder_name", builder_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + "builderName": _SERIALIZER.url("builder_name", builder_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_request( + resource_group_name: str, service_name: str, build_service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_deployments_request( + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}/listUsingDeployments", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + "builderName": _SERIALIZER.url("builder_name", builder_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class BuildServiceBuilderOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`build_service_builder` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, service_name: str, build_service_name: str, builder_name: str, **kwargs: Any + ) -> _models.BuilderResource: + """Get a KPack builder. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BuilderResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuilderResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuilderResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("BuilderResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + builder_resource: Union[_models.BuilderResource, IO], + **kwargs: Any + ) -> _models.BuilderResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuilderResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(builder_resource, (IO, bytes)): + _content = builder_resource + else: + _json = self._serialize.body(builder_resource, "BuilderResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("BuilderResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("BuilderResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + builder_resource: _models.BuilderResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BuilderResource]: + """Create or update a KPack builder. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :param builder_resource: The target builder for the create or update operation. Required. + :type builder_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuilderResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BuilderResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuilderResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + builder_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BuilderResource]: + """Create or update a KPack builder. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :param builder_resource: The target builder for the create or update operation. Required. + :type builder_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BuilderResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuilderResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + builder_resource: Union[_models.BuilderResource, IO], + **kwargs: Any + ) -> LROPoller[_models.BuilderResource]: + """Create or update a KPack builder. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :param builder_resource: The target builder for the create or update operation. Is either a + model type or a IO type. Required. + :type builder_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuilderResource or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BuilderResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuilderResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuilderResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + builder_resource=builder_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("BuilderResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, build_service_name: str, builder_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, service_name: str, build_service_name: str, builder_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete a KPack builder. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, build_service_name: str, **kwargs: Any + ) -> Iterable["_models.BuilderResource"]: + """List KPack builders result. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BuilderResource or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuilderResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuilderResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("BuilderResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders"} # type: ignore + + @distributed_trace + def list_deployments( + self, resource_group_name: str, service_name: str, build_service_name: str, builder_name: str, **kwargs: Any + ) -> _models.DeploymentList: + """List deployments that are using the builder. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DeploymentList or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentList + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DeploymentList] + + request = build_list_deployments_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_deployments.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("DeploymentList", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_deployments.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}/listUsingDeployments"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_build_service_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_build_service_operations.py new file mode 100644 index 00000000000..bcca21dbdf6 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_build_service_operations.py @@ -0,0 +1,1567 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_build_services_request( + resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_build_service_request( + resource_group_name: str, service_name: str, build_service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_builds_request( + resource_group_name: str, service_name: str, build_service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builds", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_build_request( + resource_group_name: str, + service_name: str, + build_service_name: str, + build_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builds/{buildName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + "buildName": _SERIALIZER.url("build_name", build_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_build_request( + resource_group_name: str, + service_name: str, + build_service_name: str, + build_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builds/{buildName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + "buildName": _SERIALIZER.url("build_name", build_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_build_results_request( + resource_group_name: str, + service_name: str, + build_service_name: str, + build_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builds/{buildName}/results", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + "buildName": _SERIALIZER.url("build_name", build_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_build_result_request( + resource_group_name: str, + service_name: str, + build_service_name: str, + build_name: str, + build_result_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builds/{buildName}/results/{buildResultName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + "buildName": _SERIALIZER.url("build_name", build_name, "str"), + "buildResultName": _SERIALIZER.url("build_result_name", build_result_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_build_result_log_request( + resource_group_name: str, + service_name: str, + build_service_name: str, + build_name: str, + build_result_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builds/{buildName}/results/{buildResultName}/getLogFileUrl", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + "buildName": _SERIALIZER.url("build_name", build_name, "str"), + "buildResultName": _SERIALIZER.url("build_result_name", build_result_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_resource_upload_url_request( + resource_group_name: str, service_name: str, build_service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/getResourceUploadUrl", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_supported_buildpacks_request( + resource_group_name: str, service_name: str, build_service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/supportedBuildpacks", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_supported_buildpack_request( + resource_group_name: str, + service_name: str, + build_service_name: str, + buildpack_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/supportedBuildpacks/{buildpackName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + "buildpackName": _SERIALIZER.url("buildpack_name", buildpack_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_supported_stacks_request( + resource_group_name: str, service_name: str, build_service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/supportedStacks", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_supported_stack_request( + resource_group_name: str, + service_name: str, + build_service_name: str, + stack_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/supportedStacks/{stackName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + "stackName": _SERIALIZER.url("stack_name", stack_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class BuildServiceOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`build_service` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list_build_services( + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> Iterable["_models.BuildService"]: + """List build services resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BuildService or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildServiceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_build_services_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_build_services.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("BuildServiceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list_build_services.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices"} # type: ignore + + @distributed_trace + def get_build_service( + self, resource_group_name: str, service_name: str, build_service_name: str, **kwargs: Any + ) -> _models.BuildService: + """Get a build service resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BuildService or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildService + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildService] + + request = build_get_build_service_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_build_service.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("BuildService", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_build_service.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}"} # type: ignore + + @distributed_trace + def list_builds( + self, resource_group_name: str, service_name: str, build_service_name: str, **kwargs: Any + ) -> Iterable["_models.Build"]: + """List KPack builds. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either Build or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.Build] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_builds_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_builds.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("BuildCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list_builds.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builds"} # type: ignore + + @distributed_trace + def get_build( + self, resource_group_name: str, service_name: str, build_service_name: str, build_name: str, **kwargs: Any + ) -> _models.Build: + """Get a KPack build. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param build_name: The name of the build resource. Required. + :type build_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Build or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Build + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.Build] + + request = build_get_build_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + build_name=build_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_build.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("Build", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_build.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builds/{buildName}"} # type: ignore + + @overload + def create_or_update_build( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + build_name: str, + build: _models.Build, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Build: + """Create or update a KPack build. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param build_name: The name of the build resource. Required. + :type build_name: str + :param build: Parameters for the create or update operation. Required. + :type build: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Build + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Build or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Build + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_build( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + build_name: str, + build: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Build: + """Create or update a KPack build. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param build_name: The name of the build resource. Required. + :type build_name: str + :param build: Parameters for the create or update operation. Required. + :type build: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Build or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Build + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update_build( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + build_name: str, + build: Union[_models.Build, IO], + **kwargs: Any + ) -> _models.Build: + """Create or update a KPack build. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param build_name: The name of the build resource. Required. + :type build_name: str + :param build: Parameters for the create or update operation. Is either a model type or a IO + type. Required. + :type build: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Build or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Build or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.Build + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Build] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(build, (IO, bytes)): + _content = build + else: + _json = self._serialize.body(build, "Build") + + request = build_create_or_update_build_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + build_name=build_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update_build.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("Build", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("Build", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update_build.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builds/{buildName}"} # type: ignore + + @distributed_trace + def list_build_results( + self, resource_group_name: str, service_name: str, build_service_name: str, build_name: str, **kwargs: Any + ) -> Iterable["_models.BuildResult"]: + """List KPack build results. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param build_name: The name of the build resource. Required. + :type build_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BuildResult or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildResultCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_build_results_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + build_name=build_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_build_results.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("BuildResultCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list_build_results.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builds/{buildName}/results"} # type: ignore + + @distributed_trace + def get_build_result( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + build_name: str, + build_result_name: str, + **kwargs: Any + ) -> _models.BuildResult: + """Get a KPack build result. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param build_name: The name of the build resource. Required. + :type build_name: str + :param build_result_name: The name of the build result resource. Required. + :type build_result_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BuildResult or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildResult] + + request = build_get_build_result_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + build_name=build_name, + build_result_name=build_result_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_build_result.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("BuildResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_build_result.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builds/{buildName}/results/{buildResultName}"} # type: ignore + + @distributed_trace + def get_build_result_log( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + build_name: str, + build_result_name: str, + **kwargs: Any + ) -> _models.BuildResultLog: + """Get a KPack build result log download URL. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param build_name: The name of the build resource. Required. + :type build_name: str + :param build_result_name: The name of the build result resource. Required. + :type build_result_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BuildResultLog or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildResultLog + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildResultLog] + + request = build_get_build_result_log_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + build_name=build_name, + build_result_name=build_result_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_build_result_log.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("BuildResultLog", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_build_result_log.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builds/{buildName}/results/{buildResultName}/getLogFileUrl"} # type: ignore + + @distributed_trace + def get_resource_upload_url( + self, resource_group_name: str, service_name: str, build_service_name: str, **kwargs: Any + ) -> _models.ResourceUploadDefinition: + """Get an resource upload URL for build service, which may be artifacts or source archive. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ResourceUploadDefinition or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceUploadDefinition + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ResourceUploadDefinition] + + request = build_get_resource_upload_url_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_resource_upload_url.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ResourceUploadDefinition", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_resource_upload_url.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/getResourceUploadUrl"} # type: ignore + + @distributed_trace + def list_supported_buildpacks( + self, resource_group_name: str, service_name: str, build_service_name: str, **kwargs: Any + ) -> _models.SupportedBuildpacksCollection: + """Get all supported buildpacks. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SupportedBuildpacksCollection or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SupportedBuildpacksCollection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.SupportedBuildpacksCollection] + + request = build_list_supported_buildpacks_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_supported_buildpacks.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("SupportedBuildpacksCollection", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_supported_buildpacks.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/supportedBuildpacks"} # type: ignore + + @distributed_trace + def get_supported_buildpack( + self, resource_group_name: str, service_name: str, build_service_name: str, buildpack_name: str, **kwargs: Any + ) -> _models.SupportedBuildpackResource: + """Get the supported buildpack resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param buildpack_name: The name of the buildpack resource. Required. + :type buildpack_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SupportedBuildpackResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SupportedBuildpackResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.SupportedBuildpackResource] + + request = build_get_supported_buildpack_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + buildpack_name=buildpack_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_supported_buildpack.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("SupportedBuildpackResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_supported_buildpack.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/supportedBuildpacks/{buildpackName}"} # type: ignore + + @distributed_trace + def list_supported_stacks( + self, resource_group_name: str, service_name: str, build_service_name: str, **kwargs: Any + ) -> _models.SupportedStacksCollection: + """Get all supported stacks. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SupportedStacksCollection or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SupportedStacksCollection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.SupportedStacksCollection] + + request = build_list_supported_stacks_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_supported_stacks.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("SupportedStacksCollection", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_supported_stacks.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/supportedStacks"} # type: ignore + + @distributed_trace + def get_supported_stack( + self, resource_group_name: str, service_name: str, build_service_name: str, stack_name: str, **kwargs: Any + ) -> _models.SupportedStackResource: + """Get the supported stack resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param stack_name: The name of the stack resource. Required. + :type stack_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SupportedStackResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.SupportedStackResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.SupportedStackResource] + + request = build_get_supported_stack_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + stack_name=stack_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_supported_stack.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("SupportedStackResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_supported_stack.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/supportedStacks/{stackName}"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_buildpack_binding_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_buildpack_binding_operations.py new file mode 100644 index 00000000000..fa960c96a5d --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_buildpack_binding_operations.py @@ -0,0 +1,786 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + buildpack_binding_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}/buildpackBindings/{buildpackBindingName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + "builderName": _SERIALIZER.url("builder_name", builder_name, "str"), + "buildpackBindingName": _SERIALIZER.url("buildpack_binding_name", buildpack_binding_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + buildpack_binding_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}/buildpackBindings/{buildpackBindingName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + "builderName": _SERIALIZER.url("builder_name", builder_name, "str"), + "buildpackBindingName": _SERIALIZER.url("buildpack_binding_name", buildpack_binding_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + buildpack_binding_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}/buildpackBindings/{buildpackBindingName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + "builderName": _SERIALIZER.url("builder_name", builder_name, "str"), + "buildpackBindingName": _SERIALIZER.url("buildpack_binding_name", buildpack_binding_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_request( + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}/buildpackBindings", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"), + "builderName": _SERIALIZER.url("builder_name", builder_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class BuildpackBindingOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`buildpack_binding` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + buildpack_binding_name: str, + **kwargs: Any + ) -> _models.BuildpackBindingResource: + """Get a buildpack binding by name. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :param buildpack_binding_name: The name of the Buildpack Binding Name. Required. + :type buildpack_binding_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BuildpackBindingResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackBindingResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildpackBindingResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + buildpack_binding_name=buildpack_binding_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("BuildpackBindingResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}/buildpackBindings/{buildpackBindingName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + buildpack_binding_name: str, + buildpack_binding: Union[_models.BuildpackBindingResource, IO], + **kwargs: Any + ) -> _models.BuildpackBindingResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildpackBindingResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(buildpack_binding, (IO, bytes)): + _content = buildpack_binding + else: + _json = self._serialize.body(buildpack_binding, "BuildpackBindingResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + buildpack_binding_name=buildpack_binding_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("BuildpackBindingResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("BuildpackBindingResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}/buildpackBindings/{buildpackBindingName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + buildpack_binding_name: str, + buildpack_binding: _models.BuildpackBindingResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BuildpackBindingResource]: + """Create or update a buildpack binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :param buildpack_binding_name: The name of the Buildpack Binding Name. Required. + :type buildpack_binding_name: str + :param buildpack_binding: The target buildpack binding for the create or update operation. + Required. + :type buildpack_binding: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackBindingResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BuildpackBindingResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackBindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + buildpack_binding_name: str, + buildpack_binding: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BuildpackBindingResource]: + """Create or update a buildpack binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :param buildpack_binding_name: The name of the Buildpack Binding Name. Required. + :type buildpack_binding_name: str + :param buildpack_binding: The target buildpack binding for the create or update operation. + Required. + :type buildpack_binding: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BuildpackBindingResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackBindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + buildpack_binding_name: str, + buildpack_binding: Union[_models.BuildpackBindingResource, IO], + **kwargs: Any + ) -> LROPoller[_models.BuildpackBindingResource]: + """Create or update a buildpack binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :param buildpack_binding_name: The name of the Buildpack Binding Name. Required. + :type buildpack_binding_name: str + :param buildpack_binding: The target buildpack binding for the create or update operation. Is + either a model type or a IO type. Required. + :type buildpack_binding: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackBindingResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BuildpackBindingResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackBindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildpackBindingResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + buildpack_binding_name=buildpack_binding_name, + buildpack_binding=buildpack_binding, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("BuildpackBindingResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}/buildpackBindings/{buildpackBindingName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + buildpack_binding_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + buildpack_binding_name=buildpack_binding_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}/buildpackBindings/{buildpackBindingName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, + resource_group_name: str, + service_name: str, + build_service_name: str, + builder_name: str, + buildpack_binding_name: str, + **kwargs: Any + ) -> LROPoller[None]: + """Operation to delete a Buildpack Binding. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :param buildpack_binding_name: The name of the Buildpack Binding Name. Required. + :type buildpack_binding_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + buildpack_binding_name=buildpack_binding_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}/buildpackBindings/{buildpackBindingName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, build_service_name: str, builder_name: str, **kwargs: Any + ) -> Iterable["_models.BuildpackBindingResource"]: + """Handles requests to list all buildpack bindings in a builder. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param build_service_name: The name of the build service resource. Required. + :type build_service_name: str + :param builder_name: The name of the builder resource. Required. + :type builder_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BuildpackBindingResource or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.BuildpackBindingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildpackBindingResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + build_service_name=build_service_name, + builder_name=builder_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("BuildpackBindingResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}/buildpackBindings"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_certificates_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_certificates_operations.py new file mode 100644 index 00000000000..f302a70b246 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_certificates_operations.py @@ -0,0 +1,691 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, service_name: str, certificate_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/certificates/{certificateName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "certificateName": _SERIALIZER.url("certificate_name", certificate_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, service_name: str, certificate_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/certificates/{certificateName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "certificateName": _SERIALIZER.url("certificate_name", certificate_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, service_name: str, certificate_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/certificates/{certificateName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "certificateName": _SERIALIZER.url("certificate_name", certificate_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_request(resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/certificates", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class CertificatesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`certificates` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, service_name: str, certificate_name: str, **kwargs: Any + ) -> _models.CertificateResource: + """Get the certificate resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param certificate_name: The name of the certificate resource. Required. + :type certificate_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CertificateResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CertificateResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CertificateResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + certificate_name=certificate_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("CertificateResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/certificates/{certificateName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + certificate_name: str, + certificate_resource: Union[_models.CertificateResource, IO], + **kwargs: Any + ) -> _models.CertificateResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CertificateResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(certificate_resource, (IO, bytes)): + _content = certificate_resource + else: + _json = self._serialize.body(certificate_resource, "CertificateResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + certificate_name=certificate_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("CertificateResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("CertificateResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("CertificateResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/certificates/{certificateName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + certificate_name: str, + certificate_resource: _models.CertificateResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.CertificateResource]: + """Create or update certificate resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param certificate_name: The name of the certificate resource. Required. + :type certificate_name: str + :param certificate_resource: Parameters for the create or update operation. Required. + :type certificate_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CertificateResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CertificateResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CertificateResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + certificate_name: str, + certificate_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.CertificateResource]: + """Create or update certificate resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param certificate_name: The name of the certificate resource. Required. + :type certificate_name: str + :param certificate_resource: Parameters for the create or update operation. Required. + :type certificate_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CertificateResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CertificateResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + certificate_name: str, + certificate_resource: Union[_models.CertificateResource, IO], + **kwargs: Any + ) -> LROPoller[_models.CertificateResource]: + """Create or update certificate resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param certificate_name: The name of the certificate resource. Required. + :type certificate_name: str + :param certificate_resource: Parameters for the create or update operation. Is either a model + type or a IO type. Required. + :type certificate_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CertificateResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CertificateResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CertificateResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CertificateResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + certificate_name=certificate_name, + certificate_resource=certificate_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("CertificateResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/certificates/{certificateName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, certificate_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + certificate_name=certificate_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/certificates/{certificateName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, service_name: str, certificate_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete the certificate resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param certificate_name: The name of the certificate resource. Required. + :type certificate_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + certificate_name=certificate_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/certificates/{certificateName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> Iterable["_models.CertificateResource"]: + """List all the certificates of one user. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either CertificateResource or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.CertificateResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CertificateResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("CertificateResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/certificates"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_config_servers_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_config_servers_operations.py new file mode 100644 index 00000000000..2b98576414f --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_config_servers_operations.py @@ -0,0 +1,923 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request(resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configServers/default", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_put_request( + resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configServers/default", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_patch_request( + resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configServers/default", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_validate_request( + resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configServers/validate", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class ConfigServersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`config_servers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, resource_group_name: str, service_name: str, **kwargs: Any) -> _models.ConfigServerResource: + """Get the config server and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ConfigServerResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigServerResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ConfigServerResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configServers/default"} # type: ignore + + def _update_put_initial( + self, + resource_group_name: str, + service_name: str, + config_server_resource: Union[_models.ConfigServerResource, IO], + **kwargs: Any + ) -> _models.ConfigServerResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigServerResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(config_server_resource, (IO, bytes)): + _content = config_server_resource + else: + _json = self._serialize.body(config_server_resource, "ConfigServerResource") + + request = build_update_put_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_put_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ConfigServerResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("ConfigServerResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_put_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configServers/default"} # type: ignore + + @overload + def begin_update_put( + self, + resource_group_name: str, + service_name: str, + config_server_resource: _models.ConfigServerResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ConfigServerResource]: + """Update the config server. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param config_server_resource: Parameters for the update operation. Required. + :type config_server_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ConfigServerResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update_put( + self, + resource_group_name: str, + service_name: str, + config_server_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ConfigServerResource]: + """Update the config server. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param config_server_resource: Parameters for the update operation. Required. + :type config_server_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ConfigServerResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update_put( + self, + resource_group_name: str, + service_name: str, + config_server_resource: Union[_models.ConfigServerResource, IO], + **kwargs: Any + ) -> LROPoller[_models.ConfigServerResource]: + """Update the config server. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param config_server_resource: Parameters for the update operation. Is either a model type or a + IO type. Required. + :type config_server_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ConfigServerResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigServerResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_put_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + config_server_resource=config_server_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ConfigServerResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update_put.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configServers/default"} # type: ignore + + def _update_patch_initial( + self, + resource_group_name: str, + service_name: str, + config_server_resource: Union[_models.ConfigServerResource, IO], + **kwargs: Any + ) -> _models.ConfigServerResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigServerResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(config_server_resource, (IO, bytes)): + _content = config_server_resource + else: + _json = self._serialize.body(config_server_resource, "ConfigServerResource") + + request = build_update_patch_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_patch_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ConfigServerResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("ConfigServerResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_patch_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configServers/default"} # type: ignore + + @overload + def begin_update_patch( + self, + resource_group_name: str, + service_name: str, + config_server_resource: _models.ConfigServerResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ConfigServerResource]: + """Update the config server. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param config_server_resource: Parameters for the update operation. Required. + :type config_server_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ConfigServerResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update_patch( + self, + resource_group_name: str, + service_name: str, + config_server_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ConfigServerResource]: + """Update the config server. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param config_server_resource: Parameters for the update operation. Required. + :type config_server_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ConfigServerResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update_patch( + self, + resource_group_name: str, + service_name: str, + config_server_resource: Union[_models.ConfigServerResource, IO], + **kwargs: Any + ) -> LROPoller[_models.ConfigServerResource]: + """Update the config server. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param config_server_resource: Parameters for the update operation. Is either a model type or a + IO type. Required. + :type config_server_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ConfigServerResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigServerResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_patch_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + config_server_resource=config_server_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ConfigServerResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update_patch.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configServers/default"} # type: ignore + + def _validate_initial( + self, + resource_group_name: str, + service_name: str, + config_server_settings: Union[_models.ConfigServerSettings, IO], + **kwargs: Any + ) -> _models.ConfigServerSettingsValidateResult: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigServerSettingsValidateResult] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(config_server_settings, (IO, bytes)): + _content = config_server_settings + else: + _json = self._serialize.body(config_server_settings, "ConfigServerSettings") + + request = build_validate_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._validate_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ConfigServerSettingsValidateResult", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("ConfigServerSettingsValidateResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _validate_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configServers/validate"} # type: ignore + + @overload + def begin_validate( + self, + resource_group_name: str, + service_name: str, + config_server_settings: _models.ConfigServerSettings, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ConfigServerSettingsValidateResult]: + """Check if the config server settings are valid. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param config_server_settings: Config server settings to be validated. Required. + :type config_server_settings: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerSettings + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ConfigServerSettingsValidateResult or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerSettingsValidateResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_validate( + self, + resource_group_name: str, + service_name: str, + config_server_settings: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ConfigServerSettingsValidateResult]: + """Check if the config server settings are valid. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param config_server_settings: Config server settings to be validated. Required. + :type config_server_settings: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ConfigServerSettingsValidateResult or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerSettingsValidateResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_validate( + self, + resource_group_name: str, + service_name: str, + config_server_settings: Union[_models.ConfigServerSettings, IO], + **kwargs: Any + ) -> LROPoller[_models.ConfigServerSettingsValidateResult]: + """Check if the config server settings are valid. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param config_server_settings: Config server settings to be validated. Is either a model type + or a IO type. Required. + :type config_server_settings: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerSettings or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ConfigServerSettingsValidateResult or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigServerSettingsValidateResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigServerSettingsValidateResult] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._validate_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + config_server_settings=config_server_settings, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ConfigServerSettingsValidateResult", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_validate.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configServers/validate"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_configuration_services_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_configuration_services_operations.py new file mode 100644 index 00000000000..1dbe8a9c8e4 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_configuration_services_operations.py @@ -0,0 +1,965 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, service_name: str, configuration_service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configurationServices/{configurationServiceName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "configurationServiceName": _SERIALIZER.url("configuration_service_name", configuration_service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, service_name: str, configuration_service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configurationServices/{configurationServiceName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "configurationServiceName": _SERIALIZER.url("configuration_service_name", configuration_service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, service_name: str, configuration_service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configurationServices/{configurationServiceName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "configurationServiceName": _SERIALIZER.url("configuration_service_name", configuration_service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_request(resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configurationServices", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_validate_request( + resource_group_name: str, service_name: str, configuration_service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configurationServices/{configurationServiceName}/validate", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "configurationServiceName": _SERIALIZER.url("configuration_service_name", configuration_service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class ConfigurationServicesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`configuration_services` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, service_name: str, configuration_service_name: str, **kwargs: Any + ) -> _models.ConfigurationServiceResource: + """Get the Application Configuration Service and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param configuration_service_name: The name of Application Configuration Service. Required. + :type configuration_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ConfigurationServiceResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigurationServiceResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + configuration_service_name=configuration_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ConfigurationServiceResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configurationServices/{configurationServiceName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + configuration_service_name: str, + configuration_service_resource: Union[_models.ConfigurationServiceResource, IO], + **kwargs: Any + ) -> _models.ConfigurationServiceResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigurationServiceResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(configuration_service_resource, (IO, bytes)): + _content = configuration_service_resource + else: + _json = self._serialize.body(configuration_service_resource, "ConfigurationServiceResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + configuration_service_name=configuration_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ConfigurationServiceResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ConfigurationServiceResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configurationServices/{configurationServiceName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + configuration_service_name: str, + configuration_service_resource: _models.ConfigurationServiceResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ConfigurationServiceResource]: + """Create the default Application Configuration Service or update the existing Application + Configuration Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param configuration_service_name: The name of Application Configuration Service. Required. + :type configuration_service_name: str + :param configuration_service_resource: Parameters for the update operation. Required. + :type configuration_service_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ConfigurationServiceResource or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + configuration_service_name: str, + configuration_service_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ConfigurationServiceResource]: + """Create the default Application Configuration Service or update the existing Application + Configuration Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param configuration_service_name: The name of Application Configuration Service. Required. + :type configuration_service_name: str + :param configuration_service_resource: Parameters for the update operation. Required. + :type configuration_service_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ConfigurationServiceResource or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + configuration_service_name: str, + configuration_service_resource: Union[_models.ConfigurationServiceResource, IO], + **kwargs: Any + ) -> LROPoller[_models.ConfigurationServiceResource]: + """Create the default Application Configuration Service or update the existing Application + Configuration Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param configuration_service_name: The name of Application Configuration Service. Required. + :type configuration_service_name: str + :param configuration_service_resource: Parameters for the update operation. Is either a model + type or a IO type. Required. + :type configuration_service_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ConfigurationServiceResource or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigurationServiceResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + configuration_service_name=configuration_service_name, + configuration_service_resource=configuration_service_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ConfigurationServiceResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configurationServices/{configurationServiceName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, configuration_service_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + configuration_service_name=configuration_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configurationServices/{configurationServiceName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, service_name: str, configuration_service_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Disable the default Application Configuration Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param configuration_service_name: The name of Application Configuration Service. Required. + :type configuration_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + configuration_service_name=configuration_service_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configurationServices/{configurationServiceName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> Iterable["_models.ConfigurationServiceResource"]: + """Handles requests to list all resources in a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ConfigurationServiceResource or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigurationServiceResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ConfigurationServiceResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configurationServices"} # type: ignore + + def _validate_initial( + self, + resource_group_name: str, + service_name: str, + configuration_service_name: str, + settings: Union[_models.ConfigurationServiceSettings, IO], + **kwargs: Any + ) -> _models.ConfigurationServiceSettingsValidateResult: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigurationServiceSettingsValidateResult] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(settings, (IO, bytes)): + _content = settings + else: + _json = self._serialize.body(settings, "ConfigurationServiceSettings") + + request = build_validate_request( + resource_group_name=resource_group_name, + service_name=service_name, + configuration_service_name=configuration_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._validate_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ConfigurationServiceSettingsValidateResult", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("ConfigurationServiceSettingsValidateResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _validate_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configurationServices/{configurationServiceName}/validate"} # type: ignore + + @overload + def begin_validate( + self, + resource_group_name: str, + service_name: str, + configuration_service_name: str, + settings: _models.ConfigurationServiceSettings, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ConfigurationServiceSettingsValidateResult]: + """Check if the Application Configuration Service settings are valid. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param configuration_service_name: The name of Application Configuration Service. Required. + :type configuration_service_name: str + :param settings: Application Configuration Service settings to be validated. Required. + :type settings: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceSettings + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either + ConfigurationServiceSettingsValidateResult or the result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceSettingsValidateResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_validate( + self, + resource_group_name: str, + service_name: str, + configuration_service_name: str, + settings: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ConfigurationServiceSettingsValidateResult]: + """Check if the Application Configuration Service settings are valid. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param configuration_service_name: The name of Application Configuration Service. Required. + :type configuration_service_name: str + :param settings: Application Configuration Service settings to be validated. Required. + :type settings: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either + ConfigurationServiceSettingsValidateResult or the result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceSettingsValidateResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_validate( + self, + resource_group_name: str, + service_name: str, + configuration_service_name: str, + settings: Union[_models.ConfigurationServiceSettings, IO], + **kwargs: Any + ) -> LROPoller[_models.ConfigurationServiceSettingsValidateResult]: + """Check if the Application Configuration Service settings are valid. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param configuration_service_name: The name of Application Configuration Service. Required. + :type configuration_service_name: str + :param settings: Application Configuration Service settings to be validated. Is either a model + type or a IO type. Required. + :type settings: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceSettings + or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either + ConfigurationServiceSettingsValidateResult or the result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ConfigurationServiceSettingsValidateResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ConfigurationServiceSettingsValidateResult] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._validate_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + configuration_service_name=configuration_service_name, + settings=settings, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ConfigurationServiceSettingsValidateResult", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_validate.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/configurationServices/{configurationServiceName}/validate"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_custom_domains_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_custom_domains_operations.py new file mode 100644 index 00000000000..645f0315466 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_custom_domains_operations.py @@ -0,0 +1,1003 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, service_name: str, app_name: str, domain_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/domains/{domainName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "domainName": _SERIALIZER.url("domain_name", domain_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, service_name: str, app_name: str, domain_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/domains/{domainName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "domainName": _SERIALIZER.url("domain_name", domain_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, service_name: str, app_name: str, domain_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/domains/{domainName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "domainName": _SERIALIZER.url("domain_name", domain_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, service_name: str, app_name: str, domain_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/domains/{domainName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "domainName": _SERIALIZER.url("domain_name", domain_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_request( + resource_group_name: str, service_name: str, app_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/domains", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class CustomDomainsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`custom_domains` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, service_name: str, app_name: str, domain_name: str, **kwargs: Any + ) -> _models.CustomDomainResource: + """Get the custom domain of one lifecycle application. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param domain_name: The name of the custom domain resource. Required. + :type domain_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomDomainResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomDomainResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + domain_name=domain_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("CustomDomainResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/domains/{domainName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + app_name: str, + domain_name: str, + domain_resource: Union[_models.CustomDomainResource, IO], + **kwargs: Any + ) -> _models.CustomDomainResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomDomainResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(domain_resource, (IO, bytes)): + _content = domain_resource + else: + _json = self._serialize.body(domain_resource, "CustomDomainResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + domain_name=domain_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("CustomDomainResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("CustomDomainResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("CustomDomainResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/domains/{domainName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + domain_name: str, + domain_resource: _models.CustomDomainResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.CustomDomainResource]: + """Create or update custom domain of one lifecycle application. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param domain_name: The name of the custom domain resource. Required. + :type domain_name: str + :param domain_resource: Parameters for the create or update operation. Required. + :type domain_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CustomDomainResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + domain_name: str, + domain_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.CustomDomainResource]: + """Create or update custom domain of one lifecycle application. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param domain_name: The name of the custom domain resource. Required. + :type domain_name: str + :param domain_resource: Parameters for the create or update operation. Required. + :type domain_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CustomDomainResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + domain_name: str, + domain_resource: Union[_models.CustomDomainResource, IO], + **kwargs: Any + ) -> LROPoller[_models.CustomDomainResource]: + """Create or update custom domain of one lifecycle application. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param domain_name: The name of the custom domain resource. Required. + :type domain_name: str + :param domain_resource: Parameters for the create or update operation. Is either a model type + or a IO type. Required. + :type domain_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource + or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CustomDomainResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomDomainResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + domain_name=domain_name, + domain_resource=domain_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("CustomDomainResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/domains/{domainName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, app_name: str, domain_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + domain_name=domain_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/domains/{domainName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, service_name: str, app_name: str, domain_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete the custom domain of one lifecycle application. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param domain_name: The name of the custom domain resource. Required. + :type domain_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + domain_name=domain_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/domains/{domainName}"} # type: ignore + + def _update_initial( + self, + resource_group_name: str, + service_name: str, + app_name: str, + domain_name: str, + domain_resource: Union[_models.CustomDomainResource, IO], + **kwargs: Any + ) -> _models.CustomDomainResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomDomainResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(domain_resource, (IO, bytes)): + _content = domain_resource + else: + _json = self._serialize.body(domain_resource, "CustomDomainResource") + + request = build_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + domain_name=domain_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("CustomDomainResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("CustomDomainResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/domains/{domainName}"} # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + domain_name: str, + domain_resource: _models.CustomDomainResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.CustomDomainResource]: + """Update custom domain of one lifecycle application. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param domain_name: The name of the custom domain resource. Required. + :type domain_name: str + :param domain_resource: Parameters for the create or update operation. Required. + :type domain_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CustomDomainResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + domain_name: str, + domain_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.CustomDomainResource]: + """Update custom domain of one lifecycle application. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param domain_name: The name of the custom domain resource. Required. + :type domain_name: str + :param domain_resource: Parameters for the create or update operation. Required. + :type domain_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CustomDomainResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + domain_name: str, + domain_resource: Union[_models.CustomDomainResource, IO], + **kwargs: Any + ) -> LROPoller[_models.CustomDomainResource]: + """Update custom domain of one lifecycle application. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param domain_name: The name of the custom domain resource. Required. + :type domain_name: str + :param domain_resource: Parameters for the create or update operation. Is either a model type + or a IO type. Required. + :type domain_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource + or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CustomDomainResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomDomainResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + domain_name=domain_name, + domain_resource=domain_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("CustomDomainResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/domains/{domainName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, app_name: str, **kwargs: Any + ) -> Iterable["_models.CustomDomainResource"]: + """List the custom domains of one lifecycle application. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either CustomDomainResource or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomDomainResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("CustomDomainResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/domains"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_customized_accelerators_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_customized_accelerators_operations.py new file mode 100644 index 00000000000..b6c2f0fce4d --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_customized_accelerators_operations.py @@ -0,0 +1,966 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, service_name: str, application_accelerator_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/customizedAccelerators", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "applicationAcceleratorName": _SERIALIZER.url( + "application_accelerator_name", application_accelerator_name, "str" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/customizedAccelerators/{customizedAcceleratorName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "applicationAcceleratorName": _SERIALIZER.url( + "application_accelerator_name", application_accelerator_name, "str" + ), + "customizedAcceleratorName": _SERIALIZER.url("customized_accelerator_name", customized_accelerator_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/customizedAccelerators/{customizedAcceleratorName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "applicationAcceleratorName": _SERIALIZER.url( + "application_accelerator_name", application_accelerator_name, "str" + ), + "customizedAcceleratorName": _SERIALIZER.url("customized_accelerator_name", customized_accelerator_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/customizedAccelerators/{customizedAcceleratorName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "applicationAcceleratorName": _SERIALIZER.url( + "application_accelerator_name", application_accelerator_name, "str" + ), + "customizedAcceleratorName": _SERIALIZER.url("customized_accelerator_name", customized_accelerator_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_validate_request( + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/customizedAccelerators/{customizedAcceleratorName}/validate", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "applicationAcceleratorName": _SERIALIZER.url( + "application_accelerator_name", application_accelerator_name, "str" + ), + "customizedAcceleratorName": _SERIALIZER.url("customized_accelerator_name", customized_accelerator_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class CustomizedAcceleratorsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`customized_accelerators` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, application_accelerator_name: str, **kwargs: Any + ) -> Iterable["_models.CustomizedAcceleratorResource"]: + """Handle requests to list all customized accelerators. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either CustomizedAcceleratorResource or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomizedAcceleratorResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("CustomizedAcceleratorResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/customizedAccelerators"} # type: ignore + + @distributed_trace + def get( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + **kwargs: Any + ) -> _models.CustomizedAcceleratorResource: + """Get the customized accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param customized_accelerator_name: The name of the customized accelerator. Required. + :type customized_accelerator_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomizedAcceleratorResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomizedAcceleratorResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + customized_accelerator_name=customized_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("CustomizedAcceleratorResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/customizedAccelerators/{customizedAcceleratorName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + customized_accelerator_resource: Union[_models.CustomizedAcceleratorResource, IO], + **kwargs: Any + ) -> _models.CustomizedAcceleratorResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomizedAcceleratorResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(customized_accelerator_resource, (IO, bytes)): + _content = customized_accelerator_resource + else: + _json = self._serialize.body(customized_accelerator_resource, "CustomizedAcceleratorResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + customized_accelerator_name=customized_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("CustomizedAcceleratorResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("CustomizedAcceleratorResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/customizedAccelerators/{customizedAcceleratorName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + customized_accelerator_resource: _models.CustomizedAcceleratorResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.CustomizedAcceleratorResource]: + """Create or update the customized accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param customized_accelerator_name: The name of the customized accelerator. Required. + :type customized_accelerator_name: str + :param customized_accelerator_resource: The customized accelerator for the create or update + operation. Required. + :type customized_accelerator_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CustomizedAcceleratorResource or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + customized_accelerator_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.CustomizedAcceleratorResource]: + """Create or update the customized accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param customized_accelerator_name: The name of the customized accelerator. Required. + :type customized_accelerator_name: str + :param customized_accelerator_resource: The customized accelerator for the create or update + operation. Required. + :type customized_accelerator_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CustomizedAcceleratorResource or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + customized_accelerator_resource: Union[_models.CustomizedAcceleratorResource, IO], + **kwargs: Any + ) -> LROPoller[_models.CustomizedAcceleratorResource]: + """Create or update the customized accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param customized_accelerator_name: The name of the customized accelerator. Required. + :type customized_accelerator_name: str + :param customized_accelerator_resource: The customized accelerator for the create or update + operation. Is either a model type or a IO type. Required. + :type customized_accelerator_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CustomizedAcceleratorResource or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomizedAcceleratorResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + customized_accelerator_name=customized_accelerator_name, + customized_accelerator_resource=customized_accelerator_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("CustomizedAcceleratorResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/customizedAccelerators/{customizedAcceleratorName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + customized_accelerator_name=customized_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/customizedAccelerators/{customizedAcceleratorName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + **kwargs: Any + ) -> LROPoller[None]: + """Delete the customized accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param customized_accelerator_name: The name of the customized accelerator. Required. + :type customized_accelerator_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + customized_accelerator_name=customized_accelerator_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/customizedAccelerators/{customizedAcceleratorName}"} # type: ignore + + @overload + def validate( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + properties: _models.CustomizedAcceleratorProperties, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> Optional[_models.CustomizedAcceleratorValidateResult]: + """Check the customized accelerator are valid. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param customized_accelerator_name: The name of the customized accelerator. Required. + :type customized_accelerator_name: str + :param properties: Customized accelerator properties to be validated. Required. + :type properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorProperties + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomizedAcceleratorValidateResult or None or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorValidateResult + or None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def validate( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + properties: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> Optional[_models.CustomizedAcceleratorValidateResult]: + """Check the customized accelerator are valid. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param customized_accelerator_name: The name of the customized accelerator. Required. + :type customized_accelerator_name: str + :param properties: Customized accelerator properties to be validated. Required. + :type properties: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomizedAcceleratorValidateResult or None or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorValidateResult + or None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def validate( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + customized_accelerator_name: str, + properties: Union[_models.CustomizedAcceleratorProperties, IO], + **kwargs: Any + ) -> Optional[_models.CustomizedAcceleratorValidateResult]: + """Check the customized accelerator are valid. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param customized_accelerator_name: The name of the customized accelerator. Required. + :type customized_accelerator_name: str + :param properties: Customized accelerator properties to be validated. Is either a model type or + a IO type. Required. + :type properties: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorProperties or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomizedAcceleratorValidateResult or None or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomizedAcceleratorValidateResult + or None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.CustomizedAcceleratorValidateResult]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(properties, (IO, bytes)): + _content = properties + else: + _json = self._serialize.body(properties, "CustomizedAcceleratorProperties") + + request = build_validate_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + customized_accelerator_name=customized_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.validate.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize("CustomizedAcceleratorValidateResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + validate.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/customizedAccelerators/{customizedAcceleratorName}/validate"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_deployments_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_deployments_operations.py new file mode 100644 index 00000000000..959fd6dc816 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_deployments_operations.py @@ -0,0 +1,3125 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, List, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_request( + resource_group_name: str, + service_name: str, + app_name: str, + subscription_id: str, + *, + version: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if version is not None: + _params["version"] = [_SERIALIZER.query("version", q, "str") if q is not None else "" for q in version] + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_for_cluster_request( + resource_group_name: str, + service_name: str, + subscription_id: str, + *, + version: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/deployments", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if version is not None: + _params["version"] = [_SERIALIZER.query("version", q, "str") if q is not None else "" for q in version] + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_start_request( + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/start", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_stop_request( + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/stop", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_restart_request( + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/restart", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_enable_remote_debugging_request( + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/enableRemoteDebugging", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_disable_remote_debugging_request( + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/disableRemoteDebugging", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_remote_debugging_config_request( + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/getRemoteDebuggingConfig", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_log_file_url_request( + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/getLogFileUrl", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_generate_heap_dump_request( + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/generateHeapDump", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_generate_thread_dump_request( + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/generateThreadDump", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_start_jfr_request( + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/startJFR", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "appName": _SERIALIZER.url("app_name", app_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class DeploymentsOperations: # pylint: disable=too-many-public-methods + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`deployments` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> _models.DeploymentResource: + """Get a Deployment and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DeploymentResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DeploymentResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("DeploymentResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + deployment_resource: Union[_models.DeploymentResource, IO], + **kwargs: Any + ) -> _models.DeploymentResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DeploymentResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(deployment_resource, (IO, bytes)): + _content = deployment_resource + else: + _json = self._serialize.body(deployment_resource, "DeploymentResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("DeploymentResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("DeploymentResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("DeploymentResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + deployment_resource: _models.DeploymentResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DeploymentResource]: + """Create a new Deployment or update an exiting Deployment. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param deployment_resource: Parameters for the create or update operation. Required. + :type deployment_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DeploymentResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + deployment_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DeploymentResource]: + """Create a new Deployment or update an exiting Deployment. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param deployment_resource: Parameters for the create or update operation. Required. + :type deployment_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DeploymentResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + deployment_resource: Union[_models.DeploymentResource, IO], + **kwargs: Any + ) -> LROPoller[_models.DeploymentResource]: + """Create a new Deployment or update an exiting Deployment. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param deployment_resource: Parameters for the create or update operation. Is either a model + type or a IO type. Required. + :type deployment_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DeploymentResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DeploymentResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + deployment_resource=deployment_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("DeploymentResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Operation to delete a Deployment. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}"} # type: ignore + + def _update_initial( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + deployment_resource: Union[_models.DeploymentResource, IO], + **kwargs: Any + ) -> _models.DeploymentResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DeploymentResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(deployment_resource, (IO, bytes)): + _content = deployment_resource + else: + _json = self._serialize.body(deployment_resource, "DeploymentResource") + + request = build_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("DeploymentResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("DeploymentResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}"} # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + deployment_resource: _models.DeploymentResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DeploymentResource]: + """Operation to update an exiting Deployment. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param deployment_resource: Parameters for the update operation. Required. + :type deployment_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DeploymentResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + deployment_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DeploymentResource]: + """Operation to update an exiting Deployment. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param deployment_resource: Parameters for the update operation. Required. + :type deployment_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DeploymentResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + deployment_resource: Union[_models.DeploymentResource, IO], + **kwargs: Any + ) -> LROPoller[_models.DeploymentResource]: + """Operation to update an exiting Deployment. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param deployment_resource: Parameters for the update operation. Is either a model type or a IO + type. Required. + :type deployment_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DeploymentResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DeploymentResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + deployment_resource=deployment_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("DeploymentResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}"} # type: ignore + + @distributed_trace + def list( + self, + resource_group_name: str, + service_name: str, + app_name: str, + version: Optional[List[str]] = None, + **kwargs: Any + ) -> Iterable["_models.DeploymentResource"]: + """Handles requests to list all resources in an App. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param version: Version of the deployments to be listed. Default value is None. + :type version: list[str] + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DeploymentResource or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DeploymentResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + subscription_id=self._config.subscription_id, + version=version, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("DeploymentResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments"} # type: ignore + + @distributed_trace + def list_for_cluster( + self, resource_group_name: str, service_name: str, version: Optional[List[str]] = None, **kwargs: Any + ) -> Iterable["_models.DeploymentResource"]: + """List deployments for a certain service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param version: Version of the deployments to be listed. Default value is None. + :type version: list[str] + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DeploymentResource or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.DeploymentResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DeploymentResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_for_cluster_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + version=version, + api_version=api_version, + template_url=self.list_for_cluster.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("DeploymentResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list_for_cluster.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/deployments"} # type: ignore + + def _start_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_start_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._start_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _start_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/start"} # type: ignore + + @distributed_trace + def begin_start( + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Start the deployment. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._start_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_start.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/start"} # type: ignore + + def _stop_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_stop_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._stop_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _stop_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/stop"} # type: ignore + + @distributed_trace + def begin_stop( + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Stop the deployment. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._stop_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_stop.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/stop"} # type: ignore + + def _restart_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_restart_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._restart_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _restart_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/restart"} # type: ignore + + @distributed_trace + def begin_restart( + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Restart the deployment. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._restart_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_restart.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/restart"} # type: ignore + + def _enable_remote_debugging_initial( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + remote_debugging_payload: Optional[Union[_models.RemoteDebuggingPayload, IO]] = None, + **kwargs: Any + ) -> _models.RemoteDebugging: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.RemoteDebugging] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(remote_debugging_payload, (IO, bytes)): + _content = remote_debugging_payload + else: + if remote_debugging_payload is not None: + _json = self._serialize.body(remote_debugging_payload, "RemoteDebuggingPayload") + else: + _json = None + + request = build_enable_remote_debugging_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._enable_remote_debugging_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("RemoteDebugging", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("RemoteDebugging", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _enable_remote_debugging_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/enableRemoteDebugging"} # type: ignore + + @overload + def begin_enable_remote_debugging( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + remote_debugging_payload: Optional[_models.RemoteDebuggingPayload] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.RemoteDebugging]: + """Enable remote debugging. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param remote_debugging_payload: Parameters for enable remote debugging. Default value is None. + :type remote_debugging_payload: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.RemoteDebuggingPayload + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either RemoteDebugging or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.RemoteDebugging] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_enable_remote_debugging( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + remote_debugging_payload: Optional[IO] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.RemoteDebugging]: + """Enable remote debugging. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param remote_debugging_payload: Parameters for enable remote debugging. Default value is None. + :type remote_debugging_payload: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either RemoteDebugging or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.RemoteDebugging] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_enable_remote_debugging( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + remote_debugging_payload: Optional[Union[_models.RemoteDebuggingPayload, IO]] = None, + **kwargs: Any + ) -> LROPoller[_models.RemoteDebugging]: + """Enable remote debugging. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param remote_debugging_payload: Parameters for enable remote debugging. Is either a model type + or a IO type. Default value is None. + :type remote_debugging_payload: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.RemoteDebuggingPayload or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either RemoteDebugging or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.RemoteDebugging] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.RemoteDebugging] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._enable_remote_debugging_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + remote_debugging_payload=remote_debugging_payload, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("RemoteDebugging", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_enable_remote_debugging.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/enableRemoteDebugging"} # type: ignore + + def _disable_remote_debugging_initial( + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> _models.RemoteDebugging: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.RemoteDebugging] + + request = build_disable_remote_debugging_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._disable_remote_debugging_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("RemoteDebugging", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("RemoteDebugging", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _disable_remote_debugging_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/disableRemoteDebugging"} # type: ignore + + @distributed_trace + def begin_disable_remote_debugging( + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> LROPoller[_models.RemoteDebugging]: + """Disable remote debugging. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either RemoteDebugging or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.RemoteDebugging] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.RemoteDebugging] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._disable_remote_debugging_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("RemoteDebugging", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_disable_remote_debugging.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/disableRemoteDebugging"} # type: ignore + + @distributed_trace + def get_remote_debugging_config( + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> _models.RemoteDebugging: + """Get remote debugging config. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: RemoteDebugging or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.RemoteDebugging + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.RemoteDebugging] + + request = build_get_remote_debugging_config_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_remote_debugging_config.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("RemoteDebugging", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_remote_debugging_config.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/getRemoteDebuggingConfig"} # type: ignore + + @distributed_trace + def get_log_file_url( + self, resource_group_name: str, service_name: str, app_name: str, deployment_name: str, **kwargs: Any + ) -> Optional[_models.LogFileUrlResponse]: + """Get deployment log file URL. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LogFileUrlResponse or None or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.LogFileUrlResponse or None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.LogFileUrlResponse]] + + request = build_get_log_file_url_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_log_file_url.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize("LogFileUrlResponse", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_log_file_url.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/getLogFileUrl"} # type: ignore + + def _generate_heap_dump_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: Union[_models.DiagnosticParameters, IO], + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(diagnostic_parameters, (IO, bytes)): + _content = diagnostic_parameters + else: + _json = self._serialize.body(diagnostic_parameters, "DiagnosticParameters") + + request = build_generate_heap_dump_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._generate_heap_dump_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _generate_heap_dump_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/generateHeapDump"} # type: ignore + + @overload + def begin_generate_heap_dump( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: _models.DiagnosticParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Generate Heap Dump. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param diagnostic_parameters: Parameters for the diagnostic operation. Required. + :type diagnostic_parameters: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DiagnosticParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_generate_heap_dump( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Generate Heap Dump. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param diagnostic_parameters: Parameters for the diagnostic operation. Required. + :type diagnostic_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_generate_heap_dump( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: Union[_models.DiagnosticParameters, IO], + **kwargs: Any + ) -> LROPoller[None]: + """Generate Heap Dump. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param diagnostic_parameters: Parameters for the diagnostic operation. Is either a model type + or a IO type. Required. + :type diagnostic_parameters: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DiagnosticParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._generate_heap_dump_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + diagnostic_parameters=diagnostic_parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_generate_heap_dump.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/generateHeapDump"} # type: ignore + + def _generate_thread_dump_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: Union[_models.DiagnosticParameters, IO], + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(diagnostic_parameters, (IO, bytes)): + _content = diagnostic_parameters + else: + _json = self._serialize.body(diagnostic_parameters, "DiagnosticParameters") + + request = build_generate_thread_dump_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._generate_thread_dump_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _generate_thread_dump_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/generateThreadDump"} # type: ignore + + @overload + def begin_generate_thread_dump( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: _models.DiagnosticParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Generate Thread Dump. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param diagnostic_parameters: Parameters for the diagnostic operation. Required. + :type diagnostic_parameters: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DiagnosticParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_generate_thread_dump( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Generate Thread Dump. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param diagnostic_parameters: Parameters for the diagnostic operation. Required. + :type diagnostic_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_generate_thread_dump( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: Union[_models.DiagnosticParameters, IO], + **kwargs: Any + ) -> LROPoller[None]: + """Generate Thread Dump. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param diagnostic_parameters: Parameters for the diagnostic operation. Is either a model type + or a IO type. Required. + :type diagnostic_parameters: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DiagnosticParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._generate_thread_dump_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + diagnostic_parameters=diagnostic_parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_generate_thread_dump.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/generateThreadDump"} # type: ignore + + def _start_jfr_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: Union[_models.DiagnosticParameters, IO], + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(diagnostic_parameters, (IO, bytes)): + _content = diagnostic_parameters + else: + _json = self._serialize.body(diagnostic_parameters, "DiagnosticParameters") + + request = build_start_jfr_request( + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._start_jfr_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _start_jfr_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/startJFR"} # type: ignore + + @overload + def begin_start_jfr( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: _models.DiagnosticParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Start JFR. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param diagnostic_parameters: Parameters for the diagnostic operation. Required. + :type diagnostic_parameters: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DiagnosticParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_start_jfr( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Start JFR. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param diagnostic_parameters: Parameters for the diagnostic operation. Required. + :type diagnostic_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_start_jfr( + self, + resource_group_name: str, + service_name: str, + app_name: str, + deployment_name: str, + diagnostic_parameters: Union[_models.DiagnosticParameters, IO], + **kwargs: Any + ) -> LROPoller[None]: + """Start JFR. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param app_name: The name of the App resource. Required. + :type app_name: str + :param deployment_name: The name of the Deployment resource. Required. + :type deployment_name: str + :param diagnostic_parameters: Parameters for the diagnostic operation. Is either a model type + or a IO type. Required. + :type diagnostic_parameters: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DiagnosticParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._start_jfr_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + app_name=app_name, + deployment_name=deployment_name, + diagnostic_parameters=diagnostic_parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_start_jfr.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/startJFR"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_dev_tool_portal_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_dev_tool_portal_operations.py new file mode 100644 index 00000000000..894f9b9efd0 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_dev_tool_portal_operations.py @@ -0,0 +1,200 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Optional, TypeVar, Union, cast + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_delete_request( + resource_group_name: str, service_name: str, dev_tool_portal_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/DevToolPortals/{devToolPortalName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "devToolPortalName": _SERIALIZER.url("dev_tool_portal_name", dev_tool_portal_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +class DevToolPortalOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`dev_tool_portal` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, dev_tool_portal_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + dev_tool_portal_name=dev_tool_portal_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/DevToolPortals/{devToolPortalName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, service_name: str, dev_tool_portal_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Disable the default Dev Tool Portal. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param dev_tool_portal_name: The name of Dev Tool Portal. Required. + :type dev_tool_portal_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + dev_tool_portal_name=dev_tool_portal_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/DevToolPortals/{devToolPortalName}"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_dev_tool_portals_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_dev_tool_portals_operations.py new file mode 100644 index 00000000000..8f1a3b8040a --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_dev_tool_portals_operations.py @@ -0,0 +1,545 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request(resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/DevToolPortals", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, service_name: str, dev_tool_portal_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/DevToolPortals/{devToolPortalName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "devToolPortalName": _SERIALIZER.url("dev_tool_portal_name", dev_tool_portal_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, service_name: str, dev_tool_portal_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/DevToolPortals/{devToolPortalName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "devToolPortalName": _SERIALIZER.url("dev_tool_portal_name", dev_tool_portal_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class DevToolPortalsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`dev_tool_portals` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> Iterable["_models.DevToolPortalResource"]: + """Handles requests to list all resources in a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DevToolPortalResource or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DevToolPortalResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("DevToolPortalResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/DevToolPortals"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, service_name: str, dev_tool_portal_name: str, **kwargs: Any + ) -> _models.DevToolPortalResource: + """Get the Application Live and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param dev_tool_portal_name: The name of Dev Tool Portal. Required. + :type dev_tool_portal_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DevToolPortalResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DevToolPortalResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + dev_tool_portal_name=dev_tool_portal_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("DevToolPortalResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/DevToolPortals/{devToolPortalName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + dev_tool_portal_name: str, + dev_tool_portal_resource: Union[_models.DevToolPortalResource, IO], + **kwargs: Any + ) -> _models.DevToolPortalResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DevToolPortalResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(dev_tool_portal_resource, (IO, bytes)): + _content = dev_tool_portal_resource + else: + _json = self._serialize.body(dev_tool_portal_resource, "DevToolPortalResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + dev_tool_portal_name=dev_tool_portal_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("DevToolPortalResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("DevToolPortalResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/DevToolPortals/{devToolPortalName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + dev_tool_portal_name: str, + dev_tool_portal_resource: _models.DevToolPortalResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DevToolPortalResource]: + """Create the default Dev Tool Portal or update the existing Dev Tool Portal. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param dev_tool_portal_name: The name of Dev Tool Portal. Required. + :type dev_tool_portal_name: str + :param dev_tool_portal_resource: Parameters for the create or update operation. Required. + :type dev_tool_portal_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DevToolPortalResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + dev_tool_portal_name: str, + dev_tool_portal_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DevToolPortalResource]: + """Create the default Dev Tool Portal or update the existing Dev Tool Portal. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param dev_tool_portal_name: The name of Dev Tool Portal. Required. + :type dev_tool_portal_name: str + :param dev_tool_portal_resource: Parameters for the create or update operation. Required. + :type dev_tool_portal_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DevToolPortalResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + dev_tool_portal_name: str, + dev_tool_portal_resource: Union[_models.DevToolPortalResource, IO], + **kwargs: Any + ) -> LROPoller[_models.DevToolPortalResource]: + """Create the default Dev Tool Portal or update the existing Dev Tool Portal. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param dev_tool_portal_name: The name of Dev Tool Portal. Required. + :type dev_tool_portal_name: str + :param dev_tool_portal_resource: Parameters for the create or update operation. Is either a + model type or a IO type. Required. + :type dev_tool_portal_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DevToolPortalResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.DevToolPortalResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DevToolPortalResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + dev_tool_portal_name=dev_tool_portal_name, + dev_tool_portal_resource=dev_tool_portal_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("DevToolPortalResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/DevToolPortals/{devToolPortalName}"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_gateway_custom_domains_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_gateway_custom_domains_operations.py new file mode 100644 index 00000000000..79728ea3566 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_gateway_custom_domains_operations.py @@ -0,0 +1,734 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, + service_name: str, + gateway_name: str, + domain_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/domains/{domainName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "gatewayName": _SERIALIZER.url("gateway_name", gateway_name, "str"), + "domainName": _SERIALIZER.url("domain_name", domain_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, + service_name: str, + gateway_name: str, + domain_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/domains/{domainName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "gatewayName": _SERIALIZER.url("gateway_name", gateway_name, "str"), + "domainName": _SERIALIZER.url("domain_name", domain_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, + service_name: str, + gateway_name: str, + domain_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/domains/{domainName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "gatewayName": _SERIALIZER.url("gateway_name", gateway_name, "str"), + "domainName": _SERIALIZER.url("domain_name", domain_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_request( + resource_group_name: str, service_name: str, gateway_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/domains", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "gatewayName": _SERIALIZER.url("gateway_name", gateway_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class GatewayCustomDomainsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`gateway_custom_domains` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, service_name: str, gateway_name: str, domain_name: str, **kwargs: Any + ) -> _models.GatewayCustomDomainResource: + """Get the Spring Cloud Gateway custom domain. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param domain_name: The name of the Spring Cloud Gateway custom domain. Required. + :type domain_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GatewayCustomDomainResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayCustomDomainResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayCustomDomainResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + domain_name=domain_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("GatewayCustomDomainResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/domains/{domainName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + domain_name: str, + gateway_custom_domain_resource: Union[_models.GatewayCustomDomainResource, IO], + **kwargs: Any + ) -> _models.GatewayCustomDomainResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayCustomDomainResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(gateway_custom_domain_resource, (IO, bytes)): + _content = gateway_custom_domain_resource + else: + _json = self._serialize.body(gateway_custom_domain_resource, "GatewayCustomDomainResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + domain_name=domain_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("GatewayCustomDomainResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("GatewayCustomDomainResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/domains/{domainName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + domain_name: str, + gateway_custom_domain_resource: _models.GatewayCustomDomainResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.GatewayCustomDomainResource]: + """Create or update the Spring Cloud Gateway custom domain. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param domain_name: The name of the Spring Cloud Gateway custom domain. Required. + :type domain_name: str + :param gateway_custom_domain_resource: The gateway custom domain resource for the create or + update operation. Required. + :type gateway_custom_domain_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayCustomDomainResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either GatewayCustomDomainResource or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayCustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + domain_name: str, + gateway_custom_domain_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.GatewayCustomDomainResource]: + """Create or update the Spring Cloud Gateway custom domain. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param domain_name: The name of the Spring Cloud Gateway custom domain. Required. + :type domain_name: str + :param gateway_custom_domain_resource: The gateway custom domain resource for the create or + update operation. Required. + :type gateway_custom_domain_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either GatewayCustomDomainResource or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayCustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + domain_name: str, + gateway_custom_domain_resource: Union[_models.GatewayCustomDomainResource, IO], + **kwargs: Any + ) -> LROPoller[_models.GatewayCustomDomainResource]: + """Create or update the Spring Cloud Gateway custom domain. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param domain_name: The name of the Spring Cloud Gateway custom domain. Required. + :type domain_name: str + :param gateway_custom_domain_resource: The gateway custom domain resource for the create or + update operation. Is either a model type or a IO type. Required. + :type gateway_custom_domain_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayCustomDomainResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either GatewayCustomDomainResource or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayCustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayCustomDomainResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + domain_name=domain_name, + gateway_custom_domain_resource=gateway_custom_domain_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("GatewayCustomDomainResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/domains/{domainName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, gateway_name: str, domain_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + domain_name=domain_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/domains/{domainName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, service_name: str, gateway_name: str, domain_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete the Spring Cloud Gateway custom domain. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param domain_name: The name of the Spring Cloud Gateway custom domain. Required. + :type domain_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + domain_name=domain_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/domains/{domainName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, gateway_name: str, **kwargs: Any + ) -> Iterable["_models.GatewayCustomDomainResource"]: + """Handle requests to list all Spring Cloud Gateway custom domains. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either GatewayCustomDomainResource or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayCustomDomainResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayCustomDomainResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("GatewayCustomDomainResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/domains"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_gateway_route_configs_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_gateway_route_configs_operations.py new file mode 100644 index 00000000000..62e607f6d88 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_gateway_route_configs_operations.py @@ -0,0 +1,737 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, + service_name: str, + gateway_name: str, + route_config_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/routeConfigs/{routeConfigName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "gatewayName": _SERIALIZER.url("gateway_name", gateway_name, "str"), + "routeConfigName": _SERIALIZER.url("route_config_name", route_config_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, + service_name: str, + gateway_name: str, + route_config_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/routeConfigs/{routeConfigName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "gatewayName": _SERIALIZER.url("gateway_name", gateway_name, "str"), + "routeConfigName": _SERIALIZER.url("route_config_name", route_config_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, + service_name: str, + gateway_name: str, + route_config_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/routeConfigs/{routeConfigName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "gatewayName": _SERIALIZER.url("gateway_name", gateway_name, "str"), + "routeConfigName": _SERIALIZER.url("route_config_name", route_config_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_request( + resource_group_name: str, service_name: str, gateway_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/routeConfigs", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "gatewayName": _SERIALIZER.url("gateway_name", gateway_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class GatewayRouteConfigsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`gateway_route_configs` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, service_name: str, gateway_name: str, route_config_name: str, **kwargs: Any + ) -> _models.GatewayRouteConfigResource: + """Get the Spring Cloud Gateway route configs. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param route_config_name: The name of the Spring Cloud Gateway route config. Required. + :type route_config_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GatewayRouteConfigResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayRouteConfigResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + route_config_name=route_config_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("GatewayRouteConfigResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/routeConfigs/{routeConfigName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + route_config_name: str, + gateway_route_config_resource: Union[_models.GatewayRouteConfigResource, IO], + **kwargs: Any + ) -> _models.GatewayRouteConfigResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayRouteConfigResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(gateway_route_config_resource, (IO, bytes)): + _content = gateway_route_config_resource + else: + _json = self._serialize.body(gateway_route_config_resource, "GatewayRouteConfigResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + route_config_name=route_config_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("GatewayRouteConfigResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("GatewayRouteConfigResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/routeConfigs/{routeConfigName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + route_config_name: str, + gateway_route_config_resource: _models.GatewayRouteConfigResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.GatewayRouteConfigResource]: + """Create the default Spring Cloud Gateway route configs or update the existing Spring Cloud + Gateway route configs. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param route_config_name: The name of the Spring Cloud Gateway route config. Required. + :type route_config_name: str + :param gateway_route_config_resource: The Spring Cloud Gateway route config for the create or + update operation. Required. + :type gateway_route_config_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either GatewayRouteConfigResource or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + route_config_name: str, + gateway_route_config_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.GatewayRouteConfigResource]: + """Create the default Spring Cloud Gateway route configs or update the existing Spring Cloud + Gateway route configs. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param route_config_name: The name of the Spring Cloud Gateway route config. Required. + :type route_config_name: str + :param gateway_route_config_resource: The Spring Cloud Gateway route config for the create or + update operation. Required. + :type gateway_route_config_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either GatewayRouteConfigResource or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + route_config_name: str, + gateway_route_config_resource: Union[_models.GatewayRouteConfigResource, IO], + **kwargs: Any + ) -> LROPoller[_models.GatewayRouteConfigResource]: + """Create the default Spring Cloud Gateway route configs or update the existing Spring Cloud + Gateway route configs. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param route_config_name: The name of the Spring Cloud Gateway route config. Required. + :type route_config_name: str + :param gateway_route_config_resource: The Spring Cloud Gateway route config for the create or + update operation. Is either a model type or a IO type. Required. + :type gateway_route_config_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either GatewayRouteConfigResource or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayRouteConfigResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + route_config_name=route_config_name, + gateway_route_config_resource=gateway_route_config_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("GatewayRouteConfigResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/routeConfigs/{routeConfigName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, gateway_name: str, route_config_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + route_config_name=route_config_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/routeConfigs/{routeConfigName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, service_name: str, gateway_name: str, route_config_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete the Spring Cloud Gateway route config. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param route_config_name: The name of the Spring Cloud Gateway route config. Required. + :type route_config_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + route_config_name=route_config_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/routeConfigs/{routeConfigName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, gateway_name: str, **kwargs: Any + ) -> Iterable["_models.GatewayRouteConfigResource"]: + """Handle requests to list all Spring Cloud Gateway route configs. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either GatewayRouteConfigResource or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayRouteConfigResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayRouteConfigResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("GatewayRouteConfigResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/routeConfigs"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_gateways_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_gateways_operations.py new file mode 100644 index 00000000000..3e58eb91f3d --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_gateways_operations.py @@ -0,0 +1,967 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, service_name: str, gateway_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "gatewayName": _SERIALIZER.url("gateway_name", gateway_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, service_name: str, gateway_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "gatewayName": _SERIALIZER.url("gateway_name", gateway_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, service_name: str, gateway_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "gatewayName": _SERIALIZER.url("gateway_name", gateway_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_env_secrets_request( + resource_group_name: str, service_name: str, gateway_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/listEnvSecrets", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "gatewayName": _SERIALIZER.url("gateway_name", gateway_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_request(resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_validate_domain_request( + resource_group_name: str, service_name: str, gateway_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/validateDomain", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "gatewayName": _SERIALIZER.url("gateway_name", gateway_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class GatewaysOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`gateways` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, service_name: str, gateway_name: str, **kwargs: Any + ) -> _models.GatewayResource: + """Get the Spring Cloud Gateway and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GatewayResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("GatewayResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + gateway_resource: Union[_models.GatewayResource, IO], + **kwargs: Any + ) -> _models.GatewayResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(gateway_resource, (IO, bytes)): + _content = gateway_resource + else: + _json = self._serialize.body(gateway_resource, "GatewayResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("GatewayResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("GatewayResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + gateway_resource: _models.GatewayResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.GatewayResource]: + """Create the default Spring Cloud Gateway or update the existing Spring Cloud Gateway. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param gateway_resource: The gateway for the create or update operation. Required. + :type gateway_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either GatewayResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + gateway_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.GatewayResource]: + """Create the default Spring Cloud Gateway or update the existing Spring Cloud Gateway. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param gateway_resource: The gateway for the create or update operation. Required. + :type gateway_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either GatewayResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + gateway_resource: Union[_models.GatewayResource, IO], + **kwargs: Any + ) -> LROPoller[_models.GatewayResource]: + """Create the default Spring Cloud Gateway or update the existing Spring Cloud Gateway. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param gateway_resource: The gateway for the create or update operation. Is either a model type + or a IO type. Required. + :type gateway_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayResource or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either GatewayResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + gateway_resource=gateway_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("GatewayResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, gateway_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, service_name: str, gateway_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Disable the default Spring Cloud Gateway. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}"} # type: ignore + + @distributed_trace + def list_env_secrets( + self, resource_group_name: str, service_name: str, gateway_name: str, **kwargs: Any + ) -> Dict[str, str]: + """List sensitive environment variables of Spring Cloud Gateway. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: dict mapping str to str or the result of cls(response) + :rtype: dict[str, str] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Dict[str, str]] + + request = build_list_env_secrets_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_env_secrets.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("{str}", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_env_secrets.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/listEnvSecrets"} # type: ignore + + @distributed_trace + def list(self, resource_group_name: str, service_name: str, **kwargs: Any) -> Iterable["_models.GatewayResource"]: + """Handles requests to list all resources in a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either GatewayResource or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.GatewayResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.GatewayResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("GatewayResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways"} # type: ignore + + @overload + def validate_domain( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + validate_payload: _models.CustomDomainValidatePayload, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CustomDomainValidateResult: + """Check the domains are valid as well as not in use. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param validate_payload: Custom domain payload to be validated. Required. + :type validate_payload: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidatePayload + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomDomainValidateResult or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidateResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def validate_domain( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + validate_payload: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CustomDomainValidateResult: + """Check the domains are valid as well as not in use. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param validate_payload: Custom domain payload to be validated. Required. + :type validate_payload: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomDomainValidateResult or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidateResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def validate_domain( + self, + resource_group_name: str, + service_name: str, + gateway_name: str, + validate_payload: Union[_models.CustomDomainValidatePayload, IO], + **kwargs: Any + ) -> _models.CustomDomainValidateResult: + """Check the domains are valid as well as not in use. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param gateway_name: The name of Spring Cloud Gateway. Required. + :type gateway_name: str + :param validate_payload: Custom domain payload to be validated. Is either a model type or a IO + type. Required. + :type validate_payload: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidatePayload or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CustomDomainValidateResult or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.CustomDomainValidateResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CustomDomainValidateResult] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(validate_payload, (IO, bytes)): + _content = validate_payload + else: + _json = self._serialize.body(validate_payload, "CustomDomainValidatePayload") + + request = build_validate_domain_request( + resource_group_name=resource_group_name, + service_name=service_name, + gateway_name=gateway_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.validate_domain.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("CustomDomainValidateResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + validate_domain.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/gateways/{gatewayName}/validateDomain"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_monitoring_settings_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_monitoring_settings_operations.py new file mode 100644 index 00000000000..903950f5cf7 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_monitoring_settings_operations.py @@ -0,0 +1,662 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request(resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/monitoringSettings/default", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_put_request( + resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/monitoringSettings/default", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_patch_request( + resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/monitoringSettings/default", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +class MonitoringSettingsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`monitoring_settings` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, resource_group_name: str, service_name: str, **kwargs: Any) -> _models.MonitoringSettingResource: + """Get the Monitoring Setting and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: MonitoringSettingResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.MonitoringSettingResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("MonitoringSettingResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/monitoringSettings/default"} # type: ignore + + def _update_put_initial( + self, + resource_group_name: str, + service_name: str, + monitoring_setting_resource: Union[_models.MonitoringSettingResource, IO], + **kwargs: Any + ) -> _models.MonitoringSettingResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.MonitoringSettingResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(monitoring_setting_resource, (IO, bytes)): + _content = monitoring_setting_resource + else: + _json = self._serialize.body(monitoring_setting_resource, "MonitoringSettingResource") + + request = build_update_put_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_put_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("MonitoringSettingResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("MonitoringSettingResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_put_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/monitoringSettings/default"} # type: ignore + + @overload + def begin_update_put( + self, + resource_group_name: str, + service_name: str, + monitoring_setting_resource: _models.MonitoringSettingResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.MonitoringSettingResource]: + """Update the Monitoring Setting. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param monitoring_setting_resource: Parameters for the update operation. Required. + :type monitoring_setting_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either MonitoringSettingResource or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update_put( + self, + resource_group_name: str, + service_name: str, + monitoring_setting_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.MonitoringSettingResource]: + """Update the Monitoring Setting. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param monitoring_setting_resource: Parameters for the update operation. Required. + :type monitoring_setting_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either MonitoringSettingResource or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update_put( + self, + resource_group_name: str, + service_name: str, + monitoring_setting_resource: Union[_models.MonitoringSettingResource, IO], + **kwargs: Any + ) -> LROPoller[_models.MonitoringSettingResource]: + """Update the Monitoring Setting. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param monitoring_setting_resource: Parameters for the update operation. Is either a model type + or a IO type. Required. + :type monitoring_setting_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either MonitoringSettingResource or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.MonitoringSettingResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_put_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + monitoring_setting_resource=monitoring_setting_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("MonitoringSettingResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update_put.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/monitoringSettings/default"} # type: ignore + + def _update_patch_initial( + self, + resource_group_name: str, + service_name: str, + monitoring_setting_resource: Union[_models.MonitoringSettingResource, IO], + **kwargs: Any + ) -> _models.MonitoringSettingResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.MonitoringSettingResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(monitoring_setting_resource, (IO, bytes)): + _content = monitoring_setting_resource + else: + _json = self._serialize.body(monitoring_setting_resource, "MonitoringSettingResource") + + request = build_update_patch_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_patch_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("MonitoringSettingResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("MonitoringSettingResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_patch_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/monitoringSettings/default"} # type: ignore + + @overload + def begin_update_patch( + self, + resource_group_name: str, + service_name: str, + monitoring_setting_resource: _models.MonitoringSettingResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.MonitoringSettingResource]: + """Update the Monitoring Setting. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param monitoring_setting_resource: Parameters for the update operation. Required. + :type monitoring_setting_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either MonitoringSettingResource or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update_patch( + self, + resource_group_name: str, + service_name: str, + monitoring_setting_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.MonitoringSettingResource]: + """Update the Monitoring Setting. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param monitoring_setting_resource: Parameters for the update operation. Required. + :type monitoring_setting_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either MonitoringSettingResource or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update_patch( + self, + resource_group_name: str, + service_name: str, + monitoring_setting_resource: Union[_models.MonitoringSettingResource, IO], + **kwargs: Any + ) -> LROPoller[_models.MonitoringSettingResource]: + """Update the Monitoring Setting. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param monitoring_setting_resource: Parameters for the update operation. Is either a model type + or a IO type. Required. + :type monitoring_setting_resource: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either MonitoringSettingResource or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.MonitoringSettingResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.MonitoringSettingResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_patch_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + monitoring_setting_resource=monitoring_setting_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("MonitoringSettingResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update_patch.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/monitoringSettings/default"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_operations.py new file mode 100644 index 00000000000..252cafdc3c8 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_operations.py @@ -0,0 +1,154 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/providers/Microsoft.AppPlatform/operations") + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class Operations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`operations` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list(self, **kwargs: Any) -> Iterable["_models.OperationDetail"]: + """Lists all of the available REST API operations of the Microsoft.AppPlatform provider. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OperationDetail or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.OperationDetail] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.AvailableOperations] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("AvailableOperations", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/providers/Microsoft.AppPlatform/operations"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_patch.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_patch.py new file mode 100644 index 00000000000..f7dd3251033 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_predefined_accelerators_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_predefined_accelerators_operations.py new file mode 100644 index 00000000000..0c9416e3eb9 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_predefined_accelerators_operations.py @@ -0,0 +1,629 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, service_name: str, application_accelerator_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/predefinedAccelerators", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "applicationAcceleratorName": _SERIALIZER.url( + "application_accelerator_name", application_accelerator_name, "str" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + predefined_accelerator_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/predefinedAccelerators/{predefinedAcceleratorName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "applicationAcceleratorName": _SERIALIZER.url( + "application_accelerator_name", application_accelerator_name, "str" + ), + "predefinedAcceleratorName": _SERIALIZER.url("predefined_accelerator_name", predefined_accelerator_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_disable_request( + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + predefined_accelerator_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/predefinedAccelerators/{predefinedAcceleratorName}/disable", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "applicationAcceleratorName": _SERIALIZER.url( + "application_accelerator_name", application_accelerator_name, "str" + ), + "predefinedAcceleratorName": _SERIALIZER.url("predefined_accelerator_name", predefined_accelerator_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_enable_request( + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + predefined_accelerator_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/predefinedAccelerators/{predefinedAcceleratorName}/enable", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "applicationAcceleratorName": _SERIALIZER.url( + "application_accelerator_name", application_accelerator_name, "str" + ), + "predefinedAcceleratorName": _SERIALIZER.url("predefined_accelerator_name", predefined_accelerator_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class PredefinedAcceleratorsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`predefined_accelerators` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, application_accelerator_name: str, **kwargs: Any + ) -> Iterable["_models.PredefinedAcceleratorResource"]: + """Handle requests to list all predefined accelerators. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PredefinedAcceleratorResource or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.PredefinedAcceleratorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PredefinedAcceleratorResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("PredefinedAcceleratorResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/predefinedAccelerators"} # type: ignore + + @distributed_trace + def get( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + predefined_accelerator_name: str, + **kwargs: Any + ) -> _models.PredefinedAcceleratorResource: + """Get the predefined accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param predefined_accelerator_name: The name of the predefined accelerator. Required. + :type predefined_accelerator_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PredefinedAcceleratorResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.PredefinedAcceleratorResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PredefinedAcceleratorResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + predefined_accelerator_name=predefined_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("PredefinedAcceleratorResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/predefinedAccelerators/{predefinedAcceleratorName}"} # type: ignore + + def _disable_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + predefined_accelerator_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_disable_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + predefined_accelerator_name=predefined_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._disable_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _disable_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/predefinedAccelerators/{predefinedAcceleratorName}/disable"} # type: ignore + + @distributed_trace + def begin_disable( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + predefined_accelerator_name: str, + **kwargs: Any + ) -> LROPoller[None]: + """Disable predefined accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param predefined_accelerator_name: The name of the predefined accelerator. Required. + :type predefined_accelerator_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._disable_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + predefined_accelerator_name=predefined_accelerator_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_disable.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/predefinedAccelerators/{predefinedAcceleratorName}/disable"} # type: ignore + + def _enable_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + predefined_accelerator_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_enable_request( + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + predefined_accelerator_name=predefined_accelerator_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._enable_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _enable_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/predefinedAccelerators/{predefinedAcceleratorName}/enable"} # type: ignore + + @distributed_trace + def begin_enable( + self, + resource_group_name: str, + service_name: str, + application_accelerator_name: str, + predefined_accelerator_name: str, + **kwargs: Any + ) -> LROPoller[None]: + """Enable predefined accelerator. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param application_accelerator_name: The name of the application accelerator. Required. + :type application_accelerator_name: str + :param predefined_accelerator_name: The name of the predefined accelerator. Required. + :type predefined_accelerator_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._enable_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + application_accelerator_name=application_accelerator_name, + predefined_accelerator_name=predefined_accelerator_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_enable.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/applicationAccelerators/{applicationAcceleratorName}/predefinedAccelerators/{predefinedAcceleratorName}/enable"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_runtime_versions_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_runtime_versions_operations.py new file mode 100644 index 00000000000..d43df4682d3 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_runtime_versions_operations.py @@ -0,0 +1,124 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Optional, TypeVar + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_runtime_versions_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/providers/Microsoft.AppPlatform/runtimeVersions") + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class RuntimeVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`runtime_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list_runtime_versions(self, **kwargs: Any) -> _models.AvailableRuntimeVersions: + """Lists all of the available runtime versions supported by Microsoft.AppPlatform provider. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AvailableRuntimeVersions or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.AvailableRuntimeVersions + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.AvailableRuntimeVersions] + + request = build_list_runtime_versions_request( + api_version=api_version, + template_url=self.list_runtime_versions.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("AvailableRuntimeVersions", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_runtime_versions.metadata = {"url": "/providers/Microsoft.AppPlatform/runtimeVersions"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_service_registries_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_service_registries_operations.py new file mode 100644 index 00000000000..887cbd0952b --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_service_registries_operations.py @@ -0,0 +1,573 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, service_name: str, service_registry_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/serviceRegistries/{serviceRegistryName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "serviceRegistryName": _SERIALIZER.url("service_registry_name", service_registry_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, service_name: str, service_registry_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/serviceRegistries/{serviceRegistryName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "serviceRegistryName": _SERIALIZER.url("service_registry_name", service_registry_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, service_name: str, service_registry_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/serviceRegistries/{serviceRegistryName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "serviceRegistryName": _SERIALIZER.url("service_registry_name", service_registry_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_request(resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/serviceRegistries", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class ServiceRegistriesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`service_registries` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, service_name: str, service_registry_name: str, **kwargs: Any + ) -> _models.ServiceRegistryResource: + """Get the Service Registry and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param service_registry_name: The name of Service Registry. Required. + :type service_registry_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ServiceRegistryResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceRegistryResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceRegistryResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + service_registry_name=service_registry_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ServiceRegistryResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/serviceRegistries/{serviceRegistryName}"} # type: ignore + + def _create_or_update_initial( + self, resource_group_name: str, service_name: str, service_registry_name: str, **kwargs: Any + ) -> _models.ServiceRegistryResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceRegistryResource] + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + service_registry_name=service_registry_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ServiceRegistryResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ServiceRegistryResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/serviceRegistries/{serviceRegistryName}"} # type: ignore + + @distributed_trace + def begin_create_or_update( + self, resource_group_name: str, service_name: str, service_registry_name: str, **kwargs: Any + ) -> LROPoller[_models.ServiceRegistryResource]: + """Create the default Service Registry or update the existing Service Registry. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param service_registry_name: The name of Service Registry. Required. + :type service_registry_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ServiceRegistryResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceRegistryResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceRegistryResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + service_registry_name=service_registry_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ServiceRegistryResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/serviceRegistries/{serviceRegistryName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, service_registry_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + service_registry_name=service_registry_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/serviceRegistries/{serviceRegistryName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, service_name: str, service_registry_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Disable the default Service Registry. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param service_registry_name: The name of Service Registry. Required. + :type service_registry_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + service_registry_name=service_registry_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/serviceRegistries/{serviceRegistryName}"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> Iterable["_models.ServiceRegistryResource"]: + """Handles requests to list all resources in a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ServiceRegistryResource or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceRegistryResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceRegistryResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ServiceRegistryResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/serviceRegistries"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_services_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_services_operations.py new file mode 100644 index 00000000000..5084d8b383d --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_services_operations.py @@ -0,0 +1,1866 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request(resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_test_keys_request( + resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/listTestKeys", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_regenerate_test_key_request( + resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/regenerateTestKey", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_disable_test_endpoint_request( + resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/disableTestEndpoint", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_enable_test_endpoint_request( + resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/enableTestEndpoint", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_stop_request(resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/stop", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_start_request( + resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/start", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_check_name_availability_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/providers/Microsoft.AppPlatform/locations/{location}/checkNameAvailability", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "location": _SERIALIZER.url("location", location, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.AppPlatform/Spring") + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_request(resource_group_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class ServicesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`services` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, resource_group_name: str, service_name: str, **kwargs: Any) -> _models.ServiceResource: + """Get a Service and its properties. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ServiceResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ServiceResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}"} # type: ignore + + def _create_or_update_initial( + self, resource_group_name: str, service_name: str, resource: Union[_models.ServiceResource, IO], **kwargs: Any + ) -> _models.ServiceResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(resource, (IO, bytes)): + _content = resource + else: + _json = self._serialize.body(resource, "ServiceResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ServiceResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ServiceResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("ServiceResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + resource: _models.ServiceResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ServiceResource]: + """Create a new Service or update an exiting Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param resource: Parameters for the create or update operation. Required. + :type resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ServiceResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ServiceResource]: + """Create a new Service or update an exiting Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param resource: Parameters for the create or update operation. Required. + :type resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ServiceResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, resource_group_name: str, service_name: str, resource: Union[_models.ServiceResource, IO], **kwargs: Any + ) -> LROPoller[_models.ServiceResource]: + """Create a new Service or update an exiting Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param resource: Parameters for the create or update operation. Is either a model type or a IO + type. Required. + :type resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ServiceResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + resource=resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ServiceResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}"} # type: ignore + + @distributed_trace + def begin_delete(self, resource_group_name: str, service_name: str, **kwargs: Any) -> LROPoller[None]: + """Operation to delete a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}"} # type: ignore + + def _update_initial( + self, resource_group_name: str, service_name: str, resource: Union[_models.ServiceResource, IO], **kwargs: Any + ) -> _models.ServiceResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(resource, (IO, bytes)): + _content = resource + else: + _json = self._serialize.body(resource, "ServiceResource") + + request = build_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ServiceResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("ServiceResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}"} # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + service_name: str, + resource: _models.ServiceResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ServiceResource]: + """Operation to update an exiting Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param resource: Parameters for the update operation. Required. + :type resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ServiceResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + service_name: str, + resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ServiceResource]: + """Operation to update an exiting Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param resource: Parameters for the update operation. Required. + :type resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ServiceResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, resource_group_name: str, service_name: str, resource: Union[_models.ServiceResource, IO], **kwargs: Any + ) -> LROPoller[_models.ServiceResource]: + """Operation to update an exiting Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param resource: Parameters for the update operation. Is either a model type or a IO type. + Required. + :type resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ServiceResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + resource=resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ServiceResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}"} # type: ignore + + @distributed_trace + def list_test_keys(self, resource_group_name: str, service_name: str, **kwargs: Any) -> _models.TestKeys: + """List test keys for a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TestKeys or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.TestKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.TestKeys] + + request = build_list_test_keys_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_test_keys.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("TestKeys", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_test_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/listTestKeys"} # type: ignore + + @overload + def regenerate_test_key( + self, + resource_group_name: str, + service_name: str, + regenerate_test_key_request: _models.RegenerateTestKeyRequestPayload, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.TestKeys: + """Regenerate a test key for a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param regenerate_test_key_request: Parameters for the operation. Required. + :type regenerate_test_key_request: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.RegenerateTestKeyRequestPayload + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TestKeys or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.TestKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def regenerate_test_key( + self, + resource_group_name: str, + service_name: str, + regenerate_test_key_request: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.TestKeys: + """Regenerate a test key for a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param regenerate_test_key_request: Parameters for the operation. Required. + :type regenerate_test_key_request: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TestKeys or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.TestKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def regenerate_test_key( + self, + resource_group_name: str, + service_name: str, + regenerate_test_key_request: Union[_models.RegenerateTestKeyRequestPayload, IO], + **kwargs: Any + ) -> _models.TestKeys: + """Regenerate a test key for a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param regenerate_test_key_request: Parameters for the operation. Is either a model type or a + IO type. Required. + :type regenerate_test_key_request: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.RegenerateTestKeyRequestPayload or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TestKeys or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.TestKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.TestKeys] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(regenerate_test_key_request, (IO, bytes)): + _content = regenerate_test_key_request + else: + _json = self._serialize.body(regenerate_test_key_request, "RegenerateTestKeyRequestPayload") + + request = build_regenerate_test_key_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.regenerate_test_key.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("TestKeys", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + regenerate_test_key.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/regenerateTestKey"} # type: ignore + + @distributed_trace + def disable_test_endpoint( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> None: + """Disable test endpoint functionality for a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_disable_test_endpoint_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.disable_test_endpoint.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + disable_test_endpoint.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/disableTestEndpoint"} # type: ignore + + @distributed_trace + def enable_test_endpoint(self, resource_group_name: str, service_name: str, **kwargs: Any) -> _models.TestKeys: + """Enable test endpoint functionality for a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TestKeys or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.TestKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.TestKeys] + + request = build_enable_test_endpoint_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.enable_test_endpoint.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("TestKeys", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + enable_test_endpoint.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/enableTestEndpoint"} # type: ignore + + def _stop_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 304: ResourceNotModifiedError, + 404: lambda response: ResourceNotFoundError(response=response, error_format=ARMErrorFormat), + 409: lambda response: ResourceExistsError(response=response, error_format=ARMErrorFormat), + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_stop_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._stop_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _stop_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/stop"} # type: ignore + + @distributed_trace + def begin_stop(self, resource_group_name: str, service_name: str, **kwargs: Any) -> LROPoller[None]: + """Stop a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._stop_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_stop.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/stop"} # type: ignore + + def _start_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 304: ResourceNotModifiedError, + 404: lambda response: ResourceNotFoundError(response=response, error_format=ARMErrorFormat), + 409: lambda response: ResourceExistsError(response=response, error_format=ARMErrorFormat), + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_start_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._start_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _start_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/start"} # type: ignore + + @distributed_trace + def begin_start(self, resource_group_name: str, service_name: str, **kwargs: Any) -> LROPoller[None]: + """Start a Service. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._start_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_start.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/start"} # type: ignore + + @overload + def check_name_availability( + self, + location: str, + availability_parameters: _models.NameAvailabilityParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.NameAvailability: + """Checks that the resource name is valid and is not already in use. + + :param location: the region. Required. + :type location: str + :param availability_parameters: Parameters supplied to the operation. Required. + :type availability_parameters: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.NameAvailabilityParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: NameAvailability or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.NameAvailability + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def check_name_availability( + self, location: str, availability_parameters: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.NameAvailability: + """Checks that the resource name is valid and is not already in use. + + :param location: the region. Required. + :type location: str + :param availability_parameters: Parameters supplied to the operation. Required. + :type availability_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: NameAvailability or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.NameAvailability + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def check_name_availability( + self, location: str, availability_parameters: Union[_models.NameAvailabilityParameters, IO], **kwargs: Any + ) -> _models.NameAvailability: + """Checks that the resource name is valid and is not already in use. + + :param location: the region. Required. + :type location: str + :param availability_parameters: Parameters supplied to the operation. Is either a model type or + a IO type. Required. + :type availability_parameters: + ~azure.mgmt.appplatform.v2022_11_01_preview.models.NameAvailabilityParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: NameAvailability or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.NameAvailability + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.NameAvailability] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(availability_parameters, (IO, bytes)): + _content = availability_parameters + else: + _json = self._serialize.body(availability_parameters, "NameAvailabilityParameters") + + request = build_check_name_availability_request( + location=location, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.check_name_availability.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("NameAvailability", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + check_name_availability.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.AppPlatform/locations/{location}/checkNameAvailability"} # type: ignore + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.ServiceResource"]: + """Handles requests to list all resources in a subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ServiceResource or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceResourceList] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_by_subscription.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ServiceResourceList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list_by_subscription.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.AppPlatform/Spring"} # type: ignore + + @distributed_trace + def list(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.ServiceResource"]: + """Handles requests to list all resources in a resource group. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ServiceResource or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.ServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ServiceResourceList] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ServiceResourceList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_skus_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_skus_operations.py new file mode 100644 index 00000000000..6012e646d1d --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_skus_operations.py @@ -0,0 +1,160 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.AppPlatform/skus") + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class SkusOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`skus` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list(self, **kwargs: Any) -> Iterable["_models.ResourceSku"]: + """Lists all of the available skus of the Microsoft.AppPlatform provider. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ResourceSku or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.ResourceSku] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ResourceSkuCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ResourceSkuCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.AppPlatform/skus"} # type: ignore diff --git a/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_storages_operations.py b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_storages_operations.py new file mode 100644 index 00000000000..622cbde5ac6 --- /dev/null +++ b/src/spring/azext_spring/vendored_sdks/appplatform/v2022_11_01_preview/operations/_storages_operations.py @@ -0,0 +1,688 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from ..._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, service_name: str, storage_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/storages/{storageName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "storageName": _SERIALIZER.url("storage_name", storage_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, service_name: str, storage_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/storages/{storageName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "storageName": _SERIALIZER.url("storage_name", storage_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, service_name: str, storage_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/storages/{storageName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + "storageName": _SERIALIZER.url("storage_name", storage_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_request(resource_group_name: str, service_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/storages", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "serviceName": _SERIALIZER.url("service_name", service_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class StoragesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.appplatform.v2022_11_01_preview.AppPlatformManagementClient`'s + :attr:`storages` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, service_name: str, storage_name: str, **kwargs: Any + ) -> _models.StorageResource: + """Get the storage resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param storage_name: The name of the storage resource. Required. + :type storage_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageResource or the result of cls(response) + :rtype: ~azure.mgmt.appplatform.v2022_11_01_preview.models.StorageResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageResource] + + request = build_get_request( + resource_group_name=resource_group_name, + service_name=service_name, + storage_name=storage_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("StorageResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/storages/{storageName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + service_name: str, + storage_name: str, + storage_resource: Union[_models.StorageResource, IO], + **kwargs: Any + ) -> _models.StorageResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(storage_resource, (IO, bytes)): + _content = storage_resource + else: + _json = self._serialize.body(storage_resource, "StorageResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + service_name=service_name, + storage_name=storage_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("StorageResource", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("StorageResource", pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize("StorageResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/storages/{storageName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + storage_name: str, + storage_resource: _models.StorageResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageResource]: + """Create or update storage resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param storage_name: The name of the storage resource. Required. + :type storage_name: str + :param storage_resource: Parameters for the create or update operation. Required. + :type storage_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.StorageResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either StorageResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.StorageResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + storage_name: str, + storage_resource: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageResource]: + """Create or update storage resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param storage_name: The name of the storage resource. Required. + :type storage_name: str + :param storage_resource: Parameters for the create or update operation. Required. + :type storage_resource: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either StorageResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.StorageResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + service_name: str, + storage_name: str, + storage_resource: Union[_models.StorageResource, IO], + **kwargs: Any + ) -> LROPoller[_models.StorageResource]: + """Create or update storage resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param storage_name: The name of the storage resource. Required. + :type storage_name: str + :param storage_resource: Parameters for the create or update operation. Is either a model type + or a IO type. Required. + :type storage_resource: ~azure.mgmt.appplatform.v2022_11_01_preview.models.StorageResource or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either StorageResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_11_01_preview.models.StorageResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + storage_name=storage_name, + storage_resource=storage_resource, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("StorageResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/storages/{storageName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, service_name: str, storage_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + service_name=service_name, + storage_name=storage_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/storages/{storageName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, service_name: str, storage_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete the storage resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :param storage_name: The name of the storage resource. Required. + :type storage_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + service_name=service_name, + storage_name=storage_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/storages/{storageName}"} # type: ignore + + @distributed_trace + def list(self, resource_group_name: str, service_name: str, **kwargs: Any) -> Iterable["_models.StorageResource"]: + """List all the storages of one Azure Spring Apps resource. + + :param resource_group_name: The name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param service_name: The name of the Service resource. Required. + :type service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either StorageResource or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_11_01_preview.models.StorageResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-11-01-preview")) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageResourceCollection] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + service_name=service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("StorageResourceCollection", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/storages"} # type: ignore From 49586d4914b16176b89ab79220e364a55246de69 Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Tue, 1 Nov 2022 06:28:28 +0000 Subject: [PATCH 55/85] [Release] Update index.json for extension [ dynatrace ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=12498&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/0509f8518b4edc6bf24eadf1d670020d409ed60b --- src/index.json | 45 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/src/index.json b/src/index.json index d7bf3364691..48e83d7cdc9 100644 --- a/src/index.json +++ b/src/index.json @@ -23013,6 +23013,51 @@ "sha256Digest": "1c4bb8216e509c2f08fa75c45930ec377768326f30cb9ab125842aa9352c6e2e" } ], + "dynatrace": [ + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/dynatrace-0.1.0-py3-none-any.whl", + "filename": "dynatrace-0.1.0-py3-none-any.whl", + "metadata": { + "azext.isPreview": true, + "azext.minCliCoreVersion": "2.41.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/dynatrace" + } + } + }, + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "dynatrace", + "summary": "Microsoft Azure Command-Line Tools Dynatrace Extension.", + "version": "0.1.0" + }, + "sha256Digest": "112a7e423461d1b6f7c385fe8b73b4f2b850e2570c35a54a4bbcc2e87afec661" + } + ], "edgeorder": [ { "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/edgeorder-0.1.0-py3-none-any.whl", From 979eede8cd20964ccfcdd7569f520ba1b7c8dbfb Mon Sep 17 00:00:00 2001 From: Yishi Wang Date: Wed, 2 Nov 2022 09:33:28 +0800 Subject: [PATCH 56/85] [storage-blob-preview] `az storage blob filter`: Add `--container-name` to support filter blobs in specific container (#5481) * `az storage blob filter`: Add `--container-name` to support filter blobs in specific container * rerun tests --- src/storage-blob-preview/HISTORY.rst | 4 + .../azext_storage_blob_preview/__init__.py | 2 +- .../azext_storage_blob_preview/_help.py | 4 - .../azext_storage_blob_preview/_params.py | 2 + .../azext_storage_blob_preview/commands.py | 2 +- .../operations/blob.py | 6 + .../test_storage_blob_tags_scenario.yaml | 529 ++- .../test_storage_blob_versioning.yaml | 346 +- .../recordings/test_storage_blob_vlm.yaml | 85 +- ...test_storage_container_list_scenarios.yaml | 242 +- ...t_storage_container_soft_delete_oauth.yaml | 159 +- ...orage_container_soft_delete_scenarios.yaml | 159 +- .../test_storage_blob_preview_scenario.py | 6 + .../v2021_04_10/__init__.py | 239 + .../v2021_04_10/_blob_client.py | 4003 +++++++++++++++++ .../v2021_04_10/_blob_service_client.py | 740 +++ .../v2021_04_10/_container_client.py | 1601 +++++++ .../v2021_04_10/_deserialize.py | 174 + .../v2021_04_10/_download.py | 637 +++ .../v2021_04_10/_generated/__init__.py | 16 + .../_generated/_azure_blob_storage.py | 106 + .../v2021_04_10/_generated/_configuration.py | 58 + .../v2021_04_10/_generated/aio/__init__.py | 10 + .../_generated/aio/_azure_blob_storage.py | 96 + .../_generated/aio/_configuration.py | 52 + .../_generated/aio/operations/__init__.py | 23 + .../aio/operations/_append_blob_operations.py | 726 +++ .../aio/operations/_blob_operations.py | 3008 +++++++++++++ .../aio/operations/_block_blob_operations.py | 1138 +++++ .../aio/operations/_container_operations.py | 1748 +++++++ .../aio/operations/_page_blob_operations.py | 1424 ++++++ .../aio/operations/_service_operations.py | 698 +++ .../v2021_04_10/_generated/models/__init__.py | 219 + .../models/_azure_blob_storage_enums.py | 346 ++ .../v2021_04_10/_generated/models/_models.py | 1995 ++++++++ .../_generated/models/_models_py3.py | 2265 ++++++++++ .../_generated/operations/__init__.py | 23 + .../operations/_append_blob_operations.py | 734 +++ .../_generated/operations/_blob_operations.py | 3036 +++++++++++++ .../operations/_block_blob_operations.py | 1148 +++++ .../operations/_container_operations.py | 1770 ++++++++ .../operations/_page_blob_operations.py | 1437 ++++++ .../operations/_service_operations.py | 710 +++ .../azure_storage_blob/v2021_04_10/_lease.py | 331 ++ .../v2021_04_10/_list_blobs_helper.py | 244 + .../azure_storage_blob/v2021_04_10/_models.py | 1259 ++++++ .../v2021_04_10/_quick_query_helper.py | 195 + .../v2021_04_10/_serialize.py | 215 + .../v2021_04_10/_shared/__init__.py | 56 + .../v2021_04_10/_shared/authentication.py | 178 + .../v2021_04_10/_shared/avro/__init__.py | 5 + .../v2021_04_10/_shared/avro/avro_io.py | 464 ++ .../v2021_04_10/_shared/avro/avro_io_async.py | 448 ++ .../v2021_04_10/_shared/avro/datafile.py | 266 ++ .../_shared/avro/datafile_async.py | 215 + .../v2021_04_10/_shared/avro/schema.py | 1221 +++++ .../v2021_04_10/_shared/base_client.py | 463 ++ .../v2021_04_10/_shared/base_client_async.py | 191 + .../v2021_04_10/_shared/constants.py | 28 + .../v2021_04_10/_shared/encryption.py | 542 +++ .../v2021_04_10/_shared/models.py | 480 ++ .../v2021_04_10/_shared/parser.py | 20 + .../v2021_04_10/_shared/policies.py | 657 +++ .../v2021_04_10/_shared/policies_async.py | 253 ++ .../v2021_04_10/_shared/request_handlers.py | 278 ++ .../v2021_04_10/_shared/response_handlers.py | 195 + .../_shared/shared_access_signature.py | 230 + .../v2021_04_10/_shared/uploads.py | 603 +++ .../v2021_04_10/_shared/uploads_async.py | 395 ++ .../v2021_04_10/_shared_access_signature.py | 609 +++ .../v2021_04_10/_upload_helpers.py | 306 ++ .../v2021_04_10/_version.py | 7 + src/storage-blob-preview/setup.py | 2 +- 73 files changed, 41341 insertions(+), 741 deletions(-) create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/__init__.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_blob_client.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_blob_service_client.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_container_client.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_deserialize.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_download.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/__init__.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/_azure_blob_storage.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/_configuration.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/__init__.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/_azure_blob_storage.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/_configuration.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/__init__.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_append_blob_operations.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_blob_operations.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_block_blob_operations.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_container_operations.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_page_blob_operations.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_service_operations.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/models/__init__.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/models/_azure_blob_storage_enums.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/models/_models.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/models/_models_py3.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/__init__.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_append_blob_operations.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_blob_operations.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_block_blob_operations.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_container_operations.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_page_blob_operations.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_service_operations.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_lease.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_list_blobs_helper.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_models.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_quick_query_helper.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_serialize.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/__init__.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/authentication.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/__init__.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/avro_io.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/avro_io_async.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/datafile.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/datafile_async.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/schema.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/base_client.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/base_client_async.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/constants.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/encryption.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/models.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/parser.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/policies.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/policies_async.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/request_handlers.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/response_handlers.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/shared_access_signature.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/uploads.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/uploads_async.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared_access_signature.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_upload_helpers.py create mode 100644 src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_version.py diff --git a/src/storage-blob-preview/HISTORY.rst b/src/storage-blob-preview/HISTORY.rst index c75222a6a1d..50ed7e84387 100644 --- a/src/storage-blob-preview/HISTORY.rst +++ b/src/storage-blob-preview/HISTORY.rst @@ -2,6 +2,10 @@ Release History =============== +0.6.2 +++++++ +* `az storage blob filter`: Add `--container-name` to support filter blobs in specific container + 0.6.1 ++++++ * `az storage blob immutability-policy set/delete`: Extend/Lock/Unlock/Delete blob's immutability policy diff --git a/src/storage-blob-preview/azext_storage_blob_preview/__init__.py b/src/storage-blob-preview/azext_storage_blob_preview/__init__.py index 8707dbdcebb..9217d8af0fe 100644 --- a/src/storage-blob-preview/azext_storage_blob_preview/__init__.py +++ b/src/storage-blob-preview/azext_storage_blob_preview/__init__.py @@ -14,7 +14,7 @@ class StorageCommandsLoader(AzCommandsLoader): def __init__(self, cli_ctx=None): from azure.cli.core.commands import CliCommandType - register_resource_type('latest', CUSTOM_DATA_STORAGE_BLOB, '2020-10-02') + register_resource_type('latest', CUSTOM_DATA_STORAGE_BLOB, '2021-04-10') storage_custom = CliCommandType(operations_tmpl='azure.cli.command_modules.storage.custom#{}') super(StorageCommandsLoader, self).__init__(cli_ctx=cli_ctx, resource_type=CUSTOM_DATA_STORAGE_BLOB, diff --git a/src/storage-blob-preview/azext_storage_blob_preview/_help.py b/src/storage-blob-preview/azext_storage_blob_preview/_help.py index 84132a80d3a..c8b615d0dcd 100644 --- a/src/storage-blob-preview/azext_storage_blob_preview/_help.py +++ b/src/storage-blob-preview/azext_storage_blob_preview/_help.py @@ -76,15 +76,11 @@ helps['storage blob filter'] = """ type: command short-summary: List blobs across all containers whose tags match a given search expression. -long-summary: > - Filter blobs searches across all containers within a storage account but can be scoped within the expression to - a single container. parameters: - name: --tag-filter short-summary: > The expression to find blobs whose tags matches the specified condition. eg. ""yourtagname"='firsttag' and "yourtagname2"='secondtag'" - To specify a container, eg. "@container='containerName' and "Name"='C'" """ helps['storage blob list'] = """ diff --git a/src/storage-blob-preview/azext_storage_blob_preview/_params.py b/src/storage-blob-preview/azext_storage_blob_preview/_params.py index e1a6301b823..3d4dc6a9959 100644 --- a/src/storage-blob-preview/azext_storage_blob_preview/_params.py +++ b/src/storage-blob-preview/azext_storage_blob_preview/_params.py @@ -264,6 +264,8 @@ def load_arguments(self, _): # pylint: disable=too-many-locals, too-many-statem with self.argument_context('storage blob filter') as c: c.argument('filter_expression', options_list=['--tag-filter']) + c.argument('container_name', container_name_type, + help='Used when you want to list blobs under a specified container') with self.argument_context('storage blob generate-sas') as c: from .completers import get_storage_acl_name_completion_list diff --git a/src/storage-blob-preview/azext_storage_blob_preview/commands.py b/src/storage-blob-preview/azext_storage_blob_preview/commands.py index 9ec50fc59fe..eeec5e7f6d6 100644 --- a/src/storage-blob-preview/azext_storage_blob_preview/commands.py +++ b/src/storage-blob-preview/azext_storage_blob_preview/commands.py @@ -85,7 +85,7 @@ def get_custom_sdk(custom_module, client_factory, resource_type=ResourceType.DAT with self.command_group('storage blob', blob_service_sdk, resource_type=CUSTOM_DATA_STORAGE_BLOB, min_api='2019-12-12', custom_command_type=blob_service_custom_sdk) as g: - g.storage_command_oauth('filter', 'find_blobs_by_tags', is_preview=True) + g.storage_custom_command_oauth('filter', 'find_blobs_by_tags', is_preview=True) blob_lease_client_sdk = CliCommandType( operations_tmpl='azure.multiapi.storagev2.blob._lease#BlobLeaseClient.{}', diff --git a/src/storage-blob-preview/azext_storage_blob_preview/operations/blob.py b/src/storage-blob-preview/azext_storage_blob_preview/operations/blob.py index 5bf8ce19ae7..1d5881164be 100644 --- a/src/storage-blob-preview/azext_storage_blob_preview/operations/blob.py +++ b/src/storage-blob-preview/azext_storage_blob_preview/operations/blob.py @@ -720,3 +720,9 @@ def query_blob(cmd, client, query_expression, input_config=None, output_config=N return None return reader.readall().decode("utf-8") + + +def find_blobs_by_tags(client, filter_expression, container_name=None): + if container_name: + client = client.get_container_client(container_name) + return client.find_blobs_by_tags(filter_expression=filter_expression) diff --git a/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_blob_tags_scenario.yaml b/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_blob_tags_scenario.yaml index 1b13abe23fa..822e4b73103 100644 --- a/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_blob_tags_scenario.yaml +++ b/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_blob_tags_scenario.yaml @@ -2,16 +2,24 @@ interactions: - request: body: null headers: + Accept: + - application/xml + Accept-Encoding: + - gzip, deflate + CommandName: + - storage container create Connection: - keep-alive Content-Length: - '0' + ParameterSetName: + - -n --account-name --account-key User-Agent: - - Azure-Storage/2.0.0-2.0.1 (Python CPython 3.8.7; Windows 10) AZURECLI/2.35.0 + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.12.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:38:20 GMT + - Mon, 24 Oct 2022 06:05:06 GMT x-ms-version: - - '2018-11-09' + - '2021-06-08' method: PUT uri: https://blobtag000002.blob.core.windows.net/cont1000003?restype=container response: @@ -21,31 +29,39 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:38:20 GMT + - Mon, 24 Oct 2022 06:05:07 GMT etag: - - '"0x8DA20FDA5F36E2A"' + - '"0x8DAB585B3F003AD"' last-modified: - - Mon, 18 Apr 2022 05:38:20 GMT + - Mon, 24 Oct 2022 06:05:08 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2018-11-09' + - '2021-06-08' status: code: 201 message: Created - request: body: null headers: + Accept: + - application/xml + Accept-Encoding: + - gzip, deflate + CommandName: + - storage container create Connection: - keep-alive Content-Length: - '0' + ParameterSetName: + - -n --account-name --account-key User-Agent: - - Azure-Storage/2.0.0-2.0.1 (Python CPython 3.8.7; Windows 10) AZURECLI/2.35.0 + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.12.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:38:21 GMT + - Mon, 24 Oct 2022 06:05:07 GMT x-ms-version: - - '2018-11-09' + - '2021-06-08' method: PUT uri: https://blobtag000002.blob.core.windows.net/cont2000004?restype=container response: @@ -55,15 +71,15 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:38:22 GMT + - Mon, 24 Oct 2022 06:05:08 GMT etag: - - '"0x8DA20FDA6E08956"' + - '"0x8DAB585B4CE520D"' last-modified: - - Mon, 18 Apr 2022 05:38:22 GMT + - Mon, 24 Oct 2022 06:05:09 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2018-11-09' + - '2021-06-08' status: code: 201 message: Created @@ -87,15 +103,15 @@ interactions: ParameterSetName: - -c -f -n --tags --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-blob-type: - BlockBlob x-ms-date: - - Mon, 18 Apr 2022 05:38:23 GMT + - Mon, 24 Oct 2022 06:05:09 GMT x-ms-tags: - date=2020-01-01&category=test x-ms-version: - - '2020-10-02' + - '2021-04-10' method: PUT uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005 response: @@ -107,11 +123,11 @@ interactions: content-md5: - DfvoqkwgtS4bi/PLbL3xkw== date: - - Mon, 18 Apr 2022 05:38:24 GMT + - Mon, 24 Oct 2022 06:05:11 GMT etag: - - '"0x8DA20FDA852192A"' + - '"0x8DAB585B646C1C7"' last-modified: - - Mon, 18 Apr 2022 05:38:24 GMT + - Mon, 24 Oct 2022 06:05:11 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-content-crc64: @@ -119,7 +135,7 @@ interactions: x-ms-request-server-encrypted: - 'true' x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 201 message: Created @@ -137,19 +153,19 @@ interactions: ParameterSetName: - -c --include --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:38:55 GMT + - Mon, 24 Oct 2022 06:05:41 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://blobtag000002.blob.core.windows.net/cont1000003?restype=container&comp=list&maxresults=5000&include=tags response: body: string: "\uFEFF5000blob000005Mon, - 18 Apr 2022 05:38:24 GMTMon, 18 Apr 2022 05:38:24 - GMT0x8DA20FDA852192A131072application/octet-streamMon, 24 Oct 2022 06:05:11 + GMT0x8DAB585B646C1C7131072application/octet-streamDfvoqkwgtS4bi/PLbL3xkw==BlockBlobHottrueunlockedavailabletrue2categorytestdate2020-01-01" @@ -157,13 +173,13 @@ interactions: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:38:55 GMT + - Mon, 24 Oct 2022 06:05:42 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -181,11 +197,11 @@ interactions: ParameterSetName: - -n -c --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:38:59 GMT + - Mon, 24 Oct 2022 06:05:43 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005?comp=tags response: @@ -197,11 +213,11 @@ interactions: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:38:59 GMT + - Mon, 24 Oct 2022 06:05:44 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -221,15 +237,15 @@ interactions: ParameterSetName: - --source-blob --source-container -c -b --tags --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.10.0 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.12.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-copy-source: - - https://blobtag7cecw7sf6hbq72peq.blob.core.windows.net/cont126k356pvuzwcxq4gwkt/blobr5yl5zh6jdy7ycxdg7cy + - https://blobtagrfdgx6wf7ljxth3ik.blob.core.windows.net/cont1urijtimap7657pj2y4s/blobwm3b4jnvm4us4yxdwah4 x-ms-date: - - Mon, 18 Apr 2022 05:39:01 GMT + - Mon, 24 Oct 2022 06:05:44 GMT x-ms-tags: - number=1 x-ms-version: - - '2021-04-10' + - '2021-06-08' method: PUT uri: https://blobtag000002.blob.core.windows.net/cont2000004/blob000006 response: @@ -239,19 +255,19 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:39:01 GMT + - Mon, 24 Oct 2022 06:05:46 GMT etag: - - '"0x8DA20FDBE730DC5"' + - '"0x8DAB585CAE6FD0C"' last-modified: - - Mon, 18 Apr 2022 05:39:02 GMT + - Mon, 24 Oct 2022 06:05:46 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-copy-id: - - af5e0626-3819-4fc3-b515-a8bcffdd8446 + - 96ed856b-21b8-4e1b-98d6-6fc87e3e37f8 x-ms-copy-status: - success x-ms-version: - - '2021-04-10' + - '2021-06-08' status: code: 202 message: Accepted @@ -269,11 +285,11 @@ interactions: ParameterSetName: - -n -c --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:02 GMT + - Mon, 24 Oct 2022 06:05:46 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://blobtag000002.blob.core.windows.net/cont2000004/blob000006?comp=tags response: @@ -285,11 +301,11 @@ interactions: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:39:03 GMT + - Mon, 24 Oct 2022 06:05:47 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -307,19 +323,19 @@ interactions: ParameterSetName: - -c --include --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:04 GMT + - Mon, 24 Oct 2022 06:05:47 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://blobtag000002.blob.core.windows.net/cont2000004?restype=container&comp=list&maxresults=5000&include=tags response: body: string: "\uFEFF5000blob000006Mon, - 18 Apr 2022 05:39:02 GMTMon, 18 Apr 2022 05:39:02 - GMT0x8DA20FDBE730DC5131072application/octet-streamMon, 24 Oct 2022 06:05:46 + GMT0x8DAB585CAE6FD0C131072application/octet-streamDfvoqkwgtS4bi/PLbL3xkw==BlockBlobHottrueunlockedavailabletrue1number1" @@ -327,13 +343,13 @@ interactions: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:39:05 GMT + - Mon, 24 Oct 2022 06:05:49 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -357,11 +373,11 @@ interactions: ParameterSetName: - -n -c --tags --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:06 GMT + - Mon, 24 Oct 2022 06:05:49 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: PUT uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005?comp=tags response: @@ -369,11 +385,11 @@ interactions: string: '' headers: date: - - Mon, 18 Apr 2022 05:39:07 GMT + - Mon, 24 Oct 2022 06:05:51 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 204 message: No Content @@ -391,11 +407,11 @@ interactions: ParameterSetName: - -n -c --tags --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:08 GMT + - Mon, 24 Oct 2022 06:05:51 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005?comp=tags response: @@ -407,11 +423,11 @@ interactions: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:39:07 GMT + - Mon, 24 Oct 2022 06:05:51 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -429,11 +445,11 @@ interactions: ParameterSetName: - -n -c --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:09 GMT + - Mon, 24 Oct 2022 06:05:51 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005?comp=tags response: @@ -445,11 +461,11 @@ interactions: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:39:09 GMT + - Mon, 24 Oct 2022 06:05:53 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -467,11 +483,11 @@ interactions: ParameterSetName: - --tag-filter --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:11 GMT + - Mon, 24 Oct 2022 06:05:53 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://blobtag000002.blob.core.windows.net/?comp=blobs&where=test%3D%27tag%27 response: @@ -482,13 +498,52 @@ interactions: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:39:11 GMT + - Mon, 24 Oct 2022 06:05:55 GMT + server: + - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 + transfer-encoding: + - chunked + x-ms-version: + - '2021-04-10' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/xml + Accept-Encoding: + - gzip, deflate + CommandName: + - storage blob filter + Connection: + - keep-alive + ParameterSetName: + - --tag-filter -c --account-name --account-key + User-Agent: + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) + x-ms-date: + - Mon, 24 Oct 2022 06:05:54 GMT + x-ms-version: + - '2021-04-10' + method: GET + uri: https://blobtag000002.blob.core.windows.net/cont1000003?restype=container&comp=blobs&where=test%3D%27tag%27 + response: + body: + string: "\uFEFF\ntest='tag'blob000005cont1000003testtag" + headers: + content-type: + - application/xml + date: + - Mon, 24 Oct 2022 06:05:55 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -509,15 +564,15 @@ interactions: - --source-blob --source-container -c -b --source-tags-condition --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.10.0 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.12.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-copy-source: - - https://blobtag7cecw7sf6hbq72peq.blob.core.windows.net/cont126k356pvuzwcxq4gwkt/blobr5yl5zh6jdy7ycxdg7cy + - https://blobtagrfdgx6wf7ljxth3ik.blob.core.windows.net/cont1urijtimap7657pj2y4s/blobwm3b4jnvm4us4yxdwah4 x-ms-date: - - Mon, 18 Apr 2022 05:39:13 GMT + - Mon, 24 Oct 2022 06:05:56 GMT x-ms-source-if-tags: - test='tag' x-ms-version: - - '2021-04-10' + - '2021-06-08' method: PUT uri: https://blobtag000002.blob.core.windows.net/cont2000004/blob000006 response: @@ -527,19 +582,19 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:39:14 GMT + - Mon, 24 Oct 2022 06:05:57 GMT etag: - - '"0x8DA20FDC5B4D22F"' + - '"0x8DAB585D1F75187"' last-modified: - - Mon, 18 Apr 2022 05:39:14 GMT + - Mon, 24 Oct 2022 06:05:58 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-copy-id: - - cbb6281b-61d2-4f0c-8368-b895f32d27e3 + - f18e0e53-1844-4dd4-832b-5d81f59a2afd x-ms-copy-status: - success x-ms-version: - - '2021-04-10' + - '2021-06-08' status: code: 202 message: Accepted @@ -561,17 +616,17 @@ interactions: ParameterSetName: - -n -c --tags-condition -f --tags --overwrite --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-blob-type: - BlockBlob x-ms-date: - - Mon, 18 Apr 2022 05:39:15 GMT + - Mon, 24 Oct 2022 06:05:58 GMT x-ms-if-tags: - test='tag' x-ms-tags: - date=2020-01-01&category=test x-ms-version: - - '2020-10-02' + - '2021-04-10' method: PUT uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005 response: @@ -583,11 +638,11 @@ interactions: content-md5: - DfvoqkwgtS4bi/PLbL3xkw== date: - - Mon, 18 Apr 2022 05:39:16 GMT + - Mon, 24 Oct 2022 06:05:59 GMT etag: - - '"0x8DA20FDC7794AA2"' + - '"0x8DAB585D3658F87"' last-modified: - - Mon, 18 Apr 2022 05:39:17 GMT + - Mon, 24 Oct 2022 06:06:00 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-content-crc64: @@ -595,7 +650,7 @@ interactions: x-ms-request-server-encrypted: - 'true' x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 201 message: Created @@ -615,15 +670,15 @@ interactions: ParameterSetName: - -n -c --tags-condition --metadata --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:17 GMT + - Mon, 24 Oct 2022 06:06:00 GMT x-ms-if-tags: - category='test' x-ms-meta-a: - b x-ms-version: - - '2020-10-02' + - '2021-04-10' method: PUT uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005?comp=metadata response: @@ -633,17 +688,17 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:39:18 GMT + - Mon, 24 Oct 2022 06:06:02 GMT etag: - - '"0x8DA20FDC8AD7EFE"' + - '"0x8DAB585D45D6F0D"' last-modified: - - Mon, 18 Apr 2022 05:39:19 GMT + - Mon, 24 Oct 2022 06:06:02 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-request-server-encrypted: - 'true' x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -661,13 +716,13 @@ interactions: ParameterSetName: - -n -c --tags-condition --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:20 GMT + - Mon, 24 Oct 2022 06:06:02 GMT x-ms-if-tags: - category='test' x-ms-version: - - '2020-10-02' + - '2021-04-10' method: HEAD uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005 response: @@ -683,11 +738,11 @@ interactions: content-type: - application/octet-stream date: - - Mon, 18 Apr 2022 05:39:20 GMT + - Mon, 24 Oct 2022 06:06:04 GMT etag: - - '"0x8DA20FDC8AD7EFE"' + - '"0x8DAB585D45D6F0D"' last-modified: - - Mon, 18 Apr 2022 05:39:19 GMT + - Mon, 24 Oct 2022 06:06:02 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-access-tier: @@ -697,7 +752,7 @@ interactions: x-ms-blob-type: - BlockBlob x-ms-creation-time: - - Mon, 18 Apr 2022 05:38:24 GMT + - Mon, 24 Oct 2022 06:05:11 GMT x-ms-lease-state: - available x-ms-lease-status: @@ -709,7 +764,7 @@ interactions: x-ms-tag-count: - '2' x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -730,9 +785,9 @@ interactions: - --lease-duration -b -c --proposed-lease-id --tags-condition --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:21 GMT + - Mon, 24 Oct 2022 06:06:05 GMT x-ms-if-tags: - category='test' x-ms-lease-action: @@ -742,7 +797,7 @@ interactions: x-ms-proposed-lease-id: - abcdabcd-abcd-abcd-abcd-abcdabcdabcd x-ms-version: - - '2020-10-02' + - '2021-04-10' method: PUT uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005?comp=lease response: @@ -752,17 +807,17 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:39:22 GMT + - Mon, 24 Oct 2022 06:06:07 GMT etag: - - '"0x8DA20FDC8AD7EFE"' + - '"0x8DAB585D45D6F0D"' last-modified: - - Mon, 18 Apr 2022 05:39:19 GMT + - Mon, 24 Oct 2022 06:06:02 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-lease-id: - abcdabcd-abcd-abcd-abcd-abcdabcdabcd x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 201 message: Created @@ -780,11 +835,11 @@ interactions: ParameterSetName: - -n -c --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:23 GMT + - Mon, 24 Oct 2022 06:06:06 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: HEAD uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005 response: @@ -800,11 +855,11 @@ interactions: content-type: - application/octet-stream date: - - Mon, 18 Apr 2022 05:39:24 GMT + - Mon, 24 Oct 2022 06:06:08 GMT etag: - - '"0x8DA20FDC8AD7EFE"' + - '"0x8DAB585D45D6F0D"' last-modified: - - Mon, 18 Apr 2022 05:39:19 GMT + - Mon, 24 Oct 2022 06:06:02 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-access-tier: @@ -814,7 +869,7 @@ interactions: x-ms-blob-type: - BlockBlob x-ms-creation-time: - - Mon, 18 Apr 2022 05:38:24 GMT + - Mon, 24 Oct 2022 06:05:11 GMT x-ms-lease-duration: - fixed x-ms-lease-state: @@ -828,7 +883,7 @@ interactions: x-ms-tag-count: - '2' x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -848,9 +903,9 @@ interactions: ParameterSetName: - -b -c --lease-id --proposed-lease-id --tags-condition --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:25 GMT + - Mon, 24 Oct 2022 06:06:08 GMT x-ms-if-tags: - category='test' x-ms-lease-action: @@ -860,7 +915,7 @@ interactions: x-ms-proposed-lease-id: - dcbadcba-dcba-dcba-dcba-dcbadcbadcba x-ms-version: - - '2020-10-02' + - '2021-04-10' method: PUT uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005?comp=lease response: @@ -870,17 +925,17 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:39:26 GMT + - Mon, 24 Oct 2022 06:06:10 GMT etag: - - '"0x8DA20FDC8AD7EFE"' + - '"0x8DAB585D45D6F0D"' last-modified: - - Mon, 18 Apr 2022 05:39:19 GMT + - Mon, 24 Oct 2022 06:06:02 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-lease-id: - dcbadcba-dcba-dcba-dcba-dcbadcbadcba x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -900,9 +955,9 @@ interactions: ParameterSetName: - -b -c --lease-id --tags-condition --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:28 GMT + - Mon, 24 Oct 2022 06:06:10 GMT x-ms-if-tags: - category='test' x-ms-lease-action: @@ -910,7 +965,7 @@ interactions: x-ms-lease-id: - dcbadcba-dcba-dcba-dcba-dcbadcbadcba x-ms-version: - - '2020-10-02' + - '2021-04-10' method: PUT uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005?comp=lease response: @@ -920,17 +975,17 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:39:29 GMT + - Mon, 24 Oct 2022 06:06:13 GMT etag: - - '"0x8DA20FDC8AD7EFE"' + - '"0x8DAB585D45D6F0D"' last-modified: - - Mon, 18 Apr 2022 05:39:19 GMT + - Mon, 24 Oct 2022 06:06:02 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-lease-id: - dcbadcba-dcba-dcba-dcba-dcbadcbadcba x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -948,11 +1003,11 @@ interactions: ParameterSetName: - -n -c --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:30 GMT + - Mon, 24 Oct 2022 06:06:12 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: HEAD uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005 response: @@ -968,11 +1023,11 @@ interactions: content-type: - application/octet-stream date: - - Mon, 18 Apr 2022 05:39:30 GMT + - Mon, 24 Oct 2022 06:06:14 GMT etag: - - '"0x8DA20FDC8AD7EFE"' + - '"0x8DAB585D45D6F0D"' last-modified: - - Mon, 18 Apr 2022 05:39:19 GMT + - Mon, 24 Oct 2022 06:06:02 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-access-tier: @@ -982,7 +1037,7 @@ interactions: x-ms-blob-type: - BlockBlob x-ms-creation-time: - - Mon, 18 Apr 2022 05:38:24 GMT + - Mon, 24 Oct 2022 06:05:11 GMT x-ms-lease-duration: - fixed x-ms-lease-state: @@ -996,7 +1051,7 @@ interactions: x-ms-tag-count: - '2' x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -1016,9 +1071,9 @@ interactions: ParameterSetName: - -b -c --lease-break-period --tags-condition --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:32 GMT + - Mon, 24 Oct 2022 06:06:14 GMT x-ms-if-tags: - category='test' x-ms-lease-action: @@ -1026,7 +1081,7 @@ interactions: x-ms-lease-break-period: - '30' x-ms-version: - - '2020-10-02' + - '2021-04-10' method: PUT uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005?comp=lease response: @@ -1036,17 +1091,17 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:39:32 GMT + - Mon, 24 Oct 2022 06:06:15 GMT etag: - - '"0x8DA20FDC8AD7EFE"' + - '"0x8DAB585D45D6F0D"' last-modified: - - Mon, 18 Apr 2022 05:39:19 GMT + - Mon, 24 Oct 2022 06:06:02 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-lease-time: - '30' x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 202 message: Accepted @@ -1064,11 +1119,11 @@ interactions: ParameterSetName: - -n -c --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:33 GMT + - Mon, 24 Oct 2022 06:06:15 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: HEAD uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005 response: @@ -1084,11 +1139,11 @@ interactions: content-type: - application/octet-stream date: - - Mon, 18 Apr 2022 05:39:34 GMT + - Mon, 24 Oct 2022 06:06:17 GMT etag: - - '"0x8DA20FDC8AD7EFE"' + - '"0x8DAB585D45D6F0D"' last-modified: - - Mon, 18 Apr 2022 05:39:19 GMT + - Mon, 24 Oct 2022 06:06:02 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-access-tier: @@ -1098,7 +1153,7 @@ interactions: x-ms-blob-type: - BlockBlob x-ms-creation-time: - - Mon, 18 Apr 2022 05:38:24 GMT + - Mon, 24 Oct 2022 06:05:11 GMT x-ms-lease-state: - breaking x-ms-lease-status: @@ -1110,7 +1165,7 @@ interactions: x-ms-tag-count: - '2' x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -1130,9 +1185,9 @@ interactions: ParameterSetName: - -b -c --lease-id --tags-condition --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:35 GMT + - Mon, 24 Oct 2022 06:06:17 GMT x-ms-if-tags: - category='test' x-ms-lease-action: @@ -1140,7 +1195,7 @@ interactions: x-ms-lease-id: - dcbadcba-dcba-dcba-dcba-dcbadcbadcba x-ms-version: - - '2020-10-02' + - '2021-04-10' method: PUT uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005?comp=lease response: @@ -1150,15 +1205,15 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:39:36 GMT + - Mon, 24 Oct 2022 06:06:19 GMT etag: - - '"0x8DA20FDC8AD7EFE"' + - '"0x8DAB585D45D6F0D"' last-modified: - - Mon, 18 Apr 2022 05:39:19 GMT + - Mon, 24 Oct 2022 06:06:02 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -1176,11 +1231,11 @@ interactions: ParameterSetName: - -n -c --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:37 GMT + - Mon, 24 Oct 2022 06:06:19 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: HEAD uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005 response: @@ -1196,11 +1251,11 @@ interactions: content-type: - application/octet-stream date: - - Mon, 18 Apr 2022 05:39:38 GMT + - Mon, 24 Oct 2022 06:06:20 GMT etag: - - '"0x8DA20FDC8AD7EFE"' + - '"0x8DAB585D45D6F0D"' last-modified: - - Mon, 18 Apr 2022 05:39:19 GMT + - Mon, 24 Oct 2022 06:06:02 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-access-tier: @@ -1210,7 +1265,7 @@ interactions: x-ms-blob-type: - BlockBlob x-ms-creation-time: - - Mon, 18 Apr 2022 05:38:24 GMT + - Mon, 24 Oct 2022 06:05:11 GMT x-ms-lease-state: - available x-ms-lease-status: @@ -1222,7 +1277,7 @@ interactions: x-ms-tag-count: - '2' x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -1242,15 +1297,15 @@ interactions: ParameterSetName: - -n -c --tier --tags-condition --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-access-tier: - Hot x-ms-date: - - Mon, 18 Apr 2022 05:39:39 GMT + - Mon, 24 Oct 2022 06:06:20 GMT x-ms-if-tags: - category='test' x-ms-version: - - '2020-10-02' + - '2021-04-10' method: PUT uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005?comp=tier response: @@ -1260,11 +1315,11 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:39:39 GMT + - Mon, 24 Oct 2022 06:06:24 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -1284,13 +1339,13 @@ interactions: ParameterSetName: - -n -c --tags-condition --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:40 GMT + - Mon, 24 Oct 2022 06:06:24 GMT x-ms-if-tags: - category='test' x-ms-version: - - '2020-10-02' + - '2021-04-10' method: PUT uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005?comp=snapshot response: @@ -1300,19 +1355,19 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:39:41 GMT + - Mon, 24 Oct 2022 06:06:28 GMT etag: - - '"0x8DA20FDC8AD7EFE"' + - '"0x8DAB585D45D6F0D"' last-modified: - - Mon, 18 Apr 2022 05:39:19 GMT + - Mon, 24 Oct 2022 06:06:02 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-request-server-encrypted: - 'false' x-ms-snapshot: - - '2022-04-18T05:39:41.9265144Z' + - '2022-10-24T06:06:28.8519523Z' x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 201 message: Created @@ -1336,13 +1391,13 @@ interactions: ParameterSetName: - -n -c --tags-condition --tags --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:42 GMT + - Mon, 24 Oct 2022 06:06:28 GMT x-ms-if-tags: - category='test' x-ms-version: - - '2020-10-02' + - '2021-04-10' method: PUT uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005?comp=tags response: @@ -1350,11 +1405,11 @@ interactions: string: '' headers: date: - - Mon, 18 Apr 2022 05:39:43 GMT + - Mon, 24 Oct 2022 06:06:30 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 204 message: No Content @@ -1372,11 +1427,11 @@ interactions: ParameterSetName: - -n -c --tags-condition --tags --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:44 GMT + - Mon, 24 Oct 2022 06:06:30 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005?comp=tags response: @@ -1388,11 +1443,11 @@ interactions: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:39:44 GMT + - Mon, 24 Oct 2022 06:06:30 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -1410,13 +1465,13 @@ interactions: ParameterSetName: - -n -c --tags-condition --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:44 GMT + - Mon, 24 Oct 2022 06:06:30 GMT x-ms-if-tags: - category='test' x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005?comp=tags response: @@ -1428,11 +1483,11 @@ interactions: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:39:46 GMT + - Mon, 24 Oct 2022 06:06:32 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -1450,34 +1505,34 @@ interactions: ParameterSetName: - -c --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:47 GMT + - Mon, 24 Oct 2022 06:06:32 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://blobtag000002.blob.core.windows.net/cont1000003?restype=container&comp=list&maxresults=5000 response: body: string: "\uFEFF5000blob000005Mon, - 18 Apr 2022 05:38:24 GMTMon, 18 Apr 2022 05:39:19 - GMT0x8DA20FDC8AD7EFE131072application/octet-streamMon, 24 Oct 2022 06:06:02 + GMT0x8DAB585D45D6F0D131072application/octet-streamDfvoqkwgtS4bi/PLbL3xkw==BlockBlobHotMon, - 18 Apr 2022 05:39:40 GMTunlockedavailabletrue2unlockedavailabletrue2" headers: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:39:48 GMT + - Mon, 24 Oct 2022 06:06:34 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -1495,40 +1550,40 @@ interactions: ParameterSetName: - -c --include --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:49 GMT + - Mon, 24 Oct 2022 06:06:34 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://blobtag000002.blob.core.windows.net/cont1000003?restype=container&comp=list&maxresults=5000&include=snapshots response: body: string: "\uFEFF5000blob0000052022-04-18T05:39:41.9265144ZMon, - 18 Apr 2022 05:38:24 GMTMon, 18 Apr 2022 05:39:19 - GMT0x8DA20FDC8AD7EFE131072application/octet-stream5000blob0000052022-10-24T06:06:28.8519523ZMon, + 24 Oct 2022 06:05:11 GMTMon, 24 Oct 2022 06:06:02 + GMT0x8DAB585D45D6F0D131072application/octet-streamDfvoqkwgtS4bi/PLbL3xkw==BlockBlobHotMon, - 18 Apr 2022 05:39:40 GMTtrue2blob000005Mon, 18 Apr - 2022 05:38:24 GMTMon, 18 Apr 2022 05:39:19 - GMT0x8DA20FDC8AD7EFE131072application/octet-streamtrue2blob000005Mon, 24 Oct + 2022 06:05:11 GMTMon, 24 Oct 2022 06:06:02 + GMT0x8DAB585D45D6F0D131072application/octet-streamDfvoqkwgtS4bi/PLbL3xkw==BlockBlobHotMon, - 18 Apr 2022 05:39:40 GMTunlockedavailabletrue2unlockedavailabletrue2" headers: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:39:50 GMT + - Mon, 24 Oct 2022 06:06:35 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -1548,15 +1603,15 @@ interactions: ParameterSetName: - -n -c --tags-condition --snapshot --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:51 GMT + - Mon, 24 Oct 2022 06:06:35 GMT x-ms-if-tags: - category='test' x-ms-version: - - '2020-10-02' + - '2021-04-10' method: DELETE - uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005?snapshot=2022-04-18T05:39:41.9265144Z + uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005?snapshot=2022-10-24T06:06:28.8519523Z response: body: string: '' @@ -1564,13 +1619,13 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:39:52 GMT + - Mon, 24 Oct 2022 06:06:36 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-delete-type-permanent: - 'true' x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 202 message: Accepted @@ -1590,13 +1645,13 @@ interactions: ParameterSetName: - -n -c --tags-condition --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:53 GMT + - Mon, 24 Oct 2022 06:06:37 GMT x-ms-if-tags: - category='test' x-ms-version: - - '2020-10-02' + - '2021-04-10' method: DELETE uri: https://blobtag000002.blob.core.windows.net/cont1000003/blob000005 response: @@ -1606,13 +1661,13 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:39:54 GMT + - Mon, 24 Oct 2022 06:06:40 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-delete-type-permanent: - 'true' x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 202 message: Accepted @@ -1630,11 +1685,11 @@ interactions: ParameterSetName: - -c --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:39:55 GMT + - Mon, 24 Oct 2022 06:06:40 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://blobtag000002.blob.core.windows.net/cont1000003?restype=container&comp=list&maxresults=5000 response: @@ -1646,13 +1701,13 @@ interactions: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:39:56 GMT + - Mon, 24 Oct 2022 06:06:42 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK diff --git a/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_blob_versioning.yaml b/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_blob_versioning.yaml index 7dd4b7998e0..dda0f58d464 100644 --- a/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_blob_versioning.yaml +++ b/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_blob_versioning.yaml @@ -13,9 +13,10 @@ interactions: ParameterSetName: - -n -g --enable-versioning User-Agent: - - AZURECLI/2.35.0 azsdk-python-azure-mgmt-storage/20.0.0 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-azure-mgmt-storage/20.1.0 Python/3.7.9 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/version000002/blobServices/default?api-version=2021-09-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/version000002/blobServices/default?api-version=2022-05-01 response: body: string: '{"sku":{"name":"Standard_LRS","tier":"Standard"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/version000002/blobServices/default","name":"default","type":"Microsoft.Storage/storageAccounts/blobServices","properties":{"cors":{"corsRules":[]},"deleteRetentionPolicy":{"allowPermanentDelete":false,"enabled":false}}}' @@ -27,7 +28,7 @@ interactions: content-type: - application/json date: - - Mon, 18 Apr 2022 05:41:33 GMT + - Mon, 24 Oct 2022 07:21:23 GMT expires: - '-1' pragma: @@ -64,9 +65,10 @@ interactions: ParameterSetName: - -n -g --enable-versioning User-Agent: - - AZURECLI/2.35.0 azsdk-python-azure-mgmt-storage/20.0.0 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-azure-mgmt-storage/20.1.0 Python/3.7.9 + (Windows-10-10.0.22621-SP0) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/version000002/blobServices/default?api-version=2021-09-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/version000002/blobServices/default?api-version=2022-05-01 response: body: string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/version000002/blobServices/default","name":"default","type":"Microsoft.Storage/storageAccounts/blobServices","properties":{"cors":{"corsRules":[]},"deleteRetentionPolicy":{"allowPermanentDelete":false,"enabled":false},"isVersioningEnabled":true}}' @@ -78,7 +80,7 @@ interactions: content-type: - application/json date: - - Mon, 18 Apr 2022 05:41:37 GMT + - Mon, 24 Oct 2022 07:21:25 GMT expires: - '-1' pragma: @@ -114,12 +116,13 @@ interactions: ParameterSetName: - -n -g --query -o User-Agent: - - AZURECLI/2.35.0 azsdk-python-azure-mgmt-storage/20.0.0 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-azure-mgmt-storage/20.1.0 Python/3.7.9 + (Windows-10-10.0.22621-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/version000002/listKeys?api-version=2021-09-01&$expand=kerb + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/version000002/listKeys?api-version=2022-05-01&$expand=kerb response: body: - string: '{"keys":[{"creationTime":"2022-04-18T05:41:06.0840412Z","keyName":"key1","value":"veryFakedStorageAccountKey==","permissions":"FULL"},{"creationTime":"2022-04-18T05:41:06.0840412Z","keyName":"key2","value":"veryFakedStorageAccountKey==","permissions":"FULL"}]}' + string: '{"keys":[{"creationTime":"2022-10-24T07:21:00.7657965Z","keyName":"key1","value":"veryFakedStorageAccountKey==","permissions":"FULL"},{"creationTime":"2022-10-24T07:21:00.7657965Z","keyName":"key2","value":"veryFakedStorageAccountKey==","permissions":"FULL"}]}' headers: cache-control: - no-cache @@ -128,7 +131,7 @@ interactions: content-type: - application/json date: - - Mon, 18 Apr 2022 05:41:42 GMT + - Mon, 24 Oct 2022 07:21:27 GMT expires: - '-1' pragma: @@ -150,136 +153,46 @@ interactions: message: OK - request: body: null - headers: - Connection: - - keep-alive - Content-Length: - - '0' - User-Agent: - - Azure-Storage/2.0.0-2.0.1 (Python CPython 3.8.7; Windows 10) AZURECLI/2.35.0 - x-ms-date: - - Mon, 18 Apr 2022 05:41:43 GMT - x-ms-version: - - '2018-11-09' - method: PUT - uri: https://version000002.blob.core.windows.net/con000003?restype=container - response: - body: - string: '' - headers: - content-length: - - '0' - date: - - Mon, 18 Apr 2022 05:41:43 GMT - etag: - - '"0x8DA20FE1EDDFCB7"' - last-modified: - - Mon, 18 Apr 2022 05:41:43 GMT - server: - - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 - x-ms-version: - - '2018-11-09' - status: - code: 201 - message: Created -- request: - body: "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" headers: Accept: - application/xml Accept-Encoding: - gzip, deflate CommandName: - - storage blob upload + - storage container create Connection: - keep-alive Content-Length: - - '1024' - Content-Type: - - application/octet-stream - If-None-Match: - - '*' + - '0' ParameterSetName: - - -c -f -n --account-name --account-key + - -n --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) - x-ms-blob-type: - - BlockBlob + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.12.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:41:44 GMT + - Mon, 24 Oct 2022 07:21:27 GMT x-ms-version: - - '2020-10-02' + - '2021-06-08' method: PUT - uri: https://version000002.blob.core.windows.net/con000003/blob000004 + uri: https://version000002.blob.core.windows.net/con000003?restype=container response: body: string: '' headers: content-length: - '0' - content-md5: - - DzQ7CTESaiDxM9Z8KwGKOw== date: - - Mon, 18 Apr 2022 05:41:44 GMT + - Mon, 24 Oct 2022 07:21:29 GMT etag: - - '"0x8DA20FE1FCFF519"' + - '"0x8DAB5905ED2537E"' last-modified: - - Mon, 18 Apr 2022 05:41:45 GMT + - Mon, 24 Oct 2022 07:21:29 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 - x-ms-content-crc64: - - iknlm7CyG2k= - x-ms-request-server-encrypted: - - 'true' x-ms-version: - - '2020-10-02' + - '2021-06-08' status: code: 201 message: Created -- request: - body: null - headers: - Accept: - - application/xml - Accept-Encoding: - - gzip, deflate - CommandName: - - storage blob list - Connection: - - keep-alive - ParameterSetName: - - -c --include --account-name --account-key - User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) - x-ms-date: - - Mon, 18 Apr 2022 05:41:46 GMT - x-ms-version: - - '2020-10-02' - method: GET - uri: https://version000002.blob.core.windows.net/con000003?restype=container&comp=list&maxresults=5000&include=versions - response: - body: - string: "\uFEFF5000blob000004Mon, - 18 Apr 2022 05:41:45 GMTMon, 18 Apr 2022 05:41:45 - GMT0x8DA20FE1FCFF5191024application/octet-streamDzQ7CTESaiDxM9Z8KwGKOw==BlockBlobHottrueunlockedavailabletrue" - headers: - content-type: - - application/xml - date: - - Mon, 18 Apr 2022 05:41:46 GMT - server: - - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 - transfer-encoding: - - chunked - x-ms-version: - - '2020-10-02' - status: - code: 200 - message: OK - request: body: "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" headers: @@ -295,16 +208,18 @@ interactions: - '1024' Content-Type: - application/octet-stream + If-None-Match: + - '*' ParameterSetName: - - -c -f -n --overwrite --account-name --account-key + - -c -f -n --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-blob-type: - BlockBlob x-ms-date: - - Mon, 18 Apr 2022 05:41:57 GMT + - Mon, 24 Oct 2022 07:21:29 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: PUT uri: https://version000002.blob.core.windows.net/con000003/blob000004 response: @@ -316,11 +231,11 @@ interactions: content-md5: - DzQ7CTESaiDxM9Z8KwGKOw== date: - - Mon, 18 Apr 2022 05:41:58 GMT + - Mon, 24 Oct 2022 07:21:30 GMT etag: - - '"0x8DA20FE27D0328D"' + - '"0x8DAB5905FD53569"' last-modified: - - Mon, 18 Apr 2022 05:41:58 GMT + - Mon, 24 Oct 2022 07:21:31 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-content-crc64: @@ -328,9 +243,9 @@ interactions: x-ms-request-server-encrypted: - 'true' x-ms-version: - - '2020-10-02' + - '2021-04-10' x-ms-version-id: - - '2022-04-18T05:41:58.8174312Z' + - '2022-10-24T07:21:31.4082931Z' status: code: 201 message: Created @@ -348,24 +263,19 @@ interactions: ParameterSetName: - -c --include --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:41:59 GMT + - Mon, 24 Oct 2022 07:21:31 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://version000002.blob.core.windows.net/con000003?restype=container&comp=list&maxresults=5000&include=versions response: body: string: "\uFEFF5000blob0000042022-04-18T05:41:45.3850905ZMon, - 18 Apr 2022 05:41:45 GMTMon, 18 Apr 2022 05:41:45 - GMT0x8DA20FE1FCFF5191024application/octet-streamDzQ7CTESaiDxM9Z8KwGKOw==BlockBlobHottruetrueblob0000042022-04-18T05:41:58.8174312ZtrueMon, - 18 Apr 2022 05:41:58 GMTMon, 18 Apr 2022 05:41:58 - GMT0x8DA20FE27D0328D1024application/octet-stream5000blob0000042022-10-24T07:21:31.4082931ZtrueMon, + 24 Oct 2022 07:21:31 GMTMon, 24 Oct 2022 07:21:31 + GMT0x8DAB5905FD535691024application/octet-streamDzQ7CTESaiDxM9Z8KwGKOw==BlockBlobHottrueunlockedavailabletrue" @@ -373,13 +283,13 @@ interactions: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:42:00 GMT + - Mon, 24 Oct 2022 07:21:32 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -401,13 +311,13 @@ interactions: ParameterSetName: - -c -f -n --overwrite --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-blob-type: - BlockBlob x-ms-date: - - Mon, 18 Apr 2022 05:42:01 GMT + - Mon, 24 Oct 2022 07:21:32 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: PUT uri: https://version000002.blob.core.windows.net/con000003/blob000004 response: @@ -419,11 +329,11 @@ interactions: content-md5: - yZp0xVU3GkM9Eh9VHWxjmA== date: - - Mon, 18 Apr 2022 05:42:01 GMT + - Mon, 24 Oct 2022 07:21:33 GMT etag: - - '"0x8DA20FE2A0EF92D"' + - '"0x8DAB59061A1EA2F"' last-modified: - - Mon, 18 Apr 2022 05:42:02 GMT + - Mon, 24 Oct 2022 07:21:34 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-content-crc64: @@ -431,9 +341,9 @@ interactions: x-ms-request-server-encrypted: - 'true' x-ms-version: - - '2020-10-02' + - '2021-04-10' x-ms-version-id: - - '2022-04-18T05:42:02.5762877Z' + - '2022-10-24T07:21:34.4275775Z' status: code: 201 message: Created @@ -451,13 +361,13 @@ interactions: ParameterSetName: - -c -n --version-id --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:42:03 GMT + - Mon, 24 Oct 2022 07:21:34 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: HEAD - uri: https://version000002.blob.core.windows.net/con000003/blob000004?versionid=2022-04-18T05%3A41%3A45.3850905Z + uri: https://version000002.blob.core.windows.net/con000003/blob000004?versionid=2022-10-24T07%3A21%3A31.4082931Z response: body: string: '' @@ -471,11 +381,11 @@ interactions: content-type: - application/octet-stream date: - - Mon, 18 Apr 2022 05:42:03 GMT + - Mon, 24 Oct 2022 07:21:35 GMT etag: - - '"0x8DA20FE1FCFF519"' + - '"0x8DAB5905FD53569"' last-modified: - - Mon, 18 Apr 2022 05:41:45 GMT + - Mon, 24 Oct 2022 07:21:31 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-access-tier: @@ -485,13 +395,13 @@ interactions: x-ms-blob-type: - BlockBlob x-ms-creation-time: - - Mon, 18 Apr 2022 05:41:45 GMT + - Mon, 24 Oct 2022 07:21:31 GMT x-ms-server-encrypted: - 'true' x-ms-version: - - '2020-10-02' + - '2021-04-10' x-ms-version-id: - - '2022-04-18T05:41:45.3850905Z' + - '2022-10-24T07:21:31.4082931Z' status: code: 200 message: OK @@ -509,15 +419,15 @@ interactions: ParameterSetName: - -c -n --version-id -f --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:42:05 GMT + - Mon, 24 Oct 2022 07:21:35 GMT x-ms-range: - bytes=0-33554431 x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET - uri: https://version000002.blob.core.windows.net/con000003/blob000004?versionid=2022-04-18T05%3A41%3A45.3850905Z + uri: https://version000002.blob.core.windows.net/con000003/blob000004?versionid=2022-10-24T07%3A21%3A31.4082931Z response: body: string: "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" @@ -531,11 +441,11 @@ interactions: content-type: - application/octet-stream date: - - Mon, 18 Apr 2022 05:42:06 GMT + - Mon, 24 Oct 2022 07:21:37 GMT etag: - - '"0x8DA20FE1FCFF519"' + - '"0x8DAB5905FD53569"' last-modified: - - Mon, 18 Apr 2022 05:41:45 GMT + - Mon, 24 Oct 2022 07:21:31 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-blob-content-md5: @@ -543,13 +453,13 @@ interactions: x-ms-blob-type: - BlockBlob x-ms-creation-time: - - Mon, 18 Apr 2022 05:41:45 GMT + - Mon, 24 Oct 2022 07:21:31 GMT x-ms-server-encrypted: - 'true' x-ms-version: - - '2020-10-02' + - '2021-04-10' x-ms-version-id: - - '2022-04-18T05:41:45.3850905Z' + - '2022-10-24T07:21:31.4082931Z' status: code: 206 message: Partial Content @@ -567,11 +477,11 @@ interactions: ParameterSetName: - -c -n --version-id -f --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:42:06 GMT + - Mon, 24 Oct 2022 07:21:36 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: HEAD uri: https://version000002.blob.core.windows.net/con000003/blob000004 response: @@ -587,11 +497,11 @@ interactions: content-type: - application/octet-stream date: - - Mon, 18 Apr 2022 05:42:06 GMT + - Mon, 24 Oct 2022 07:21:37 GMT etag: - - '"0x8DA20FE2A0EF92D"' + - '"0x8DAB59061A1EA2F"' last-modified: - - Mon, 18 Apr 2022 05:42:02 GMT + - Mon, 24 Oct 2022 07:21:34 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-access-tier: @@ -601,7 +511,7 @@ interactions: x-ms-blob-type: - BlockBlob x-ms-creation-time: - - Mon, 18 Apr 2022 05:42:02 GMT + - Mon, 24 Oct 2022 07:21:34 GMT x-ms-is-current-version: - 'true' x-ms-lease-state: @@ -611,9 +521,9 @@ interactions: x-ms-server-encrypted: - 'true' x-ms-version: - - '2020-10-02' + - '2021-04-10' x-ms-version-id: - - '2022-04-18T05:42:02.5762877Z' + - '2022-10-24T07:21:34.4275775Z' status: code: 200 message: OK @@ -633,15 +543,15 @@ interactions: ParameterSetName: - -c -n --version-id --tier --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-access-tier: - Cool x-ms-date: - - Mon, 18 Apr 2022 05:42:07 GMT + - Mon, 24 Oct 2022 07:21:37 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: PUT - uri: https://version000002.blob.core.windows.net/con000003/blob000004?comp=tier&versionid=2022-04-18T05%3A41%3A45.3850905Z + uri: https://version000002.blob.core.windows.net/con000003/blob000004?comp=tier&versionid=2022-10-24T07%3A21%3A31.4082931Z response: body: string: '' @@ -649,11 +559,11 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:42:07 GMT + - Mon, 24 Oct 2022 07:21:38 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -671,13 +581,13 @@ interactions: ParameterSetName: - -c -n --version-id --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:42:09 GMT + - Mon, 24 Oct 2022 07:21:40 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: HEAD - uri: https://version000002.blob.core.windows.net/con000003/blob000004?versionid=2022-04-18T05%3A41%3A45.3850905Z + uri: https://version000002.blob.core.windows.net/con000003/blob000004?versionid=2022-10-24T07%3A21%3A31.4082931Z response: body: string: '' @@ -691,27 +601,27 @@ interactions: content-type: - application/octet-stream date: - - Mon, 18 Apr 2022 05:42:09 GMT + - Mon, 24 Oct 2022 07:21:41 GMT etag: - - '"0x8DA20FE1FCFF519"' + - '"0x8DAB5905FD53569"' last-modified: - - Mon, 18 Apr 2022 05:41:45 GMT + - Mon, 24 Oct 2022 07:21:31 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-access-tier: - Cool x-ms-access-tier-change-time: - - Mon, 18 Apr 2022 05:42:08 GMT + - Mon, 24 Oct 2022 07:21:39 GMT x-ms-blob-type: - BlockBlob x-ms-creation-time: - - Mon, 18 Apr 2022 05:41:45 GMT + - Mon, 24 Oct 2022 07:21:31 GMT x-ms-server-encrypted: - 'true' x-ms-version: - - '2020-10-02' + - '2021-04-10' x-ms-version-id: - - '2022-04-18T05:41:45.3850905Z' + - '2022-10-24T07:21:31.4082931Z' status: code: 200 message: OK @@ -729,30 +639,25 @@ interactions: ParameterSetName: - -c --include --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:42:10 GMT + - Mon, 24 Oct 2022 07:21:41 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://version000002.blob.core.windows.net/con000003?restype=container&comp=list&maxresults=5000&include=versions response: body: string: "\uFEFF5000blob0000042022-04-18T05:41:45.3850905ZMon, - 18 Apr 2022 05:41:45 GMTMon, 18 Apr 2022 05:41:45 - GMT0x8DA20FE1FCFF5191024application/octet-stream5000blob0000042022-10-24T07:21:31.4082931ZMon, + 24 Oct 2022 07:21:31 GMTMon, 24 Oct 2022 07:21:31 + GMT0x8DAB5905FD535691024application/octet-streamDzQ7CTESaiDxM9Z8KwGKOw==BlockBlobCoolMon, - 18 Apr 2022 05:42:08 GMTtrueblob0000042022-04-18T05:41:58.8174312ZMon, - 18 Apr 2022 05:41:58 GMTMon, 18 Apr 2022 05:41:58 - GMT0x8DA20FE27D0328D1024application/octet-streamDzQ7CTESaiDxM9Z8KwGKOw==BlockBlobHottruetrueblob0000042022-04-18T05:42:02.5762877ZtrueMon, - 18 Apr 2022 05:42:02 GMTMon, 18 Apr 2022 05:42:02 - GMT0x8DA20FE2A0EF92D2048application/octet-streamtrueblob0000042022-10-24T07:21:34.4275775ZtrueMon, + 24 Oct 2022 07:21:34 GMTMon, 24 Oct 2022 07:21:34 + GMT0x8DAB59061A1EA2F2048application/octet-streamyZp0xVU3GkM9Eh9VHWxjmA==BlockBlobHottrueunlockedavailabletrue" @@ -760,13 +665,13 @@ interactions: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:42:11 GMT + - Mon, 24 Oct 2022 07:21:42 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -786,13 +691,13 @@ interactions: ParameterSetName: - -c -n --version-id --account-name --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:42:13 GMT + - Mon, 24 Oct 2022 07:21:43 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: DELETE - uri: https://version000002.blob.core.windows.net/con000003/blob000004?versionid=2022-04-18T05%3A41%3A45.3850905Z + uri: https://version000002.blob.core.windows.net/con000003/blob000004?versionid=2022-10-24T07%3A21%3A31.4082931Z response: body: string: '' @@ -800,13 +705,13 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:42:13 GMT + - Mon, 24 Oct 2022 07:21:44 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-delete-type-permanent: - 'true' x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 202 message: Accepted @@ -824,24 +729,19 @@ interactions: ParameterSetName: - -c --include --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:42:15 GMT + - Mon, 24 Oct 2022 07:21:44 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://version000002.blob.core.windows.net/con000003?restype=container&comp=list&maxresults=5000&include=versions response: body: string: "\uFEFF5000blob0000042022-04-18T05:41:58.8174312ZMon, - 18 Apr 2022 05:41:58 GMTMon, 18 Apr 2022 05:41:58 - GMT0x8DA20FE27D0328D1024application/octet-streamDzQ7CTESaiDxM9Z8KwGKOw==BlockBlobHottruetrueblob0000042022-04-18T05:42:02.5762877ZtrueMon, - 18 Apr 2022 05:42:02 GMTMon, 18 Apr 2022 05:42:02 - GMT0x8DA20FE2A0EF92D2048application/octet-stream5000blob0000042022-10-24T07:21:34.4275775ZtrueMon, + 24 Oct 2022 07:21:34 GMTMon, 24 Oct 2022 07:21:34 + GMT0x8DAB59061A1EA2F2048application/octet-streamyZp0xVU3GkM9Eh9VHWxjmA==BlockBlobHottrueunlockedavailabletrue" @@ -849,13 +749,13 @@ interactions: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:42:15 GMT + - Mon, 24 Oct 2022 07:21:46 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK diff --git a/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_blob_vlm.yaml b/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_blob_vlm.yaml index 06fb83c2716..ca0e68913c6 100644 --- a/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_blob_vlm.yaml +++ b/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_blob_vlm.yaml @@ -13,9 +13,10 @@ interactions: ParameterSetName: - -n -g --enable-versioning User-Agent: - - AZURECLI/2.35.0 azsdk-python-azure-mgmt-storage/20.0.0 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-azure-mgmt-storage/20.1.0 Python/3.7.9 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/version000002/blobServices/default?api-version=2021-09-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/version000002/blobServices/default?api-version=2022-05-01 response: body: string: '{"sku":{"name":"Standard_LRS","tier":"Standard"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/version000002/blobServices/default","name":"default","type":"Microsoft.Storage/storageAccounts/blobServices","properties":{"cors":{"corsRules":[]},"deleteRetentionPolicy":{"allowPermanentDelete":false,"enabled":false}}}' @@ -27,7 +28,7 @@ interactions: content-type: - application/json date: - - Mon, 18 Apr 2022 05:43:03 GMT + - Mon, 24 Oct 2022 07:21:24 GMT expires: - '-1' pragma: @@ -64,9 +65,10 @@ interactions: ParameterSetName: - -n -g --enable-versioning User-Agent: - - AZURECLI/2.35.0 azsdk-python-azure-mgmt-storage/20.0.0 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-azure-mgmt-storage/20.1.0 Python/3.7.9 + (Windows-10-10.0.22621-SP0) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/version000002/blobServices/default?api-version=2021-09-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/version000002/blobServices/default?api-version=2022-05-01 response: body: string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/version000002/blobServices/default","name":"default","type":"Microsoft.Storage/storageAccounts/blobServices","properties":{"cors":{"corsRules":[]},"deleteRetentionPolicy":{"allowPermanentDelete":false,"enabled":false},"isVersioningEnabled":true}}' @@ -78,7 +80,7 @@ interactions: content-type: - application/json date: - - Mon, 18 Apr 2022 05:43:06 GMT + - Mon, 24 Oct 2022 07:21:27 GMT expires: - '-1' pragma: @@ -94,7 +96,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - - '1198' + - '1199' status: code: 200 message: OK @@ -116,9 +118,10 @@ interactions: ParameterSetName: - -n --storage-account -g --enable-vlw User-Agent: - - AZURECLI/2.35.0 azsdk-python-azure-mgmt-storage/20.0.0 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-azure-mgmt-storage/20.1.0 Python/3.7.9 + (Windows-10-10.0.22621-SP0) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/version000002/blobServices/default/containers/container000003?api-version=2021-09-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/version000002/blobServices/default/containers/container000003?api-version=2022-05-01 response: body: string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/version000002/blobServices/default/containers/container000003","name":"container000003","type":"Microsoft.Storage/storageAccounts/blobServices/containers","properties":{"immutableStorageWithVersioning":{"enabled":true},"deleted":false,"remainingRetentionDays":0,"hasImmutabilityPolicy":false,"hasLegalHold":false}}' @@ -130,9 +133,9 @@ interactions: content-type: - application/json date: - - Mon, 18 Apr 2022 05:43:09 GMT + - Mon, 24 Oct 2022 07:21:28 GMT etag: - - '"0x8DA20FE51FDAAA7"' + - '"0x8DAB5905DE2FAF6"' expires: - '-1' pragma: @@ -144,7 +147,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - - '1199' + - '1198' status: code: 201 message: Created @@ -168,13 +171,13 @@ interactions: ParameterSetName: - -c -f -n --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-blob-type: - BlockBlob x-ms-date: - - Mon, 18 Apr 2022 05:43:10 GMT + - Mon, 24 Oct 2022 07:21:28 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: PUT uri: https://version000002.blob.core.windows.net/container000003/blob000004 response: @@ -186,11 +189,11 @@ interactions: content-md5: - EnZIEQLyGMmB4DJBgLr9nw== date: - - Mon, 18 Apr 2022 05:43:11 GMT + - Mon, 24 Oct 2022 07:21:29 GMT etag: - - '"0x8DA20FE53359BAD"' + - '"0x8DAB5905EE430A9"' last-modified: - - Mon, 18 Apr 2022 05:43:11 GMT + - Mon, 24 Oct 2022 07:21:29 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-content-crc64: @@ -198,7 +201,7 @@ interactions: x-ms-request-server-encrypted: - 'true' x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 201 message: Created @@ -218,15 +221,15 @@ interactions: ParameterSetName: - -n -c --expiry-time --policy-mode --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:43:12 GMT + - Mon, 24 Oct 2022 07:21:29 GMT x-ms-immutability-policy-mode: - Unlocked x-ms-immutability-policy-until-date: - - Mon, 18 Apr 2022 06:43:00 GMT + - Mon, 24 Oct 2022 08:21:00 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: PUT uri: https://version000002.blob.core.windows.net/container000003/blob000004?comp=immutabilityPolicies response: @@ -236,15 +239,15 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:43:12 GMT + - Mon, 24 Oct 2022 07:21:31 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-immutability-policy-mode: - unlocked x-ms-immutability-policy-until-date: - - Mon, 18 Apr 2022 06:43:00 GMT + - Mon, 24 Oct 2022 08:21:00 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -264,11 +267,11 @@ interactions: ParameterSetName: - -n -c --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:43:14 GMT + - Mon, 24 Oct 2022 07:21:31 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: DELETE uri: https://version000002.blob.core.windows.net/container000003/blob000004?comp=immutabilityPolicies response: @@ -278,11 +281,11 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:43:14 GMT + - Mon, 24 Oct 2022 07:21:34 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -302,13 +305,13 @@ interactions: ParameterSetName: - --legal-hold -n -c --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:43:16 GMT + - Mon, 24 Oct 2022 07:21:34 GMT x-ms-legal-hold: - 'true' x-ms-version: - - '2020-10-02' + - '2021-04-10' method: PUT uri: https://version000002.blob.core.windows.net/container000003/blob000004?comp=legalhold response: @@ -318,13 +321,13 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:43:18 GMT + - Mon, 24 Oct 2022 07:21:37 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-legal-hold: - 'true' x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -344,13 +347,13 @@ interactions: ParameterSetName: - --legal-hold -n -c --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:43:20 GMT + - Mon, 24 Oct 2022 07:21:37 GMT x-ms-legal-hold: - 'false' x-ms-version: - - '2020-10-02' + - '2021-04-10' method: PUT uri: https://version000002.blob.core.windows.net/container000003/blob000004?comp=legalhold response: @@ -360,13 +363,13 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:43:20 GMT + - Mon, 24 Oct 2022 07:21:39 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-legal-hold: - 'false' x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK diff --git a/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_container_list_scenarios.yaml b/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_container_list_scenarios.yaml index 5768fda6b87..baa3de43b09 100644 --- a/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_container_list_scenarios.yaml +++ b/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_container_list_scenarios.yaml @@ -2,16 +2,24 @@ interactions: - request: body: null headers: + Accept: + - application/xml + Accept-Encoding: + - gzip, deflate + CommandName: + - storage container create Connection: - keep-alive Content-Length: - '0' + ParameterSetName: + - -n --account-name --account-key User-Agent: - - Azure-Storage/2.0.0-2.0.1 (Python CPython 3.8.7; Windows 10) AZURECLI/2.35.0 + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.12.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:46:20 GMT + - Mon, 24 Oct 2022 07:21:26 GMT x-ms-version: - - '2018-11-09' + - '2021-06-08' method: PUT uri: https://clitest000002.blob.core.windows.net/con1000003?restype=container response: @@ -21,31 +29,39 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:46:22 GMT + - Mon, 24 Oct 2022 07:21:27 GMT etag: - - '"0x8DA20FEC5B30669"' + - '"0x8DAB5905E1090B1"' last-modified: - - Mon, 18 Apr 2022 05:46:23 GMT + - Mon, 24 Oct 2022 07:21:28 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2018-11-09' + - '2021-06-08' status: code: 201 message: Created - request: body: null headers: + Accept: + - application/xml + Accept-Encoding: + - gzip, deflate + CommandName: + - storage container create Connection: - keep-alive Content-Length: - '0' + ParameterSetName: + - -n --account-name --account-key User-Agent: - - Azure-Storage/2.0.0-2.0.1 (Python CPython 3.8.7; Windows 10) AZURECLI/2.35.0 + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.12.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:46:24 GMT + - Mon, 24 Oct 2022 07:21:28 GMT x-ms-version: - - '2018-11-09' + - '2021-06-08' method: PUT uri: https://clitest000002.blob.core.windows.net/con2000004?restype=container response: @@ -55,15 +71,15 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:46:24 GMT + - Mon, 24 Oct 2022 07:21:28 GMT etag: - - '"0x8DA20FEC6BD9183"' + - '"0x8DAB5905EF303E3"' last-modified: - - Mon, 18 Apr 2022 05:46:25 GMT + - Mon, 24 Oct 2022 07:21:29 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2018-11-09' + - '2021-06-08' status: code: 201 message: Created @@ -81,9 +97,10 @@ interactions: ParameterSetName: - -n -g --container-delete-retention-days --enable-container-delete-retention User-Agent: - - AZURECLI/2.35.0 azsdk-python-azure-mgmt-storage/20.0.0 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-azure-mgmt-storage/20.1.0 Python/3.7.9 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/blobServices/default?api-version=2021-09-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/blobServices/default?api-version=2022-05-01 response: body: string: '{"sku":{"name":"Standard_LRS","tier":"Standard"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/blobServices/default","name":"default","type":"Microsoft.Storage/storageAccounts/blobServices","properties":{"cors":{"corsRules":[]},"deleteRetentionPolicy":{"allowPermanentDelete":false,"enabled":false}}}' @@ -95,7 +112,7 @@ interactions: content-type: - application/json date: - - Mon, 18 Apr 2022 05:46:27 GMT + - Mon, 24 Oct 2022 07:21:30 GMT expires: - '-1' pragma: @@ -133,12 +150,13 @@ interactions: ParameterSetName: - -n -g --container-delete-retention-days --enable-container-delete-retention User-Agent: - - AZURECLI/2.35.0 azsdk-python-azure-mgmt-storage/20.0.0 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-azure-mgmt-storage/20.1.0 Python/3.7.9 + (Windows-10-10.0.22621-SP0) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/blobServices/default?api-version=2021-09-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/blobServices/default?api-version=2022-05-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/blobServices/default","name":"default","type":"Microsoft.Storage/storageAccounts/blobServices","properties":{"cors":{"corsRules":[]},"deleteRetentionPolicy":{"allowPermanentDelete":false,"enabled":false},"containerDeleteRetentionPolicy":{"enabled":true,"days":7}}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/blobServices/default","name":"default","type":"Microsoft.Storage/storageAccounts/blobServices","properties":{"containerDeleteRetentionPolicy":{"enabled":true,"days":7},"cors":{"corsRules":[]},"deleteRetentionPolicy":{"allowPermanentDelete":false,"enabled":false}}}' headers: cache-control: - no-cache @@ -147,7 +165,7 @@ interactions: content-type: - application/json date: - - Mon, 18 Apr 2022 05:46:30 GMT + - Mon, 24 Oct 2022 07:21:33 GMT expires: - '-1' pragma: @@ -181,49 +199,57 @@ interactions: ParameterSetName: - --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:46:33 GMT + - Mon, 24 Oct 2022 07:21:33 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://clitest000002.blob.core.windows.net/?comp=list&maxresults=5000&include= response: body: string: "\uFEFF5000con1000003Mon, - 18 Apr 2022 05:46:23 GMT\"0x8DA20FEC5B30669\"unlockedavailable$account-encryption-keyfalsefalsefalsefalsecon2000004Mon, - 18 Apr 2022 05:46:25 GMT\"0x8DA20FEC6BD9183\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse\"0x8DAB5905E1090B1\"unlockedavailable$account-encryption-keyfalsefalsefalsefalsecon2000004Mon, + 24 Oct 2022 07:21:29 GMT\"0x8DAB5905EF303E3\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse" headers: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:46:33 GMT + - Mon, 24 Oct 2022 07:21:35 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK - request: body: null headers: + Accept: + - application/xml + Accept-Encoding: + - gzip, deflate + CommandName: + - storage container metadata update Connection: - keep-alive Content-Length: - '0' + ParameterSetName: + - -n --metadata --account-name --account-key User-Agent: - - Azure-Storage/2.0.0-2.0.1 (Python CPython 3.8.7; Windows 10) AZURECLI/2.35.0 + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.12.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:46:34 GMT + - Mon, 24 Oct 2022 07:21:34 GMT x-ms-meta-test: - '1' x-ms-version: - - '2018-11-09' + - '2021-06-08' method: PUT uri: https://clitest000002.blob.core.windows.net/con1000003?restype=container&comp=metadata response: @@ -233,31 +259,39 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:46:34 GMT + - Mon, 24 Oct 2022 07:21:36 GMT etag: - - '"0x8DA20FECCDEC8AB"' + - '"0x8DAB5906303D1CB"' last-modified: - - Mon, 18 Apr 2022 05:46:35 GMT + - Mon, 24 Oct 2022 07:21:36 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2018-11-09' + - '2021-06-08' status: code: 200 message: OK - request: body: null headers: + Accept: + - application/xml + Accept-Encoding: + - gzip, deflate + CommandName: + - storage container metadata show Connection: - keep-alive + ParameterSetName: + - -n --account-name --account-key User-Agent: - - Azure-Storage/2.0.0-2.0.1 (Python CPython 3.8.7; Windows 10) AZURECLI/2.35.0 + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.12.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:46:36 GMT + - Mon, 24 Oct 2022 07:21:36 GMT x-ms-version: - - '2018-11-09' + - '2021-06-08' method: GET - uri: https://clitest000002.blob.core.windows.net/con1000003?restype=container&comp=metadata + uri: https://clitest000002.blob.core.windows.net/con1000003?restype=container response: body: string: '' @@ -265,17 +299,31 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:46:36 GMT + - Mon, 24 Oct 2022 07:21:37 GMT etag: - - '"0x8DA20FECCDEC8AB"' + - '"0x8DAB5906303D1CB"' last-modified: - - Mon, 18 Apr 2022 05:46:35 GMT + - Mon, 24 Oct 2022 07:21:36 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 + x-ms-default-encryption-scope: + - $account-encryption-key + x-ms-deny-encryption-scope-override: + - 'false' + x-ms-has-immutability-policy: + - 'false' + x-ms-has-legal-hold: + - 'false' + x-ms-immutable-storage-with-versioning-enabled: + - 'false' + x-ms-lease-state: + - available + x-ms-lease-status: + - unlocked x-ms-meta-test: - '1' x-ms-version: - - '2018-11-09' + - '2021-06-08' status: code: 200 message: OK @@ -293,31 +341,31 @@ interactions: ParameterSetName: - --include-metadata --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:46:38 GMT + - Mon, 24 Oct 2022 07:21:38 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://clitest000002.blob.core.windows.net/?comp=list&maxresults=5000&include=metadata response: body: string: "\uFEFF5000con1000003Mon, - 18 Apr 2022 05:46:35 GMT\"0x8DA20FECCDEC8AB\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse1con2000004Mon, - 18 Apr 2022 05:46:25 GMT\"0x8DA20FEC6BD9183\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse\"0x8DAB5906303D1CB\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse1con2000004Mon, + 24 Oct 2022 07:21:29 GMT\"0x8DAB5905EF303E3\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse" headers: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:46:38 GMT + - Mon, 24 Oct 2022 07:21:39 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -335,29 +383,29 @@ interactions: ParameterSetName: - --num-results --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:46:39 GMT + - Mon, 24 Oct 2022 07:21:40 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://clitest000002.blob.core.windows.net/?comp=list&maxresults=1&include= response: body: string: "\uFEFF1con1000003Mon, - 18 Apr 2022 05:46:35 GMT\"0x8DA20FECCDEC8AB\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse/clitest000002/con2000004" + 24 Oct 2022 07:21:36 GMT\"0x8DAB5906303D1CB\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse/clitest000002/con2000004" headers: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:46:40 GMT + - Mon, 24 Oct 2022 07:21:41 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -375,29 +423,29 @@ interactions: ParameterSetName: - --num-results --show-next-marker --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:46:41 GMT + - Mon, 24 Oct 2022 07:21:41 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://clitest000002.blob.core.windows.net/?comp=list&maxresults=1&include= response: body: string: "\uFEFF1con1000003Mon, - 18 Apr 2022 05:46:35 GMT\"0x8DA20FECCDEC8AB\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse/clitest000002/con2000004" + 24 Oct 2022 07:21:36 GMT\"0x8DAB5906303D1CB\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse/clitest000002/con2000004" headers: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:46:41 GMT + - Mon, 24 Oct 2022 07:21:43 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -415,30 +463,30 @@ interactions: ParameterSetName: - --marker --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:46:43 GMT + - Mon, 24 Oct 2022 07:21:43 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://clitest000002.blob.core.windows.net/?comp=list&marker=%2Fclitest000002%2Fcon2000004&maxresults=5000&include= response: body: string: "\uFEFF/clitest000002/con20000045000con2000004Mon, - 18 Apr 2022 05:46:25 GMT\"0x8DA20FEC6BD9183\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse\"0x8DAB5905EF303E3\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse" headers: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:46:44 GMT + - Mon, 24 Oct 2022 07:21:45 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -456,46 +504,54 @@ interactions: ParameterSetName: - --prefix --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:46:46 GMT + - Mon, 24 Oct 2022 07:21:44 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://clitest000002.blob.core.windows.net/?comp=list&prefix=con1&maxresults=5000&include= response: body: string: "\uFEFFcon15000con1000003Mon, - 18 Apr 2022 05:46:35 GMT\"0x8DA20FECCDEC8AB\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse\"0x8DAB5906303D1CB\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse" headers: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:46:46 GMT + - Mon, 24 Oct 2022 07:21:46 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK - request: body: null headers: + Accept: + - application/xml + Accept-Encoding: + - gzip, deflate + CommandName: + - storage container delete Connection: - keep-alive Content-Length: - '0' + ParameterSetName: + - -n --account-name --account-key User-Agent: - - Azure-Storage/2.0.0-2.0.1 (Python CPython 3.8.7; Windows 10) AZURECLI/2.35.0 + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.12.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:46:47 GMT + - Mon, 24 Oct 2022 07:21:46 GMT x-ms-version: - - '2018-11-09' + - '2021-06-08' method: DELETE uri: https://clitest000002.blob.core.windows.net/con2000004?restype=container response: @@ -505,11 +561,11 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:46:49 GMT + - Mon, 24 Oct 2022 07:21:47 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2018-11-09' + - '2021-06-08' status: code: 202 message: Accepted @@ -527,30 +583,30 @@ interactions: ParameterSetName: - --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:46:50 GMT + - Mon, 24 Oct 2022 07:21:48 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://clitest000002.blob.core.windows.net/?comp=list&maxresults=5000&include= response: body: string: "\uFEFF5000con1000003Mon, - 18 Apr 2022 05:46:35 GMT\"0x8DA20FECCDEC8AB\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse\"0x8DAB5906303D1CB\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse" headers: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:46:50 GMT + - Mon, 24 Oct 2022 07:21:49 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -568,32 +624,32 @@ interactions: ParameterSetName: - --include-deleted --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:46:52 GMT + - Mon, 24 Oct 2022 07:21:49 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://clitest000002.blob.core.windows.net/?comp=list&maxresults=5000&include=deleted response: body: string: "\uFEFF5000con1000003Mon, - 18 Apr 2022 05:46:35 GMT\"0x8DA20FECCDEC8AB\"unlockedavailable$account-encryption-keyfalsefalsefalsefalsecon2000004true01D852E7A44727B0Mon, - 18 Apr 2022 05:46:25 GMT\"0x8DA20FEC6BD9183\"lockedleasedfixed$account-encryption-keyfalsefalsefalsefalseMon, - 18 Apr 2022 05:46:49 GMT7\"0x8DAB5906303D1CB\"unlockedavailable$account-encryption-keyfalsefalsefalsefalsecon2000004true01D8E7793C7C3581Mon, + 24 Oct 2022 07:21:29 GMT\"0x8DAB5905EF303E3\"lockedleasedfixed$account-encryption-keyfalsefalsefalsefalseMon, + 24 Oct 2022 07:21:48 GMT7" headers: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:46:52 GMT + - Mon, 24 Oct 2022 07:21:50 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK diff --git a/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_container_soft_delete_oauth.yaml b/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_container_soft_delete_oauth.yaml index aa625bc566e..a9e6595f85c 100644 --- a/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_container_soft_delete_oauth.yaml +++ b/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_container_soft_delete_oauth.yaml @@ -15,12 +15,13 @@ interactions: ParameterSetName: - -n -g --query -o User-Agent: - - AZURECLI/2.35.0 azsdk-python-azure-mgmt-storage/20.0.0 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-azure-mgmt-storage/20.1.0 Python/3.7.9 + (Windows-10-10.0.22621-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/listKeys?api-version=2021-09-01&$expand=kerb + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/listKeys?api-version=2022-05-01&$expand=kerb response: body: - string: '{"keys":[{"creationTime":"2022-04-18T07:11:32.4974824Z","keyName":"key1","value":"veryFakedStorageAccountKey==","permissions":"FULL"},{"creationTime":"2022-04-18T07:11:32.4974824Z","keyName":"key2","value":"veryFakedStorageAccountKey==","permissions":"FULL"}]}' + string: '{"keys":[{"creationTime":"2022-10-24T07:22:02.5962347Z","keyName":"key1","value":"veryFakedStorageAccountKey==","permissions":"FULL"},{"creationTime":"2022-10-24T07:22:02.5962347Z","keyName":"key2","value":"veryFakedStorageAccountKey==","permissions":"FULL"}]}' headers: cache-control: - no-cache @@ -29,7 +30,7 @@ interactions: content-type: - application/json date: - - Mon, 18 Apr 2022 07:11:57 GMT + - Mon, 24 Oct 2022 07:22:27 GMT expires: - '-1' pragma: @@ -52,16 +53,24 @@ interactions: - request: body: null headers: + Accept: + - application/xml + Accept-Encoding: + - gzip, deflate + CommandName: + - storage container create Connection: - keep-alive Content-Length: - '0' + ParameterSetName: + - -n --account-name --account-key User-Agent: - - Azure-Storage/2.0.0-2.0.1 (Python CPython 3.8.7; Windows 10) AZURECLI/2.35.0 + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.12.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 07:11:58 GMT + - Mon, 24 Oct 2022 07:22:26 GMT x-ms-version: - - '2018-11-09' + - '2021-06-08' method: PUT uri: https://clitest000002.blob.core.windows.net/con1000003?restype=container response: @@ -71,15 +80,15 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 07:12:06 GMT + - Mon, 24 Oct 2022 07:22:28 GMT etag: - - '"0x8DA210ABF8C772B"' + - '"0x8DAB5908220FD16"' last-modified: - - Mon, 18 Apr 2022 07:12:07 GMT + - Mon, 24 Oct 2022 07:22:28 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2018-11-09' + - '2021-06-08' status: code: 201 message: Created @@ -97,9 +106,10 @@ interactions: ParameterSetName: - -n -g --container-delete-retention-days --enable-container-delete-retention User-Agent: - - AZURECLI/2.35.0 azsdk-python-azure-mgmt-storage/20.0.0 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-azure-mgmt-storage/20.1.0 Python/3.7.9 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/blobServices/default?api-version=2021-09-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/blobServices/default?api-version=2022-05-01 response: body: string: '{"sku":{"name":"Standard_LRS","tier":"Standard"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/blobServices/default","name":"default","type":"Microsoft.Storage/storageAccounts/blobServices","properties":{"cors":{"corsRules":[]},"deleteRetentionPolicy":{"allowPermanentDelete":false,"enabled":false}}}' @@ -111,7 +121,7 @@ interactions: content-type: - application/json date: - - Mon, 18 Apr 2022 07:12:08 GMT + - Mon, 24 Oct 2022 07:22:30 GMT expires: - '-1' pragma: @@ -149,12 +159,13 @@ interactions: ParameterSetName: - -n -g --container-delete-retention-days --enable-container-delete-retention User-Agent: - - AZURECLI/2.35.0 azsdk-python-azure-mgmt-storage/20.0.0 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-azure-mgmt-storage/20.1.0 Python/3.7.9 + (Windows-10-10.0.22621-SP0) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/blobServices/default?api-version=2021-09-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/blobServices/default?api-version=2022-05-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/blobServices/default","name":"default","type":"Microsoft.Storage/storageAccounts/blobServices","properties":{"cors":{"corsRules":[]},"deleteRetentionPolicy":{"allowPermanentDelete":false,"enabled":false},"containerDeleteRetentionPolicy":{"enabled":true,"days":7}}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/blobServices/default","name":"default","type":"Microsoft.Storage/storageAccounts/blobServices","properties":{"containerDeleteRetentionPolicy":{"enabled":true,"days":7},"cors":{"corsRules":[]},"deleteRetentionPolicy":{"allowPermanentDelete":false,"enabled":false}}}' headers: cache-control: - no-cache @@ -163,7 +174,7 @@ interactions: content-type: - application/json date: - - Mon, 18 Apr 2022 07:12:10 GMT + - Mon, 24 Oct 2022 07:22:32 GMT expires: - '-1' pragma: @@ -179,7 +190,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - - '1196' + - '1199' status: code: 200 message: OK @@ -197,46 +208,54 @@ interactions: ParameterSetName: - --account-name --auth-mode User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 07:12:12 GMT + - Mon, 24 Oct 2022 07:22:32 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://clitest000002.blob.core.windows.net/?comp=list&maxresults=5000&include= response: body: string: "\uFEFF5000con1000003Mon, - 18 Apr 2022 07:12:07 GMT\"0x8DA210ABF8C772B\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse\"0x8DAB5908220FD16\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse" headers: content-type: - application/xml date: - - Mon, 18 Apr 2022 07:12:14 GMT + - Mon, 24 Oct 2022 07:22:36 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK - request: body: null headers: + Accept: + - application/xml + Accept-Encoding: + - gzip, deflate + CommandName: + - storage container delete Connection: - keep-alive Content-Length: - '0' + ParameterSetName: + - -n --account-name --auth-mode User-Agent: - - Azure-Storage/2.0.0-2.0.1 (Python CPython 3.8.7; Windows 10) AZURECLI/2.35.0 + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.12.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 07:12:15 GMT + - Mon, 24 Oct 2022 07:22:36 GMT x-ms-version: - - '2018-11-09' + - '2021-06-08' method: DELETE uri: https://clitest000002.blob.core.windows.net/con1000003?restype=container response: @@ -246,11 +265,11 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 07:12:15 GMT + - Mon, 24 Oct 2022 07:22:37 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2018-11-09' + - '2021-06-08' status: code: 202 message: Accepted @@ -268,11 +287,11 @@ interactions: ParameterSetName: - --account-name --auth-mode User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 07:12:17 GMT + - Mon, 24 Oct 2022 07:22:38 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://clitest000002.blob.core.windows.net/?comp=list&maxresults=5000&include= response: @@ -284,13 +303,13 @@ interactions: content-type: - application/xml date: - - Mon, 18 Apr 2022 07:12:17 GMT + - Mon, 24 Oct 2022 07:22:39 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -308,31 +327,31 @@ interactions: ParameterSetName: - --include-deleted --account-name --auth-mode User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 07:12:18 GMT + - Mon, 24 Oct 2022 07:22:39 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://clitest000002.blob.core.windows.net/?comp=list&maxresults=5000&include=deleted response: body: string: "\uFEFF5000con1000003true01D852F39D169066Mon, - 18 Apr 2022 07:12:07 GMT\"0x8DA210ABF8C772B\"lockedleasedfixed$account-encryption-keyfalsefalsefalsefalseMon, - 18 Apr 2022 07:12:16 GMT75000con1000003true01D8E7795FAA863AMon, + 24 Oct 2022 07:22:28 GMT\"0x8DAB5908220FD16\"lockedleasedfixed$account-encryption-keyfalsefalsefalsefalseMon, + 24 Oct 2022 07:22:38 GMT7" headers: content-type: - application/xml date: - - Mon, 18 Apr 2022 07:12:18 GMT + - Mon, 24 Oct 2022 07:22:40 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -350,31 +369,31 @@ interactions: ParameterSetName: - --include-deleted --query -o --account-name --auth-mode User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 07:12:50 GMT + - Mon, 24 Oct 2022 07:23:11 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://clitest000002.blob.core.windows.net/?comp=list&maxresults=5000&include=deleted response: body: string: "\uFEFF5000con1000003true01D852F39D169066Mon, - 18 Apr 2022 07:12:07 GMT\"0x8DA210ABF8C772B\"unlockedexpired$account-encryption-keyfalsefalsefalsefalseMon, - 18 Apr 2022 07:12:16 GMT75000con1000003true01D8E7795FAA863AMon, + 24 Oct 2022 07:22:28 GMT\"0x8DAB5908220FD16\"unlockedexpired$account-encryption-keyfalsefalsefalsefalseMon, + 24 Oct 2022 07:22:38 GMT7" headers: content-type: - application/xml date: - - Mon, 18 Apr 2022 07:12:51 GMT + - Mon, 24 Oct 2022 07:23:12 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -394,15 +413,15 @@ interactions: ParameterSetName: - -n --deleted-version --account-name --auth-mode User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 07:12:52 GMT + - Mon, 24 Oct 2022 07:23:13 GMT x-ms-deleted-container-name: - - con1hxuybeeihdtjf7wblaqg + - con1pchkau3ecxkeerpai2gf x-ms-deleted-container-version: - - 01D852F39D169066 + - 01D8E7795FAA863A x-ms-version: - - '2020-10-02' + - '2021-04-10' method: PUT uri: https://clitest000002.blob.core.windows.net/con1000003?restype=container&comp=undelete response: @@ -412,11 +431,11 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 07:12:52 GMT + - Mon, 24 Oct 2022 07:23:15 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 201 message: Created @@ -434,30 +453,30 @@ interactions: ParameterSetName: - --account-name --auth-mode User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 07:12:54 GMT + - Mon, 24 Oct 2022 07:23:14 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://clitest000002.blob.core.windows.net/?comp=list&maxresults=5000&include= response: body: string: "\uFEFF5000con1000003Mon, - 18 Apr 2022 07:12:53 GMT\"0x8DA210ADAE72629\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse\"0x8DAB5909DBF9CA0\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse" headers: content-type: - application/xml date: - - Mon, 18 Apr 2022 07:12:54 GMT + - Mon, 24 Oct 2022 07:23:16 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -475,30 +494,30 @@ interactions: ParameterSetName: - --include-deleted --account-name --auth-mode User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 07:12:55 GMT + - Mon, 24 Oct 2022 07:23:16 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://clitest000002.blob.core.windows.net/?comp=list&maxresults=5000&include=deleted response: body: string: "\uFEFF5000con1000003Mon, - 18 Apr 2022 07:12:53 GMT\"0x8DA210ADAE72629\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse\"0x8DAB5909DBF9CA0\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse" headers: content-type: - application/xml date: - - Mon, 18 Apr 2022 07:12:56 GMT + - Mon, 24 Oct 2022 07:23:17 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK diff --git a/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_container_soft_delete_scenarios.yaml b/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_container_soft_delete_scenarios.yaml index c74b28bcd71..6abb12c4ead 100644 --- a/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_container_soft_delete_scenarios.yaml +++ b/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/recordings/test_storage_container_soft_delete_scenarios.yaml @@ -15,12 +15,13 @@ interactions: ParameterSetName: - -n -g --query -o User-Agent: - - AZURECLI/2.35.0 azsdk-python-azure-mgmt-storage/20.0.0 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-azure-mgmt-storage/20.1.0 Python/3.7.9 + (Windows-10-10.0.22621-SP0) method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/listKeys?api-version=2021-09-01&$expand=kerb + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/listKeys?api-version=2022-05-01&$expand=kerb response: body: - string: '{"keys":[{"creationTime":"2022-04-18T05:47:12.0713255Z","keyName":"key1","value":"veryFakedStorageAccountKey==","permissions":"FULL"},{"creationTime":"2022-04-18T05:47:12.0713255Z","keyName":"key2","value":"veryFakedStorageAccountKey==","permissions":"FULL"}]}' + string: '{"keys":[{"creationTime":"2022-10-24T07:20:59.8126352Z","keyName":"key1","value":"veryFakedStorageAccountKey==","permissions":"FULL"},{"creationTime":"2022-10-24T07:20:59.8126352Z","keyName":"key2","value":"veryFakedStorageAccountKey==","permissions":"FULL"}]}' headers: cache-control: - no-cache @@ -29,7 +30,7 @@ interactions: content-type: - application/json date: - - Mon, 18 Apr 2022 05:47:35 GMT + - Mon, 24 Oct 2022 07:21:24 GMT expires: - '-1' pragma: @@ -52,16 +53,24 @@ interactions: - request: body: null headers: + Accept: + - application/xml + Accept-Encoding: + - gzip, deflate + CommandName: + - storage container create Connection: - keep-alive Content-Length: - '0' + ParameterSetName: + - -n --account-name --account-key User-Agent: - - Azure-Storage/2.0.0-2.0.1 (Python CPython 3.8.7; Windows 10) AZURECLI/2.35.0 + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.12.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:47:36 GMT + - Mon, 24 Oct 2022 07:21:24 GMT x-ms-version: - - '2018-11-09' + - '2021-06-08' method: PUT uri: https://clitest000002.blob.core.windows.net/con1000003?restype=container response: @@ -71,15 +80,15 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:47:36 GMT + - Mon, 24 Oct 2022 07:21:25 GMT etag: - - '"0x8DA20FEF1C5EAD8"' + - '"0x8DAB5905D1C7001"' last-modified: - - Mon, 18 Apr 2022 05:47:37 GMT + - Mon, 24 Oct 2022 07:21:26 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2018-11-09' + - '2021-06-08' status: code: 201 message: Created @@ -97,9 +106,10 @@ interactions: ParameterSetName: - -n -g --container-delete-retention-days --enable-container-delete-retention User-Agent: - - AZURECLI/2.35.0 azsdk-python-azure-mgmt-storage/20.0.0 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-azure-mgmt-storage/20.1.0 Python/3.7.9 + (Windows-10-10.0.22621-SP0) method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/blobServices/default?api-version=2021-09-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/blobServices/default?api-version=2022-05-01 response: body: string: '{"sku":{"name":"Standard_LRS","tier":"Standard"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/blobServices/default","name":"default","type":"Microsoft.Storage/storageAccounts/blobServices","properties":{"cors":{"corsRules":[]},"deleteRetentionPolicy":{"allowPermanentDelete":false,"enabled":false}}}' @@ -111,7 +121,7 @@ interactions: content-type: - application/json date: - - Mon, 18 Apr 2022 05:47:38 GMT + - Mon, 24 Oct 2022 07:21:28 GMT expires: - '-1' pragma: @@ -149,12 +159,13 @@ interactions: ParameterSetName: - -n -g --container-delete-retention-days --enable-container-delete-retention User-Agent: - - AZURECLI/2.35.0 azsdk-python-azure-mgmt-storage/20.0.0 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-azure-mgmt-storage/20.1.0 Python/3.7.9 + (Windows-10-10.0.22621-SP0) method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/blobServices/default?api-version=2021-09-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/blobServices/default?api-version=2022-05-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/blobServices/default","name":"default","type":"Microsoft.Storage/storageAccounts/blobServices","properties":{"cors":{"corsRules":[]},"deleteRetentionPolicy":{"allowPermanentDelete":false,"enabled":false},"containerDeleteRetentionPolicy":{"enabled":true,"days":7}}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000001/providers/Microsoft.Storage/storageAccounts/clitest000002/blobServices/default","name":"default","type":"Microsoft.Storage/storageAccounts/blobServices","properties":{"containerDeleteRetentionPolicy":{"enabled":true,"days":7},"cors":{"corsRules":[]},"deleteRetentionPolicy":{"allowPermanentDelete":false,"enabled":false}}}' headers: cache-control: - no-cache @@ -163,7 +174,7 @@ interactions: content-type: - application/json date: - - Mon, 18 Apr 2022 05:47:41 GMT + - Mon, 24 Oct 2022 07:21:29 GMT expires: - '-1' pragma: @@ -179,7 +190,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - - '1198' + - '1199' status: code: 200 message: OK @@ -197,46 +208,54 @@ interactions: ParameterSetName: - --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:47:43 GMT + - Mon, 24 Oct 2022 07:21:29 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://clitest000002.blob.core.windows.net/?comp=list&maxresults=5000&include= response: body: string: "\uFEFF5000con1000003Mon, - 18 Apr 2022 05:47:37 GMT\"0x8DA20FEF1C5EAD8\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse\"0x8DAB5905D1C7001\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse" headers: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:47:43 GMT + - Mon, 24 Oct 2022 07:21:30 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK - request: body: null headers: + Accept: + - application/xml + Accept-Encoding: + - gzip, deflate + CommandName: + - storage container delete Connection: - keep-alive Content-Length: - '0' + ParameterSetName: + - -n --account-name --account-key User-Agent: - - Azure-Storage/2.0.0-2.0.1 (Python CPython 3.8.7; Windows 10) AZURECLI/2.35.0 + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.12.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:47:44 GMT + - Mon, 24 Oct 2022 07:21:31 GMT x-ms-version: - - '2018-11-09' + - '2021-06-08' method: DELETE uri: https://clitest000002.blob.core.windows.net/con1000003?restype=container response: @@ -246,11 +265,11 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:47:44 GMT + - Mon, 24 Oct 2022 07:21:32 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2018-11-09' + - '2021-06-08' status: code: 202 message: Accepted @@ -268,11 +287,11 @@ interactions: ParameterSetName: - --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:47:46 GMT + - Mon, 24 Oct 2022 07:21:32 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://clitest000002.blob.core.windows.net/?comp=list&maxresults=5000&include= response: @@ -284,13 +303,13 @@ interactions: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:47:46 GMT + - Mon, 24 Oct 2022 07:21:33 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -308,31 +327,31 @@ interactions: ParameterSetName: - --include-deleted --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:47:48 GMT + - Mon, 24 Oct 2022 07:21:34 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://clitest000002.blob.core.windows.net/?comp=list&maxresults=5000&include=deleted response: body: string: "\uFEFF5000con1000003true01D852E7CF4EF55EMon, - 18 Apr 2022 05:47:37 GMT\"0x8DA20FEF1C5EAD8\"lockedleasedfixed$account-encryption-keyfalsefalsefalsefalseMon, - 18 Apr 2022 05:47:45 GMT75000con1000003true01D8E7793AA55F61Mon, + 24 Oct 2022 07:21:26 GMT\"0x8DAB5905D1C7001\"lockedleasedfixed$account-encryption-keyfalsefalsefalsefalseMon, + 24 Oct 2022 07:21:33 GMT7" headers: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:47:48 GMT + - Mon, 24 Oct 2022 07:21:36 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -350,31 +369,31 @@ interactions: ParameterSetName: - --include-deleted --query -o --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:48:19 GMT + - Mon, 24 Oct 2022 07:22:05 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://clitest000002.blob.core.windows.net/?comp=list&maxresults=5000&include=deleted response: body: string: "\uFEFF5000con1000003true01D852E7CF4EF55EMon, - 18 Apr 2022 05:47:37 GMT\"0x8DA20FEF1C5EAD8\"unlockedexpired$account-encryption-keyfalsefalsefalsefalseMon, - 18 Apr 2022 05:47:45 GMT75000con1000003true01D8E7793AA55F61Mon, + 24 Oct 2022 07:21:26 GMT\"0x8DAB5905D1C7001\"unlockedexpired$account-encryption-keyfalsefalsefalsefalseMon, + 24 Oct 2022 07:21:33 GMT7" headers: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:48:19 GMT + - Mon, 24 Oct 2022 07:22:07 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -394,15 +413,15 @@ interactions: ParameterSetName: - -n --deleted-version --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:48:21 GMT + - Mon, 24 Oct 2022 07:22:07 GMT x-ms-deleted-container-name: - - con1lo4ptk7kri2rw2odiw4u + - con1w7ehfgrhmcwol5n7kit4 x-ms-deleted-container-version: - - 01D852E7CF4EF55E + - 01D8E7793AA55F61 x-ms-version: - - '2020-10-02' + - '2021-04-10' method: PUT uri: https://clitest000002.blob.core.windows.net/con1000003?restype=container&comp=undelete response: @@ -412,11 +431,11 @@ interactions: content-length: - '0' date: - - Mon, 18 Apr 2022 05:48:22 GMT + - Mon, 24 Oct 2022 07:22:10 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 201 message: Created @@ -434,30 +453,30 @@ interactions: ParameterSetName: - --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:48:24 GMT + - Mon, 24 Oct 2022 07:22:10 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://clitest000002.blob.core.windows.net/?comp=list&maxresults=5000&include= response: body: string: "\uFEFF5000con1000003Mon, - 18 Apr 2022 05:48:23 GMT\"0x8DA20FF0D13448F\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse\"0x8DAB590776AFF34\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse" headers: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:48:24 GMT + - Mon, 24 Oct 2022 07:22:12 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK @@ -475,30 +494,30 @@ interactions: ParameterSetName: - --include-deleted --account-name --account-key User-Agent: - - AZURECLI/2.35.0 azsdk-python-storage-blob/12.9.0b1 Python/3.8.7 (Windows-10-10.0.22000-SP0) + - AZURECLI/2.41.0 (PIP) azsdk-python-storage-blob/12.11.0 Python/3.7.9 (Windows-10-10.0.22621-SP0) x-ms-date: - - Mon, 18 Apr 2022 05:48:26 GMT + - Mon, 24 Oct 2022 07:22:12 GMT x-ms-version: - - '2020-10-02' + - '2021-04-10' method: GET uri: https://clitest000002.blob.core.windows.net/?comp=list&maxresults=5000&include=deleted response: body: string: "\uFEFF5000con1000003Mon, - 18 Apr 2022 05:48:23 GMT\"0x8DA20FF0D13448F\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse\"0x8DAB590776AFF34\"unlockedavailable$account-encryption-keyfalsefalsefalsefalse" headers: content-type: - application/xml date: - - Mon, 18 Apr 2022 05:48:27 GMT + - Mon, 24 Oct 2022 07:22:14 GMT server: - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked x-ms-version: - - '2020-10-02' + - '2021-04-10' status: code: 200 message: OK diff --git a/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/test_storage_blob_preview_scenario.py b/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/test_storage_blob_preview_scenario.py index d21c4427596..9ef5ac10516 100644 --- a/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/test_storage_blob_preview_scenario.py +++ b/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/test_storage_blob_preview_scenario.py @@ -240,6 +240,12 @@ def test_storage_blob_tags_scenario(self, resource_group, storage_account_info): JMESPathCheck('[0].containerName', container1), JMESPathCheck('[0].name', blob_name1)) + # find blobs in specified container with index tags + self.storage_cmd("storage blob filter --tag-filter \"test='tag'\" -c {}", account_info, container1). \ + assert_with_checks(JMESPathCheck('length(@)', 1), + JMESPathCheck('[0].containerName', container1), + JMESPathCheck('[0].name', blob_name1)) + # tag condition tag_condition = "test=\'tag\'" self.storage_cmd('storage blob copy start --source-blob {} --source-container {} -c {} -b {} ' diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/__init__.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/__init__.py new file mode 100644 index 00000000000..58442edc91e --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/__init__.py @@ -0,0 +1,239 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import os + +from typing import Union, Iterable, AnyStr, IO, Any, Dict # pylint: disable=unused-import +from ._version import VERSION +from ._blob_client import BlobClient +from ._container_client import ContainerClient +from ._blob_service_client import BlobServiceClient +from ._lease import BlobLeaseClient +from ._download import StorageStreamDownloader +from ._quick_query_helper import BlobQueryReader +from ._shared_access_signature import generate_account_sas, generate_container_sas, generate_blob_sas +from ._shared.policies import ExponentialRetry, LinearRetry +from ._shared.response_handlers import PartialBatchErrorException +from ._shared.models import( + LocationMode, + ResourceTypes, + AccountSasPermissions, + StorageErrorCode, + UserDelegationKey +) +from ._generated.models import ( + RehydratePriority, +) +from ._models import ( + BlobType, + BlockState, + StandardBlobTier, + PremiumPageBlobTier, + BlobImmutabilityPolicyMode, + SequenceNumberAction, + PublicAccess, + BlobAnalyticsLogging, + Metrics, + RetentionPolicy, + StaticWebsite, + CorsRule, + ContainerProperties, + BlobProperties, + FilteredBlob, + LeaseProperties, + ContentSettings, + CopyProperties, + BlobBlock, + PageRange, + AccessPolicy, + ContainerSasPermissions, + BlobSasPermissions, + CustomerProvidedEncryptionKey, + ContainerEncryptionScope, + BlobQueryError, + DelimitedJsonDialect, + DelimitedTextDialect, + QuickQueryDialect, + ArrowDialect, + ArrowType, + ObjectReplicationPolicy, + ObjectReplicationRule, + ImmutabilityPolicy +) +from ._list_blobs_helper import BlobPrefix + +__version__ = VERSION + + +def upload_blob_to_url( + blob_url, # type: str + data, # type: Union[Iterable[AnyStr], IO[AnyStr]] + credential=None, # type: Any + **kwargs): + # type: (...) -> Dict[str, Any] + """Upload data to a given URL + + The data will be uploaded as a block blob. + + :param str blob_url: + The full URI to the blob. This can also include a SAS token. + :param data: + The data to upload. This can be bytes, text, an iterable or a file-like object. + :type data: bytes or str or Iterable + :param credential: + The credentials with which to authenticate. This is optional if the + blob URL already has a SAS token. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account + shared access key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + :keyword bool overwrite: + Whether the blob to be uploaded should overwrite the current data. + If True, upload_blob_to_url will overwrite any existing data. If set to False, the + operation will fail with a ResourceExistsError. + :keyword int max_concurrency: + The number of parallel connections with which to download. + :keyword int length: + Number of bytes to read from the stream. This is optional, but + should be supplied for optimal performance. + :keyword dict(str,str) metadata: + Name-value pairs associated with the blob as metadata. + :keyword bool validate_content: + If true, calculates an MD5 hash for each chunk of the blob. The storage + service checks the hash of the content that has arrived with the hash + that was sent. This is primarily valuable for detecting bitflips on + the wire if using http instead of https as https (the default) will + already validate. Note that this MD5 hash is not stored with the + blob. Also note that if enabled, the memory-efficient upload algorithm + will not be used, because computing the MD5 hash requires buffering + entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. + :keyword str encoding: + Encoding to use if text is supplied as input. Defaults to UTF-8. + :returns: Blob-updated property dict (Etag and last modified) + :rtype: dict(str, Any) + """ + with BlobClient.from_blob_url(blob_url, credential=credential) as client: + return client.upload_blob(data=data, blob_type=BlobType.BlockBlob, **kwargs) + + +def _download_to_stream(client, handle, **kwargs): + """Download data to specified open file-handle.""" + stream = client.download_blob(**kwargs) + stream.readinto(handle) + + +def download_blob_from_url( + blob_url, # type: str + output, # type: str + credential=None, # type: Any + **kwargs): + # type: (...) -> None + """Download the contents of a blob to a local file or stream. + + :param str blob_url: + The full URI to the blob. This can also include a SAS token. + :param output: + Where the data should be downloaded to. This could be either a file path to write to, + or an open IO handle to write to. + :type output: str or writable stream. + :param credential: + The credentials with which to authenticate. This is optional if the + blob URL already has a SAS token or the blob is public. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + :keyword bool overwrite: + Whether the local file should be overwritten if it already exists. The default value is + `False` - in which case a ValueError will be raised if the file already exists. If set to + `True`, an attempt will be made to write to the existing file. If a stream handle is passed + in, this value is ignored. + :keyword int max_concurrency: + The number of parallel connections with which to download. + :keyword int offset: + Start of byte range to use for downloading a section of the blob. + Must be set if length is provided. + :keyword int length: + Number of bytes to read from the stream. This is optional, but + should be supplied for optimal performance. + :keyword bool validate_content: + If true, calculates an MD5 hash for each chunk of the blob. The storage + service checks the hash of the content that has arrived with the hash + that was sent. This is primarily valuable for detecting bitflips on + the wire if using http instead of https as https (the default) will + already validate. Note that this MD5 hash is not stored with the + blob. Also note that if enabled, the memory-efficient upload algorithm + will not be used, because computing the MD5 hash requires buffering + entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. + :rtype: None + """ + overwrite = kwargs.pop('overwrite', False) + with BlobClient.from_blob_url(blob_url, credential=credential) as client: + if hasattr(output, 'write'): + _download_to_stream(client, output, **kwargs) + else: + if not overwrite and os.path.isfile(output): + raise ValueError("The file '{}' already exists.".format(output)) + with open(output, 'wb') as file_handle: + _download_to_stream(client, file_handle, **kwargs) + + +__all__ = [ + 'upload_blob_to_url', + 'download_blob_from_url', + 'BlobServiceClient', + 'ContainerClient', + 'BlobClient', + 'BlobType', + 'BlobLeaseClient', + 'StorageErrorCode', + 'UserDelegationKey', + 'ExponentialRetry', + 'LinearRetry', + 'LocationMode', + 'BlockState', + 'StandardBlobTier', + 'PremiumPageBlobTier', + 'SequenceNumberAction', + 'BlobImmutabilityPolicyMode', + 'ImmutabilityPolicy', + 'PublicAccess', + 'BlobAnalyticsLogging', + 'Metrics', + 'RetentionPolicy', + 'StaticWebsite', + 'CorsRule', + 'ContainerProperties', + 'BlobProperties', + 'BlobPrefix', + 'FilteredBlob', + 'LeaseProperties', + 'ContentSettings', + 'CopyProperties', + 'BlobBlock', + 'PageRange', + 'AccessPolicy', + 'QuickQueryDialect', + 'ContainerSasPermissions', + 'BlobSasPermissions', + 'ResourceTypes', + 'AccountSasPermissions', + 'StorageStreamDownloader', + 'CustomerProvidedEncryptionKey', + 'RehydratePriority', + 'generate_account_sas', + 'generate_container_sas', + 'generate_blob_sas', + 'PartialBatchErrorException', + 'ContainerEncryptionScope', + 'BlobQueryError', + 'DelimitedJsonDialect', + 'DelimitedTextDialect', + 'ArrowDialect', + 'ArrowType', + 'BlobQueryReader', + 'ObjectReplicationPolicy', + 'ObjectReplicationRule' +] diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_blob_client.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_blob_client.py new file mode 100644 index 00000000000..65f901fe9be --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_blob_client.py @@ -0,0 +1,4003 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-lines,no-self-use +from functools import partial +from io import BytesIO +from typing import ( # pylint: disable=unused-import + Union, Optional, Any, IO, Iterable, AnyStr, Dict, List, Tuple, + TYPE_CHECKING, + TypeVar, Type) + +try: + from urllib.parse import urlparse, quote, unquote +except ImportError: + from urlparse import urlparse # type: ignore + from urllib2 import quote, unquote # type: ignore +import six +from azure.core.pipeline import Pipeline +from azure.core.tracing.decorator import distributed_trace +from azure.core.exceptions import ResourceNotFoundError, HttpResponseError, ResourceExistsError + +from ._shared import encode_base64 +from ._shared.base_client import StorageAccountHostsMixin, parse_connection_str, parse_query, TransportWrapper +from ._shared.encryption import generate_blob_encryption_data +from ._shared.uploads import IterStreamer +from ._shared.request_handlers import ( + add_metadata_headers, get_length, read_length, + validate_and_format_range_headers) +from ._shared.response_handlers import return_response_headers, process_storage_error, return_headers_and_deserialized +from ._generated import AzureBlobStorage +from ._generated.models import ( # pylint: disable=unused-import + DeleteSnapshotsOptionType, + BlobHTTPHeaders, + BlockLookupList, + AppendPositionAccessConditions, + SequenceNumberAccessConditions, + QueryRequest, + CpkInfo) +from ._serialize import ( + get_modify_conditions, + get_source_conditions, + get_cpk_scope_info, + get_api_version, + serialize_blob_tags_header, + serialize_blob_tags, + serialize_query_format, get_access_conditions +) +from ._deserialize import get_page_ranges_result, deserialize_blob_properties, deserialize_blob_stream, parse_tags, \ + deserialize_pipeline_response_into_cls +from ._quick_query_helper import BlobQueryReader +from ._upload_helpers import ( + upload_block_blob, + upload_append_blob, + upload_page_blob, _any_conditions) +from ._models import BlobType, BlobBlock, BlobProperties, BlobQueryError, QuickQueryDialect, \ + DelimitedJsonDialect, DelimitedTextDialect +from ._download import StorageStreamDownloader +from ._lease import BlobLeaseClient + +if TYPE_CHECKING: + from datetime import datetime + from ._generated.models import BlockList + from ._models import ( # pylint: disable=unused-import + ContentSettings, + ImmutabilityPolicy, + PremiumPageBlobTier, + StandardBlobTier, + SequenceNumberAction + ) + +_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION = ( + 'The require_encryption flag is set, but encryption is not supported' + ' for this method.') + +ClassType = TypeVar("ClassType") + + +class BlobClient(StorageAccountHostsMixin): # pylint: disable=too-many-public-methods + """A client to interact with a specific blob, although that blob may not yet exist. + + For more optional configuration, please click + `here `_. + + :param str account_url: + The URI to the storage account. In order to create a client given the full URI to the blob, + use the :func:`from_blob_url` classmethod. + :param container_name: The container name for the blob. + :type container_name: str + :param blob_name: The name of the blob with which to interact. If specified, this value will override + a blob value specified in the blob URL. + :type blob_name: str + :param str snapshot: + The optional blob snapshot on which to operate. This can be the snapshot ID string + or the response returned from :func:`create_snapshot`. + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account + shared access key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + + .. versionadded:: 12.2.0 + + :keyword str secondary_hostname: + The hostname of the secondary endpoint. + :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. + Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be + uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, + the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. + :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient + algorithm when uploading a block blob. Defaults to 4*1024*1024+1. + :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. + :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, + the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. + :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, + or 4MB. + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START create_blob_client] + :end-before: [END create_blob_client] + :language: python + :dedent: 8 + :caption: Creating the BlobClient from a URL to a public blob (no auth needed). + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START create_blob_client_sas_url] + :end-before: [END create_blob_client_sas_url] + :language: python + :dedent: 8 + :caption: Creating the BlobClient from a SAS URL to a blob. + """ + def __init__( + self, account_url, # type: str + container_name, # type: str + blob_name, # type: str + snapshot=None, # type: Optional[Union[str, Dict[str, Any]]] + credential=None, # type: Optional[Any] + **kwargs # type: Any + ): + # type: (...) -> None + try: + if not account_url.lower().startswith('http'): + account_url = "https://" + account_url + except AttributeError: + raise ValueError("Account URL must be a string.") + parsed_url = urlparse(account_url.rstrip('/')) + + if not (container_name and blob_name): + raise ValueError("Please specify a container name and blob name.") + if not parsed_url.netloc: + raise ValueError("Invalid URL: {}".format(account_url)) + + path_snapshot, sas_token = parse_query(parsed_url.query) + + self.container_name = container_name + self.blob_name = blob_name + try: + self.snapshot = snapshot.snapshot # type: ignore + except AttributeError: + try: + self.snapshot = snapshot['snapshot'] # type: ignore + except TypeError: + self.snapshot = snapshot or path_snapshot + + # This parameter is used for the hierarchy traversal. Give precedence to credential. + self._raw_credential = credential if credential else sas_token + self._query_str, credential = self._format_query_string(sas_token, credential, snapshot=self.snapshot) + super(BlobClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) + self._client = AzureBlobStorage(self.url, pipeline=self._pipeline) + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access + + def _format_url(self, hostname): + container_name = self.container_name + if isinstance(container_name, six.text_type): + container_name = container_name.encode('UTF-8') + return "{}://{}/{}/{}{}".format( + self.scheme, + hostname, + quote(container_name), + quote(self.blob_name, safe='~/'), + self._query_str) + + def _encode_source_url(self, source_url): + parsed_source_url = urlparse(source_url) + source_scheme = parsed_source_url.scheme + source_hostname = parsed_source_url.netloc.rstrip('/') + source_path = unquote(parsed_source_url.path) + source_query = parsed_source_url.query + result = ["{}://{}{}".format(source_scheme, source_hostname, quote(source_path, safe='~/'))] + if source_query: + result.append(source_query) + return '?'.join(result) + + @classmethod + def from_blob_url(cls, blob_url, credential=None, snapshot=None, **kwargs): + # type: (Type[ClassType], str, Optional[Any], Optional[Union[str, Dict[str, Any]]], Any) -> ClassType + """Create BlobClient from a blob url. This doesn't support customized blob url with '/' in blob name. + + :param str blob_url: + The full endpoint URL to the Blob, including SAS token and snapshot if used. This could be + either the primary endpoint, or the secondary endpoint depending on the current `location_mode`. + :type blob_url: str + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account shared access + key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + :param str snapshot: + The optional blob snapshot on which to operate. This can be the snapshot ID string + or the response returned from :func:`create_snapshot`. If specified, this will override + the snapshot in the url. + :returns: A Blob client. + :rtype: ~azure.storage.blob.BlobClient + """ + try: + if not blob_url.lower().startswith('http'): + blob_url = "https://" + blob_url + except AttributeError: + raise ValueError("Blob URL must be a string.") + parsed_url = urlparse(blob_url.rstrip('/')) + + if not parsed_url.netloc: + raise ValueError("Invalid URL: {}".format(blob_url)) + + account_path = "" + if ".core." in parsed_url.netloc: + # .core. is indicating non-customized url. Blob name with directory info can also be parsed. + path_blob = parsed_url.path.lstrip('/').split('/', 1) + elif "localhost" in parsed_url.netloc or "127.0.0.1" in parsed_url.netloc: + path_blob = parsed_url.path.lstrip('/').split('/', 2) + account_path += '/' + path_blob[0] + else: + # for customized url. blob name that has directory info cannot be parsed. + path_blob = parsed_url.path.lstrip('/').split('/') + if len(path_blob) > 2: + account_path = "/" + "/".join(path_blob[:-2]) + account_url = "{}://{}{}?{}".format( + parsed_url.scheme, + parsed_url.netloc.rstrip('/'), + account_path, + parsed_url.query) + container_name, blob_name = unquote(path_blob[-2]), unquote(path_blob[-1]) + if not container_name or not blob_name: + raise ValueError("Invalid URL. Provide a blob_url with a valid blob and container name.") + + path_snapshot, _ = parse_query(parsed_url.query) + if snapshot: + try: + path_snapshot = snapshot.snapshot # type: ignore + except AttributeError: + try: + path_snapshot = snapshot['snapshot'] # type: ignore + except TypeError: + path_snapshot = snapshot + + return cls( + account_url, container_name=container_name, blob_name=blob_name, + snapshot=path_snapshot, credential=credential, **kwargs + ) + + @classmethod + def from_connection_string( + cls, # type: Type[ClassType] + conn_str, # type: str + container_name, # type: str + blob_name, # type: str + snapshot=None, # type: Optional[str] + credential=None, # type: Optional[Any] + **kwargs # type: Any + ): # type: (...) -> ClassType + """Create BlobClient from a Connection String. + + :param str conn_str: + A connection string to an Azure Storage account. + :param container_name: The container name for the blob. + :type container_name: str + :param blob_name: The name of the blob with which to interact. + :type blob_name: str + :param str snapshot: + The optional blob snapshot on which to operate. This can be the snapshot ID string + or the response returned from :func:`create_snapshot`. + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account shared access + key, or an instance of a TokenCredentials class from azure.identity. + Credentials provided here will take precedence over those in the connection string. + :returns: A Blob client. + :rtype: ~azure.storage.blob.BlobClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START auth_from_connection_string_blob] + :end-before: [END auth_from_connection_string_blob] + :language: python + :dedent: 8 + :caption: Creating the BlobClient from a connection string. + """ + account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') + if 'secondary_hostname' not in kwargs: + kwargs['secondary_hostname'] = secondary + return cls( + account_url, container_name=container_name, blob_name=blob_name, + snapshot=snapshot, credential=credential, **kwargs + ) + + @distributed_trace + def get_account_information(self, **kwargs): + # type: (**Any) -> Dict[str, str] + """Gets information related to the storage account in which the blob resides. + + The information can also be retrieved if the user has a SAS to a container or blob. + The keys in the returned dictionary include 'sku_name' and 'account_kind'. + + :returns: A dict of account information (SKU and account type). + :rtype: dict(str, str) + """ + try: + return self._client.blob.get_account_info(cls=return_response_headers, **kwargs) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _upload_blob_options( # pylint:disable=too-many-statements + self, data, # type: Union[Iterable[AnyStr], IO[AnyStr]] + blob_type=BlobType.BlockBlob, # type: Union[str, BlobType] + length=None, # type: Optional[int] + metadata=None, # type: Optional[Dict[str, str]] + **kwargs + ): + # type: (...) -> Dict[str, Any] + if self.require_encryption and not self.key_encryption_key: + raise ValueError("Encryption required but no key was provided.") + encryption_options = { + 'required': self.require_encryption, + 'key': self.key_encryption_key, + 'resolver': self.key_resolver_function, + } + if self.key_encryption_key is not None: + cek, iv, encryption_data = generate_blob_encryption_data(self.key_encryption_key) + encryption_options['cek'] = cek + encryption_options['vector'] = iv + encryption_options['data'] = encryption_data + + encoding = kwargs.pop('encoding', 'UTF-8') + if isinstance(data, six.text_type): + data = data.encode(encoding) # type: ignore + if length is None: + length = get_length(data) + if isinstance(data, bytes): + data = data[:length] + + if isinstance(data, bytes): + stream = BytesIO(data) + elif hasattr(data, 'read'): + stream = data + elif hasattr(data, '__iter__'): + stream = IterStreamer(data, encoding=encoding) + else: + raise TypeError("Unsupported data type: {}".format(type(data))) + + validate_content = kwargs.pop('validate_content', False) + content_settings = kwargs.pop('content_settings', None) + overwrite = kwargs.pop('overwrite', False) + max_concurrency = kwargs.pop('max_concurrency', 1) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + kwargs['cpk_info'] = cpk_info + + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + kwargs['lease_access_conditions'] = get_access_conditions(kwargs.pop('lease', None)) + kwargs['modified_access_conditions'] = get_modify_conditions(kwargs) + kwargs['cpk_scope_info'] = get_cpk_scope_info(kwargs) + if content_settings: + kwargs['blob_headers'] = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=content_settings.content_md5, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + kwargs['blob_tags_string'] = serialize_blob_tags_header(kwargs.pop('tags', None)) + kwargs['stream'] = stream + kwargs['length'] = length + kwargs['overwrite'] = overwrite + kwargs['headers'] = headers + kwargs['validate_content'] = validate_content + kwargs['blob_settings'] = self._config + kwargs['max_concurrency'] = max_concurrency + kwargs['encryption_options'] = encryption_options + + if blob_type == BlobType.BlockBlob: + kwargs['client'] = self._client.block_blob + kwargs['data'] = data + elif blob_type == BlobType.PageBlob: + kwargs['client'] = self._client.page_blob + elif blob_type == BlobType.AppendBlob: + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + kwargs['client'] = self._client.append_blob + else: + raise ValueError("Unsupported BlobType: {}".format(blob_type)) + return kwargs + + def _upload_blob_from_url_options(self, source_url, **kwargs): + # type: (...) -> Dict[str, Any] + tier = kwargs.pop('standard_blob_tier', None) + overwrite = kwargs.pop('overwrite', False) + content_settings = kwargs.pop('content_settings', None) + source_authorization = kwargs.pop('source_authorization', None) + if content_settings: + kwargs['blob_http_headers'] = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=None, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'copy_source_authorization': source_authorization, + 'content_length': 0, + 'copy_source_blob_properties': kwargs.pop('include_source_blob_properties', True), + 'source_content_md5': kwargs.pop('source_content_md5', None), + 'copy_source': source_url, + 'modified_access_conditions': get_modify_conditions(kwargs), + 'blob_tags_string': serialize_blob_tags_header(kwargs.pop('tags', None)), + 'cls': return_response_headers, + 'lease_access_conditions': get_access_conditions(kwargs.pop('destination_lease', None)), + 'tier': tier.value if tier else None, + 'source_modified_access_conditions': get_source_conditions(kwargs), + 'cpk_info': cpk_info, + 'cpk_scope_info': get_cpk_scope_info(kwargs) + } + options.update(kwargs) + if not overwrite and not _any_conditions(**options): # pylint: disable=protected-access + options['modified_access_conditions'].if_none_match = '*' + return options + + @distributed_trace + def upload_blob_from_url(self, source_url, **kwargs): + # type: (str, Any) -> Dict[str, Any] + """ + Creates a new Block Blob where the content of the blob is read from a given URL. + The content of an existing blob is overwritten with the new blob. + + :param str source_url: + A URL of up to 2 KB in length that specifies a file or blob. + The value should be URL-encoded as it would appear in a request URI. + If the source is in another account, the source must either be public + or must be authenticated via a shared access signature. If the source + is public, no authentication is required. + Examples: + https://myaccount.blob.core.windows.net/mycontainer/myblob + + https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + + https://otheraccount.blob.core.windows.net/mycontainer/myblob?sastoken + :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. + If True, upload_blob will overwrite the existing data. If set to False, the + operation will fail with ResourceExistsError. + :keyword bool include_source_blob_properties: + Indicates if properties from the source blob should be copied. Defaults to True. + :keyword tags: + Name-value pairs associated with the blob as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + :paramtype tags: dict(str, str) + :keyword bytearray source_content_md5: + Specify the md5 that is used to verify the integrity of the source bytes. + :keyword ~datetime.datetime source_if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the source resource has been modified since the specified time. + :keyword ~datetime.datetime source_if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the source resource has not been modified since the specified date/time. + :keyword str source_etag: + The source ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions source_match_condition: + The source match condition to use upon the etag. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + The destination ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The destination match condition to use upon the etag. + :keyword destination_lease: + The lease ID specified for this header must match the lease ID of the + destination blob. If the request does not include the lease ID or it is not + valid, the operation fails with status code 412 (Precondition Failed). + :paramtype destination_lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :keyword ~azure.storage.blob.ContentSettings content_settings: + ContentSettings object used to set blob properties. Used to set content type, encoding, + language, disposition, md5, and cache control. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: + A standard blob tier value to set the blob to. For this version of the library, + this is only applicable to block blobs on standard storage accounts. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + """ + options = self._upload_blob_from_url_options( + source_url=self._encode_source_url(source_url), + **kwargs) + try: + return self._client.block_blob.put_blob_from_url(**options) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def upload_blob( # pylint: disable=too-many-locals + self, data, # type: Union[Iterable[AnyStr], IO[AnyStr]] + blob_type=BlobType.BlockBlob, # type: Union[str, BlobType] + length=None, # type: Optional[int] + metadata=None, # type: Optional[Dict[str, str]] + **kwargs + ): + # type: (...) -> Any + """Creates a new blob from a data source with automatic chunking. + + :param data: The blob data to upload. + :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be + either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. + :param int length: + Number of bytes to read from the stream. This is optional, but + should be supplied for optimal performance. + :param metadata: + Name-value pairs associated with the blob as metadata. + :type metadata: dict(str, str) + :keyword tags: + Name-value pairs associated with the blob as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + + .. versionadded:: 12.4.0 + + :paramtype tags: dict(str, str) + :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. + If True, upload_blob will overwrite the existing data. If set to False, the + operation will fail with ResourceExistsError. The exception to the above is with Append + blob types: if set to False and the data already exists, an error will not be raised + and the data will be appended to the existing blob. If set overwrite=True, then the existing + append blob will be deleted, and a new one created. Defaults to False. + :keyword ~azure.storage.blob.ContentSettings content_settings: + ContentSettings object used to set blob properties. Used to set content type, encoding, + language, disposition, md5, and cache control. + :keyword bool validate_content: + If true, calculates an MD5 hash for each chunk of the blob. The storage + service checks the hash of the content that has arrived with the hash + that was sent. This is primarily valuable for detecting bitflips on + the wire if using http instead of https, as https (the default), will + already validate. Note that this MD5 hash is not stored with the + blob. Also note that if enabled, the memory-efficient upload algorithm + will not be used because computing the MD5 hash requires buffering + entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. + :keyword lease: + Required if the blob has an active lease. If specified, upload_blob only succeeds if the + blob's lease is active and matches this ID. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: + A page blob tier value to set the blob to. The tier correlates to the size of the + blob and number of allowed IOPS. This is only applicable to page blobs on + premium storage accounts. + :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: + A standard blob tier value to set the blob to. For this version of the library, + this is only applicable to block blobs on standard storage accounts. + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + Currently this parameter of upload_blob() API is for BlockBlob only. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + Currently this parameter of upload_blob() API is for BlockBlob only. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword int maxsize_condition: + Optional conditional header. The max length in bytes permitted for + the append blob. If the Append Block operation would cause the blob + to exceed that limit or if the blob size is already greater than the + value specified in this header, the request will fail with + MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). + :keyword int max_concurrency: + Maximum number of parallel connections to use when the blob size exceeds + 64MB. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword str encoding: + Defaults to UTF-8. + :keyword int timeout: + The timeout parameter is expressed in seconds. This method may make + multiple calls to the Azure service and the timeout will apply to + each call individually. + :returns: Blob-updated property dict (Etag and last modified) + :rtype: dict[str, Any] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_hello_world.py + :start-after: [START upload_a_blob] + :end-before: [END upload_a_blob] + :language: python + :dedent: 12 + :caption: Upload a blob to the container. + """ + options = self._upload_blob_options( + data, + blob_type=blob_type, + length=length, + metadata=metadata, + **kwargs) + if blob_type == BlobType.BlockBlob: + return upload_block_blob(**options) + if blob_type == BlobType.PageBlob: + return upload_page_blob(**options) + return upload_append_blob(**options) + + def _download_blob_options(self, offset=None, length=None, **kwargs): + # type: (Optional[int], Optional[int], **Any) -> Dict[str, Any] + if self.require_encryption and not self.key_encryption_key: + raise ValueError("Encryption required but no key was provided.") + if length is not None and offset is None: + raise ValueError("Offset value must not be None if length is set.") + if length is not None: + length = offset + length - 1 # Service actually uses an end-range inclusive index + + validate_content = kwargs.pop('validate_content', False) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'clients': self._client, + 'config': self._config, + 'start_range': offset, + 'end_range': length, + 'version_id': kwargs.pop('version_id', None), + 'validate_content': validate_content, + 'encryption_options': { + 'required': self.require_encryption, + 'key': self.key_encryption_key, + 'resolver': self.key_resolver_function}, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_info': cpk_info, + 'cls': kwargs.pop('cls', None) or deserialize_blob_stream, + 'max_concurrency':kwargs.pop('max_concurrency', 1), + 'encoding': kwargs.pop('encoding', None), + 'timeout': kwargs.pop('timeout', None), + 'name': self.blob_name, + 'container': self.container_name} + options.update(kwargs) + return options + + @distributed_trace + def download_blob(self, offset=None, length=None, **kwargs): + # type: (Optional[int], Optional[int], **Any) -> StorageStreamDownloader + """Downloads a blob to the StorageStreamDownloader. The readall() method must + be used to read all the content or readinto() must be used to download the blob into + a stream. Using chunks() returns an iterator which allows the user to iterate over the content in chunks. + + :param int offset: + Start of byte range to use for downloading a section of the blob. + Must be set if length is provided. + :param int length: + Number of bytes to read from the stream. This is optional, but + should be supplied for optimal performance. + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to download. + + .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. + + :keyword bool validate_content: + If true, calculates an MD5 hash for each chunk of the blob. The storage + service checks the hash of the content that has arrived with the hash + that was sent. This is primarily valuable for detecting bitflips on + the wire if using http instead of https, as https (the default), will + already validate. Note that this MD5 hash is not stored with the + blob. Also note that if enabled, the memory-efficient upload algorithm + will not be used because computing the MD5 hash requires buffering + entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. + :keyword lease: + Required if the blob has an active lease. If specified, download_blob only + succeeds if the blob's lease is active and matches this ID. Value can be a + BlobLeaseClient object or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword int max_concurrency: + The number of parallel connections with which to download. + :keyword str encoding: + Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. + :keyword int timeout: + The timeout parameter is expressed in seconds. This method may make + multiple calls to the Azure service and the timeout will apply to + each call individually. + :returns: A streaming object (StorageStreamDownloader) + :rtype: ~azure.storage.blob.StorageStreamDownloader + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_hello_world.py + :start-after: [START download_a_blob] + :end-before: [END download_a_blob] + :language: python + :dedent: 12 + :caption: Download a blob. + """ + options = self._download_blob_options( + offset=offset, + length=length, + **kwargs) + return StorageStreamDownloader(**options) + + def _quick_query_options(self, query_expression, + **kwargs): + # type: (str, **Any) -> Dict[str, Any] + delimiter = '\n' + input_format = kwargs.pop('blob_format', None) + if input_format == QuickQueryDialect.DelimitedJson: + input_format = DelimitedJsonDialect() + if input_format == QuickQueryDialect.DelimitedText: + input_format = DelimitedTextDialect() + input_parquet_format = input_format == "ParquetDialect" + if input_format and not input_parquet_format: + try: + delimiter = input_format.lineterminator + except AttributeError: + try: + delimiter = input_format.delimiter + except AttributeError: + raise ValueError("The Type of blob_format can only be DelimitedTextDialect or " + "DelimitedJsonDialect or ParquetDialect") + output_format = kwargs.pop('output_format', None) + if output_format == QuickQueryDialect.DelimitedJson: + output_format = DelimitedJsonDialect() + if output_format == QuickQueryDialect.DelimitedText: + output_format = DelimitedTextDialect() + if output_format: + if output_format == "ParquetDialect": + raise ValueError("ParquetDialect is invalid as an output format.") + try: + delimiter = output_format.lineterminator + except AttributeError: + try: + delimiter = output_format.delimiter + except AttributeError: + pass + else: + output_format = input_format if not input_parquet_format else None + query_request = QueryRequest( + expression=query_expression, + input_serialization=serialize_query_format(input_format), + output_serialization=serialize_query_format(output_format) + ) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) + options = { + 'query_request': query_request, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_info': cpk_info, + 'snapshot': self.snapshot, + 'timeout': kwargs.pop('timeout', None), + 'cls': return_headers_and_deserialized, + } + options.update(kwargs) + return options, delimiter + + @distributed_trace + def query_blob(self, query_expression, **kwargs): + # type: (str, **Any) -> BlobQueryReader + """Enables users to select/project on blob/or blob snapshot data by providing simple query expressions. + This operations returns a BlobQueryReader, users need to use readall() or readinto() to get query data. + + :param str query_expression: + Required. a query statement. + :keyword Callable[~azure.storage.blob.BlobQueryError] on_error: + A function to be called on any processing errors returned by the service. + :keyword blob_format: + Optional. Defines the serialization of the data currently stored in the blob. The default is to + treat the blob data as CSV data formatted in the default dialect. This can be overridden with + a custom DelimitedTextDialect, or DelimitedJsonDialect or "ParquetDialect" (passed as a string or enum). + These dialects can be passed through their respective classes, the QuickQueryDialect enum or as a string + :paramtype blob_format: ~azure.storage.blob.DelimitedTextDialect or ~azure.storage.blob.DelimitedJsonDialect + or ~azure.storage.blob.QuickQueryDialect or str + :keyword output_format: + Optional. Defines the output serialization for the data stream. By default the data will be returned + as it is represented in the blob (Parquet formats default to DelimitedTextDialect). + By providing an output format, the blob data will be reformatted according to that profile. + This value can be a DelimitedTextDialect or a DelimitedJsonDialect or ArrowDialect. + These dialects can be passed through their respective classes, the QuickQueryDialect enum or as a string + :paramtype output_format: ~azure.storage.blob.DelimitedTextDialect or ~azure.storage.blob.DelimitedJsonDialect + or list[~azure.storage.blob.ArrowDialect] or ~azure.storage.blob.QuickQueryDialect or str + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: A streaming object (BlobQueryReader) + :rtype: ~azure.storage.blob.BlobQueryReader + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_query.py + :start-after: [START query] + :end-before: [END query] + :language: python + :dedent: 4 + :caption: select/project on blob/or blob snapshot data by providing simple query expressions. + """ + errors = kwargs.pop("on_error", None) + error_cls = kwargs.pop("error_cls", BlobQueryError) + encoding = kwargs.pop("encoding", None) + options, delimiter = self._quick_query_options(query_expression, **kwargs) + try: + headers, raw_response_body = self._client.blob.query(**options) + except HttpResponseError as error: + process_storage_error(error) + return BlobQueryReader( + name=self.blob_name, + container=self.container_name, + errors=errors, + record_delimiter=delimiter, + encoding=encoding, + headers=headers, + response=raw_response_body, + error_cls=error_cls) + + @staticmethod + def _generic_delete_blob_options(delete_snapshots=None, **kwargs): + # type: (str, **Any) -> Dict[str, Any] + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if delete_snapshots: + delete_snapshots = DeleteSnapshotsOptionType(delete_snapshots) + options = { + 'timeout': kwargs.pop('timeout', None), + 'snapshot': kwargs.pop('snapshot', None), # this is added for delete_blobs + 'delete_snapshots': delete_snapshots or None, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions} + options.update(kwargs) + return options + + def _delete_blob_options(self, delete_snapshots=None, **kwargs): + # type: (str, **Any) -> Dict[str, Any] + if self.snapshot and delete_snapshots: + raise ValueError("The delete_snapshots option cannot be used with a specific snapshot.") + options = self._generic_delete_blob_options(delete_snapshots, **kwargs) + options['snapshot'] = self.snapshot + options['version_id'] = kwargs.pop('version_id', None) + options['blob_delete_type'] = kwargs.pop('blob_delete_type', None) + return options + + @distributed_trace + def delete_blob(self, delete_snapshots=None, **kwargs): + # type: (str, **Any) -> None + """Marks the specified blob for deletion. + + The blob is later deleted during garbage collection. + Note that in order to delete a blob, you must delete all of its + snapshots. You can delete both at the same time with the delete_blob() + operation. + + If a delete retention policy is enabled for the service, then this operation soft deletes the blob + and retains the blob for a specified number of days. + After the specified number of days, the blob's data is removed from the service during garbage collection. + Soft deleted blob is accessible through :func:`~ContainerClient.list_blobs()` specifying `include=['deleted']` + option. Soft-deleted blob can be restored using :func:`undelete` operation. + + :param str delete_snapshots: + Required if the blob has associated snapshots. Values include: + - "only": Deletes only the blobs snapshots. + - "include": Deletes the blob along with all snapshots. + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to delete. + + .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. + + :keyword lease: + Required if the blob has an active lease. If specified, delete_blob only + succeeds if the blob's lease is active and matches this ID. Value can be a + BlobLeaseClient object or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_hello_world.py + :start-after: [START delete_blob] + :end-before: [END delete_blob] + :language: python + :dedent: 12 + :caption: Delete a blob. + """ + options = self._delete_blob_options(delete_snapshots=delete_snapshots, **kwargs) + try: + self._client.blob.delete(**options) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def undelete_blob(self, **kwargs): + # type: (**Any) -> None + """Restores soft-deleted blobs or snapshots. + + Operation will only be successful if used within the specified number of days + set in the delete retention policy. + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_common.py + :start-after: [START undelete_blob] + :end-before: [END undelete_blob] + :language: python + :dedent: 8 + :caption: Undeleting a blob. + """ + try: + self._client.blob.undelete(timeout=kwargs.pop('timeout', None), **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace() + def exists(self, **kwargs): + # type: (**Any) -> bool + """ + Returns True if a blob exists with the defined parameters, and returns + False otherwise. + + :kwarg str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to check if it exists. + :kwarg int timeout: + The timeout parameter is expressed in seconds. + :returns: boolean + """ + try: + self._client.blob.get_properties( + snapshot=self.snapshot, + **kwargs) + return True + # Encrypted with CPK + except ResourceExistsError: + return True + except HttpResponseError as error: + try: + process_storage_error(error) + except ResourceNotFoundError: + return False + + @distributed_trace + def get_blob_properties(self, **kwargs): + # type: (**Any) -> BlobProperties + """Returns all user-defined metadata, standard HTTP properties, and + system properties for the blob. It does not return the content of the blob. + + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to get properties. + + .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. + + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: BlobProperties + :rtype: ~azure.storage.blob.BlobProperties + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_common.py + :start-after: [START get_blob_properties] + :end-before: [END get_blob_properties] + :language: python + :dedent: 8 + :caption: Getting the properties for a blob. + """ + # TODO: extract this out as _get_blob_properties_options + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + try: + cls_method = kwargs.pop('cls', None) + if cls_method: + kwargs['cls'] = partial(deserialize_pipeline_response_into_cls, cls_method) + blob_props = self._client.blob.get_properties( + timeout=kwargs.pop('timeout', None), + version_id=kwargs.pop('version_id', None), + snapshot=self.snapshot, + lease_access_conditions=access_conditions, + modified_access_conditions=mod_conditions, + cls=kwargs.pop('cls', None) or deserialize_blob_properties, + cpk_info=cpk_info, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + blob_props.name = self.blob_name + if isinstance(blob_props, BlobProperties): + blob_props.container = self.container_name + blob_props.snapshot = self.snapshot + return blob_props # type: ignore + + def _set_http_headers_options(self, content_settings=None, **kwargs): + # type: (Optional[ContentSettings], **Any) -> Dict[str, Any] + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + blob_headers = None + if content_settings: + blob_headers = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=content_settings.content_md5, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + options = { + 'timeout': kwargs.pop('timeout', None), + 'blob_http_headers': blob_headers, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cls': return_response_headers} + options.update(kwargs) + return options + + @distributed_trace + def set_http_headers(self, content_settings=None, **kwargs): + # type: (Optional[ContentSettings], **Any) -> None + """Sets system properties on the blob. + + If one property is set for the content_settings, all properties will be overridden. + + :param ~azure.storage.blob.ContentSettings content_settings: + ContentSettings object used to set blob properties. Used to set content type, encoding, + language, disposition, md5, and cache control. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified) + :rtype: Dict[str, Any] + """ + options = self._set_http_headers_options(content_settings=content_settings, **kwargs) + try: + return self._client.blob.set_http_headers(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _set_blob_metadata_options(self, metadata=None, **kwargs): + # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Any] + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + options = { + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers, + 'headers': headers} + options.update(kwargs) + return options + + @distributed_trace + def set_blob_metadata(self, metadata=None, **kwargs): + # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Union[str, datetime]] + """Sets user-defined metadata for the blob as one or more name-value pairs. + + :param metadata: + Dict containing name and value pairs. Each call to this operation + replaces all existing metadata attached to the blob. To remove all + metadata from the blob, call this operation with no metadata headers. + :type metadata: dict(str, str) + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified) + """ + options = self._set_blob_metadata_options(metadata=metadata, **kwargs) + try: + return self._client.blob.set_metadata(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def set_immutability_policy(self, immutability_policy, **kwargs): + # type: (ImmutabilityPolicy, **Any) -> Dict[str, str] + """The Set Immutability Policy operation sets the immutability policy on the blob. + + .. versionadded:: 12.10.0 + This operation was introduced in API version '2020-10-02'. + + :param ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, str] + """ + + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + return self._client.blob.set_immutability_policy(cls=return_response_headers, **kwargs) + + @distributed_trace + def delete_immutability_policy(self, **kwargs): + # type: (**Any) -> None + """The Delete Immutability Policy operation deletes the immutability policy on the blob. + + .. versionadded:: 12.10.0 + This operation was introduced in API version '2020-10-02'. + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, str] + """ + + self._client.blob.delete_immutability_policy(**kwargs) + + @distributed_trace + def set_legal_hold(self, legal_hold, **kwargs): + # type: (bool, **Any) -> Dict[str, Union[str, datetime, bool]] + """The Set Legal Hold operation sets a legal hold on the blob. + + .. versionadded:: 12.10.0 + This operation was introduced in API version '2020-10-02'. + + :param bool legal_hold: + Specified if a legal hold should be set on the blob. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, Union[str, datetime, bool]] + """ + + return self._client.blob.set_legal_hold(legal_hold, cls=return_response_headers, **kwargs) + + def _create_page_blob_options( # type: ignore + self, size, # type: int + content_settings=None, # type: Optional[ContentSettings] + metadata=None, # type: Optional[Dict[str, str]] + premium_page_blob_tier=None, # type: Optional[Union[str, PremiumPageBlobTier]] + **kwargs + ): + # type: (...) -> Dict[str, Any] + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + blob_headers = None + if content_settings: + blob_headers = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=content_settings.content_md5, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + + sequence_number = kwargs.pop('sequence_number', None) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + immutability_policy = kwargs.pop('immutability_policy', None) + if immutability_policy: + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + + if premium_page_blob_tier: + try: + headers['x-ms-access-tier'] = premium_page_blob_tier.value # type: ignore + except AttributeError: + headers['x-ms-access-tier'] = premium_page_blob_tier # type: ignore + + blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) + + options = { + 'content_length': 0, + 'blob_content_length': size, + 'blob_sequence_number': sequence_number, + 'blob_http_headers': blob_headers, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'blob_tags_string': blob_tags_string, + 'cls': return_response_headers, + 'headers': headers} + options.update(kwargs) + return options + + @distributed_trace + def create_page_blob( # type: ignore + self, size, # type: int + content_settings=None, # type: Optional[ContentSettings] + metadata=None, # type: Optional[Dict[str, str]] + premium_page_blob_tier=None, # type: Optional[Union[str, PremiumPageBlobTier]] + **kwargs + ): + # type: (...) -> Dict[str, Union[str, datetime]] + """Creates a new Page Blob of the specified size. + + :param int size: + This specifies the maximum size for the page blob, up to 1 TB. + The page blob size must be aligned to a 512-byte boundary. + :param ~azure.storage.blob.ContentSettings content_settings: + ContentSettings object used to set blob properties. Used to set content type, encoding, + language, disposition, md5, and cache control. + :param metadata: + Name-value pairs associated with the blob as metadata. + :type metadata: dict(str, str) + :param ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: + A page blob tier value to set the blob to. The tier correlates to the size of the + blob and number of allowed IOPS. This is only applicable to page blobs on + premium storage accounts. + :keyword tags: + Name-value pairs associated with the blob as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + + .. versionadded:: 12.4.0 + + :paramtype tags: dict(str, str) + :keyword int sequence_number: + Only for Page blobs. The sequence number is a user-controlled value that you can use to + track requests. The value of the sequence number must be between 0 + and 2^63 - 1.The default value is 0. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified). + :rtype: dict[str, Any] + """ + options = self._create_page_blob_options( + size, + content_settings=content_settings, + metadata=metadata, + premium_page_blob_tier=premium_page_blob_tier, + **kwargs) + try: + return self._client.page_blob.create(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _create_append_blob_options(self, content_settings=None, metadata=None, **kwargs): + # type: (Optional[ContentSettings], Optional[Dict[str, str]], **Any) -> Dict[str, Any] + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + blob_headers = None + if content_settings: + blob_headers = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=content_settings.content_md5, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + immutability_policy = kwargs.pop('immutability_policy', None) + if immutability_policy: + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + + blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) + + options = { + 'content_length': 0, + 'blob_http_headers': blob_headers, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'blob_tags_string': blob_tags_string, + 'cls': return_response_headers, + 'headers': headers} + options.update(kwargs) + return options + + @distributed_trace + def create_append_blob(self, content_settings=None, metadata=None, **kwargs): + # type: (Optional[ContentSettings], Optional[Dict[str, str]], **Any) -> Dict[str, Union[str, datetime]] + """Creates a new Append Blob. + + :param ~azure.storage.blob.ContentSettings content_settings: + ContentSettings object used to set blob properties. Used to set content type, encoding, + language, disposition, md5, and cache control. + :param metadata: + Name-value pairs associated with the blob as metadata. + :type metadata: dict(str, str) + :keyword tags: + Name-value pairs associated with the blob as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + + .. versionadded:: 12.4.0 + + :paramtype tags: dict(str, str) + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified). + :rtype: dict[str, Any] + """ + options = self._create_append_blob_options( + content_settings=content_settings, + metadata=metadata, + **kwargs) + try: + return self._client.append_blob.create(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _create_snapshot_options(self, metadata=None, **kwargs): + # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Any] + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers, + 'headers': headers} + options.update(kwargs) + return options + + @distributed_trace + def create_snapshot(self, metadata=None, **kwargs): + # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Union[str, datetime]] + """Creates a snapshot of the blob. + + A snapshot is a read-only version of a blob that's taken at a point in time. + It can be read, copied, or deleted, but not modified. Snapshots provide a way + to back up a blob as it appears at a moment in time. + + A snapshot of a blob has the same name as the base blob from which the snapshot + is taken, with a DateTime value appended to indicate the time at which the + snapshot was taken. + + :param metadata: + Name-value pairs associated with the blob as metadata. + :type metadata: dict(str, str) + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on destination blob with a matching value. + + .. versionadded:: 12.4.0 + + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Snapshot ID, Etag, and last modified). + :rtype: dict[str, Any] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_common.py + :start-after: [START create_blob_snapshot] + :end-before: [END create_blob_snapshot] + :language: python + :dedent: 8 + :caption: Create a snapshot of the blob. + """ + options = self._create_snapshot_options(metadata=metadata, **kwargs) + try: + return self._client.blob.create_snapshot(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _start_copy_from_url_options(self, source_url, metadata=None, incremental_copy=False, **kwargs): + # type: (str, Optional[Dict[str, str]], bool, **Any) -> Dict[str, Any] + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + if 'source_lease' in kwargs: + source_lease = kwargs.pop('source_lease') + try: + headers['x-ms-source-lease-id'] = source_lease.id # type: str + except AttributeError: + headers['x-ms-source-lease-id'] = source_lease + + tier = kwargs.pop('premium_page_blob_tier', None) or kwargs.pop('standard_blob_tier', None) + requires_sync = kwargs.pop('requires_sync', None) + encryption_scope_str = kwargs.pop('encryption_scope', None) + source_authorization = kwargs.pop('source_authorization', None) + + if not requires_sync and encryption_scope_str: + raise ValueError("Encryption_scope is only supported for sync copy, please specify requires_sync=True") + if source_authorization and incremental_copy: + raise ValueError("Source authorization tokens are not applicable for incremental copying.") + # + # TODO: refactor start_copy_from_url api in _blob_client.py. Call _generated/_blob_operations.py copy_from_url + # when requires_sync=True is set. + # Currently both sync copy and async copy are calling _generated/_blob_operations.py start_copy_from_url. + # As sync copy diverges more from async copy, more problem will surface. + if encryption_scope_str: + headers.update({'x-ms-encryption-scope': encryption_scope_str}) + + if requires_sync is True: + headers['x-ms-requires-sync'] = str(requires_sync) + if source_authorization: + headers['x-ms-copy-source-authorization'] = source_authorization + else: + if source_authorization: + raise ValueError("Source authorization tokens are only applicable for synchronous copy operations.") + timeout = kwargs.pop('timeout', None) + dest_mod_conditions = get_modify_conditions(kwargs) + blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) + + immutability_policy = kwargs.pop('immutability_policy', None) + if immutability_policy: + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + + options = { + 'copy_source': source_url, + 'seal_blob': kwargs.pop('seal_destination_blob', None), + 'timeout': timeout, + 'modified_access_conditions': dest_mod_conditions, + 'blob_tags_string': blob_tags_string, + 'headers': headers, + 'cls': return_response_headers, + } + if not incremental_copy: + source_mod_conditions = get_source_conditions(kwargs) + dest_access_conditions = get_access_conditions(kwargs.pop('destination_lease', None)) + options['source_modified_access_conditions'] = source_mod_conditions + options['lease_access_conditions'] = dest_access_conditions + options['tier'] = tier.value if tier else None + options.update(kwargs) + return options + + @distributed_trace + def start_copy_from_url(self, source_url, metadata=None, incremental_copy=False, **kwargs): + # type: (str, Optional[Dict[str, str]], bool, **Any) -> Dict[str, Union[str, datetime]] + """Copies a blob asynchronously. + + This operation returns a copy operation + object that can be used to wait on the completion of the operation, + as well as check status or abort the copy operation. + The Blob service copies blobs on a best-effort basis. + + The source blob for a copy operation may be a block blob, an append blob, + or a page blob. If the destination blob already exists, it must be of the + same blob type as the source blob. Any existing destination blob will be + overwritten. The destination blob cannot be modified while a copy operation + is in progress. + + When copying from a page blob, the Blob service creates a destination page + blob of the source blob's length, initially containing all zeroes. Then + the source page ranges are enumerated, and non-empty ranges are copied. + + For a block blob or an append blob, the Blob service creates a committed + blob of zero length before returning from this operation. When copying + from a block blob, all committed blocks and their block IDs are copied. + Uncommitted blocks are not copied. At the end of the copy operation, the + destination blob will have the same committed block count as the source. + + When copying from an append blob, all committed blocks are copied. At the + end of the copy operation, the destination blob will have the same committed + block count as the source. + + For all blob types, you can call status() on the returned polling object + to check the status of the copy operation, or wait() to block until the + operation is complete. The final blob will be committed when the copy completes. + + :param str source_url: + A URL of up to 2 KB in length that specifies a file or blob. + The value should be URL-encoded as it would appear in a request URI. + If the source is in another account, the source must either be public + or must be authenticated via a shared access signature. If the source + is public, no authentication is required. + Examples: + https://myaccount.blob.core.windows.net/mycontainer/myblob + + https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + + https://otheraccount.blob.core.windows.net/mycontainer/myblob?sastoken + :param metadata: + Name-value pairs associated with the blob as metadata. If no name-value + pairs are specified, the operation will copy the metadata from the + source blob or file to the destination blob. If one or more name-value + pairs are specified, the destination blob is created with the specified + metadata, and metadata is not copied from the source blob or file. + :type metadata: dict(str, str) + :param bool incremental_copy: + Copies the snapshot of the source page blob to a destination page blob. + The snapshot is copied such that only the differential changes between + the previously copied snapshot are transferred to the destination. + The copied snapshots are complete copies of the original snapshot and + can be read or copied from as usual. Defaults to False. + :keyword tags: + Name-value pairs associated with the blob as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + + .. versionadded:: 12.4.0 + + :paramtype tags: dict(str, str) + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword ~datetime.datetime source_if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this conditional header to copy the blob only if the source + blob has been modified since the specified date/time. + :keyword ~datetime.datetime source_if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this conditional header to copy the blob only if the source blob + has not been modified since the specified date/time. + :keyword str source_etag: + The source ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions source_match_condition: + The source match condition to use upon the etag. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this conditional header to copy the blob only + if the destination blob has been modified since the specified date/time. + If the destination blob has not been modified, the Blob service returns + status code 412 (Precondition Failed). + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this conditional header to copy the blob only + if the destination blob has not been modified since the specified + date/time. If the destination blob has been modified, the Blob service + returns status code 412 (Precondition Failed). + :keyword str etag: + The destination ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The destination match condition to use upon the etag. + :keyword destination_lease: + The lease ID specified for this header must match the lease ID of the + destination blob. If the request does not include the lease ID or it is not + valid, the operation fails with status code 412 (Precondition Failed). + :paramtype destination_lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword source_lease: + Specify this to perform the Copy Blob operation only if + the lease ID given matches the active lease ID of the source blob. + :paramtype source_lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: + A page blob tier value to set the blob to. The tier correlates to the size of the + blob and number of allowed IOPS. This is only applicable to page blobs on + premium storage accounts. + :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: + A standard blob tier value to set the blob to. For this version of the library, + this is only applicable to block blobs on standard storage accounts. + :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: + Indicates the priority with which to rehydrate an archived blob + :keyword bool seal_destination_blob: + Seal the destination append blob. This operation is only for append blob. + + .. versionadded:: 12.4.0 + + :keyword bool requires_sync: + Enforces that the service will not return a response until the copy is complete. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. This option is only available when `incremental_copy` is + set to False and `requires_sync` is set to True. + + .. versionadded:: 12.9.0 + + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the sync copied blob. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.10.0 + + :returns: A dictionary of copy properties (etag, last_modified, copy_id, copy_status). + :rtype: dict[str, Union[str, ~datetime.datetime]] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_common.py + :start-after: [START copy_blob_from_url] + :end-before: [END copy_blob_from_url] + :language: python + :dedent: 12 + :caption: Copy a blob from a URL. + """ + options = self._start_copy_from_url_options( + source_url=self._encode_source_url(source_url), + metadata=metadata, + incremental_copy=incremental_copy, + **kwargs) + try: + if incremental_copy: + return self._client.page_blob.copy_incremental(**options) + return self._client.blob.start_copy_from_url(**options) + except HttpResponseError as error: + process_storage_error(error) + + def _abort_copy_options(self, copy_id, **kwargs): + # type: (Union[str, Dict[str, Any], BlobProperties], **Any) -> Dict[str, Any] + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + try: + copy_id = copy_id.copy.id + except AttributeError: + try: + copy_id = copy_id['copy_id'] + except TypeError: + pass + options = { + 'copy_id': copy_id, + 'lease_access_conditions': access_conditions, + 'timeout': kwargs.pop('timeout', None)} + options.update(kwargs) + return options + + @distributed_trace + def abort_copy(self, copy_id, **kwargs): + # type: (Union[str, Dict[str, Any], BlobProperties], **Any) -> None + """Abort an ongoing copy operation. + + This will leave a destination blob with zero length and full metadata. + This will raise an error if the copy operation has already ended. + + :param copy_id: + The copy operation to abort. This can be either an ID string, or an + instance of BlobProperties. + :type copy_id: str or ~azure.storage.blob.BlobProperties + :rtype: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_common.py + :start-after: [START abort_copy_blob_from_url] + :end-before: [END abort_copy_blob_from_url] + :language: python + :dedent: 12 + :caption: Abort copying a blob from URL. + """ + options = self._abort_copy_options(copy_id, **kwargs) + try: + self._client.blob.abort_copy_from_url(**options) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def acquire_lease(self, lease_duration=-1, lease_id=None, **kwargs): + # type: (int, Optional[str], **Any) -> BlobLeaseClient + """Requests a new lease. + + If the blob does not have an active lease, the Blob + Service creates a lease on the blob and returns a new lease. + + :param int lease_duration: + Specifies the duration of the lease, in seconds, or negative one + (-1) for a lease that never expires. A non-infinite lease can be + between 15 and 60 seconds. A lease duration cannot be changed + using renew or change. Default is -1 (infinite lease). + :param str lease_id: + Proposed lease ID, in a GUID string format. The Blob Service + returns 400 (Invalid request) if the proposed lease ID is not + in the correct format. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: A BlobLeaseClient object. + :rtype: ~azure.storage.blob.BlobLeaseClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_common.py + :start-after: [START acquire_lease_on_blob] + :end-before: [END acquire_lease_on_blob] + :language: python + :dedent: 8 + :caption: Acquiring a lease on a blob. + """ + lease = BlobLeaseClient(self, lease_id=lease_id) # type: ignore + lease.acquire(lease_duration=lease_duration, **kwargs) + return lease + + @distributed_trace + def set_standard_blob_tier(self, standard_blob_tier, **kwargs): + # type: (Union[str, StandardBlobTier], Any) -> None + """This operation sets the tier on a block blob. + + A block blob's tier determines Hot/Cool/Archive storage type. + This operation does not update the blob's ETag. + + :param standard_blob_tier: + Indicates the tier to be set on the blob. Options include 'Hot', 'Cool', + 'Archive'. The hot tier is optimized for storing data that is accessed + frequently. The cool storage tier is optimized for storing data that + is infrequently accessed and stored for at least a month. The archive + tier is optimized for storing data that is rarely accessed and stored + for at least six months with flexible latency requirements. + :type standard_blob_tier: str or ~azure.storage.blob.StandardBlobTier + :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: + Indicates the priority with which to rehydrate an archived blob + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to download. + + .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + :keyword int timeout: + The timeout parameter is expressed in seconds. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :rtype: None + """ + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if standard_blob_tier is None: + raise ValueError("A StandardBlobTier must be specified") + if self.snapshot and kwargs.get('version_id'): + raise ValueError("Snapshot and version_id cannot be set at the same time") + try: + self._client.blob.set_tier( + tier=standard_blob_tier, + snapshot=self.snapshot, + timeout=kwargs.pop('timeout', None), + modified_access_conditions=mod_conditions, + lease_access_conditions=access_conditions, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + def _stage_block_options( + self, block_id, # type: str + data, # type: Union[Iterable[AnyStr], IO[AnyStr]] + length=None, # type: Optional[int] + **kwargs + ): + # type: (...) -> Dict[str, Any] + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + block_id = encode_base64(str(block_id)) + if isinstance(data, six.text_type): + data = data.encode(kwargs.pop('encoding', 'UTF-8')) # type: ignore + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + if length is None: + length = get_length(data) + if length is None: + length, data = read_length(data) + if isinstance(data, bytes): + data = data[:length] + + validate_content = kwargs.pop('validate_content', False) + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'block_id': block_id, + 'content_length': length, + 'body': data, + 'transactional_content_md5': None, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'validate_content': validate_content, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers, + } + options.update(kwargs) + return options + + @distributed_trace + def stage_block( + self, block_id, # type: str + data, # type: Union[Iterable[AnyStr], IO[AnyStr]] + length=None, # type: Optional[int] + **kwargs + ): + # type: (...) -> Dict[str, Any] + """Creates a new block to be committed as part of a blob. + + :param str block_id: A string value that identifies the block. + The string should be less than or equal to 64 bytes in size. + For a given blob, the block_id must be the same size for each block. + :param data: The blob data. + :param int length: Size of the block. + :keyword bool validate_content: + If true, calculates an MD5 hash for each chunk of the blob. The storage + service checks the hash of the content that has arrived with the hash + that was sent. This is primarily valuable for detecting bitflips on + the wire if using http instead of https, as https (the default), will + already validate. Note that this MD5 hash is not stored with the + blob. Also note that if enabled, the memory-efficient upload algorithm + will not be used because computing the MD5 hash requires buffering + entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword str encoding: + Defaults to UTF-8. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob property dict. + :rtype: dict[str, Any] + """ + options = self._stage_block_options( + block_id, + data, + length=length, + **kwargs) + try: + return self._client.block_blob.stage_block(**options) + except HttpResponseError as error: + process_storage_error(error) + + def _stage_block_from_url_options( + self, block_id, # type: str + source_url, # type: str + source_offset=None, # type: Optional[int] + source_length=None, # type: Optional[int] + source_content_md5=None, # type: Optional[Union[bytes, bytearray]] + **kwargs + ): + # type: (...) -> Dict[str, Any] + source_authorization = kwargs.pop('source_authorization', None) + if source_length is not None and source_offset is None: + raise ValueError("Source offset value must not be None if length is set.") + if source_length is not None: + source_length = source_offset + source_length - 1 + block_id = encode_base64(str(block_id)) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + range_header = None + if source_offset is not None: + range_header, _ = validate_and_format_range_headers(source_offset, source_length) + + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + options = { + 'copy_source_authorization': source_authorization, + 'block_id': block_id, + 'content_length': 0, + 'source_url': source_url, + 'source_range': range_header, + 'source_content_md5': bytearray(source_content_md5) if source_content_md5 else None, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers, + } + options.update(kwargs) + return options + + @distributed_trace + def stage_block_from_url( + self, block_id, # type: Union[str, int] + source_url, # type: str + source_offset=None, # type: Optional[int] + source_length=None, # type: Optional[int] + source_content_md5=None, # type: Optional[Union[bytes, bytearray]] + **kwargs + ): + # type: (...) -> Dict[str, Any] + """Creates a new block to be committed as part of a blob where + the contents are read from a URL. + + :param str block_id: A string value that identifies the block. + The string should be less than or equal to 64 bytes in size. + For a given blob, the block_id must be the same size for each block. + :param str source_url: The URL. + :param int source_offset: + Start of byte range to use for the block. + Must be set if source length is provided. + :param int source_length: The size of the block in bytes. + :param bytearray source_content_md5: + Specify the md5 calculated for the range of + bytes that must be read from the copy source. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + :returns: Blob property dict. + :rtype: dict[str, Any] + """ + options = self._stage_block_from_url_options( + block_id, + source_url=self._encode_source_url(source_url), + source_offset=source_offset, + source_length=source_length, + source_content_md5=source_content_md5, + **kwargs) + try: + return self._client.block_blob.stage_block_from_url(**options) + except HttpResponseError as error: + process_storage_error(error) + + def _get_block_list_result(self, blocks): + # type: (BlockList) -> Tuple[List[BlobBlock], List[BlobBlock]] + committed = [] # type: List + uncommitted = [] # type: List + if blocks.committed_blocks: + committed = [BlobBlock._from_generated(b) for b in blocks.committed_blocks] # pylint: disable=protected-access + if blocks.uncommitted_blocks: + uncommitted = [BlobBlock._from_generated(b) for b in blocks.uncommitted_blocks] # pylint: disable=protected-access + return committed, uncommitted + + @distributed_trace + def get_block_list(self, block_list_type="committed", **kwargs): + # type: (Optional[str], **Any) -> Tuple[List[BlobBlock], List[BlobBlock]] + """The Get Block List operation retrieves the list of blocks that have + been uploaded as part of a block blob. + + :param str block_list_type: + Specifies whether to return the list of committed + blocks, the list of uncommitted blocks, or both lists together. + Possible values include: 'committed', 'uncommitted', 'all' + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on destination blob with a matching value. + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: A tuple of two lists - committed and uncommitted blocks + :rtype: tuple(list(~azure.storage.blob.BlobBlock), list(~azure.storage.blob.BlobBlock)) + """ + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + try: + blocks = self._client.block_blob.get_block_list( + list_type=block_list_type, + snapshot=self.snapshot, + timeout=kwargs.pop('timeout', None), + lease_access_conditions=access_conditions, + modified_access_conditions=mod_conditions, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + return self._get_block_list_result(blocks) + + def _commit_block_list_options( # type: ignore + self, block_list, # type: List[BlobBlock] + content_settings=None, # type: Optional[ContentSettings] + metadata=None, # type: Optional[Dict[str, str]] + **kwargs + ): + # type: (...) -> Dict[str, Any] + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + block_lookup = BlockLookupList(committed=[], uncommitted=[], latest=[]) + for block in block_list: + try: + if block.state.value == 'committed': + block_lookup.committed.append(encode_base64(str(block.id))) + elif block.state.value == 'uncommitted': + block_lookup.uncommitted.append(encode_base64(str(block.id))) + else: + block_lookup.latest.append(encode_base64(str(block.id))) + except AttributeError: + block_lookup.latest.append(encode_base64(str(block))) + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + blob_headers = None + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if content_settings: + blob_headers = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=content_settings.content_md5, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + + validate_content = kwargs.pop('validate_content', False) + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + immutability_policy = kwargs.pop('immutability_policy', None) + if immutability_policy: + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + + tier = kwargs.pop('standard_blob_tier', None) + blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) + + options = { + 'blocks': block_lookup, + 'blob_http_headers': blob_headers, + 'lease_access_conditions': access_conditions, + 'timeout': kwargs.pop('timeout', None), + 'modified_access_conditions': mod_conditions, + 'cls': return_response_headers, + 'validate_content': validate_content, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'tier': tier.value if tier else None, + 'blob_tags_string': blob_tags_string, + 'headers': headers + } + options.update(kwargs) + return options + + @distributed_trace + def commit_block_list( # type: ignore + self, block_list, # type: List[BlobBlock] + content_settings=None, # type: Optional[ContentSettings] + metadata=None, # type: Optional[Dict[str, str]] + **kwargs + ): + # type: (...) -> Dict[str, Union[str, datetime]] + """The Commit Block List operation writes a blob by specifying the list of + block IDs that make up the blob. + + :param list block_list: + List of Blockblobs. + :param ~azure.storage.blob.ContentSettings content_settings: + ContentSettings object used to set blob properties. Used to set content type, encoding, + language, disposition, md5, and cache control. + :param metadata: + Name-value pairs associated with the blob as metadata. + :type metadata: dict[str, str] + :keyword tags: + Name-value pairs associated with the blob as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + + .. versionadded:: 12.4.0 + + :paramtype tags: dict(str, str) + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool validate_content: + If true, calculates an MD5 hash of the page content. The storage + service checks the hash of the content that has arrived + with the hash that was sent. This is primarily valuable for detecting + bitflips on the wire if using http instead of https, as https (the default), + will already validate. Note that this MD5 hash is not stored with the + blob. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on destination blob with a matching value. + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: + A standard blob tier value to set the blob to. For this version of the library, + this is only applicable to block blobs on standard storage accounts. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified). + :rtype: dict(str, Any) + """ + options = self._commit_block_list_options( + block_list, + content_settings=content_settings, + metadata=metadata, + **kwargs) + try: + return self._client.block_blob.commit_block_list(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def set_premium_page_blob_tier(self, premium_page_blob_tier, **kwargs): + # type: (Union[str, PremiumPageBlobTier], **Any) -> None + """Sets the page blob tiers on the blob. This API is only supported for page blobs on premium accounts. + + :param premium_page_blob_tier: + A page blob tier value to set the blob to. The tier correlates to the size of the + blob and number of allowed IOPS. This is only applicable to page blobs on + premium storage accounts. + :type premium_page_blob_tier: ~azure.storage.blob.PremiumPageBlobTier + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. This method may make + multiple calls to the Azure service and the timeout will apply to + each call individually. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :rtype: None + """ + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if premium_page_blob_tier is None: + raise ValueError("A PremiumPageBlobTier must be specified") + try: + self._client.blob.set_tier( + tier=premium_page_blob_tier, + timeout=kwargs.pop('timeout', None), + lease_access_conditions=access_conditions, + modified_access_conditions=mod_conditions, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + def _set_blob_tags_options(self, tags=None, **kwargs): + # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Any] + tags = serialize_blob_tags(tags) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + options = { + 'tags': tags, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cls': return_response_headers} + options.update(kwargs) + return options + + @distributed_trace + def set_blob_tags(self, tags=None, **kwargs): + # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Any] + """The Set Tags operation enables users to set tags on a blob or specific blob version, but not snapshot. + Each call to this operation replaces all existing tags attached to the blob. To remove all + tags from the blob, call this operation with no tags set. + + .. versionadded:: 12.4.0 + This operation was introduced in API version '2019-12-12'. + + :param tags: + Name-value pairs associated with the blob as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + :type tags: dict(str, str) + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to add tags to. + :keyword bool validate_content: + If true, calculates an MD5 hash of the tags content. The storage + service checks the hash of the content that has arrived + with the hash that was sent. This is primarily valuable for detecting + bitflips on the wire if using http instead of https, as https (the default), + will already validate. Note that this MD5 hash is not stored with the + blob. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on destination blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified) + :rtype: Dict[str, Any] + """ + options = self._set_blob_tags_options(tags=tags, **kwargs) + try: + return self._client.blob.set_tags(**options) + except HttpResponseError as error: + process_storage_error(error) + + def _get_blob_tags_options(self, **kwargs): + # type: (**Any) -> Dict[str, str] + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + options = { + 'version_id': kwargs.pop('version_id', None), + 'snapshot': self.snapshot, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'timeout': kwargs.pop('timeout', None), + 'cls': return_headers_and_deserialized} + return options + + @distributed_trace + def get_blob_tags(self, **kwargs): + # type: (**Any) -> Dict[str, str] + """The Get Tags operation enables users to get tags on a blob or specific blob version, or snapshot. + + .. versionadded:: 12.4.0 + This operation was introduced in API version '2019-12-12'. + + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to add tags to. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on destination blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, str] + """ + options = self._get_blob_tags_options(**kwargs) + try: + _, tags = self._client.blob.get_tags(**options) + return parse_tags(tags) # pylint: disable=protected-access + except HttpResponseError as error: + process_storage_error(error) + + def _get_page_ranges_options( # type: ignore + self, offset=None, # type: Optional[int] + length=None, # type: Optional[int] + previous_snapshot_diff=None, # type: Optional[Union[str, Dict[str, Any]]] + **kwargs + ): + # type: (...) -> Dict[str, Any] + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if length is not None and offset is None: + raise ValueError("Offset value must not be None if length is set.") + if length is not None: + length = offset + length - 1 # Reformat to an inclusive range index + page_range, _ = validate_and_format_range_headers( + offset, length, start_range_required=False, end_range_required=False, align_to_page=True + ) + options = { + 'snapshot': self.snapshot, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'timeout': kwargs.pop('timeout', None), + 'range': page_range} + if previous_snapshot_diff: + try: + options['prevsnapshot'] = previous_snapshot_diff.snapshot # type: ignore + except AttributeError: + try: + options['prevsnapshot'] = previous_snapshot_diff['snapshot'] # type: ignore + except TypeError: + options['prevsnapshot'] = previous_snapshot_diff + options.update(kwargs) + return options + + @distributed_trace + def get_page_ranges( # type: ignore + self, offset=None, # type: Optional[int] + length=None, # type: Optional[int] + previous_snapshot_diff=None, # type: Optional[Union[str, Dict[str, Any]]] + **kwargs + ): + # type: (...) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]] + """Returns the list of valid page ranges for a Page Blob or snapshot + of a page blob. + + :param int offset: + Start of byte range to use for getting valid page ranges. + If no length is given, all bytes after the offset will be searched. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :param int length: + Number of bytes to use for getting valid page ranges. + If length is given, offset must be provided. + This range will return valid page ranges from the offset start up to + the specified length. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :param str previous_snapshot_diff: + The snapshot diff parameter that contains an opaque DateTime value that + specifies a previous blob snapshot to be compared + against a more recent snapshot or the current blob. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: + A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. + The first element are filled page ranges, the 2nd element is cleared page ranges. + :rtype: tuple(list(dict(str, str), list(dict(str, str)) + """ + options = self._get_page_ranges_options( + offset=offset, + length=length, + previous_snapshot_diff=previous_snapshot_diff, + **kwargs) + try: + if previous_snapshot_diff: + ranges = self._client.page_blob.get_page_ranges_diff(**options) + else: + ranges = self._client.page_blob.get_page_ranges(**options) + except HttpResponseError as error: + process_storage_error(error) + return get_page_ranges_result(ranges) + + @distributed_trace + def get_page_range_diff_for_managed_disk( + self, previous_snapshot_url, # type: str + offset=None, # type: Optional[int] + length=None, # type: Optional[int] + **kwargs + ): + # type: (...) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]] + """Returns the list of valid page ranges for a managed disk or snapshot. + + .. note:: + This operation is only available for managed disk accounts. + + .. versionadded:: 12.2.0 + This operation was introduced in API version '2019-07-07'. + + :param previous_snapshot_url: + Specifies the URL of a previous snapshot of the managed disk. + The response will only contain pages that were changed between the target blob and + its previous snapshot. + :param int offset: + Start of byte range to use for getting valid page ranges. + If no length is given, all bytes after the offset will be searched. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :param int length: + Number of bytes to use for getting valid page ranges. + If length is given, offset must be provided. + This range will return valid page ranges from the offset start up to + the specified length. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: + A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. + The first element are filled page ranges, the 2nd element is cleared page ranges. + :rtype: tuple(list(dict(str, str), list(dict(str, str)) + """ + options = self._get_page_ranges_options( + offset=offset, + length=length, + prev_snapshot_url=previous_snapshot_url, + **kwargs) + try: + ranges = self._client.page_blob.get_page_ranges_diff(**options) + except HttpResponseError as error: + process_storage_error(error) + return get_page_ranges_result(ranges) + + def _set_sequence_number_options(self, sequence_number_action, sequence_number=None, **kwargs): + # type: (Union[str, SequenceNumberAction], Optional[str], **Any) -> Dict[str, Any] + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if sequence_number_action is None: + raise ValueError("A sequence number action must be specified") + options = { + 'sequence_number_action': sequence_number_action, + 'timeout': kwargs.pop('timeout', None), + 'blob_sequence_number': sequence_number, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cls': return_response_headers} + options.update(kwargs) + return options + + @distributed_trace + def set_sequence_number(self, sequence_number_action, sequence_number=None, **kwargs): + # type: (Union[str, SequenceNumberAction], Optional[str], **Any) -> Dict[str, Union[str, datetime]] + """Sets the blob sequence number. + + :param str sequence_number_action: + This property indicates how the service should modify the blob's sequence + number. See :class:`~azure.storage.blob.SequenceNumberAction` for more information. + :param str sequence_number: + This property sets the blob's sequence number. The sequence number is a + user-controlled property that you can use to track requests and manage + concurrency issues. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified). + :rtype: dict(str, Any) + """ + options = self._set_sequence_number_options( + sequence_number_action, sequence_number=sequence_number, **kwargs) + try: + return self._client.page_blob.update_sequence_number(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _resize_blob_options(self, size, **kwargs): + # type: (int, **Any) -> Dict[str, Any] + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if size is None: + raise ValueError("A content length must be specified for a Page Blob.") + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + options = { + 'blob_content_length': size, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_info': cpk_info, + 'cls': return_response_headers} + options.update(kwargs) + return options + + @distributed_trace + def resize_blob(self, size, **kwargs): + # type: (int, **Any) -> Dict[str, Union[str, datetime]] + """Resizes a page blob to the specified size. + + If the specified value is less than the current size of the blob, + then all pages above the specified value are cleared. + + :param int size: + Size used to resize blob. Maximum size for a page blob is up to 1 TB. + The page blob size must be aligned to a 512-byte boundary. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: + A page blob tier value to set the blob to. The tier correlates to the size of the + blob and number of allowed IOPS. This is only applicable to page blobs on + premium storage accounts. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified). + :rtype: dict(str, Any) + """ + options = self._resize_blob_options(size, **kwargs) + try: + return self._client.page_blob.resize(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _upload_page_options( # type: ignore + self, page, # type: bytes + offset, # type: int + length, # type: int + **kwargs + ): + # type: (...) -> Dict[str, Any] + if isinstance(page, six.text_type): + page = page.encode(kwargs.pop('encoding', 'UTF-8')) + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + + if offset is None or offset % 512 != 0: + raise ValueError("offset must be an integer that aligns with 512 page size") + if length is None or length % 512 != 0: + raise ValueError("length must be an integer that aligns with 512 page size") + end_range = offset + length - 1 # Reformat to an inclusive range index + content_range = 'bytes={0}-{1}'.format(offset, end_range) # type: ignore + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + seq_conditions = SequenceNumberAccessConditions( + if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), + if_sequence_number_less_than=kwargs.pop('if_sequence_number_lt', None), + if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) + ) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + validate_content = kwargs.pop('validate_content', False) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + options = { + 'body': page[:length], + 'content_length': length, + 'transactional_content_md5': None, + 'timeout': kwargs.pop('timeout', None), + 'range': content_range, + 'lease_access_conditions': access_conditions, + 'sequence_number_access_conditions': seq_conditions, + 'modified_access_conditions': mod_conditions, + 'validate_content': validate_content, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers} + options.update(kwargs) + return options + + @distributed_trace + def upload_page( # type: ignore + self, page, # type: bytes + offset, # type: int + length, # type: int + **kwargs + ): + # type: (...) -> Dict[str, Union[str, datetime]] + """The Upload Pages operation writes a range of pages to a page blob. + + :param bytes page: + Content of the page. + :param int offset: + Start of byte range to use for writing to a section of the blob. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :param int length: + Number of bytes to use for writing to a section of the blob. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword bool validate_content: + If true, calculates an MD5 hash of the page content. The storage + service checks the hash of the content that has arrived + with the hash that was sent. This is primarily valuable for detecting + bitflips on the wire if using http instead of https, as https (the default), + will already validate. Note that this MD5 hash is not stored with the + blob. + :keyword int if_sequence_number_lte: + If the blob's sequence number is less than or equal to + the specified value, the request proceeds; otherwise it fails. + :keyword int if_sequence_number_lt: + If the blob's sequence number is less than the specified + value, the request proceeds; otherwise it fails. + :keyword int if_sequence_number_eq: + If the blob's sequence number is equal to the specified + value, the request proceeds; otherwise it fails. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword str encoding: + Defaults to UTF-8. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified). + :rtype: dict(str, Any) + """ + options = self._upload_page_options( + page=page, + offset=offset, + length=length, + **kwargs) + try: + return self._client.page_blob.upload_pages(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _upload_pages_from_url_options( # type: ignore + self, source_url, # type: str + offset, # type: int + length, # type: int + source_offset, # type: int + **kwargs + ): + # type: (...) -> Dict[str, Any] + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + + # TODO: extract the code to a method format_range + if offset is None or offset % 512 != 0: + raise ValueError("offset must be an integer that aligns with 512 page size") + if length is None or length % 512 != 0: + raise ValueError("length must be an integer that aligns with 512 page size") + if source_offset is None or offset % 512 != 0: + raise ValueError("source_offset must be an integer that aligns with 512 page size") + + # Format range + end_range = offset + length - 1 + destination_range = 'bytes={0}-{1}'.format(offset, end_range) + source_range = 'bytes={0}-{1}'.format(source_offset, source_offset + length - 1) # should subtract 1 here? + + seq_conditions = SequenceNumberAccessConditions( + if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), + if_sequence_number_less_than=kwargs.pop('if_sequence_number_lt', None), + if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) + ) + source_authorization = kwargs.pop('source_authorization', None) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + source_mod_conditions = get_source_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + source_content_md5 = kwargs.pop('source_content_md5', None) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'copy_source_authorization': source_authorization, + 'source_url': source_url, + 'content_length': 0, + 'source_range': source_range, + 'range': destination_range, + 'source_content_md5': bytearray(source_content_md5) if source_content_md5 else None, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'sequence_number_access_conditions': seq_conditions, + 'modified_access_conditions': mod_conditions, + 'source_modified_access_conditions': source_mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers} + options.update(kwargs) + return options + + @distributed_trace + def upload_pages_from_url(self, source_url, # type: str + offset, # type: int + length, # type: int + source_offset, # type: int + **kwargs + ): + # type: (...) -> Dict[str, Any] + """ + The Upload Pages operation writes a range of pages to a page blob where + the contents are read from a URL. + + :param str source_url: + The URL of the source data. It can point to any Azure Blob or File, that is either public or has a + shared access signature attached. + :param int offset: + Start of byte range to use for writing to a section of the blob. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :param int length: + Number of bytes to use for writing to a section of the blob. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :param int source_offset: + This indicates the start of the range of bytes(inclusive) that has to be taken from the copy source. + The service will read the same number of bytes as the destination range (length-offset). + :keyword bytes source_content_md5: + If given, the service will calculate the MD5 hash of the block content and compare against this value. + :keyword ~datetime.datetime source_if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the source resource has been modified since the specified time. + :keyword ~datetime.datetime source_if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the source resource has not been modified since the specified date/time. + :keyword str source_etag: + The source ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions source_match_condition: + The source match condition to use upon the etag. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int if_sequence_number_lte: + If the blob's sequence number is less than or equal to + the specified value, the request proceeds; otherwise it fails. + :keyword int if_sequence_number_lt: + If the blob's sequence number is less than the specified + value, the request proceeds; otherwise it fails. + :keyword int if_sequence_number_eq: + If the blob's sequence number is equal to the specified + value, the request proceeds; otherwise it fails. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + The destination ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The destination match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + """ + options = self._upload_pages_from_url_options( + source_url=self._encode_source_url(source_url), + offset=offset, + length=length, + source_offset=source_offset, + **kwargs + ) + try: + return self._client.page_blob.upload_pages_from_url(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _clear_page_options(self, offset, length, **kwargs): + # type: (int, int, **Any) -> Dict[str, Any] + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + seq_conditions = SequenceNumberAccessConditions( + if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), + if_sequence_number_less_than=kwargs.pop('if_sequence_number_lt', None), + if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) + ) + mod_conditions = get_modify_conditions(kwargs) + if offset is None or offset % 512 != 0: + raise ValueError("offset must be an integer that aligns with 512 page size") + if length is None or length % 512 != 0: + raise ValueError("length must be an integer that aligns with 512 page size") + end_range = length + offset - 1 # Reformat to an inclusive range index + content_range = 'bytes={0}-{1}'.format(offset, end_range) + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'content_length': 0, + 'timeout': kwargs.pop('timeout', None), + 'range': content_range, + 'lease_access_conditions': access_conditions, + 'sequence_number_access_conditions': seq_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_info': cpk_info, + 'cls': return_response_headers} + options.update(kwargs) + return options + + @distributed_trace + def clear_page(self, offset, length, **kwargs): + # type: (int, int, **Any) -> Dict[str, Union[str, datetime]] + """Clears a range of pages. + + :param int offset: + Start of byte range to use for writing to a section of the blob. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :param int length: + Number of bytes to use for writing to a section of the blob. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int if_sequence_number_lte: + If the blob's sequence number is less than or equal to + the specified value, the request proceeds; otherwise it fails. + :keyword int if_sequence_number_lt: + If the blob's sequence number is less than the specified + value, the request proceeds; otherwise it fails. + :keyword int if_sequence_number_eq: + If the blob's sequence number is equal to the specified + value, the request proceeds; otherwise it fails. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified). + :rtype: dict(str, Any) + """ + options = self._clear_page_options(offset, length, **kwargs) + try: + return self._client.page_blob.clear_pages(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _append_block_options( # type: ignore + self, data, # type: Union[AnyStr, Iterable[AnyStr], IO[AnyStr]] + length=None, # type: Optional[int] + **kwargs + ): + # type: (...) -> Dict[str, Any] + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + + if isinstance(data, six.text_type): + data = data.encode(kwargs.pop('encoding', 'UTF-8')) # type: ignore + if length is None: + length = get_length(data) + if length is None: + length, data = read_length(data) + if length == 0: + return {} + if isinstance(data, bytes): + data = data[:length] + + appendpos_condition = kwargs.pop('appendpos_condition', None) + maxsize_condition = kwargs.pop('maxsize_condition', None) + validate_content = kwargs.pop('validate_content', False) + append_conditions = None + if maxsize_condition or appendpos_condition is not None: + append_conditions = AppendPositionAccessConditions( + max_size=maxsize_condition, + append_position=appendpos_condition + ) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + options = { + 'body': data, + 'content_length': length, + 'timeout': kwargs.pop('timeout', None), + 'transactional_content_md5': None, + 'lease_access_conditions': access_conditions, + 'append_position_access_conditions': append_conditions, + 'modified_access_conditions': mod_conditions, + 'validate_content': validate_content, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers} + options.update(kwargs) + return options + + @distributed_trace + def append_block( # type: ignore + self, data, # type: Union[AnyStr, Iterable[AnyStr], IO[AnyStr]] + length=None, # type: Optional[int] + **kwargs + ): + # type: (...) -> Dict[str, Union[str, datetime, int]] + """Commits a new block of data to the end of the existing append blob. + + :param data: + Content of the block. This can be bytes, text, an iterable or a file-like object. + :type data: bytes or str or Iterable + :param int length: + Size of the block in bytes. + :keyword bool validate_content: + If true, calculates an MD5 hash of the block content. The storage + service checks the hash of the content that has arrived + with the hash that was sent. This is primarily valuable for detecting + bitflips on the wire if using http instead of https, as https (the default), + will already validate. Note that this MD5 hash is not stored with the + blob. + :keyword int maxsize_condition: + Optional conditional header. The max length in bytes permitted for + the append blob. If the Append Block operation would cause the blob + to exceed that limit or if the blob size is already greater than the + value specified in this header, the request will fail with + MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). + :keyword int appendpos_condition: + Optional conditional header, used only for the Append Block operation. + A number indicating the byte offset to compare. Append Block will + succeed only if the append position is equal to this number. If it + is not, the request will fail with the AppendPositionConditionNotMet error + (HTTP status code 412 - Precondition Failed). + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword str encoding: + Defaults to UTF-8. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). + :rtype: dict(str, Any) + """ + options = self._append_block_options( + data, + length=length, + **kwargs + ) + try: + return self._client.append_blob.append_block(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _append_block_from_url_options( # type: ignore + self, copy_source_url, # type: str + source_offset=None, # type: Optional[int] + source_length=None, # type: Optional[int] + **kwargs + ): + # type: (...) -> Dict[str, Any] + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + + # If end range is provided, start range must be provided + if source_length is not None and source_offset is None: + raise ValueError("source_offset should also be specified if source_length is specified") + # Format based on whether length is present + source_range = None + if source_length is not None: + end_range = source_offset + source_length - 1 + source_range = 'bytes={0}-{1}'.format(source_offset, end_range) + elif source_offset is not None: + source_range = "bytes={0}-".format(source_offset) + + appendpos_condition = kwargs.pop('appendpos_condition', None) + maxsize_condition = kwargs.pop('maxsize_condition', None) + source_content_md5 = kwargs.pop('source_content_md5', None) + append_conditions = None + if maxsize_condition or appendpos_condition is not None: + append_conditions = AppendPositionAccessConditions( + max_size=maxsize_condition, + append_position=appendpos_condition + ) + source_authorization = kwargs.pop('source_authorization', None) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + source_mod_conditions = get_source_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'copy_source_authorization': source_authorization, + 'source_url': copy_source_url, + 'content_length': 0, + 'source_range': source_range, + 'source_content_md5': source_content_md5, + 'transactional_content_md5': None, + 'lease_access_conditions': access_conditions, + 'append_position_access_conditions': append_conditions, + 'modified_access_conditions': mod_conditions, + 'source_modified_access_conditions': source_mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers, + 'timeout': kwargs.pop('timeout', None)} + options.update(kwargs) + return options + + @distributed_trace + def append_block_from_url(self, copy_source_url, # type: str + source_offset=None, # type: Optional[int] + source_length=None, # type: Optional[int] + **kwargs): + # type: (...) -> Dict[str, Union[str, datetime, int]] + """ + Creates a new block to be committed as part of a blob, where the contents are read from a source url. + + :param str copy_source_url: + The URL of the source data. It can point to any Azure Blob or File, that is either public or has a + shared access signature attached. + :param int source_offset: + This indicates the start of the range of bytes (inclusive) that has to be taken from the copy source. + :param int source_length: + This indicates the end of the range of bytes that has to be taken from the copy source. + :keyword bytearray source_content_md5: + If given, the service will calculate the MD5 hash of the block content and compare against this value. + :keyword int maxsize_condition: + Optional conditional header. The max length in bytes permitted for + the append blob. If the Append Block operation would cause the blob + to exceed that limit or if the blob size is already greater than the + value specified in this header, the request will fail with + MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). + :keyword int appendpos_condition: + Optional conditional header, used only for the Append Block operation. + A number indicating the byte offset to compare. Append Block will + succeed only if the append position is equal to this number. If it + is not, the request will fail with the + AppendPositionConditionNotMet error + (HTTP status code 412 - Precondition Failed). + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + The destination ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The destination match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~datetime.datetime source_if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the source resource has been modified since the specified time. + :keyword ~datetime.datetime source_if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the source resource has not been modified since the specified date/time. + :keyword str source_etag: + The source ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions source_match_condition: + The source match condition to use upon the etag. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + """ + options = self._append_block_from_url_options( + copy_source_url=self._encode_source_url(copy_source_url), + source_offset=source_offset, + source_length=source_length, + **kwargs + ) + try: + return self._client.append_blob.append_block_from_url(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _seal_append_blob_options(self, **kwargs): + # type: (...) -> Dict[str, Any] + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + + appendpos_condition = kwargs.pop('appendpos_condition', None) + append_conditions = None + if appendpos_condition is not None: + append_conditions = AppendPositionAccessConditions( + append_position=appendpos_condition + ) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + options = { + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'append_position_access_conditions': append_conditions, + 'modified_access_conditions': mod_conditions, + 'cls': return_response_headers} + options.update(kwargs) + return options + + @distributed_trace + def seal_append_blob(self, **kwargs): + # type: (...) -> Dict[str, Union[str, datetime, int]] + """The Seal operation seals the Append Blob to make it read-only. + + .. versionadded:: 12.4.0 + + :keyword int appendpos_condition: + Optional conditional header, used only for the Append Block operation. + A number indicating the byte offset to compare. Append Block will + succeed only if the append position is equal to this number. If it + is not, the request will fail with the AppendPositionConditionNotMet error + (HTTP status code 412 - Precondition Failed). + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). + :rtype: dict(str, Any) + """ + options = self._seal_append_blob_options(**kwargs) + try: + return self._client.append_blob.seal(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def _get_container_client(self): # pylint: disable=client-method-missing-kwargs + # type: (...) -> ContainerClient + """Get a client to interact with the blob's parent container. + + The container need not already exist. Defaults to current blob's credentials. + + :returns: A ContainerClient. + :rtype: ~azure.storage.blob.ContainerClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START get_container_client_from_blob_client] + :end-before: [END get_container_client_from_blob_client] + :language: python + :dedent: 8 + :caption: Get container client from blob object. + """ + from ._container_client import ContainerClient + if not isinstance(self._pipeline._transport, TransportWrapper): # pylint: disable = protected-access + _pipeline = Pipeline( + transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies # pylint: disable = protected-access + ) + else: + _pipeline = self._pipeline # pylint: disable = protected-access + return ContainerClient( + "{}://{}".format(self.scheme, self.primary_hostname), container_name=self.container_name, + credential=self._raw_credential, api_version=self.api_version, _configuration=self._config, + _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, + key_resolver_function=self.key_resolver_function) diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_blob_service_client.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_blob_service_client.py new file mode 100644 index 00000000000..6740dc36062 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_blob_service_client.py @@ -0,0 +1,740 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import functools +import warnings +from typing import ( # pylint: disable=unused-import + Union, Optional, Any, Iterable, Dict, List, + TYPE_CHECKING, + TypeVar) + + +try: + from urllib.parse import urlparse +except ImportError: + from urlparse import urlparse # type: ignore + +from azure.core.paging import ItemPaged +from azure.core.exceptions import HttpResponseError +from azure.core.pipeline import Pipeline +from azure.core.tracing.decorator import distributed_trace + +from ._shared.models import LocationMode +from ._shared.base_client import StorageAccountHostsMixin, TransportWrapper, parse_connection_str, parse_query +from ._shared.parser import _to_utc_datetime +from ._shared.response_handlers import return_response_headers, process_storage_error, \ + parse_to_internal_user_delegation_key +from ._generated import AzureBlobStorage +from ._generated.models import StorageServiceProperties, KeyInfo +from ._container_client import ContainerClient +from ._blob_client import BlobClient +from ._models import ContainerPropertiesPaged +from ._list_blobs_helper import FilteredBlobPaged +from ._serialize import get_api_version +from ._deserialize import service_stats_deserialize, service_properties_deserialize + +if TYPE_CHECKING: + from datetime import datetime + from ._shared.models import UserDelegationKey + from ._lease import BlobLeaseClient + from ._models import ( + ContainerProperties, + BlobProperties, + PublicAccess, + BlobAnalyticsLogging, + Metrics, + CorsRule, + RetentionPolicy, + StaticWebsite, + FilteredBlob + ) + +ClassType = TypeVar("ClassType") + + +class BlobServiceClient(StorageAccountHostsMixin): + """A client to interact with the Blob Service at the account level. + + This client provides operations to retrieve and configure the account properties + as well as list, create and delete containers within the account. + For operations relating to a specific container or blob, clients for those entities + can also be retrieved using the `get_client` functions. + + For more optional configuration, please click + `here `_. + + :param str account_url: + The URL to the blob storage account. Any other entities included + in the URL path (e.g. container or blob) will be discarded. This URL can be optionally + authenticated with a SAS token. + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account + shared access key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + + .. versionadded:: 12.2.0 + + :keyword str secondary_hostname: + The hostname of the secondary endpoint. + :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. + Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be + uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, + the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. + :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient + algorithm when uploading a block blob. Defaults to 4*1024*1024+1. + :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. + :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, + the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. + :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, + or 4MB. + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START create_blob_service_client] + :end-before: [END create_blob_service_client] + :language: python + :dedent: 8 + :caption: Creating the BlobServiceClient with account url and credential. + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START create_blob_service_client_oauth] + :end-before: [END create_blob_service_client_oauth] + :language: python + :dedent: 8 + :caption: Creating the BlobServiceClient with Azure Identity credentials. + """ + + def __init__( + self, account_url, # type: str + credential=None, # type: Optional[Any] + **kwargs # type: Any + ): + # type: (...) -> None + try: + if not account_url.lower().startswith('http'): + account_url = "https://" + account_url + except AttributeError: + raise ValueError("Account URL must be a string.") + parsed_url = urlparse(account_url.rstrip('/')) + if not parsed_url.netloc: + raise ValueError("Invalid URL: {}".format(account_url)) + + _, sas_token = parse_query(parsed_url.query) + self._query_str, credential = self._format_query_string(sas_token, credential) + super(BlobServiceClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) + self._client = AzureBlobStorage(self.url, pipeline=self._pipeline) + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access + + def _format_url(self, hostname): + """Format the endpoint URL according to the current location + mode hostname. + """ + return "{}://{}/{}".format(self.scheme, hostname, self._query_str) + + @classmethod + def from_connection_string( + cls, # type: Type[ClassType] + conn_str, # type: str + credential=None, # type: Optional[Any] + **kwargs # type: Any + ): # type: (...) -> ClassType + """Create BlobServiceClient from a Connection String. + + :param str conn_str: + A connection string to an Azure Storage account. + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account shared access + key, or an instance of a TokenCredentials class from azure.identity. + Credentials provided here will take precedence over those in the connection string. + :returns: A Blob service client. + :rtype: ~azure.storage.blob.BlobServiceClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START auth_from_connection_string] + :end-before: [END auth_from_connection_string] + :language: python + :dedent: 8 + :caption: Creating the BlobServiceClient from a connection string. + """ + account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') + if 'secondary_hostname' not in kwargs: + kwargs['secondary_hostname'] = secondary + return cls(account_url, credential=credential, **kwargs) + + @distributed_trace + def get_user_delegation_key(self, key_start_time, # type: datetime + key_expiry_time, # type: datetime + **kwargs # type: Any + ): + # type: (...) -> UserDelegationKey + """ + Obtain a user delegation key for the purpose of signing SAS tokens. + A token credential must be present on the service object for this request to succeed. + + :param ~datetime.datetime key_start_time: + A DateTime value. Indicates when the key becomes valid. + :param ~datetime.datetime key_expiry_time: + A DateTime value. Indicates when the key stops being valid. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: The user delegation key. + :rtype: ~azure.storage.blob.UserDelegationKey + """ + key_info = KeyInfo(start=_to_utc_datetime(key_start_time), expiry=_to_utc_datetime(key_expiry_time)) + timeout = kwargs.pop('timeout', None) + try: + user_delegation_key = self._client.service.get_user_delegation_key(key_info=key_info, + timeout=timeout, + **kwargs) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + return parse_to_internal_user_delegation_key(user_delegation_key) # type: ignore + + @distributed_trace + def get_account_information(self, **kwargs): + # type: (Any) -> Dict[str, str] + """Gets information related to the storage account. + + The information can also be retrieved if the user has a SAS to a container or blob. + The keys in the returned dictionary include 'sku_name' and 'account_kind'. + + :returns: A dict of account information (SKU and account type). + :rtype: dict(str, str) + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START get_blob_service_account_info] + :end-before: [END get_blob_service_account_info] + :language: python + :dedent: 8 + :caption: Getting account information for the blob service. + """ + try: + return self._client.service.get_account_info(cls=return_response_headers, **kwargs) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def get_service_stats(self, **kwargs): + # type: (**Any) -> Dict[str, Any] + """Retrieves statistics related to replication for the Blob service. + + It is only available when read-access geo-redundant replication is enabled for + the storage account. + + With geo-redundant replication, Azure Storage maintains your data durable + in two locations. In both locations, Azure Storage constantly maintains + multiple healthy replicas of your data. The location where you read, + create, update, or delete data is the primary storage account location. + The primary location exists in the region you choose at the time you + create an account via the Azure Management Azure classic portal, for + example, North Central US. The location to which your data is replicated + is the secondary location. The secondary location is automatically + determined based on the location of the primary; it is in a second data + center that resides in the same region as the primary location. Read-only + access is available from the secondary location, if read-access geo-redundant + replication is enabled for your storage account. + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: The blob service stats. + :rtype: Dict[str, Any] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START get_blob_service_stats] + :end-before: [END get_blob_service_stats] + :language: python + :dedent: 8 + :caption: Getting service stats for the blob service. + """ + timeout = kwargs.pop('timeout', None) + try: + stats = self._client.service.get_statistics( # type: ignore + timeout=timeout, use_location=LocationMode.SECONDARY, **kwargs) + return service_stats_deserialize(stats) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def get_service_properties(self, **kwargs): + # type: (Any) -> Dict[str, Any] + """Gets the properties of a storage account's Blob service, including + Azure Storage Analytics. + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: An object containing blob service properties such as + analytics logging, hour/minute metrics, cors rules, etc. + :rtype: Dict[str, Any] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START get_blob_service_properties] + :end-before: [END get_blob_service_properties] + :language: python + :dedent: 8 + :caption: Getting service properties for the blob service. + """ + timeout = kwargs.pop('timeout', None) + try: + service_props = self._client.service.get_properties(timeout=timeout, **kwargs) + return service_properties_deserialize(service_props) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def set_service_properties( + self, analytics_logging=None, # type: Optional[BlobAnalyticsLogging] + hour_metrics=None, # type: Optional[Metrics] + minute_metrics=None, # type: Optional[Metrics] + cors=None, # type: Optional[List[CorsRule]] + target_version=None, # type: Optional[str] + delete_retention_policy=None, # type: Optional[RetentionPolicy] + static_website=None, # type: Optional[StaticWebsite] + **kwargs + ): + # type: (...) -> None + """Sets the properties of a storage account's Blob service, including + Azure Storage Analytics. + + If an element (e.g. analytics_logging) is left as None, the + existing settings on the service for that functionality are preserved. + + :param analytics_logging: + Groups the Azure Analytics Logging settings. + :type analytics_logging: ~azure.storage.blob.BlobAnalyticsLogging + :param hour_metrics: + The hour metrics settings provide a summary of request + statistics grouped by API in hourly aggregates for blobs. + :type hour_metrics: ~azure.storage.blob.Metrics + :param minute_metrics: + The minute metrics settings provide request statistics + for each minute for blobs. + :type minute_metrics: ~azure.storage.blob.Metrics + :param cors: + You can include up to five CorsRule elements in the + list. If an empty list is specified, all CORS rules will be deleted, + and CORS will be disabled for the service. + :type cors: list[~azure.storage.blob.CorsRule] + :param str target_version: + Indicates the default version to use for requests if an incoming + request's version is not specified. + :param delete_retention_policy: + The delete retention policy specifies whether to retain deleted blobs. + It also specifies the number of days and versions of blob to keep. + :type delete_retention_policy: ~azure.storage.blob.RetentionPolicy + :param static_website: + Specifies whether the static website feature is enabled, + and if yes, indicates the index document and 404 error document to use. + :type static_website: ~azure.storage.blob.StaticWebsite + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START set_blob_service_properties] + :end-before: [END set_blob_service_properties] + :language: python + :dedent: 8 + :caption: Setting service properties for the blob service. + """ + if all(parameter is None for parameter in [ + analytics_logging, hour_metrics, minute_metrics, cors, + target_version, delete_retention_policy, static_website]): + raise ValueError("set_service_properties should be called with at least one parameter") + + props = StorageServiceProperties( + logging=analytics_logging, + hour_metrics=hour_metrics, + minute_metrics=minute_metrics, + cors=cors, + default_service_version=target_version, + delete_retention_policy=delete_retention_policy, + static_website=static_website + ) + timeout = kwargs.pop('timeout', None) + try: + self._client.service.set_properties(props, timeout=timeout, **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def list_containers( + self, name_starts_with=None, # type: Optional[str] + include_metadata=False, # type: Optional[bool] + **kwargs + ): + # type: (...) -> ItemPaged[ContainerProperties] + """Returns a generator to list the containers under the specified account. + + The generator will lazily follow the continuation tokens returned by + the service and stop when all containers have been returned. + + :param str name_starts_with: + Filters the results to return only containers whose names + begin with the specified prefix. + :param bool include_metadata: + Specifies that container metadata to be returned in the response. + The default value is `False`. + :keyword bool include_deleted: + Specifies that deleted containers to be returned in the response. This is for container restore enabled + account. The default value is `False`. + .. versionadded:: 12.4.0 + :keyword bool include_system: + Flag specifying that system containers should be included. + .. versionadded:: 12.10.0 + :keyword int results_per_page: + The maximum number of container names to retrieve per API + call. If the request does not specify the server will return up to 5,000 items. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: An iterable (auto-paging) of ContainerProperties. + :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.ContainerProperties] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START bsc_list_containers] + :end-before: [END bsc_list_containers] + :language: python + :dedent: 12 + :caption: Listing the containers in the blob service. + """ + include = ['metadata'] if include_metadata else [] + include_deleted = kwargs.pop('include_deleted', None) + if include_deleted: + include.append("deleted") + include_system = kwargs.pop('include_system', None) + if include_system: + include.append("system") + + timeout = kwargs.pop('timeout', None) + results_per_page = kwargs.pop('results_per_page', None) + command = functools.partial( + self._client.service.list_containers_segment, + prefix=name_starts_with, + include=include, + timeout=timeout, + **kwargs) + return ItemPaged( + command, + prefix=name_starts_with, + results_per_page=results_per_page, + page_iterator_class=ContainerPropertiesPaged + ) + + @distributed_trace + def find_blobs_by_tags(self, filter_expression, **kwargs): + # type: (str, **Any) -> ItemPaged[FilteredBlob] + """The Filter Blobs operation enables callers to list blobs across all + containers whose tags match a given search expression. Filter blobs + searches across all containers within a storage account but can be + scoped within the expression to a single container. + + :param str filter_expression: + The expression to find blobs whose tags matches the specified condition. + eg. "\"yourtagname\"='firsttag' and \"yourtagname2\"='secondtag'" + To specify a container, eg. "@container='containerName' and \"Name\"='C'" + :keyword int results_per_page: + The max result per page when paginating. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: An iterable (auto-paging) response of BlobProperties. + :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.FilteredBlob] + """ + + results_per_page = kwargs.pop('results_per_page', None) + timeout = kwargs.pop('timeout', None) + command = functools.partial( + self._client.service.filter_blobs, + where=filter_expression, + timeout=timeout, + **kwargs) + return ItemPaged( + command, results_per_page=results_per_page, + page_iterator_class=FilteredBlobPaged) + + @distributed_trace + def create_container( + self, name, # type: str + metadata=None, # type: Optional[Dict[str, str]] + public_access=None, # type: Optional[Union[PublicAccess, str]] + **kwargs + ): + # type: (...) -> ContainerClient + """Creates a new container under the specified account. + + If the container with the same name already exists, a ResourceExistsError will + be raised. This method returns a client with which to interact with the newly + created container. + + :param str name: The name of the container to create. + :param metadata: + A dict with name-value pairs to associate with the + container as metadata. Example: `{'Category':'test'}` + :type metadata: dict(str, str) + :param public_access: + Possible values include: 'container', 'blob'. + :type public_access: str or ~azure.storage.blob.PublicAccess + :keyword container_encryption_scope: + Specifies the default encryption scope to set on the container and use for + all future writes. + + .. versionadded:: 12.2.0 + + :paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: ~azure.storage.blob.ContainerClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START bsc_create_container] + :end-before: [END bsc_create_container] + :language: python + :dedent: 12 + :caption: Creating a container in the blob service. + """ + container = self.get_container_client(name) + kwargs.setdefault('merge_span', True) + timeout = kwargs.pop('timeout', None) + container.create_container( + metadata=metadata, public_access=public_access, timeout=timeout, **kwargs) + return container + + @distributed_trace + def delete_container( + self, container, # type: Union[ContainerProperties, str] + lease=None, # type: Optional[Union[BlobLeaseClient, str]] + **kwargs + ): + # type: (...) -> None + """Marks the specified container for deletion. + + The container and any blobs contained within it are later deleted during garbage collection. + If the container is not found, a ResourceNotFoundError will be raised. + + :param container: + The container to delete. This can either be the name of the container, + or an instance of ContainerProperties. + :type container: str or ~azure.storage.blob.ContainerProperties + :param lease: + If specified, delete_container only succeeds if the + container's lease is active and matches this ID. + Required if the container has an active lease. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START bsc_delete_container] + :end-before: [END bsc_delete_container] + :language: python + :dedent: 12 + :caption: Deleting a container in the blob service. + """ + container = self.get_container_client(container) # type: ignore + kwargs.setdefault('merge_span', True) + timeout = kwargs.pop('timeout', None) + container.delete_container( # type: ignore + lease=lease, + timeout=timeout, + **kwargs) + + @distributed_trace + def _rename_container(self, name, new_name, **kwargs): + # type: (str, str, **Any) -> ContainerClient + """Renames a container. + + Operation is successful only if the source container exists. + + :param str name: + The name of the container to rename. + :param str new_name: + The new container name the user wants to rename to. + :keyword lease: + Specify this to perform only if the lease ID given + matches the active lease ID of the source container. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: ~azure.storage.blob.ContainerClient + """ + renamed_container = self.get_container_client(new_name) + lease = kwargs.pop('lease', None) + try: + kwargs['source_lease_id'] = lease.id # type: str + except AttributeError: + kwargs['source_lease_id'] = lease + try: + renamed_container._client.container.rename(name, **kwargs) # pylint: disable = protected-access + return renamed_container + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def undelete_container(self, deleted_container_name, deleted_container_version, **kwargs): + # type: (str, str, **Any) -> ContainerClient + """Restores soft-deleted container. + + Operation will only be successful if used within the specified number of days + set in the delete retention policy. + + .. versionadded:: 12.4.0 + This operation was introduced in API version '2019-12-12'. + + :param str deleted_container_name: + Specifies the name of the deleted container to restore. + :param str deleted_container_version: + Specifies the version of the deleted container to restore. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: ~azure.storage.blob.ContainerClient + """ + new_name = kwargs.pop('new_name', None) + if new_name: + warnings.warn("`new_name` is no longer supported.", DeprecationWarning) + container = self.get_container_client(new_name or deleted_container_name) + try: + container._client.container.restore(deleted_container_name=deleted_container_name, # pylint: disable = protected-access + deleted_container_version=deleted_container_version, + timeout=kwargs.pop('timeout', None), **kwargs) + return container + except HttpResponseError as error: + process_storage_error(error) + + def get_container_client(self, container): + # type: (Union[ContainerProperties, str]) -> ContainerClient + """Get a client to interact with the specified container. + + The container need not already exist. + + :param container: + The container. This can either be the name of the container, + or an instance of ContainerProperties. + :type container: str or ~azure.storage.blob.ContainerProperties + :returns: A ContainerClient. + :rtype: ~azure.storage.blob.ContainerClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START bsc_get_container_client] + :end-before: [END bsc_get_container_client] + :language: python + :dedent: 8 + :caption: Getting the container client to interact with a specific container. + """ + try: + container_name = container.name + except AttributeError: + container_name = container + _pipeline = Pipeline( + transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies # pylint: disable = protected-access + ) + return ContainerClient( + self.url, container_name=container_name, + credential=self.credential, api_version=self.api_version, _configuration=self._config, + _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, + key_resolver_function=self.key_resolver_function) + + def get_blob_client( + self, container, # type: Union[ContainerProperties, str] + blob, # type: Union[BlobProperties, str] + snapshot=None # type: Optional[Union[Dict[str, Any], str]] + ): + # type: (...) -> BlobClient + """Get a client to interact with the specified blob. + + The blob need not already exist. + + :param container: + The container that the blob is in. This can either be the name of the container, + or an instance of ContainerProperties. + :type container: str or ~azure.storage.blob.ContainerProperties + :param blob: + The blob with which to interact. This can either be the name of the blob, + or an instance of BlobProperties. + :type blob: str or ~azure.storage.blob.BlobProperties + :param snapshot: + The optional blob snapshot on which to operate. This can either be the ID of the snapshot, + or a dictionary output returned by :func:`~azure.storage.blob.BlobClient.create_snapshot()`. + :type snapshot: str or dict(str, Any) + :returns: A BlobClient. + :rtype: ~azure.storage.blob.BlobClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START bsc_get_blob_client] + :end-before: [END bsc_get_blob_client] + :language: python + :dedent: 12 + :caption: Getting the blob client to interact with a specific blob. + """ + try: + container_name = container.name + except AttributeError: + container_name = container + try: + blob_name = blob.name + except AttributeError: + blob_name = blob + _pipeline = Pipeline( + transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies # pylint: disable = protected-access + ) + return BlobClient( # type: ignore + self.url, container_name=container_name, blob_name=blob_name, snapshot=snapshot, + credential=self.credential, api_version=self.api_version, _configuration=self._config, + _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, + key_resolver_function=self.key_resolver_function) diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_container_client.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_container_client.py new file mode 100644 index 00000000000..b5cbd58ba77 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_container_client.py @@ -0,0 +1,1601 @@ +# pylint: disable=too-many-lines +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import functools +from typing import ( # pylint: disable=unused-import + Union, Optional, Any, Iterable, AnyStr, Dict, List, Tuple, IO, Iterator, + TYPE_CHECKING, + TypeVar) + + +try: + from urllib.parse import urlparse, quote, unquote +except ImportError: + from urlparse import urlparse # type: ignore + from urllib2 import quote, unquote # type: ignore + +import six + +from azure.core import MatchConditions +from azure.core.exceptions import HttpResponseError, ResourceNotFoundError +from azure.core.paging import ItemPaged +from azure.core.tracing.decorator import distributed_trace +from azure.core.pipeline import Pipeline +from azure.core.pipeline.transport import HttpRequest + +from ._shared.base_client import StorageAccountHostsMixin, TransportWrapper, parse_connection_str, parse_query +from ._shared.request_handlers import add_metadata_headers, serialize_iso +from ._shared.response_handlers import ( + process_storage_error, + return_response_headers, + return_headers_and_deserialized) +from ._generated import AzureBlobStorage +from ._generated.models import SignedIdentifier +from ._deserialize import deserialize_container_properties +from ._serialize import get_modify_conditions, get_container_cpk_scope_info, get_api_version, get_access_conditions +from ._models import ( # pylint: disable=unused-import + ContainerProperties, + BlobProperties, + BlobType, + FilteredBlob) +from ._list_blobs_helper import BlobPrefix, BlobPropertiesPaged, FilteredBlobPaged +from ._lease import BlobLeaseClient +from ._blob_client import BlobClient + +if TYPE_CHECKING: + from azure.core.pipeline.transport import HttpTransport, HttpResponse # pylint: disable=ungrouped-imports + from azure.core.pipeline.policies import HTTPPolicy # pylint: disable=ungrouped-imports + from datetime import datetime + from ._models import ( # pylint: disable=unused-import + PublicAccess, + AccessPolicy, + ContentSettings, + StandardBlobTier, + PremiumPageBlobTier) + + +def _get_blob_name(blob): + """Return the blob name. + + :param blob: A blob string or BlobProperties + :rtype: str + """ + try: + return blob.get('name') + except AttributeError: + return blob + + +ClassType = TypeVar("ClassType") + + +class ContainerClient(StorageAccountHostsMixin): # pylint: disable=too-many-public-methods + """A client to interact with a specific container, although that container + may not yet exist. + + For operations relating to a specific blob within this container, a blob client can be + retrieved using the :func:`~get_blob_client` function. + + For more optional configuration, please click + `here `_. + + :param str account_url: + The URI to the storage account. In order to create a client given the full URI to the container, + use the :func:`from_container_url` classmethod. + :param container_name: + The name of the container for the blob. + :type container_name: str + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account + shared access key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + + .. versionadded:: 12.2.0 + + :keyword str secondary_hostname: + The hostname of the secondary endpoint. + :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. + Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be + uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, + the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. + :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient + algorithm when uploading a block blob. Defaults to 4*1024*1024+1. + :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. + :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, + the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. + :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, + or 4MB. + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START create_container_client_from_service] + :end-before: [END create_container_client_from_service] + :language: python + :dedent: 8 + :caption: Get a ContainerClient from an existing BlobServiceClient. + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START create_container_client_sasurl] + :end-before: [END create_container_client_sasurl] + :language: python + :dedent: 8 + :caption: Creating the container client directly. + """ + def __init__( + self, account_url, # type: str + container_name, # type: str + credential=None, # type: Optional[Any] + **kwargs # type: Any + ): + # type: (...) -> None + try: + if not account_url.lower().startswith('http'): + account_url = "https://" + account_url + except AttributeError: + raise ValueError("Container URL must be a string.") + parsed_url = urlparse(account_url.rstrip('/')) + if not container_name: + raise ValueError("Please specify a container name.") + if not parsed_url.netloc: + raise ValueError("Invalid URL: {}".format(account_url)) + + _, sas_token = parse_query(parsed_url.query) + self.container_name = container_name + # This parameter is used for the hierarchy traversal. Give precedence to credential. + self._raw_credential = credential if credential else sas_token + self._query_str, credential = self._format_query_string(sas_token, credential) + super(ContainerClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) + self._client = AzureBlobStorage(self.url, pipeline=self._pipeline) + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access + + def _format_url(self, hostname): + container_name = self.container_name + if isinstance(container_name, six.text_type): + container_name = container_name.encode('UTF-8') + return "{}://{}/{}{}".format( + self.scheme, + hostname, + quote(container_name), + self._query_str) + + @classmethod + def from_container_url(cls, container_url, credential=None, **kwargs): + # type: (Type[ClassType], str, Optional[Any], Any) -> ClassType + """Create ContainerClient from a container url. + + :param str container_url: + The full endpoint URL to the Container, including SAS token if used. This could be + either the primary endpoint, or the secondary endpoint depending on the current `location_mode`. + :type container_url: str + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account shared access + key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + :returns: A container client. + :rtype: ~azure.storage.blob.ContainerClient + """ + try: + if not container_url.lower().startswith('http'): + container_url = "https://" + container_url + except AttributeError: + raise ValueError("Container URL must be a string.") + parsed_url = urlparse(container_url.rstrip('/')) + if not parsed_url.netloc: + raise ValueError("Invalid URL: {}".format(container_url)) + + container_path = parsed_url.path.lstrip('/').split('/') + account_path = "" + if len(container_path) > 1: + account_path = "/" + "/".join(container_path[:-1]) + account_url = "{}://{}{}?{}".format( + parsed_url.scheme, + parsed_url.netloc.rstrip('/'), + account_path, + parsed_url.query) + container_name = unquote(container_path[-1]) + if not container_name: + raise ValueError("Invalid URL. Please provide a URL with a valid container name") + return cls(account_url, container_name=container_name, credential=credential, **kwargs) + + @classmethod + def from_connection_string( + cls, # type: Type[ClassType] + conn_str, # type: str + container_name, # type: str + credential=None, # type: Optional[Any] + **kwargs # type: Any + ): # type: (...) -> ClassType + """Create ContainerClient from a Connection String. + + :param str conn_str: + A connection string to an Azure Storage account. + :param container_name: + The container name for the blob. + :type container_name: str + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account shared access + key, or an instance of a TokenCredentials class from azure.identity. + Credentials provided here will take precedence over those in the connection string. + :returns: A container client. + :rtype: ~azure.storage.blob.ContainerClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START auth_from_connection_string_container] + :end-before: [END auth_from_connection_string_container] + :language: python + :dedent: 8 + :caption: Creating the ContainerClient from a connection string. + """ + account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') + if 'secondary_hostname' not in kwargs: + kwargs['secondary_hostname'] = secondary + return cls( + account_url, container_name=container_name, credential=credential, **kwargs) + + @distributed_trace + def create_container(self, metadata=None, public_access=None, **kwargs): + # type: (Optional[Dict[str, str]], Optional[Union[PublicAccess, str]], **Any) -> None + """ + Creates a new container under the specified account. If the container + with the same name already exists, the operation fails. + + :param metadata: + A dict with name_value pairs to associate with the + container as metadata. Example:{'Category':'test'} + :type metadata: dict[str, str] + :param ~azure.storage.blob.PublicAccess public_access: + Possible values include: 'container', 'blob'. + :keyword container_encryption_scope: + Specifies the default encryption scope to set on the container and use for + all future writes. + + .. versionadded:: 12.2.0 + + :paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START create_container] + :end-before: [END create_container] + :language: python + :dedent: 12 + :caption: Creating a container to store blobs. + """ + headers = kwargs.pop('headers', {}) + timeout = kwargs.pop('timeout', None) + headers.update(add_metadata_headers(metadata)) # type: ignore + container_cpk_scope_info = get_container_cpk_scope_info(kwargs) + try: + return self._client.container.create( # type: ignore + timeout=timeout, + access=public_access, + container_cpk_scope_info=container_cpk_scope_info, + cls=return_response_headers, + headers=headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def _rename_container(self, new_name, **kwargs): + # type: (str, **Any) -> ContainerClient + """Renames a container. + + Operation is successful only if the source container exists. + + :param str new_name: + The new container name the user wants to rename to. + :keyword lease: + Specify this to perform only if the lease ID given + matches the active lease ID of the source container. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: ~azure.storage.blob.ContainerClient + """ + lease = kwargs.pop('lease', None) + try: + kwargs['source_lease_id'] = lease.id # type: str + except AttributeError: + kwargs['source_lease_id'] = lease + try: + renamed_container = ContainerClient( + "{}://{}".format(self.scheme, self.primary_hostname), container_name=new_name, + credential=self.credential, api_version=self.api_version, _configuration=self._config, + _pipeline=self._pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, + key_resolver_function=self.key_resolver_function) + renamed_container._client.container.rename(self.container_name, **kwargs) # pylint: disable = protected-access + return renamed_container + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def delete_container( + self, **kwargs): + # type: (Any) -> None + """ + Marks the specified container for deletion. The container and any blobs + contained within it are later deleted during garbage collection. + + :keyword lease: + If specified, delete_container only succeeds if the + container's lease is active and matches this ID. + Required if the container has an active lease. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START delete_container] + :end-before: [END delete_container] + :language: python + :dedent: 12 + :caption: Delete a container. + """ + lease = kwargs.pop('lease', None) + access_conditions = get_access_conditions(lease) + mod_conditions = get_modify_conditions(kwargs) + timeout = kwargs.pop('timeout', None) + try: + self._client.container.delete( + timeout=timeout, + lease_access_conditions=access_conditions, + modified_access_conditions=mod_conditions, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def acquire_lease( + self, lease_duration=-1, # type: int + lease_id=None, # type: Optional[str] + **kwargs): + # type: (...) -> BlobLeaseClient + """ + Requests a new lease. If the container does not have an active lease, + the Blob service creates a lease on the container and returns a new + lease ID. + + :param int lease_duration: + Specifies the duration of the lease, in seconds, or negative one + (-1) for a lease that never expires. A non-infinite lease can be + between 15 and 60 seconds. A lease duration cannot be changed + using renew or change. Default is -1 (infinite lease). + :param str lease_id: + Proposed lease ID, in a GUID string format. The Blob service returns + 400 (Invalid request) if the proposed lease ID is not in the correct format. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: A BlobLeaseClient object, that can be run in a context manager. + :rtype: ~azure.storage.blob.BlobLeaseClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START acquire_lease_on_container] + :end-before: [END acquire_lease_on_container] + :language: python + :dedent: 8 + :caption: Acquiring a lease on the container. + """ + lease = BlobLeaseClient(self, lease_id=lease_id) # type: ignore + kwargs.setdefault('merge_span', True) + timeout = kwargs.pop('timeout', None) + lease.acquire(lease_duration=lease_duration, timeout=timeout, **kwargs) + return lease + + @distributed_trace + def get_account_information(self, **kwargs): + # type: (**Any) -> Dict[str, str] + """Gets information related to the storage account. + + The information can also be retrieved if the user has a SAS to a container or blob. + The keys in the returned dictionary include 'sku_name' and 'account_kind'. + + :returns: A dict of account information (SKU and account type). + :rtype: dict(str, str) + """ + try: + return self._client.container.get_account_info(cls=return_response_headers, **kwargs) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def get_container_properties(self, **kwargs): + # type: (Any) -> ContainerProperties + """Returns all user-defined metadata and system properties for the specified + container. The data returned does not include the container's list of blobs. + + :keyword lease: + If specified, get_container_properties only succeeds if the + container's lease is active and matches this ID. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: Properties for the specified container within a container object. + :rtype: ~azure.storage.blob.ContainerProperties + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START get_container_properties] + :end-before: [END get_container_properties] + :language: python + :dedent: 12 + :caption: Getting properties on the container. + """ + lease = kwargs.pop('lease', None) + access_conditions = get_access_conditions(lease) + timeout = kwargs.pop('timeout', None) + try: + response = self._client.container.get_properties( + timeout=timeout, + lease_access_conditions=access_conditions, + cls=deserialize_container_properties, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + response.name = self.container_name + return response # type: ignore + + @distributed_trace + def exists(self, **kwargs): + # type: (**Any) -> bool + """ + Returns True if a container exists and returns False otherwise. + + :kwarg int timeout: + The timeout parameter is expressed in seconds. + :returns: boolean + """ + try: + self._client.container.get_properties(**kwargs) + return True + except HttpResponseError as error: + try: + process_storage_error(error) + except ResourceNotFoundError: + return False + + @distributed_trace + def set_container_metadata( # type: ignore + self, metadata=None, # type: Optional[Dict[str, str]] + **kwargs + ): + # type: (...) -> Dict[str, Union[str, datetime]] + """Sets one or more user-defined name-value pairs for the specified + container. Each call to this operation replaces all existing metadata + attached to the container. To remove all metadata from the container, + call this operation with no metadata dict. + + :param metadata: + A dict containing name-value pairs to associate with the container as + metadata. Example: {'category':'test'} + :type metadata: dict[str, str] + :keyword lease: + If specified, set_container_metadata only succeeds if the + container's lease is active and matches this ID. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Container-updated property dict (Etag and last modified). + :rtype: dict[str, str or datetime] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START set_container_metadata] + :end-before: [END set_container_metadata] + :language: python + :dedent: 12 + :caption: Setting metadata on the container. + """ + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + lease = kwargs.pop('lease', None) + access_conditions = get_access_conditions(lease) + mod_conditions = get_modify_conditions(kwargs) + timeout = kwargs.pop('timeout', None) + try: + return self._client.container.set_metadata( # type: ignore + timeout=timeout, + lease_access_conditions=access_conditions, + modified_access_conditions=mod_conditions, + cls=return_response_headers, + headers=headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def _get_blob_service_client(self): # pylint: disable=client-method-missing-kwargs + # type: (...) -> BlobServiceClient + """Get a client to interact with the container's parent service account. + + Defaults to current container's credentials. + + :returns: A BlobServiceClient. + :rtype: ~azure.storage.blob.BlobServiceClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START get_blob_service_client_from_container_client] + :end-before: [END get_blob_service_client_from_container_client] + :language: python + :dedent: 8 + :caption: Get blob service client from container object. + """ + from ._blob_service_client import BlobServiceClient + if not isinstance(self._pipeline._transport, TransportWrapper): # pylint: disable = protected-access + _pipeline = Pipeline( + transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies # pylint: disable = protected-access + ) + else: + _pipeline = self._pipeline # pylint: disable = protected-access + return BlobServiceClient( + "{}://{}".format(self.scheme, self.primary_hostname), + credential=self._raw_credential, api_version=self.api_version, _configuration=self._config, + _location_mode=self._location_mode, _hosts=self._hosts, require_encryption=self.require_encryption, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function, + _pipeline=_pipeline) + + @distributed_trace + def get_container_access_policy(self, **kwargs): + # type: (Any) -> Dict[str, Any] + """Gets the permissions for the specified container. + The permissions indicate whether container data may be accessed publicly. + + :keyword lease: + If specified, get_container_access_policy only succeeds if the + container's lease is active and matches this ID. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Access policy information in a dict. + :rtype: dict[str, Any] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START get_container_access_policy] + :end-before: [END get_container_access_policy] + :language: python + :dedent: 12 + :caption: Getting the access policy on the container. + """ + lease = kwargs.pop('lease', None) + access_conditions = get_access_conditions(lease) + timeout = kwargs.pop('timeout', None) + try: + response, identifiers = self._client.container.get_access_policy( + timeout=timeout, + lease_access_conditions=access_conditions, + cls=return_headers_and_deserialized, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + return { + 'public_access': response.get('blob_public_access'), + 'signed_identifiers': identifiers or [] + } + + @distributed_trace + def set_container_access_policy( + self, signed_identifiers, # type: Dict[str, AccessPolicy] + public_access=None, # type: Optional[Union[str, PublicAccess]] + **kwargs + ): # type: (...) -> Dict[str, Union[str, datetime]] + """Sets the permissions for the specified container or stored access + policies that may be used with Shared Access Signatures. The permissions + indicate whether blobs in a container may be accessed publicly. + + :param signed_identifiers: + A dictionary of access policies to associate with the container. The + dictionary may contain up to 5 elements. An empty dictionary + will clear the access policies set on the service. + :type signed_identifiers: dict[str, ~azure.storage.blob.AccessPolicy] + :param ~azure.storage.blob.PublicAccess public_access: + Possible values include: 'container', 'blob'. + :keyword lease: + Required if the container has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A datetime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified date/time. + :keyword ~datetime.datetime if_unmodified_since: + A datetime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Container-updated property dict (Etag and last modified). + :rtype: dict[str, str or ~datetime.datetime] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START set_container_access_policy] + :end-before: [END set_container_access_policy] + :language: python + :dedent: 12 + :caption: Setting access policy on the container. + """ + if len(signed_identifiers) > 5: + raise ValueError( + 'Too many access policies provided. The server does not support setting ' + 'more than 5 access policies on a single resource.') + identifiers = [] + for key, value in signed_identifiers.items(): + if value: + value.start = serialize_iso(value.start) + value.expiry = serialize_iso(value.expiry) + identifiers.append(SignedIdentifier(id=key, access_policy=value)) # type: ignore + signed_identifiers = identifiers # type: ignore + lease = kwargs.pop('lease', None) + mod_conditions = get_modify_conditions(kwargs) + access_conditions = get_access_conditions(lease) + timeout = kwargs.pop('timeout', None) + try: + return self._client.container.set_access_policy( + container_acl=signed_identifiers or None, + timeout=timeout, + access=public_access, + lease_access_conditions=access_conditions, + modified_access_conditions=mod_conditions, + cls=return_response_headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def list_blobs(self, name_starts_with=None, include=None, **kwargs): + # type: (Optional[str], Optional[Union[str, List[str]]], **Any) -> ItemPaged[BlobProperties] + """Returns a generator to list the blobs under the specified container. + The generator will lazily follow the continuation tokens returned by + the service. + + :param str name_starts_with: + Filters the results to return only blobs whose names + begin with the specified prefix. + :param list[str] or str include: + Specifies one or more additional datasets to include in the response. + Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'deletedwithversions', + 'tags', 'versions', 'immutabilitypolicy', 'legalhold'. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: An iterable (auto-paging) response of BlobProperties. + :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START list_blobs_in_container] + :end-before: [END list_blobs_in_container] + :language: python + :dedent: 8 + :caption: List the blobs in the container. + """ + if include and not isinstance(include, list): + include = [include] + + results_per_page = kwargs.pop('results_per_page', None) + timeout = kwargs.pop('timeout', None) + command = functools.partial( + self._client.container.list_blob_flat_segment, + include=include, + timeout=timeout, + **kwargs) + return ItemPaged( + command, prefix=name_starts_with, results_per_page=results_per_page, + page_iterator_class=BlobPropertiesPaged) + + @distributed_trace + def walk_blobs( + self, name_starts_with=None, # type: Optional[str] + include=None, # type: Optional[Any] + delimiter="/", # type: str + **kwargs # type: Optional[Any] + ): + # type: (...) -> ItemPaged[BlobProperties] + """Returns a generator to list the blobs under the specified container. + The generator will lazily follow the continuation tokens returned by + the service. This operation will list blobs in accordance with a hierarchy, + as delimited by the specified delimiter character. + + :param str name_starts_with: + Filters the results to return only blobs whose names + begin with the specified prefix. + :param list[str] include: + Specifies one or more additional datasets to include in the response. + Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted'. + :param str delimiter: + When the request includes this parameter, the operation returns a BlobPrefix + element in the response body that acts as a placeholder for all blobs whose + names begin with the same substring up to the appearance of the delimiter + character. The delimiter may be a single character or a string. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: An iterable (auto-paging) response of BlobProperties. + :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties] + """ + if include and not isinstance(include, list): + include = [include] + + results_per_page = kwargs.pop('results_per_page', None) + timeout = kwargs.pop('timeout', None) + command = functools.partial( + self._client.container.list_blob_hierarchy_segment, + delimiter=delimiter, + include=include, + timeout=timeout, + **kwargs) + return BlobPrefix( + command, + prefix=name_starts_with, + results_per_page=results_per_page, + delimiter=delimiter) + + @distributed_trace + def find_blobs_by_tags( + self, filter_expression, # type: str + **kwargs # type: Optional[Any] + ): + # type: (...) -> ItemPaged[FilteredBlob] + """Returns a generator to list the blobs under the specified container whose tags + match the given search expression. + The generator will lazily follow the continuation tokens returned by + the service. + + :param str filter_expression: + The expression to find blobs whose tags matches the specified condition. + eg. "\"yourtagname\"='firsttag' and \"yourtagname2\"='secondtag'" + :keyword int results_per_page: + The max result per page when paginating. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: An iterable (auto-paging) response of FilteredBlob. + :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties] + """ + results_per_page = kwargs.pop('results_per_page', None) + timeout = kwargs.pop('timeout', None) + command = functools.partial( + self._client.container.filter_blobs, + timeout=timeout, + where=filter_expression, + **kwargs) + return ItemPaged( + command, results_per_page=results_per_page, + page_iterator_class=FilteredBlobPaged) + + @distributed_trace + def upload_blob( + self, name, # type: Union[str, BlobProperties] + data, # type: Union[Iterable[AnyStr], IO[AnyStr]] + blob_type=BlobType.BlockBlob, # type: Union[str, BlobType] + length=None, # type: Optional[int] + metadata=None, # type: Optional[Dict[str, str]] + **kwargs + ): + # type: (...) -> BlobClient + """Creates a new blob from a data source with automatic chunking. + + :param name: The blob with which to interact. If specified, this value will override + a blob value specified in the blob URL. + :type name: str or ~azure.storage.blob.BlobProperties + :param data: The blob data to upload. + :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be + either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. + :param int length: + Number of bytes to read from the stream. This is optional, but + should be supplied for optimal performance. + :param metadata: + Name-value pairs associated with the blob as metadata. + :type metadata: dict(str, str) + :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. + If True, upload_blob will overwrite the existing data. If set to False, the + operation will fail with ResourceExistsError. The exception to the above is with Append + blob types: if set to False and the data already exists, an error will not be raised + and the data will be appended to the existing blob. If set overwrite=True, then the existing + append blob will be deleted, and a new one created. Defaults to False. + :keyword ~azure.storage.blob.ContentSettings content_settings: + ContentSettings object used to set blob properties. Used to set content type, encoding, + language, disposition, md5, and cache control. + :keyword bool validate_content: + If true, calculates an MD5 hash for each chunk of the blob. The storage + service checks the hash of the content that has arrived with the hash + that was sent. This is primarily valuable for detecting bitflips on + the wire if using http instead of https, as https (the default), will + already validate. Note that this MD5 hash is not stored with the + blob. Also note that if enabled, the memory-efficient upload algorithm + will not be used, because computing the MD5 hash requires buffering + entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. + :keyword lease: + Required if the container has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. This method may make + multiple calls to the Azure service and the timeout will apply to + each call individually. + :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: + A page blob tier value to set the blob to. The tier correlates to the size of the + blob and number of allowed IOPS. This is only applicable to page blobs on + premium storage accounts. + :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: + A standard blob tier value to set the blob to. For this version of the library, + this is only applicable to block blobs on standard storage accounts. + :keyword int maxsize_condition: + Optional conditional header. The max length in bytes permitted for + the append blob. If the Append Block operation would cause the blob + to exceed that limit or if the blob size is already greater than the + value specified in this header, the request will fail with + MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). + :keyword int max_concurrency: + Maximum number of parallel connections to use when the blob size exceeds + 64MB. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword str encoding: + Defaults to UTF-8. + :returns: A BlobClient to interact with the newly uploaded blob. + :rtype: ~azure.storage.blob.BlobClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START upload_blob_to_container] + :end-before: [END upload_blob_to_container] + :language: python + :dedent: 8 + :caption: Upload blob to the container. + """ + blob = self.get_blob_client(name) + kwargs.setdefault('merge_span', True) + timeout = kwargs.pop('timeout', None) + encoding = kwargs.pop('encoding', 'UTF-8') + blob.upload_blob( + data, + blob_type=blob_type, + length=length, + metadata=metadata, + timeout=timeout, + encoding=encoding, + **kwargs + ) + return blob + + @distributed_trace + def delete_blob( + self, blob, # type: Union[str, BlobProperties] + delete_snapshots=None, # type: Optional[str] + **kwargs + ): + # type: (...) -> None + """Marks the specified blob or snapshot for deletion. + + The blob is later deleted during garbage collection. + Note that in order to delete a blob, you must delete all of its + snapshots. You can delete both at the same time with the delete_blob + operation. + + If a delete retention policy is enabled for the service, then this operation soft deletes the blob or snapshot + and retains the blob or snapshot for specified number of days. + After specified number of days, blob's data is removed from the service during garbage collection. + Soft deleted blob or snapshot is accessible through :func:`list_blobs()` specifying `include=["deleted"]` + option. Soft-deleted blob or snapshot can be restored using :func:`~BlobClient.undelete()` + + :param blob: The blob with which to interact. If specified, this value will override + a blob value specified in the blob URL. + :type blob: str or ~azure.storage.blob.BlobProperties + :param str delete_snapshots: + Required if the blob has associated snapshots. Values include: + - "only": Deletes only the blobs snapshots. + - "include": Deletes the blob along with all snapshots. + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to delete. + + .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. + + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + """ + blob_client = self.get_blob_client(blob) # type: ignore + kwargs.setdefault('merge_span', True) + timeout = kwargs.pop('timeout', None) + blob_client.delete_blob( # type: ignore + delete_snapshots=delete_snapshots, + timeout=timeout, + **kwargs) + + @distributed_trace + def download_blob(self, blob, offset=None, length=None, **kwargs): + # type: (Union[str, BlobProperties], Optional[int], Optional[int], **Any) -> StorageStreamDownloader + """Downloads a blob to the StorageStreamDownloader. The readall() method must + be used to read all the content or readinto() must be used to download the blob into + a stream. Using chunks() returns an iterator which allows the user to iterate over the content in chunks. + + :param blob: The blob with which to interact. If specified, this value will override + a blob value specified in the blob URL. + :type blob: str or ~azure.storage.blob.BlobProperties + :param int offset: + Start of byte range to use for downloading a section of the blob. + Must be set if length is provided. + :param int length: + Number of bytes to read from the stream. This is optional, but + should be supplied for optimal performance. + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to download. + + .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. + + :keyword bool validate_content: + If true, calculates an MD5 hash for each chunk of the blob. The storage + service checks the hash of the content that has arrived with the hash + that was sent. This is primarily valuable for detecting bitflips on + the wire if using http instead of https, as https (the default), will + already validate. Note that this MD5 hash is not stored with the + blob. Also note that if enabled, the memory-efficient upload algorithm + will not be used because computing the MD5 hash requires buffering + entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. + :keyword lease: + Required if the blob has an active lease. If specified, download_blob only + succeeds if the blob's lease is active and matches this ID. Value can be a + BlobLeaseClient object or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword int max_concurrency: + The number of parallel connections with which to download. + :keyword str encoding: + Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. + :keyword int timeout: + The timeout parameter is expressed in seconds. This method may make + multiple calls to the Azure service and the timeout will apply to + each call individually. + :returns: A streaming object (StorageStreamDownloader) + :rtype: ~azure.storage.blob.StorageStreamDownloader + """ + blob_client = self.get_blob_client(blob) # type: ignore + kwargs.setdefault('merge_span', True) + return blob_client.download_blob(offset=offset, length=length, **kwargs) + + def _generate_delete_blobs_subrequest_options( + self, snapshot=None, + delete_snapshots=None, + lease_access_conditions=None, + modified_access_conditions=None, + **kwargs + ): + """This code is a copy from _generated. + + Once Autorest is able to provide request preparation this code should be removed. + """ + lease_id = None + if lease_access_conditions is not None: + lease_id = lease_access_conditions.lease_id + if_modified_since = None + if modified_access_conditions is not None: + if_modified_since = modified_access_conditions.if_modified_since + if_unmodified_since = None + if modified_access_conditions is not None: + if_unmodified_since = modified_access_conditions.if_unmodified_since + if_match = None + if modified_access_conditions is not None: + if_match = modified_access_conditions.if_match + if_none_match = None + if modified_access_conditions is not None: + if_none_match = modified_access_conditions.if_none_match + if_tags = None + if modified_access_conditions is not None: + if_tags = modified_access_conditions.if_tags + + # Construct parameters + timeout = kwargs.pop('timeout', None) + query_parameters = {} + if snapshot is not None: + query_parameters['snapshot'] = self._client._serialize.query("snapshot", snapshot, 'str') # pylint: disable=protected-access + if timeout is not None: + query_parameters['timeout'] = self._client._serialize.query("timeout", timeout, 'int', minimum=0) # pylint: disable=protected-access + + # Construct headers + header_parameters = {} + if delete_snapshots is not None: + header_parameters['x-ms-delete-snapshots'] = self._client._serialize.header( # pylint: disable=protected-access + "delete_snapshots", delete_snapshots, 'DeleteSnapshotsOptionType') + if lease_id is not None: + header_parameters['x-ms-lease-id'] = self._client._serialize.header( # pylint: disable=protected-access + "lease_id", lease_id, 'str') + if if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._client._serialize.header( # pylint: disable=protected-access + "if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._client._serialize.header( # pylint: disable=protected-access + "if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + header_parameters['If-Match'] = self._client._serialize.header( # pylint: disable=protected-access + "if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._client._serialize.header( # pylint: disable=protected-access + "if_none_match", if_none_match, 'str') + if if_tags is not None: + header_parameters['x-ms-if-tags'] = self._client._serialize.header("if_tags", if_tags, 'str') # pylint: disable=protected-access + + return query_parameters, header_parameters + + def _generate_delete_blobs_options(self, + *blobs, # type: List[Union[str, BlobProperties, dict]] + **kwargs + ): + timeout = kwargs.pop('timeout', None) + raise_on_any_failure = kwargs.pop('raise_on_any_failure', True) + delete_snapshots = kwargs.pop('delete_snapshots', None) + if_modified_since = kwargs.pop('if_modified_since', None) + if_unmodified_since = kwargs.pop('if_unmodified_since', None) + if_tags_match_condition = kwargs.pop('if_tags_match_condition', None) + kwargs.update({'raise_on_any_failure': raise_on_any_failure, + 'sas': self._query_str.replace('?', '&'), + 'timeout': '&timeout=' + str(timeout) if timeout else "", + 'path': self.container_name, + 'restype': 'restype=container&' + }) + + reqs = [] + for blob in blobs: + blob_name = _get_blob_name(blob) + container_name = self.container_name + + try: + options = BlobClient._generic_delete_blob_options( # pylint: disable=protected-access + snapshot=blob.get('snapshot'), + delete_snapshots=delete_snapshots or blob.get('delete_snapshots'), + lease=blob.get('lease_id'), + if_modified_since=if_modified_since or blob.get('if_modified_since'), + if_unmodified_since=if_unmodified_since or blob.get('if_unmodified_since'), + etag=blob.get('etag'), + if_tags_match_condition=if_tags_match_condition or blob.get('if_tags_match_condition'), + match_condition=blob.get('match_condition') or MatchConditions.IfNotModified if blob.get('etag') + else None, + timeout=blob.get('timeout'), + ) + except AttributeError: + options = BlobClient._generic_delete_blob_options( # pylint: disable=protected-access + delete_snapshots=delete_snapshots, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags_match_condition=if_tags_match_condition + ) + + query_parameters, header_parameters = self._generate_delete_blobs_subrequest_options(**options) + + req = HttpRequest( + "DELETE", + "/{}/{}{}".format(quote(container_name), quote(blob_name, safe='/~'), self._query_str), + headers=header_parameters + ) + req.format_parameters(query_parameters) + reqs.append(req) + + return reqs, kwargs + + @distributed_trace + def delete_blobs(self, *blobs, **kwargs): + # type: (...) -> Iterator[HttpResponse] + """Marks the specified blobs or snapshots for deletion. + + The blobs are later deleted during garbage collection. + Note that in order to delete blobs, you must delete all of their + snapshots. You can delete both at the same time with the delete_blobs operation. + + If a delete retention policy is enabled for the service, then this operation soft deletes the blobs or snapshots + and retains the blobs or snapshots for specified number of days. + After specified number of days, blobs' data is removed from the service during garbage collection. + Soft deleted blobs or snapshots are accessible through :func:`list_blobs()` specifying `include=["deleted"]` + Soft-deleted blobs or snapshots can be restored using :func:`~BlobClient.undelete()` + + The maximum number of blobs that can be deleted in a single request is 256. + + :param blobs: + The blobs to delete. This can be a single blob, or multiple values can + be supplied, where each value is either the name of the blob (str) or BlobProperties. + + .. note:: + When the blob type is dict, here's a list of keys, value rules. + + blob name: + key: 'name', value type: str + snapshot you want to delete: + key: 'snapshot', value type: str + whether to delete snapthots when deleting blob: + key: 'delete_snapshots', value: 'include' or 'only' + if the blob modified or not: + key: 'if_modified_since', 'if_unmodified_since', value type: datetime + etag: + key: 'etag', value type: str + match the etag or not: + key: 'match_condition', value type: MatchConditions + tags match condition: + key: 'if_tags_match_condition', value type: str + lease: + key: 'lease_id', value type: Union[str, LeaseClient] + timeout for subrequest: + key: 'timeout', value type: int + + :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties] + :keyword str delete_snapshots: + Required if a blob has associated snapshots. Values include: + - "only": Deletes only the blobs snapshots. + - "include": Deletes the blob along with all snapshots. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword bool raise_on_any_failure: + This is a boolean param which defaults to True. When this is set, an exception + is raised even if there is a single operation failure. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: An iterator of responses, one for each blob in order + :rtype: Iterator[~azure.core.pipeline.transport.HttpResponse] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_common.py + :start-after: [START delete_multiple_blobs] + :end-before: [END delete_multiple_blobs] + :language: python + :dedent: 8 + :caption: Deleting multiple blobs. + """ + if len(blobs) == 0: + return iter(list()) + + reqs, options = self._generate_delete_blobs_options(*blobs, **kwargs) + + return self._batch_send(*reqs, **options) + + def _generate_set_tiers_subrequest_options( + self, tier, snapshot=None, version_id=None, rehydrate_priority=None, lease_access_conditions=None, **kwargs + ): + """This code is a copy from _generated. + + Once Autorest is able to provide request preparation this code should be removed. + """ + if not tier: + raise ValueError("A blob tier must be specified") + if snapshot and version_id: + raise ValueError("Snapshot and version_id cannot be set at the same time") + if_tags = kwargs.pop('if_tags', None) + + lease_id = None + if lease_access_conditions is not None: + lease_id = lease_access_conditions.lease_id + + comp = "tier" + timeout = kwargs.pop('timeout', None) + # Construct parameters + query_parameters = {} + if snapshot is not None: + query_parameters['snapshot'] = self._client._serialize.query("snapshot", snapshot, 'str') # pylint: disable=protected-access + if version_id is not None: + query_parameters['versionid'] = self._client._serialize.query("version_id", version_id, 'str') # pylint: disable=protected-access + if timeout is not None: + query_parameters['timeout'] = self._client._serialize.query("timeout", timeout, 'int', minimum=0) # pylint: disable=protected-access + query_parameters['comp'] = self._client._serialize.query("comp", comp, 'str') # pylint: disable=protected-access, specify-parameter-names-in-call + + # Construct headers + header_parameters = {} + header_parameters['x-ms-access-tier'] = self._client._serialize.header("tier", tier, 'str') # pylint: disable=protected-access, specify-parameter-names-in-call + if rehydrate_priority is not None: + header_parameters['x-ms-rehydrate-priority'] = self._client._serialize.header( # pylint: disable=protected-access + "rehydrate_priority", rehydrate_priority, 'str') + if lease_id is not None: + header_parameters['x-ms-lease-id'] = self._client._serialize.header("lease_id", lease_id, 'str') # pylint: disable=protected-access + if if_tags is not None: + header_parameters['x-ms-if-tags'] = self._client._serialize.header("if_tags", if_tags, 'str') # pylint: disable=protected-access + + return query_parameters, header_parameters + + def _generate_set_tiers_options(self, + blob_tier, # type: Optional[Union[str, StandardBlobTier, PremiumPageBlobTier]] + *blobs, # type: List[Union[str, BlobProperties, dict]] + **kwargs + ): + timeout = kwargs.pop('timeout', None) + raise_on_any_failure = kwargs.pop('raise_on_any_failure', True) + rehydrate_priority = kwargs.pop('rehydrate_priority', None) + if_tags = kwargs.pop('if_tags_match_condition', None) + kwargs.update({'raise_on_any_failure': raise_on_any_failure, + 'sas': self._query_str.replace('?', '&'), + 'timeout': '&timeout=' + str(timeout) if timeout else "", + 'path': self.container_name, + 'restype': 'restype=container&' + }) + + reqs = [] + for blob in blobs: + blob_name = _get_blob_name(blob) + container_name = self.container_name + + try: + tier = blob_tier or blob.get('blob_tier') + query_parameters, header_parameters = self._generate_set_tiers_subrequest_options( + tier=tier, + snapshot=blob.get('snapshot'), + version_id=blob.get('version_id'), + rehydrate_priority=rehydrate_priority or blob.get('rehydrate_priority'), + lease_access_conditions=blob.get('lease_id'), + if_tags=if_tags or blob.get('if_tags_match_condition'), + timeout=timeout or blob.get('timeout') + ) + except AttributeError: + query_parameters, header_parameters = self._generate_set_tiers_subrequest_options( + blob_tier, rehydrate_priority=rehydrate_priority, if_tags=if_tags) + + req = HttpRequest( + "PUT", + "/{}/{}{}".format(quote(container_name), quote(blob_name, safe='/~'), self._query_str), + headers=header_parameters + ) + req.format_parameters(query_parameters) + reqs.append(req) + + return reqs, kwargs + + @distributed_trace + def set_standard_blob_tier_blobs( + self, + standard_blob_tier, # type: Optional[Union[str, StandardBlobTier]] + *blobs, # type: List[Union[str, BlobProperties, dict]] + **kwargs + ): + # type: (...) -> Iterator[HttpResponse] + """This operation sets the tier on block blobs. + + A block blob's tier determines Hot/Cool/Archive storage type. + This operation does not update the blob's ETag. + + The maximum number of blobs that can be updated in a single request is 256. + + :param standard_blob_tier: + Indicates the tier to be set on all blobs. Options include 'Hot', 'Cool', + 'Archive'. The hot tier is optimized for storing data that is accessed + frequently. The cool storage tier is optimized for storing data that + is infrequently accessed and stored for at least a month. The archive + tier is optimized for storing data that is rarely accessed and stored + for at least six months with flexible latency requirements. + + .. note:: + If you want to set different tier on different blobs please set this positional parameter to None. + Then the blob tier on every BlobProperties will be taken. + + :type standard_blob_tier: str or ~azure.storage.blob.StandardBlobTier + :param blobs: + The blobs with which to interact. This can be a single blob, or multiple values can + be supplied, where each value is either the name of the blob (str) or BlobProperties. + + .. note:: + When the blob type is dict, here's a list of keys, value rules. + + blob name: + key: 'name', value type: str + standard blob tier: + key: 'blob_tier', value type: StandardBlobTier + rehydrate priority: + key: 'rehydrate_priority', value type: RehydratePriority + lease: + key: 'lease_id', value type: Union[str, LeaseClient] + snapshot: + key: "snapshost", value type: str + version id: + key: "version_id", value type: str + tags match condition: + key: 'if_tags_match_condition', value type: str + timeout for subrequest: + key: 'timeout', value type: int + + :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties] + :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: + Indicates the priority with which to rehydrate an archived blob + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :keyword bool raise_on_any_failure: + This is a boolean param which defaults to True. When this is set, an exception + is raised even if there is a single operation failure. + :return: An iterator of responses, one for each blob in order + :rtype: Iterator[~azure.core.pipeline.transport.HttpResponse] + """ + reqs, options = self._generate_set_tiers_options(standard_blob_tier, *blobs, **kwargs) + + return self._batch_send(*reqs, **options) + + @distributed_trace + def set_premium_page_blob_tier_blobs( + self, + premium_page_blob_tier, # type: Optional[Union[str, PremiumPageBlobTier]] + *blobs, # type: List[Union[str, BlobProperties, dict]] + **kwargs + ): + # type: (...) -> Iterator[HttpResponse] + """Sets the page blob tiers on all blobs. This API is only supported for page blobs on premium accounts. + + The maximum number of blobs that can be updated in a single request is 256. + + :param premium_page_blob_tier: + A page blob tier value to set the blob to. The tier correlates to the size of the + blob and number of allowed IOPS. This is only applicable to page blobs on + premium storage accounts. + + .. note:: + If you want to set different tier on different blobs please set this positional parameter to None. + Then the blob tier on every BlobProperties will be taken. + + :type premium_page_blob_tier: ~azure.storage.blob.PremiumPageBlobTier + :param blobs: + The blobs with which to interact. This can be a single blob, or multiple values can + be supplied, where each value is either the name of the blob (str) or BlobProperties. + + .. note:: + When the blob type is dict, here's a list of keys, value rules. + + blob name: + key: 'name', value type: str + premium blob tier: + key: 'blob_tier', value type: PremiumPageBlobTier + lease: + key: 'lease_id', value type: Union[str, LeaseClient] + timeout for subrequest: + key: 'timeout', value type: int + + :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties] + :keyword int timeout: + The timeout parameter is expressed in seconds. This method may make + multiple calls to the Azure service and the timeout will apply to + each call individually. + :keyword bool raise_on_any_failure: + This is a boolean param which defaults to True. When this is set, an exception + is raised even if there is a single operation failure. + :return: An iterator of responses, one for each blob in order + :rtype: iterator[~azure.core.pipeline.transport.HttpResponse] + """ + reqs, options = self._generate_set_tiers_options(premium_page_blob_tier, *blobs, **kwargs) + + return self._batch_send(*reqs, **options) + + def get_blob_client( + self, blob, # type: Union[str, BlobProperties] + snapshot=None # type: str + ): + # type: (...) -> BlobClient + """Get a client to interact with the specified blob. + + The blob need not already exist. + + :param blob: + The blob with which to interact. + :type blob: str or ~azure.storage.blob.BlobProperties + :param str snapshot: + The optional blob snapshot on which to operate. This can be the snapshot ID string + or the response returned from :func:`~BlobClient.create_snapshot()`. + :returns: A BlobClient. + :rtype: ~azure.storage.blob.BlobClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START get_blob_client] + :end-before: [END get_blob_client] + :language: python + :dedent: 8 + :caption: Get the blob client. + """ + blob_name = _get_blob_name(blob) + _pipeline = Pipeline( + transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies # pylint: disable = protected-access + ) + return BlobClient( + self.url, container_name=self.container_name, blob_name=blob_name, snapshot=snapshot, + credential=self.credential, api_version=self.api_version, _configuration=self._config, + _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, + key_resolver_function=self.key_resolver_function) diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_deserialize.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_deserialize.py new file mode 100644 index 00000000000..f7101e05d6a --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_deserialize.py @@ -0,0 +1,174 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=no-self-use +from typing import ( # pylint: disable=unused-import + Tuple, Dict, List, + TYPE_CHECKING +) +try: + from urllib.parse import unquote +except ImportError: + from urllib import unquote +from ._models import BlobType, CopyProperties, ContentSettings, LeaseProperties, BlobProperties, ImmutabilityPolicy +from ._shared.models import get_enum_value +from ._shared.response_handlers import deserialize_metadata +from ._models import ContainerProperties, BlobAnalyticsLogging, Metrics, CorsRule, RetentionPolicy, \ + StaticWebsite, ObjectReplicationPolicy, ObjectReplicationRule + +if TYPE_CHECKING: + from ._generated.models import PageList + + +def deserialize_pipeline_response_into_cls(cls_method, response, obj, headers): + try: + deserialized_response = response.http_response + except AttributeError: + deserialized_response = response + return cls_method(deserialized_response, obj, headers) + + +def deserialize_blob_properties(response, obj, headers): + blob_properties = BlobProperties( + metadata=deserialize_metadata(response, obj, headers), + object_replication_source_properties=deserialize_ors_policies(response.http_response.headers), + **headers + ) + if 'Content-Range' in headers: + if 'x-ms-blob-content-md5' in headers: + blob_properties.content_settings.content_md5 = headers['x-ms-blob-content-md5'] + else: + blob_properties.content_settings.content_md5 = None + return blob_properties + + +def deserialize_ors_policies(policy_dictionary): + + if policy_dictionary is None: + return None + # For source blobs (blobs that have policy ids and rule ids applied to them), + # the header will be formatted as "x-ms-or-_: {Complete, Failed}". + # The value of this header is the status of the replication. + or_policy_status_headers = {key: val for key, val in policy_dictionary.items() + if 'or-' in key and key != 'x-ms-or-policy-id'} + + parsed_result = {} + + for key, val in or_policy_status_headers.items(): + # list blobs gives or-policy_rule and get blob properties gives x-ms-or-policy_rule + policy_and_rule_ids = key.split('or-')[1].split('_') + policy_id = policy_and_rule_ids[0] + rule_id = policy_and_rule_ids[1] + + # If we are seeing this policy for the first time, create a new list to store rule_id -> result + parsed_result[policy_id] = parsed_result.get(policy_id) or list() + parsed_result[policy_id].append(ObjectReplicationRule(rule_id=rule_id, status=val)) + + result_list = [ObjectReplicationPolicy(policy_id=k, rules=v) for k, v in parsed_result.items()] + + return result_list + + +def deserialize_blob_stream(response, obj, headers): + blob_properties = deserialize_blob_properties(response, obj, headers) + obj.properties = blob_properties + return response.http_response.location_mode, obj + + +def deserialize_container_properties(response, obj, headers): + metadata = deserialize_metadata(response, obj, headers) + container_properties = ContainerProperties( + metadata=metadata, + **headers + ) + return container_properties + + +def get_page_ranges_result(ranges): + # type: (PageList) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]] + page_range = [] # type: ignore + clear_range = [] # type: List + if ranges.page_range: + page_range = [{'start': b.start, 'end': b.end} for b in ranges.page_range] # type: ignore + if ranges.clear_range: + clear_range = [{'start': b.start, 'end': b.end} for b in ranges.clear_range] + return page_range, clear_range # type: ignore + + +def service_stats_deserialize(generated): + """Deserialize a ServiceStats objects into a dict. + """ + return { + 'geo_replication': { + 'status': generated.geo_replication.status, + 'last_sync_time': generated.geo_replication.last_sync_time, + } + } + + +def service_properties_deserialize(generated): + """Deserialize a ServiceProperties objects into a dict. + """ + return { + 'analytics_logging': BlobAnalyticsLogging._from_generated(generated.logging), # pylint: disable=protected-access + 'hour_metrics': Metrics._from_generated(generated.hour_metrics), # pylint: disable=protected-access + 'minute_metrics': Metrics._from_generated(generated.minute_metrics), # pylint: disable=protected-access + 'cors': [CorsRule._from_generated(cors) for cors in generated.cors], # pylint: disable=protected-access + 'target_version': generated.default_service_version, # pylint: disable=protected-access + 'delete_retention_policy': RetentionPolicy._from_generated(generated.delete_retention_policy), # pylint: disable=protected-access + 'static_website': StaticWebsite._from_generated(generated.static_website), # pylint: disable=protected-access + } + + +def get_blob_properties_from_generated_code(generated): + blob = BlobProperties() + if generated.name.encoded: + blob.name = unquote(generated.name.content) + else: + blob.name = generated.name.content + blob_type = get_enum_value(generated.properties.blob_type) + blob.blob_type = BlobType(blob_type) if blob_type else None + blob.etag = generated.properties.etag + blob.deleted = generated.deleted + blob.snapshot = generated.snapshot + blob.is_append_blob_sealed = generated.properties.is_sealed + blob.metadata = generated.metadata.additional_properties if generated.metadata else {} + blob.encrypted_metadata = generated.metadata.encrypted if generated.metadata else None + blob.lease = LeaseProperties._from_generated(generated) # pylint: disable=protected-access + blob.copy = CopyProperties._from_generated(generated) # pylint: disable=protected-access + blob.last_modified = generated.properties.last_modified + blob.creation_time = generated.properties.creation_time + blob.content_settings = ContentSettings._from_generated(generated) # pylint: disable=protected-access + blob.size = generated.properties.content_length + blob.page_blob_sequence_number = generated.properties.blob_sequence_number + blob.server_encrypted = generated.properties.server_encrypted + blob.encryption_scope = generated.properties.encryption_scope + blob.deleted_time = generated.properties.deleted_time + blob.remaining_retention_days = generated.properties.remaining_retention_days + blob.blob_tier = generated.properties.access_tier + blob.rehydrate_priority = generated.properties.rehydrate_priority + blob.blob_tier_inferred = generated.properties.access_tier_inferred + blob.archive_status = generated.properties.archive_status + blob.blob_tier_change_time = generated.properties.access_tier_change_time + blob.version_id = generated.version_id + blob.is_current_version = generated.is_current_version + blob.tag_count = generated.properties.tag_count + blob.tags = parse_tags(generated.blob_tags) # pylint: disable=protected-access + blob.object_replication_source_properties = deserialize_ors_policies(generated.object_replication_metadata) + blob.last_accessed_on = generated.properties.last_accessed_on + blob.immutability_policy = ImmutabilityPolicy._from_generated(generated) # pylint: disable=protected-access + blob.has_legal_hold = generated.properties.legal_hold + blob.has_versions_only = generated.has_versions_only + return blob + + +def parse_tags(generated_tags): + # type: (Optional[List[BlobTag]]) -> Union[Dict[str, str], None] + """Deserialize a list of BlobTag objects into a dict. + """ + if generated_tags: + tag_dict = {t.key: t.value for t in generated_tags.blob_tag_set} + return tag_dict + return None diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_download.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_download.py new file mode 100644 index 00000000000..c74af2f3ce7 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_download.py @@ -0,0 +1,637 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import sys +import threading +import time + +import warnings +from io import BytesIO +from typing import Iterator + +import requests +from azure.core.exceptions import HttpResponseError, ServiceResponseError + +from azure.core.tracing.common import with_current_context +from ._shared.encryption import decrypt_blob +from ._shared.request_handlers import validate_and_format_range_headers +from ._shared.response_handlers import process_storage_error, parse_length_from_content_range +from ._deserialize import get_page_ranges_result + + +def process_range_and_offset(start_range, end_range, length, encryption): + start_offset, end_offset = 0, 0 + if encryption.get("key") is not None or encryption.get("resolver") is not None: + if start_range is not None: + # Align the start of the range along a 16 byte block + start_offset = start_range % 16 + start_range -= start_offset + + # Include an extra 16 bytes for the IV if necessary + # Because of the previous offsetting, start_range will always + # be a multiple of 16. + if start_range > 0: + start_offset += 16 + start_range -= 16 + + if length is not None: + # Align the end of the range along a 16 byte block + end_offset = 15 - (end_range % 16) + end_range += end_offset + + return (start_range, end_range), (start_offset, end_offset) + + +def process_content(data, start_offset, end_offset, encryption): + if data is None: + raise ValueError("Response cannot be None.") + + content = b"".join(list(data)) + + if content and encryption.get("key") is not None or encryption.get("resolver") is not None: + try: + return decrypt_blob( + encryption.get("required"), + encryption.get("key"), + encryption.get("resolver"), + content, + start_offset, + end_offset, + data.response.headers, + ) + except Exception as error: + raise HttpResponseError(message="Decryption failed.", response=data.response, error=error) + return content + + +class _ChunkDownloader(object): # pylint: disable=too-many-instance-attributes + def __init__( + self, + client=None, + non_empty_ranges=None, + total_size=None, + chunk_size=None, + current_progress=None, + start_range=None, + end_range=None, + stream=None, + parallel=None, + validate_content=None, + encryption_options=None, + **kwargs + ): + self.client = client + self.non_empty_ranges = non_empty_ranges + + # Information on the download range/chunk size + self.chunk_size = chunk_size + self.total_size = total_size + self.start_index = start_range + self.end_index = end_range + + # The destination that we will write to + self.stream = stream + self.stream_lock = threading.Lock() if parallel else None + self.progress_lock = threading.Lock() if parallel else None + + # For a parallel download, the stream is always seekable, so we note down the current position + # in order to seek to the right place when out-of-order chunks come in + self.stream_start = stream.tell() if parallel else None + + # Download progress so far + self.progress_total = current_progress + + # Encryption + self.encryption_options = encryption_options + + # Parameters for each get operation + self.validate_content = validate_content + self.request_options = kwargs + + def _calculate_range(self, chunk_start): + if chunk_start + self.chunk_size > self.end_index: + chunk_end = self.end_index + else: + chunk_end = chunk_start + self.chunk_size + return chunk_start, chunk_end + + def get_chunk_offsets(self): + index = self.start_index + while index < self.end_index: + yield index + index += self.chunk_size + + def process_chunk(self, chunk_start): + chunk_start, chunk_end = self._calculate_range(chunk_start) + chunk_data = self._download_chunk(chunk_start, chunk_end - 1) + length = chunk_end - chunk_start + if length > 0: + self._write_to_stream(chunk_data, chunk_start) + self._update_progress(length) + + def yield_chunk(self, chunk_start): + chunk_start, chunk_end = self._calculate_range(chunk_start) + return self._download_chunk(chunk_start, chunk_end - 1) + + def _update_progress(self, length): + if self.progress_lock: + with self.progress_lock: # pylint: disable=not-context-manager + self.progress_total += length + else: + self.progress_total += length + + def _write_to_stream(self, chunk_data, chunk_start): + if self.stream_lock: + with self.stream_lock: # pylint: disable=not-context-manager + self.stream.seek(self.stream_start + (chunk_start - self.start_index)) + self.stream.write(chunk_data) + else: + self.stream.write(chunk_data) + + def _do_optimize(self, given_range_start, given_range_end): + # If we have no page range list stored, then assume there's data everywhere for that page blob + # or it's a block blob or append blob + if self.non_empty_ranges is None: + return False + + for source_range in self.non_empty_ranges: + # Case 1: As the range list is sorted, if we've reached such a source_range + # we've checked all the appropriate source_range already and haven't found any overlapping. + # so the given range doesn't have any data and download optimization could be applied. + # given range: | | + # source range: | | + if given_range_end < source_range['start']: # pylint:disable=no-else-return + return True + # Case 2: the given range comes after source_range, continue checking. + # given range: | | + # source range: | | + elif source_range['end'] < given_range_start: + pass + # Case 3: source_range and given range overlap somehow, no need to optimize. + else: + return False + # Went through all src_ranges, but nothing overlapped. Optimization will be applied. + return True + + def _download_chunk(self, chunk_start, chunk_end): + download_range, offset = process_range_and_offset( + chunk_start, chunk_end, chunk_end, self.encryption_options + ) + + # No need to download the empty chunk from server if there's no data in the chunk to be downloaded. + # Do optimize and create empty chunk locally if condition is met. + if self._do_optimize(download_range[0], download_range[1]): + chunk_data = b"\x00" * self.chunk_size + else: + range_header, range_validation = validate_and_format_range_headers( + download_range[0], + download_range[1], + check_content_md5=self.validate_content + ) + + retry_active = True + retry_total = 3 + while retry_active: + try: + _, response = self.client.download( + range=range_header, + range_get_content_md5=range_validation, + validate_content=self.validate_content, + data_stream_total=self.total_size, + download_stream_current=self.progress_total, + **self.request_options + ) + except HttpResponseError as error: + process_storage_error(error) + + try: + chunk_data = process_content(response, offset[0], offset[1], self.encryption_options) + retry_active = False + except (requests.exceptions.ChunkedEncodingError, requests.exceptions.ConnectionError) as error: + retry_total -= 1 + if retry_total <= 0: + raise ServiceResponseError(error, error=error) + time.sleep(1) + + # This makes sure that if_match is set so that we can validate + # that subsequent downloads are to an unmodified blob + if self.request_options.get("modified_access_conditions"): + self.request_options["modified_access_conditions"].if_match = response.properties.etag + + return chunk_data + + +class _ChunkIterator(object): + """Async iterator for chunks in blob download stream.""" + + def __init__(self, size, content, downloader, chunk_size): + self.size = size + self._chunk_size = chunk_size + self._current_content = content + self._iter_downloader = downloader + self._iter_chunks = None + self._complete = (size == 0) + + def __len__(self): + return self.size + + def __iter__(self): + return self + + def __next__(self): + """Iterate through responses.""" + if self._complete: + raise StopIteration("Download complete") + if not self._iter_downloader: + # cut the data obtained from initial GET into chunks + if len(self._current_content) > self._chunk_size: + return self._get_chunk_data() + self._complete = True + return self._current_content + + if not self._iter_chunks: + self._iter_chunks = self._iter_downloader.get_chunk_offsets() + + # initial GET result still has more than _chunk_size bytes of data + if len(self._current_content) >= self._chunk_size: + return self._get_chunk_data() + + try: + chunk = next(self._iter_chunks) + self._current_content += self._iter_downloader.yield_chunk(chunk) + except StopIteration as e: + self._complete = True + if self._current_content: + return self._current_content + raise e + + # the current content from the first get is still there but smaller than chunk size + # therefore we want to make sure its also included + return self._get_chunk_data() + + next = __next__ # Python 2 compatibility. + + def _get_chunk_data(self): + chunk_data = self._current_content[: self._chunk_size] + self._current_content = self._current_content[self._chunk_size:] + return chunk_data + + +class StorageStreamDownloader(object): # pylint: disable=too-many-instance-attributes + """A streaming object to download from Azure Storage. + + :ivar str name: + The name of the blob being downloaded. + :ivar str container: + The name of the container where the blob is. + :ivar ~azure.storage.blob.BlobProperties properties: + The properties of the blob being downloaded. If only a range of the data is being + downloaded, this will be reflected in the properties. + :ivar int size: + The size of the total data in the stream. This will be the byte range if specified, + otherwise the total size of the blob. + """ + + def __init__( + self, + clients=None, + config=None, + start_range=None, + end_range=None, + validate_content=None, + encryption_options=None, + max_concurrency=1, + name=None, + container=None, + encoding=None, + **kwargs + ): + self.name = name + self.container = container + self.properties = None + self.size = None + + self._clients = clients + self._config = config + self._start_range = start_range + self._end_range = end_range + self._max_concurrency = max_concurrency + self._encoding = encoding + self._validate_content = validate_content + self._encryption_options = encryption_options or {} + self._request_options = kwargs + self._location_mode = None + self._download_complete = False + self._current_content = None + self._file_size = None + self._non_empty_ranges = None + self._response = None + + # The service only provides transactional MD5s for chunks under 4MB. + # If validate_content is on, get only self.MAX_CHUNK_GET_SIZE for the first + # chunk so a transactional MD5 can be retrieved. + self._first_get_size = ( + self._config.max_single_get_size if not self._validate_content else self._config.max_chunk_get_size + ) + initial_request_start = self._start_range if self._start_range is not None else 0 + if self._end_range is not None and self._end_range - self._start_range < self._first_get_size: + initial_request_end = self._end_range + else: + initial_request_end = initial_request_start + self._first_get_size - 1 + + self._initial_range, self._initial_offset = process_range_and_offset( + initial_request_start, initial_request_end, self._end_range, self._encryption_options + ) + + self._response = self._initial_request() + self.properties = self._response.properties + self.properties.name = self.name + self.properties.container = self.container + + # Set the content length to the download size instead of the size of + # the last range + self.properties.size = self.size + + # Overwrite the content range to the user requested range + self.properties.content_range = "bytes {0}-{1}/{2}".format( + self._start_range, + self._end_range, + self._file_size + ) + + # Overwrite the content MD5 as it is the MD5 for the last range instead + # of the stored MD5 + # TODO: Set to the stored MD5 when the service returns this + self.properties.content_md5 = None + + def __len__(self): + return self.size + + def _initial_request(self): + range_header, range_validation = validate_and_format_range_headers( + self._initial_range[0], + self._initial_range[1], + start_range_required=False, + end_range_required=False, + check_content_md5=self._validate_content + ) + + retry_active = True + retry_total = 3 + while retry_active: + try: + location_mode, response = self._clients.blob.download( + range=range_header, + range_get_content_md5=range_validation, + validate_content=self._validate_content, + data_stream_total=None, + download_stream_current=0, + **self._request_options + ) + + # Check the location we read from to ensure we use the same one + # for subsequent requests. + self._location_mode = location_mode + + # Parse the total file size and adjust the download size if ranges + # were specified + self._file_size = parse_length_from_content_range(response.properties.content_range) + if self._end_range is not None: + # Use the end range index unless it is over the end of the file + self.size = min(self._file_size, self._end_range - self._start_range + 1) + elif self._start_range is not None: + self.size = self._file_size - self._start_range + else: + self.size = self._file_size + + except HttpResponseError as error: + if self._start_range is None and error.response.status_code == 416: + # Get range will fail on an empty file. If the user did not + # request a range, do a regular get request in order to get + # any properties. + try: + _, response = self._clients.blob.download( + validate_content=self._validate_content, + data_stream_total=0, + download_stream_current=0, + **self._request_options + ) + except HttpResponseError as error: + process_storage_error(error) + + # Set the download size to empty + self.size = 0 + self._file_size = 0 + else: + process_storage_error(error) + + try: + if self.size == 0: + self._current_content = b"" + else: + self._current_content = process_content( + response, + self._initial_offset[0], + self._initial_offset[1], + self._encryption_options + ) + retry_active = False + except (requests.exceptions.ChunkedEncodingError, requests.exceptions.ConnectionError) as error: + retry_total -= 1 + if retry_total <= 0: + raise ServiceResponseError(error, error=error) + time.sleep(1) + + # get page ranges to optimize downloading sparse page blob + if response.properties.blob_type == 'PageBlob': + try: + page_ranges = self._clients.page_blob.get_page_ranges() + self._non_empty_ranges = get_page_ranges_result(page_ranges)[0] + # according to the REST API documentation: + # in a highly fragmented page blob with a large number of writes, + # a Get Page Ranges request can fail due to an internal server timeout. + # thus, if the page blob is not sparse, it's ok for it to fail + except HttpResponseError: + pass + + # If the file is small, the download is complete at this point. + # If file size is large, download the rest of the file in chunks. + if response.properties.size != self.size: + if self._request_options.get("modified_access_conditions"): + self._request_options["modified_access_conditions"].if_match = response.properties.etag + else: + self._download_complete = True + return response + + def chunks(self): + # type: () -> Iterator[bytes] + """Iterate over chunks in the download stream. + + :rtype: Iterator[bytes] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_hello_world.py + :start-after: [START download_a_blob_in_chunk] + :end-before: [END download_a_blob_in_chunk] + :language: python + :dedent: 12 + :caption: Download a blob using chunks(). + """ + if self.size == 0 or self._download_complete: + iter_downloader = None + else: + data_end = self._file_size + if self._end_range is not None: + # Use the end range index unless it is over the end of the file + data_end = min(self._file_size, self._end_range + 1) + iter_downloader = _ChunkDownloader( + client=self._clients.blob, + non_empty_ranges=self._non_empty_ranges, + total_size=self.size, + chunk_size=self._config.max_chunk_get_size, + current_progress=self._first_get_size, + start_range=self._initial_range[1] + 1, # start where the first download ended + end_range=data_end, + stream=None, + parallel=False, + validate_content=self._validate_content, + encryption_options=self._encryption_options, + use_location=self._location_mode, + **self._request_options + ) + return _ChunkIterator( + size=self.size, + content=self._current_content, + downloader=iter_downloader, + chunk_size=self._config.max_chunk_get_size) + + def readall(self): + # type: () -> Union[bytes, str] + """Download the contents of this blob. + + This operation is blocking until all data is downloaded. + + :rtype: bytes or str + """ + stream = BytesIO() + self.readinto(stream) + data = stream.getvalue() + if self._encoding: + return data.decode(self._encoding) + return data + + def content_as_bytes(self, max_concurrency=1): + """Download the contents of this file. + + This operation is blocking until all data is downloaded. + + :keyword int max_concurrency: + The number of parallel connections with which to download. + :rtype: bytes + """ + warnings.warn( + "content_as_bytes is deprecated, use readall instead", + DeprecationWarning + ) + self._max_concurrency = max_concurrency + return self.readall() + + def content_as_text(self, max_concurrency=1, encoding="UTF-8"): + """Download the contents of this blob, and decode as text. + + This operation is blocking until all data is downloaded. + + :keyword int max_concurrency: + The number of parallel connections with which to download. + :param str encoding: + Test encoding to decode the downloaded bytes. Default is UTF-8. + :rtype: str + """ + warnings.warn( + "content_as_text is deprecated, use readall instead", + DeprecationWarning + ) + self._max_concurrency = max_concurrency + self._encoding = encoding + return self.readall() + + def readinto(self, stream): + """Download the contents of this file to a stream. + + :param stream: + The stream to download to. This can be an open file-handle, + or any writable stream. The stream must be seekable if the download + uses more than one parallel connection. + :returns: The number of bytes read. + :rtype: int + """ + # The stream must be seekable if parallel download is required + parallel = self._max_concurrency > 1 + if parallel: + error_message = "Target stream handle must be seekable." + if sys.version_info >= (3,) and not stream.seekable(): + raise ValueError(error_message) + + try: + stream.seek(stream.tell()) + except (NotImplementedError, AttributeError): + raise ValueError(error_message) + + # Write the content to the user stream + stream.write(self._current_content) + if self._download_complete: + return self.size + + data_end = self._file_size + if self._end_range is not None: + # Use the length unless it is over the end of the file + data_end = min(self._file_size, self._end_range + 1) + + downloader = _ChunkDownloader( + client=self._clients.blob, + non_empty_ranges=self._non_empty_ranges, + total_size=self.size, + chunk_size=self._config.max_chunk_get_size, + current_progress=self._first_get_size, + start_range=self._initial_range[1] + 1, # Start where the first download ended + end_range=data_end, + stream=stream, + parallel=parallel, + validate_content=self._validate_content, + encryption_options=self._encryption_options, + use_location=self._location_mode, + **self._request_options + ) + if parallel: + import concurrent.futures + with concurrent.futures.ThreadPoolExecutor(self._max_concurrency) as executor: + list(executor.map( + with_current_context(downloader.process_chunk), + downloader.get_chunk_offsets() + )) + else: + for chunk in downloader.get_chunk_offsets(): + downloader.process_chunk(chunk) + return self.size + + def download_to_stream(self, stream, max_concurrency=1): + """Download the contents of this blob to a stream. + + :param stream: + The stream to download to. This can be an open file-handle, + or any writable stream. The stream must be seekable if the download + uses more than one parallel connection. + :returns: The properties of the downloaded blob. + :rtype: Any + """ + warnings.warn( + "download_to_stream is deprecated, use readinto instead", + DeprecationWarning + ) + self._max_concurrency = max_concurrency + self.readinto(stream) + return self.properties diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/__init__.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/__init__.py new file mode 100644 index 00000000000..cc760e7efd2 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/__init__.py @@ -0,0 +1,16 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._azure_blob_storage import AzureBlobStorage +__all__ = ['AzureBlobStorage'] + +try: + from ._patch import patch_sdk # type: ignore + patch_sdk() +except ImportError: + pass diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/_azure_blob_storage.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/_azure_blob_storage.py new file mode 100644 index 00000000000..578f65816c0 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/_azure_blob_storage.py @@ -0,0 +1,106 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.core import PipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any + + from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from ._configuration import AzureBlobStorageConfiguration +from .operations import ServiceOperations +from .operations import ContainerOperations +from .operations import BlobOperations +from .operations import PageBlobOperations +from .operations import AppendBlobOperations +from .operations import BlockBlobOperations +from . import models + + +class AzureBlobStorage(object): + """AzureBlobStorage. + + :ivar service: ServiceOperations operations + :vartype service: azure.storage.blob.operations.ServiceOperations + :ivar container: ContainerOperations operations + :vartype container: azure.storage.blob.operations.ContainerOperations + :ivar blob: BlobOperations operations + :vartype blob: azure.storage.blob.operations.BlobOperations + :ivar page_blob: PageBlobOperations operations + :vartype page_blob: azure.storage.blob.operations.PageBlobOperations + :ivar append_blob: AppendBlobOperations operations + :vartype append_blob: azure.storage.blob.operations.AppendBlobOperations + :ivar block_blob: BlockBlobOperations operations + :vartype block_blob: azure.storage.blob.operations.BlockBlobOperations + :param url: The URL of the service account, container, or blob that is the target of the desired operation. + :type url: str + """ + + def __init__( + self, + url, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + base_url = '{url}' + self._config = AzureBlobStorageConfiguration(url, **kwargs) + self._client = PipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False + self._deserialize = Deserializer(client_models) + + self.service = ServiceOperations( + self._client, self._config, self._serialize, self._deserialize) + self.container = ContainerOperations( + self._client, self._config, self._serialize, self._deserialize) + self.blob = BlobOperations( + self._client, self._config, self._serialize, self._deserialize) + self.page_blob = PageBlobOperations( + self._client, self._config, self._serialize, self._deserialize) + self.append_blob = AppendBlobOperations( + self._client, self._config, self._serialize, self._deserialize) + self.block_blob = BlockBlobOperations( + self._client, self._config, self._serialize, self._deserialize) + + def _send_request(self, http_request, **kwargs): + # type: (HttpRequest, Any) -> HttpResponse + """Runs the network request through the client's chained policies. + + :param http_request: The network request you want to make. Required. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.pipeline.transport.HttpResponse + """ + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + http_request.url = self._client.format_url(http_request.url, **path_format_arguments) + stream = kwargs.pop("stream", True) + pipeline_response = self._client._pipeline.run(http_request, stream=stream, **kwargs) + return pipeline_response.http_response + + def close(self): + # type: () -> None + self._client.close() + + def __enter__(self): + # type: () -> AzureBlobStorage + self._client.__enter__() + return self + + def __exit__(self, *exc_details): + # type: (Any) -> None + self._client.__exit__(*exc_details) diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/_configuration.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/_configuration.py new file mode 100644 index 00000000000..e25c0cdfe7d --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/_configuration.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any + +VERSION = "unknown" + +class AzureBlobStorageConfiguration(Configuration): + """Configuration for AzureBlobStorage. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param url: The URL of the service account, container, or blob that is the target of the desired operation. + :type url: str + """ + + def __init__( + self, + url, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + if url is None: + raise ValueError("Parameter 'url' must not be None.") + super(AzureBlobStorageConfiguration, self).__init__(**kwargs) + + self.url = url + self.version = "2021-04-10" + kwargs.setdefault('sdk_moniker', 'azureblobstorage/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs # type: Any + ): + # type: (...) -> None + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/__init__.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/__init__.py new file mode 100644 index 00000000000..12cfcf636c4 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/__init__.py @@ -0,0 +1,10 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._azure_blob_storage import AzureBlobStorage +__all__ = ['AzureBlobStorage'] diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/_azure_blob_storage.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/_azure_blob_storage.py new file mode 100644 index 00000000000..68f116aaaa9 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/_azure_blob_storage.py @@ -0,0 +1,96 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any + +from azure.core import AsyncPipelineClient +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from msrest import Deserializer, Serializer + +from ._configuration import AzureBlobStorageConfiguration +from .operations import ServiceOperations +from .operations import ContainerOperations +from .operations import BlobOperations +from .operations import PageBlobOperations +from .operations import AppendBlobOperations +from .operations import BlockBlobOperations +from .. import models + + +class AzureBlobStorage(object): + """AzureBlobStorage. + + :ivar service: ServiceOperations operations + :vartype service: azure.storage.blob.aio.operations.ServiceOperations + :ivar container: ContainerOperations operations + :vartype container: azure.storage.blob.aio.operations.ContainerOperations + :ivar blob: BlobOperations operations + :vartype blob: azure.storage.blob.aio.operations.BlobOperations + :ivar page_blob: PageBlobOperations operations + :vartype page_blob: azure.storage.blob.aio.operations.PageBlobOperations + :ivar append_blob: AppendBlobOperations operations + :vartype append_blob: azure.storage.blob.aio.operations.AppendBlobOperations + :ivar block_blob: BlockBlobOperations operations + :vartype block_blob: azure.storage.blob.aio.operations.BlockBlobOperations + :param url: The URL of the service account, container, or blob that is the target of the desired operation. + :type url: str + """ + + def __init__( + self, + url: str, + **kwargs: Any + ) -> None: + base_url = '{url}' + self._config = AzureBlobStorageConfiguration(url, **kwargs) + self._client = AsyncPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False + self._deserialize = Deserializer(client_models) + + self.service = ServiceOperations( + self._client, self._config, self._serialize, self._deserialize) + self.container = ContainerOperations( + self._client, self._config, self._serialize, self._deserialize) + self.blob = BlobOperations( + self._client, self._config, self._serialize, self._deserialize) + self.page_blob = PageBlobOperations( + self._client, self._config, self._serialize, self._deserialize) + self.append_blob = AppendBlobOperations( + self._client, self._config, self._serialize, self._deserialize) + self.block_blob = BlockBlobOperations( + self._client, self._config, self._serialize, self._deserialize) + + async def _send_request(self, http_request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse: + """Runs the network request through the client's chained policies. + + :param http_request: The network request you want to make. Required. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.pipeline.transport.AsyncHttpResponse + """ + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + http_request.url = self._client.format_url(http_request.url, **path_format_arguments) + stream = kwargs.pop("stream", True) + pipeline_response = await self._client._pipeline.run(http_request, stream=stream, **kwargs) + return pipeline_response.http_response + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> "AzureBlobStorage": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details) -> None: + await self._client.__aexit__(*exc_details) diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/_configuration.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/_configuration.py new file mode 100644 index 00000000000..bcf04ce4a72 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/_configuration.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies + +VERSION = "unknown" + +class AzureBlobStorageConfiguration(Configuration): + """Configuration for AzureBlobStorage. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param url: The URL of the service account, container, or blob that is the target of the desired operation. + :type url: str + """ + + def __init__( + self, + url: str, + **kwargs: Any + ) -> None: + if url is None: + raise ValueError("Parameter 'url' must not be None.") + super(AzureBlobStorageConfiguration, self).__init__(**kwargs) + + self.url = url + self.version = "2021-04-10" + kwargs.setdefault('sdk_moniker', 'azureblobstorage/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs: Any + ) -> None: + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/__init__.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/__init__.py new file mode 100644 index 00000000000..902269d05ed --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/__init__.py @@ -0,0 +1,23 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._service_operations import ServiceOperations +from ._container_operations import ContainerOperations +from ._blob_operations import BlobOperations +from ._page_blob_operations import PageBlobOperations +from ._append_blob_operations import AppendBlobOperations +from ._block_blob_operations import BlockBlobOperations + +__all__ = [ + 'ServiceOperations', + 'ContainerOperations', + 'BlobOperations', + 'PageBlobOperations', + 'AppendBlobOperations', + 'BlockBlobOperations', +] diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_append_blob_operations.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_append_blob_operations.py new file mode 100644 index 00000000000..4d18668ce67 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_append_blob_operations.py @@ -0,0 +1,726 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class AppendBlobOperations: + """AppendBlobOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.storage.blob.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def create( + self, + content_length: int, + timeout: Optional[int] = None, + metadata: Optional[str] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] = None, + legal_hold: Optional[bool] = None, + blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Create Append Blob operation creates a new append blob. + + :param content_length: The length of the request. + :type content_length: long + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :type blob_tags_string: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. + :type legal_hold: bool + :param blob_http_headers: Parameter group. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _blob_content_type = None + _blob_content_encoding = None + _blob_content_language = None + _blob_content_md5 = None + _blob_cache_control = None + _lease_id = None + _blob_content_disposition = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if blob_http_headers is not None: + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + blob_type = "AppendBlob" + accept = "application/xml" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + if _blob_content_type is not None: + header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') + if _blob_content_encoding is not None: + header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') + if _blob_content_language is not None: + header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') + if _blob_content_md5 is not None: + header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') + if _blob_cache_control is not None: + header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _blob_content_disposition is not None: + header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if blob_tags_string is not None: + header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') + if immutability_policy_expiry is not None: + header_parameters['x-ms-immutability-policy-until-date'] = self._serialize.header("immutability_policy_expiry", immutability_policy_expiry, 'rfc-1123') + if immutability_policy_mode is not None: + header_parameters['x-ms-immutability-policy-mode'] = self._serialize.header("immutability_policy_mode", immutability_policy_mode, 'str') + if legal_hold is not None: + header_parameters['x-ms-legal-hold'] = self._serialize.header("legal_hold", legal_hold, 'bool') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + create.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def append_block( + self, + content_length: int, + body: IO, + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytearray] = None, + transactional_content_crc64: Optional[bytearray] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + append_position_access_conditions: Optional["_models.AppendPositionAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Append Block operation commits a new block of data to the end of an existing append blob. + The Append Block operation is permitted only if the blob was created with x-ms-blob-type set to + AppendBlob. Append Block is supported only on version 2015-02-21 version or later. + + :param content_length: The length of the request. + :type content_length: long + :param body: Initial data. + :type body: IO + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. + :type transactional_content_md5: bytearray + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. + :type transactional_content_crc64: bytearray + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param append_position_access_conditions: Parameter group. + :type append_position_access_conditions: ~azure.storage.blob.models.AppendPositionAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _max_size = None + _append_position = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if append_position_access_conditions is not None: + _max_size = append_position_access_conditions.max_size + _append_position = append_position_access_conditions.append_position + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "appendblock" + content_type = kwargs.pop("content_type", "application/octet-stream") + accept = "application/xml" + + # Construct URL + url = self.append_block.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + if transactional_content_md5 is not None: + header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') + if transactional_content_crc64 is not None: + header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _max_size is not None: + header_parameters['x-ms-blob-condition-maxsize'] = self._serialize.header("max_size", _max_size, 'long') + if _append_position is not None: + header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("append_position", _append_position, 'long') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content_kwargs['stream_content'] = body + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-append-offset']=self._deserialize('str', response.headers.get('x-ms-blob-append-offset')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + append_block.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def append_block_from_url( + self, + source_url: str, + content_length: int, + source_range: Optional[str] = None, + source_content_md5: Optional[bytearray] = None, + source_contentcrc64: Optional[bytearray] = None, + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytearray] = None, + request_id_parameter: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + append_position_access_conditions: Optional["_models.AppendPositionAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Append Block operation commits a new block of data to the end of an existing append blob + where the contents are read from a source url. The Append Block operation is permitted only if + the blob was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on + version 2015-02-21 version or later. + + :param source_url: Specify a URL to the copy source. + :type source_url: str + :param content_length: The length of the request. + :type content_length: long + :param source_range: Bytes of source data in the specified range. + :type source_range: str + :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read + from the copy source. + :type source_content_md5: bytearray + :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be + read from the copy source. + :type source_contentcrc64: bytearray + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. + :type transactional_content_md5: bytearray + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. + :type copy_source_authorization: str + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param append_position_access_conditions: Parameter group. + :type append_position_access_conditions: ~azure.storage.blob.models.AppendPositionAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param source_modified_access_conditions: Parameter group. + :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _lease_id = None + _max_size = None + _append_position = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + if append_position_access_conditions is not None: + _max_size = append_position_access_conditions.max_size + _append_position = append_position_access_conditions.append_position + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + comp = "appendblock" + accept = "application/xml" + + # Construct URL + url = self.append_block_from_url.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-copy-source'] = self._serialize.header("source_url", source_url, 'str') + if source_range is not None: + header_parameters['x-ms-source-range'] = self._serialize.header("source_range", source_range, 'str') + if source_content_md5 is not None: + header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') + if source_contentcrc64 is not None: + header_parameters['x-ms-source-content-crc64'] = self._serialize.header("source_contentcrc64", source_contentcrc64, 'bytearray') + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + if transactional_content_md5 is not None: + header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _max_size is not None: + header_parameters['x-ms-blob-condition-maxsize'] = self._serialize.header("max_size", _max_size, 'long') + if _append_position is not None: + header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("append_position", _append_position, 'long') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + if _source_if_modified_since is not None: + header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') + if _source_if_unmodified_since is not None: + header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') + if _source_if_match is not None: + header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') + if _source_if_none_match is not None: + header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if copy_source_authorization is not None: + header_parameters['x-ms-copy-source-authorization'] = self._serialize.header("copy_source_authorization", copy_source_authorization, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-append-offset']=self._deserialize('str', response.headers.get('x-ms-blob-append-offset')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + + if cls: + return cls(pipeline_response, None, response_headers) + + append_block_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def seal( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + append_position_access_conditions: Optional["_models.AppendPositionAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Seal operation seals the Append Blob to make it read-only. Seal is supported only on + version 2019-12-12 version or later. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param append_position_access_conditions: Parameter group. + :type append_position_access_conditions: ~azure.storage.blob.models.AppendPositionAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _append_position = None + if append_position_access_conditions is not None: + _append_position = append_position_access_conditions.append_position + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + comp = "seal" + accept = "application/xml" + + # Construct URL + url = self.seal.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _append_position is not None: + header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("append_position", _append_position, 'long') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) + + if cls: + return cls(pipeline_response, None, response_headers) + + seal.metadata = {'url': '/{containerName}/{blob}'} # type: ignore diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_blob_operations.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_blob_operations.py new file mode 100644 index 00000000000..46a5ad21ca3 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_blob_operations.py @@ -0,0 +1,3008 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class BlobOperations: + """BlobOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.storage.blob.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def download( + self, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + range_get_content_md5: Optional[bool] = None, + range_get_content_crc64: Optional[bool] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> IO: + """The Download operation reads or downloads a blob from the system, including its metadata and + properties. You can also call Download to read a snapshot. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + :type version_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param range: Return only the bytes of the blob in the specified range. + :type range: str + :param range_get_content_md5: When set to true and specified together with the Range, the + service returns the MD5 hash for the range, as long as the range is less than or equal to 4 MB + in size. + :type range_get_content_md5: bool + :param range_get_content_crc64: When set to true and specified together with the Range, the + service returns the CRC64 hash for the range, as long as the range is less than or equal to 4 + MB in size. + :type range_get_content_crc64: bool + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IO, or the result of cls(response) + :rtype: IO + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[IO] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + accept = "application/xml" + + # Construct URL + url = self.download.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if snapshot is not None: + query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') + if version_id is not None: + query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if range is not None: + header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if range_get_content_md5 is not None: + header_parameters['x-ms-range-get-content-md5'] = self._serialize.header("range_get_content_md5", range_get_content_md5, 'bool') + if range_get_content_crc64 is not None: + header_parameters['x-ms-range-get-content-crc64'] = self._serialize.header("range_get_content_crc64", range_get_content_crc64, 'bool') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=True, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 206]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 200: + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) + response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) + response_headers['x-ms-or']=self._deserialize('str', response.headers.get('x-ms-or')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) + response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) + response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) + response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) + response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) + response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) + response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['x-ms-is-current-version']=self._deserialize('bool', response.headers.get('x-ms-is-current-version')) + response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) + response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) + response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) + response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) + response_headers['x-ms-immutability-policy-until-date']=self._deserialize('rfc-1123', response.headers.get('x-ms-immutability-policy-until-date')) + response_headers['x-ms-immutability-policy-mode']=self._deserialize('str', response.headers.get('x-ms-immutability-policy-mode')) + response_headers['x-ms-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-legal-hold')) + deserialized = response.stream_download(self._client._pipeline) + + if response.status_code == 206: + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) + response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) + response_headers['x-ms-or']=self._deserialize('str', response.headers.get('x-ms-or')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) + response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) + response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) + response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) + response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) + response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['x-ms-is-current-version']=self._deserialize('bool', response.headers.get('x-ms-is-current-version')) + response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) + response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) + response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) + response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) + response_headers['x-ms-immutability-policy-until-date']=self._deserialize('rfc-1123', response.headers.get('x-ms-immutability-policy-until-date')) + response_headers['x-ms-immutability-policy-mode']=self._deserialize('str', response.headers.get('x-ms-immutability-policy-mode')) + response_headers['x-ms-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-legal-hold')) + deserialized = response.stream_download(self._client._pipeline) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + download.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def get_properties( + self, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Get Properties operation returns all user-defined metadata, standard HTTP properties, and + system properties for the blob. It does not return the content of the blob. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + :type version_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + accept = "application/xml" + + # Construct URL + url = self.get_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if snapshot is not None: + query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') + if version_id is not None: + query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.head(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-creation-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-creation-time')) + response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) + response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) + response_headers['x-ms-or']=self._deserialize('str', response.headers.get('x-ms-or')) + response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) + response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) + response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) + response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['x-ms-incremental-copy']=self._deserialize('bool', response.headers.get('x-ms-incremental-copy')) + response_headers['x-ms-copy-destination-snapshot']=self._deserialize('str', response.headers.get('x-ms-copy-destination-snapshot')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) + response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) + response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) + response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-access-tier']=self._deserialize('str', response.headers.get('x-ms-access-tier')) + response_headers['x-ms-access-tier-inferred']=self._deserialize('bool', response.headers.get('x-ms-access-tier-inferred')) + response_headers['x-ms-archive-status']=self._deserialize('str', response.headers.get('x-ms-archive-status')) + response_headers['x-ms-access-tier-change-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-access-tier-change-time')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['x-ms-is-current-version']=self._deserialize('bool', response.headers.get('x-ms-is-current-version')) + response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) + response_headers['x-ms-expiry-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-expiry-time')) + response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) + response_headers['x-ms-rehydrate-priority']=self._deserialize('str', response.headers.get('x-ms-rehydrate-priority')) + response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) + response_headers['x-ms-immutability-policy-until-date']=self._deserialize('rfc-1123', response.headers.get('x-ms-immutability-policy-until-date')) + response_headers['x-ms-immutability-policy-mode']=self._deserialize('str', response.headers.get('x-ms-immutability-policy-mode')) + response_headers['x-ms-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-legal-hold')) + + if cls: + return cls(pipeline_response, None, response_headers) + + get_properties.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def delete( + self, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + delete_snapshots: Optional[Union[str, "_models.DeleteSnapshotsOptionType"]] = None, + request_id_parameter: Optional[str] = None, + blob_delete_type: Optional[str] = "Permanent", + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """If the storage account's soft delete feature is disabled then, when a blob is deleted, it is + permanently removed from the storage account. If the storage account's soft delete feature is + enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible + immediately. However, the blob service retains the blob or snapshot for the number of days + specified by the DeleteRetentionPolicy section of [Storage service properties] + (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's + data is permanently removed from the storage account. Note that you continue to be charged for + the soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and + specify the "include=deleted" query parameter to discover which blobs and snapshots have been + soft deleted. You can then use the Undelete Blob API to restore a soft-deleted blob. All other + operations on a soft-deleted blob or snapshot causes the service to return an HTTP status code + of 404 (ResourceNotFound). + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + :type version_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param delete_snapshots: Required if the blob has associated snapshots. Specify one of the + following two options: include: Delete the base blob and all of its snapshots. only: Delete + only the blob's snapshots and not the blob itself. + :type delete_snapshots: str or ~azure.storage.blob.models.DeleteSnapshotsOptionType + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param blob_delete_type: Optional. Only possible value is 'permanent', which specifies to + permanently delete a blob if blob soft delete is enabled. + :type blob_delete_type: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + accept = "application/xml" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if snapshot is not None: + query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') + if version_id is not None: + query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if blob_delete_type is not None: + query_parameters['deletetype'] = self._serialize.query("blob_delete_type", blob_delete_type, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if delete_snapshots is not None: + header_parameters['x-ms-delete-snapshots'] = self._serialize.header("delete_snapshots", delete_snapshots, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + delete.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def undelete( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> None: + """Undelete a blob that was previously soft deleted. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + comp = "undelete" + accept = "application/xml" + + # Construct URL + url = self.undelete.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + undelete.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def set_expiry( + self, + expiry_options: Union[str, "_models.BlobExpiryOptions"], + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + expires_on: Optional[str] = None, + **kwargs: Any + ) -> None: + """Sets the time a blob will expire and be deleted. + + :param expiry_options: Required. Indicates mode of the expiry time. + :type expiry_options: str or ~azure.storage.blob.models.BlobExpiryOptions + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param expires_on: The time to set the blob to expiry. + :type expires_on: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + comp = "expiry" + accept = "application/xml" + + # Construct URL + url = self.set_expiry.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['x-ms-expiry-option'] = self._serialize.header("expiry_options", expiry_options, 'str') + if expires_on is not None: + header_parameters['x-ms-expiry-time'] = self._serialize.header("expires_on", expires_on, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_expiry.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def set_http_headers( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Set HTTP Headers operation sets system properties on the blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param blob_http_headers: Parameter group. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _blob_cache_control = None + _blob_content_type = None + _blob_content_md5 = None + _blob_content_encoding = None + _blob_content_language = None + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _blob_content_disposition = None + if blob_http_headers is not None: + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_disposition = blob_http_headers.blob_content_disposition + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "properties" + accept = "application/xml" + + # Construct URL + url = self.set_http_headers.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _blob_cache_control is not None: + header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') + if _blob_content_type is not None: + header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') + if _blob_content_md5 is not None: + header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') + if _blob_content_encoding is not None: + header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') + if _blob_content_language is not None: + header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + if _blob_content_disposition is not None: + header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_http_headers.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def set_immutability_policy( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Set Immutability Policy operation sets the immutability policy on the blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_unmodified_since = modified_access_conditions.if_unmodified_since + comp = "immutabilityPolicies" + accept = "application/xml" + + # Construct URL + url = self.set_immutability_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if immutability_policy_expiry is not None: + header_parameters['x-ms-immutability-policy-until-date'] = self._serialize.header("immutability_policy_expiry", immutability_policy_expiry, 'rfc-1123') + if immutability_policy_mode is not None: + header_parameters['x-ms-immutability-policy-mode'] = self._serialize.header("immutability_policy_mode", immutability_policy_mode, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-immutability-policy-until-date']=self._deserialize('rfc-1123', response.headers.get('x-ms-immutability-policy-until-date')) + response_headers['x-ms-immutability-policy-mode']=self._deserialize('str', response.headers.get('x-ms-immutability-policy-mode')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_immutability_policy.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def delete_immutability_policy( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> None: + """The Delete Immutability Policy operation deletes the immutability policy on the blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + comp = "immutabilityPolicies" + accept = "application/xml" + + # Construct URL + url = self.delete_immutability_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + delete_immutability_policy.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def set_legal_hold( + self, + legal_hold: bool, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> None: + """The Set Legal Hold operation sets a legal hold on the blob. + + :param legal_hold: Specified if a legal hold should be set on the blob. + :type legal_hold: bool + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + comp = "legalhold" + accept = "application/xml" + + # Construct URL + url = self.set_legal_hold.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['x-ms-legal-hold'] = self._serialize.header("legal_hold", legal_hold, 'bool') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-legal-hold')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_legal_hold.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def set_metadata( + self, + timeout: Optional[int] = None, + metadata: Optional[str] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or + more name-value pairs. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "metadata" + accept = "application/xml" + + # Construct URL + url = self.set_metadata.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_metadata.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def acquire_lease( + self, + timeout: Optional[int] = None, + duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a + lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease + duration cannot be changed using renew or change. + :type duration: int + :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns + 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid + Constructor (String) for a list of valid GUID string formats. + :type proposed_lease_id: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "lease" + action = "acquire" + accept = "application/xml" + + # Construct URL + url = self.acquire_lease.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') + if duration is not None: + header_parameters['x-ms-lease-duration'] = self._serialize.header("duration", duration, 'int') + if proposed_lease_id is not None: + header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + acquire_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def release_lease( + self, + lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "lease" + action = "release" + accept = "application/xml" + + # Construct URL + url = self.release_lease.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + release_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def renew_lease( + self, + lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "lease" + action = "renew" + accept = "application/xml" + + # Construct URL + url = self.renew_lease.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + renew_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def change_lease( + self, + lease_id: str, + proposed_lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns + 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid + Constructor (String) for a list of valid GUID string formats. + :type proposed_lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "lease" + action = "change" + accept = "application/xml" + + # Construct URL + url = self.change_lease.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') + header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + change_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def break_lease( + self, + timeout: Optional[int] = None, + break_period: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param break_period: For a break operation, proposed duration the lease should continue before + it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter + than the time remaining on the lease. If longer, the time remaining on the lease is used. A new + lease will not be available before the break period has expired, but the lease may be held for + longer than the break period. If this header does not appear with a break operation, a + fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease + breaks immediately. + :type break_period: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "lease" + action = "break" + accept = "application/xml" + + # Construct URL + url = self.break_lease.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') + if break_period is not None: + header_parameters['x-ms-lease-break-period'] = self._serialize.header("break_period", break_period, 'int') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-time']=self._deserialize('int', response.headers.get('x-ms-lease-time')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + break_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def create_snapshot( + self, + timeout: Optional[int] = None, + metadata: Optional[str] = None, + request_id_parameter: Optional[str] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Create Snapshot operation creates a read-only snapshot of a blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _lease_id = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "snapshot" + accept = "application/xml" + + # Construct URL + url = self.create_snapshot.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-snapshot']=self._deserialize('str', response.headers.get('x-ms-snapshot')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + + if cls: + return cls(pipeline_response, None, response_headers) + + create_snapshot.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def start_copy_from_url( + self, + copy_source: str, + timeout: Optional[int] = None, + metadata: Optional[str] = None, + tier: Optional[Union[str, "_models.AccessTierOptional"]] = None, + rehydrate_priority: Optional[Union[str, "_models.RehydratePriority"]] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + seal_blob: Optional[bool] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] = None, + legal_hold: Optional[bool] = None, + source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Start Copy From URL operation copies a blob or an internet resource to a new blob. + + :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of + up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it + would appear in a request URI. The source blob must either be public or must be authenticated + via a shared access signature. + :type copy_source: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param tier: Optional. Indicates the tier to be set on the blob. + :type tier: str or ~azure.storage.blob.models.AccessTierOptional + :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived + blob. + :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :type blob_tags_string: str + :param seal_blob: Overrides the sealed state of the destination blob. Service version + 2019-12-12 and newer. + :type seal_blob: bool + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. + :type legal_hold: bool + :param source_modified_access_conditions: Parameter group. + :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + _source_if_tags = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _lease_id = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + _source_if_tags = source_modified_access_conditions.source_if_tags + accept = "application/xml" + + # Construct URL + url = self.start_copy_from_url.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if tier is not None: + header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') + if rehydrate_priority is not None: + header_parameters['x-ms-rehydrate-priority'] = self._serialize.header("rehydrate_priority", rehydrate_priority, 'str') + if _source_if_modified_since is not None: + header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') + if _source_if_unmodified_since is not None: + header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') + if _source_if_match is not None: + header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') + if _source_if_none_match is not None: + header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') + if _source_if_tags is not None: + header_parameters['x-ms-source-if-tags'] = self._serialize.header("source_if_tags", _source_if_tags, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if blob_tags_string is not None: + header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') + if seal_blob is not None: + header_parameters['x-ms-seal-blob'] = self._serialize.header("seal_blob", seal_blob, 'bool') + if immutability_policy_expiry is not None: + header_parameters['x-ms-immutability-policy-until-date'] = self._serialize.header("immutability_policy_expiry", immutability_policy_expiry, 'rfc-1123') + if immutability_policy_mode is not None: + header_parameters['x-ms-immutability-policy-mode'] = self._serialize.header("immutability_policy_mode", immutability_policy_mode, 'str') + if legal_hold is not None: + header_parameters['x-ms-legal-hold'] = self._serialize.header("legal_hold", legal_hold, 'bool') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + + if cls: + return cls(pipeline_response, None, response_headers) + + start_copy_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def copy_from_url( + self, + copy_source: str, + timeout: Optional[int] = None, + metadata: Optional[str] = None, + tier: Optional[Union[str, "_models.AccessTierOptional"]] = None, + request_id_parameter: Optional[str] = None, + source_content_md5: Optional[bytearray] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] = None, + legal_hold: Optional[bool] = None, + copy_source_authorization: Optional[str] = None, + source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + **kwargs: Any + ) -> None: + """The Copy From URL operation copies a blob or an internet resource to a new blob. It will not + return a response until the copy is complete. + + :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of + up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it + would appear in a request URI. The source blob must either be public or must be authenticated + via a shared access signature. + :type copy_source: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param tier: Optional. Indicates the tier to be set on the blob. + :type tier: str or ~azure.storage.blob.models.AccessTierOptional + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read + from the copy source. + :type source_content_md5: bytearray + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :type blob_tags_string: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. + :type legal_hold: bool + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. + :type copy_source_authorization: str + :param source_modified_access_conditions: Parameter group. + :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _lease_id = None + _encryption_scope = None + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + x_ms_requires_sync = "true" + accept = "application/xml" + + # Construct URL + url = self.copy_from_url.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-requires-sync'] = self._serialize.header("x_ms_requires_sync", x_ms_requires_sync, 'str') + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if tier is not None: + header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') + if _source_if_modified_since is not None: + header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') + if _source_if_unmodified_since is not None: + header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') + if _source_if_match is not None: + header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') + if _source_if_none_match is not None: + header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if source_content_md5 is not None: + header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') + if blob_tags_string is not None: + header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') + if immutability_policy_expiry is not None: + header_parameters['x-ms-immutability-policy-until-date'] = self._serialize.header("immutability_policy_expiry", immutability_policy_expiry, 'rfc-1123') + if immutability_policy_mode is not None: + header_parameters['x-ms-immutability-policy-mode'] = self._serialize.header("immutability_policy_mode", immutability_policy_mode, 'str') + if legal_hold is not None: + header_parameters['x-ms-legal-hold'] = self._serialize.header("legal_hold", legal_hold, 'bool') + if copy_source_authorization is not None: + header_parameters['x-ms-copy-source-authorization'] = self._serialize.header("copy_source_authorization", copy_source_authorization, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + copy_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def abort_copy_from_url( + self, + copy_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a + destination blob with zero length and full metadata. + + :param copy_id: The copy identifier provided in the x-ms-copy-id header of the original Copy + Blob operation. + :type copy_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + comp = "copy" + copy_action_abort_constant = "abort" + accept = "application/xml" + + # Construct URL + url = self.abort_copy_from_url.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + query_parameters['copyid'] = self._serialize.query("copy_id", copy_id, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-copy-action'] = self._serialize.header("copy_action_abort_constant", copy_action_abort_constant, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + abort_copy_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def set_tier( + self, + tier: Union[str, "_models.AccessTierRequired"], + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + rehydrate_priority: Optional[Union[str, "_models.RehydratePriority"]] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a + premium storage account and on a block blob in a blob storage account (locally redundant + storage only). A premium page blob's tier determines the allowed size, IOPS, and bandwidth of + the blob. A block blob's tier determines Hot/Cool/Archive storage type. This operation does not + update the blob's ETag. + + :param tier: Indicates the tier to be set on the blob. + :type tier: str or ~azure.storage.blob.models.AccessTierRequired + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + :type version_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived + blob. + :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_tags = modified_access_conditions.if_tags + comp = "tier" + accept = "application/xml" + + # Construct URL + url = self.set_tier.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if snapshot is not None: + query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') + if version_id is not None: + query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') + if rehydrate_priority is not None: + header_parameters['x-ms-rehydrate-priority'] = self._serialize.header("rehydrate_priority", rehydrate_priority, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 200: + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + if response.status_code == 202: + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_tier.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def get_account_info( + self, + **kwargs: Any + ) -> None: + """Returns the sku name and account kind. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "account" + comp = "properties" + accept = "application/xml" + + # Construct URL + url = self.get_account_info.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) + response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) + + if cls: + return cls(pipeline_response, None, response_headers) + + get_account_info.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def query( + self, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + query_request: Optional["_models.QueryRequest"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> IO: + """The Query operation enables users to select/project on blob data by providing simple query + expressions. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. + :type snapshot: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param query_request: the query request. + :type query_request: ~azure.storage.blob.models.QueryRequest + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IO, or the result of cls(response) + :rtype: IO + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[IO] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "query" + content_type = kwargs.pop("content_type", "application/xml") + accept = "application/xml" + + # Construct URL + url = self.query.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if snapshot is not None: + query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if query_request is not None: + body_content = self._serialize.body(query_request, 'QueryRequest', is_xml=True) + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=True, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 206]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 200: + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) + response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) + response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) + response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) + response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) + response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) + response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) + deserialized = response.stream_download(self._client._pipeline) + + if response.status_code == 206: + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) + response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) + response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) + response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) + response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) + response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) + deserialized = response.stream_download(self._client._pipeline) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + query.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def get_tags( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + **kwargs: Any + ) -> "_models.BlobTags": + """The Get Tags operation enables users to get the tags associated with a blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + :type version_id: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlobTags, or the result of cls(response) + :rtype: ~azure.storage.blob.models.BlobTags + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobTags"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_tags = None + _lease_id = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_tags = modified_access_conditions.if_tags + comp = "tags" + accept = "application/xml" + + # Construct URL + url = self.get_tags.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if snapshot is not None: + query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') + if version_id is not None: + query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('BlobTags', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_tags.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def set_tags( + self, + timeout: Optional[int] = None, + version_id: Optional[str] = None, + transactional_content_md5: Optional[bytearray] = None, + transactional_content_crc64: Optional[bytearray] = None, + request_id_parameter: Optional[str] = None, + tags: Optional["_models.BlobTags"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Set Tags operation enables users to set tags on a blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + :type version_id: str + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. + :type transactional_content_md5: bytearray + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. + :type transactional_content_crc64: bytearray + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param tags: Blob tags. + :type tags: ~azure.storage.blob.models.BlobTags + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_tags = None + _lease_id = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_tags = modified_access_conditions.if_tags + comp = "tags" + content_type = kwargs.pop("content_type", "application/xml") + accept = "application/xml" + + # Construct URL + url = self.set_tags.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if version_id is not None: + query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if transactional_content_md5 is not None: + header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') + if transactional_content_crc64 is not None: + header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if tags is not None: + body_content = self._serialize.body(tags, 'BlobTags', is_xml=True) + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_tags.metadata = {'url': '/{containerName}/{blob}'} # type: ignore diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_block_blob_operations.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_block_blob_operations.py new file mode 100644 index 00000000000..c45f674cb75 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_block_blob_operations.py @@ -0,0 +1,1138 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class BlockBlobOperations: + """BlockBlobOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.storage.blob.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def upload( + self, + content_length: int, + body: IO, + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytearray] = None, + metadata: Optional[str] = None, + tier: Optional[Union[str, "_models.AccessTierOptional"]] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] = None, + legal_hold: Optional[bool] = None, + blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Upload Block Blob operation updates the content of an existing block blob. Updating an + existing block blob overwrites any existing metadata on the blob. Partial updates are not + supported with Put Blob; the content of the existing blob is overwritten with the content of + the new blob. To perform a partial update of the content of a block blob, use the Put Block + List operation. + + :param content_length: The length of the request. + :type content_length: long + :param body: Initial data. + :type body: IO + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. + :type transactional_content_md5: bytearray + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param tier: Optional. Indicates the tier to be set on the blob. + :type tier: str or ~azure.storage.blob.models.AccessTierOptional + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :type blob_tags_string: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. + :type legal_hold: bool + :param blob_http_headers: Parameter group. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _blob_content_type = None + _blob_content_encoding = None + _blob_content_language = None + _blob_content_md5 = None + _blob_cache_control = None + _lease_id = None + _blob_content_disposition = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if blob_http_headers is not None: + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + blob_type = "BlockBlob" + content_type = kwargs.pop("content_type", "application/octet-stream") + accept = "application/xml" + + # Construct URL + url = self.upload.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') + if transactional_content_md5 is not None: + header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + if _blob_content_type is not None: + header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') + if _blob_content_encoding is not None: + header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') + if _blob_content_language is not None: + header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') + if _blob_content_md5 is not None: + header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') + if _blob_cache_control is not None: + header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _blob_content_disposition is not None: + header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if tier is not None: + header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if blob_tags_string is not None: + header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') + if immutability_policy_expiry is not None: + header_parameters['x-ms-immutability-policy-until-date'] = self._serialize.header("immutability_policy_expiry", immutability_policy_expiry, 'rfc-1123') + if immutability_policy_mode is not None: + header_parameters['x-ms-immutability-policy-mode'] = self._serialize.header("immutability_policy_mode", immutability_policy_mode, 'str') + if legal_hold is not None: + header_parameters['x-ms-legal-hold'] = self._serialize.header("legal_hold", legal_hold, 'bool') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content_kwargs['stream_content'] = body + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + upload.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def put_blob_from_url( + self, + content_length: int, + copy_source: str, + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytearray] = None, + metadata: Optional[str] = None, + tier: Optional[Union[str, "_models.AccessTierOptional"]] = None, + request_id_parameter: Optional[str] = None, + source_content_md5: Optional[bytearray] = None, + blob_tags_string: Optional[str] = None, + copy_source_blob_properties: Optional[bool] = None, + copy_source_authorization: Optional[str] = None, + blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Put Blob from URL operation creates a new Block Blob where the contents of the blob are + read from a given URL. This API is supported beginning with the 2020-04-08 version. Partial + updates are not supported with Put Blob from URL; the content of an existing blob is + overwritten with the content of the new blob. To perform partial updates to a block blob’s + contents using a source URL, use the Put Block from URL API in conjunction with Put Block List. + + :param content_length: The length of the request. + :type content_length: long + :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of + up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it + would appear in a request URI. The source blob must either be public or must be authenticated + via a shared access signature. + :type copy_source: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. + :type transactional_content_md5: bytearray + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param tier: Optional. Indicates the tier to be set on the blob. + :type tier: str or ~azure.storage.blob.models.AccessTierOptional + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read + from the copy source. + :type source_content_md5: bytearray + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :type blob_tags_string: str + :param copy_source_blob_properties: Optional, default is true. Indicates if properties from + the source blob should be copied. + :type copy_source_blob_properties: bool + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. + :type copy_source_authorization: str + :param blob_http_headers: Parameter group. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param source_modified_access_conditions: Parameter group. + :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _blob_content_type = None + _blob_content_encoding = None + _blob_content_language = None + _blob_content_md5 = None + _blob_cache_control = None + _lease_id = None + _blob_content_disposition = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + _source_if_tags = None + if blob_http_headers is not None: + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + _source_if_tags = source_modified_access_conditions.source_if_tags + blob_type = "BlockBlob" + accept = "application/xml" + + # Construct URL + url = self.put_blob_from_url.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') + if transactional_content_md5 is not None: + header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + if _blob_content_type is not None: + header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') + if _blob_content_encoding is not None: + header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') + if _blob_content_language is not None: + header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') + if _blob_content_md5 is not None: + header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') + if _blob_cache_control is not None: + header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _blob_content_disposition is not None: + header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if tier is not None: + header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + if _source_if_modified_since is not None: + header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') + if _source_if_unmodified_since is not None: + header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') + if _source_if_match is not None: + header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') + if _source_if_none_match is not None: + header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') + if _source_if_tags is not None: + header_parameters['x-ms-source-if-tags'] = self._serialize.header("source_if_tags", _source_if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if source_content_md5 is not None: + header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') + if blob_tags_string is not None: + header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') + header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') + if copy_source_blob_properties is not None: + header_parameters['x-ms-copy-source-blob-properties'] = self._serialize.header("copy_source_blob_properties", copy_source_blob_properties, 'bool') + if copy_source_authorization is not None: + header_parameters['x-ms-copy-source-authorization'] = self._serialize.header("copy_source_authorization", copy_source_authorization, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + put_blob_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def stage_block( + self, + block_id: str, + content_length: int, + body: IO, + transactional_content_md5: Optional[bytearray] = None, + transactional_content_crc64: Optional[bytearray] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + **kwargs: Any + ) -> None: + """The Stage Block operation creates a new block to be committed as part of a blob. + + :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the + string must be less than or equal to 64 bytes in size. For a given blob, the length of the + value specified for the blockid parameter must be the same size for each block. + :type block_id: str + :param content_length: The length of the request. + :type content_length: long + :param body: Initial data. + :type body: IO + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. + :type transactional_content_md5: bytearray + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. + :type transactional_content_crc64: bytearray + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + comp = "block" + content_type = kwargs.pop("content_type", "application/octet-stream") + accept = "application/xml" + + # Construct URL + url = self.stage_block.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + query_parameters['blockid'] = self._serialize.query("block_id", block_id, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + if transactional_content_md5 is not None: + header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') + if transactional_content_crc64 is not None: + header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content_kwargs['stream_content'] = body + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + stage_block.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def stage_block_from_url( + self, + block_id: str, + content_length: int, + source_url: str, + source_range: Optional[str] = None, + source_content_md5: Optional[bytearray] = None, + source_contentcrc64: Optional[bytearray] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Stage Block operation creates a new block to be committed as part of a blob where the + contents are read from a URL. + + :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the + string must be less than or equal to 64 bytes in size. For a given blob, the length of the + value specified for the blockid parameter must be the same size for each block. + :type block_id: str + :param content_length: The length of the request. + :type content_length: long + :param source_url: Specify a URL to the copy source. + :type source_url: str + :param source_range: Bytes of source data in the specified range. + :type source_range: str + :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read + from the copy source. + :type source_content_md5: bytearray + :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be + read from the copy source. + :type source_contentcrc64: bytearray + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. + :type copy_source_authorization: str + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param source_modified_access_conditions: Parameter group. + :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _lease_id = None + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + comp = "block" + accept = "application/xml" + + # Construct URL + url = self.stage_block_from_url.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + query_parameters['blockid'] = self._serialize.query("block_id", block_id, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + header_parameters['x-ms-copy-source'] = self._serialize.header("source_url", source_url, 'str') + if source_range is not None: + header_parameters['x-ms-source-range'] = self._serialize.header("source_range", source_range, 'str') + if source_content_md5 is not None: + header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') + if source_contentcrc64 is not None: + header_parameters['x-ms-source-content-crc64'] = self._serialize.header("source_contentcrc64", source_contentcrc64, 'bytearray') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _source_if_modified_since is not None: + header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') + if _source_if_unmodified_since is not None: + header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') + if _source_if_match is not None: + header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') + if _source_if_none_match is not None: + header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if copy_source_authorization is not None: + header_parameters['x-ms-copy-source-authorization'] = self._serialize.header("copy_source_authorization", copy_source_authorization, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + stage_block_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def commit_block_list( + self, + blocks: "_models.BlockLookupList", + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytearray] = None, + transactional_content_crc64: Optional[bytearray] = None, + metadata: Optional[str] = None, + tier: Optional[Union[str, "_models.AccessTierOptional"]] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] = None, + legal_hold: Optional[bool] = None, + blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Commit Block List operation writes a blob by specifying the list of block IDs that make up + the blob. In order to be written as part of a blob, a block must have been successfully written + to the server in a prior Put Block operation. You can call Put Block List to update a blob by + uploading only those blocks that have changed, then committing the new and existing blocks + together. You can do this by specifying whether to commit a block from the committed block list + or from the uncommitted block list, or to commit the most recently uploaded version of the + block, whichever list it may belong to. + + :param blocks: Blob Blocks. + :type blocks: ~azure.storage.blob.models.BlockLookupList + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. + :type transactional_content_md5: bytearray + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. + :type transactional_content_crc64: bytearray + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param tier: Optional. Indicates the tier to be set on the blob. + :type tier: str or ~azure.storage.blob.models.AccessTierOptional + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :type blob_tags_string: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. + :type legal_hold: bool + :param blob_http_headers: Parameter group. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _blob_cache_control = None + _blob_content_type = None + _blob_content_encoding = None + _blob_content_language = None + _blob_content_md5 = None + _lease_id = None + _blob_content_disposition = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if blob_http_headers is not None: + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_content_disposition = blob_http_headers.blob_content_disposition + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "blocklist" + content_type = kwargs.pop("content_type", "application/xml") + accept = "application/xml" + + # Construct URL + url = self.commit_block_list.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _blob_cache_control is not None: + header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') + if _blob_content_type is not None: + header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') + if _blob_content_encoding is not None: + header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') + if _blob_content_language is not None: + header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') + if _blob_content_md5 is not None: + header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') + if transactional_content_md5 is not None: + header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') + if transactional_content_crc64 is not None: + header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _blob_content_disposition is not None: + header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if tier is not None: + header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if blob_tags_string is not None: + header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') + if immutability_policy_expiry is not None: + header_parameters['x-ms-immutability-policy-until-date'] = self._serialize.header("immutability_policy_expiry", immutability_policy_expiry, 'rfc-1123') + if immutability_policy_mode is not None: + header_parameters['x-ms-immutability-policy-mode'] = self._serialize.header("immutability_policy_mode", immutability_policy_mode, 'str') + if legal_hold is not None: + header_parameters['x-ms-legal-hold'] = self._serialize.header("legal_hold", legal_hold, 'bool') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(blocks, 'BlockLookupList', is_xml=True) + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + commit_block_list.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def get_block_list( + self, + snapshot: Optional[str] = None, + list_type: Union[str, "_models.BlockListType"] = "committed", + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> "_models.BlockList": + """The Get Block List operation retrieves the list of blocks that have been uploaded as part of a + block blob. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. + :type snapshot: str + :param list_type: Specifies whether to return the list of committed blocks, the list of + uncommitted blocks, or both lists together. + :type list_type: str or ~azure.storage.blob.models.BlockListType + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlockList, or the result of cls(response) + :rtype: ~azure.storage.blob.models.BlockList + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlockList"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_tags = modified_access_conditions.if_tags + comp = "blocklist" + accept = "application/xml" + + # Construct URL + url = self.get_block_list.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if snapshot is not None: + query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') + query_parameters['blocklisttype'] = self._serialize.query("list_type", list_type, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('BlockList', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_block_list.metadata = {'url': '/{containerName}/{blob}'} # type: ignore diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_container_operations.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_container_operations.py new file mode 100644 index 00000000000..18c70f533d8 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_container_operations.py @@ -0,0 +1,1748 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from typing import Any, Callable, Dict, Generic, IO, List, Optional, TypeVar, Union +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ContainerOperations: + """ContainerOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.storage.blob.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def create( + self, + timeout: Optional[int] = None, + metadata: Optional[str] = None, + access: Optional[Union[str, "_models.PublicAccessType"]] = None, + request_id_parameter: Optional[str] = None, + container_cpk_scope_info: Optional["_models.ContainerCpkScopeInfo"] = None, + **kwargs: Any + ) -> None: + """creates a new container under the specified account. If the container with the same name + already exists, the operation fails. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param access: Specifies whether data in the container may be accessed publicly and the level + of access. + :type access: str or ~azure.storage.blob.models.PublicAccessType + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param container_cpk_scope_info: Parameter group. + :type container_cpk_scope_info: ~azure.storage.blob.models.ContainerCpkScopeInfo + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _default_encryption_scope = None + _prevent_encryption_scope_override = None + if container_cpk_scope_info is not None: + _default_encryption_scope = container_cpk_scope_info.default_encryption_scope + _prevent_encryption_scope_override = container_cpk_scope_info.prevent_encryption_scope_override + restype = "container" + accept = "application/xml" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if access is not None: + header_parameters['x-ms-blob-public-access'] = self._serialize.header("access", access, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if _default_encryption_scope is not None: + header_parameters['x-ms-default-encryption-scope'] = self._serialize.header("default_encryption_scope", _default_encryption_scope, 'str') + if _prevent_encryption_scope_override is not None: + header_parameters['x-ms-deny-encryption-scope-override'] = self._serialize.header("prevent_encryption_scope_override", _prevent_encryption_scope_override, 'bool') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + create.metadata = {'url': '/{containerName}'} # type: ignore + + async def get_properties( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + **kwargs: Any + ) -> None: + """returns all user-defined metadata and system properties for the specified container. The data + returned does not include the container's list of blobs. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + restype = "container" + accept = "application/xml" + + # Construct URL + url = self.get_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-public-access']=self._deserialize('str', response.headers.get('x-ms-blob-public-access')) + response_headers['x-ms-has-immutability-policy']=self._deserialize('bool', response.headers.get('x-ms-has-immutability-policy')) + response_headers['x-ms-has-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-has-legal-hold')) + response_headers['x-ms-default-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-default-encryption-scope')) + response_headers['x-ms-deny-encryption-scope-override']=self._deserialize('bool', response.headers.get('x-ms-deny-encryption-scope-override')) + response_headers['x-ms-immutable-storage-with-versioning-enabled']=self._deserialize('bool', response.headers.get('x-ms-immutable-storage-with-versioning-enabled')) + + if cls: + return cls(pipeline_response, None, response_headers) + + get_properties.metadata = {'url': '/{containerName}'} # type: ignore + + async def delete( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """operation marks the specified container for deletion. The container and any blobs contained + within it are later deleted during garbage collection. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + restype = "container" + accept = "application/xml" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + delete.metadata = {'url': '/{containerName}'} # type: ignore + + async def set_metadata( + self, + timeout: Optional[int] = None, + metadata: Optional[str] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """operation sets one or more user-defined name-value pairs for the specified container. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_modified_since = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + restype = "container" + comp = "metadata" + accept = "application/xml" + + # Construct URL + url = self.set_metadata.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_metadata.metadata = {'url': '/{containerName}'} # type: ignore + + async def get_access_policy( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + **kwargs: Any + ) -> List["_models.SignedIdentifier"]: + """gets the permissions for the specified container. The permissions indicate whether container + data may be accessed publicly. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: list of SignedIdentifier, or the result of cls(response) + :rtype: list[~azure.storage.blob.models.SignedIdentifier] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[List["_models.SignedIdentifier"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + restype = "container" + comp = "acl" + accept = "application/xml" + + # Construct URL + url = self.get_access_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-blob-public-access']=self._deserialize('str', response.headers.get('x-ms-blob-public-access')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('[SignedIdentifier]', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_access_policy.metadata = {'url': '/{containerName}'} # type: ignore + + async def set_access_policy( + self, + timeout: Optional[int] = None, + access: Optional[Union[str, "_models.PublicAccessType"]] = None, + request_id_parameter: Optional[str] = None, + container_acl: Optional[List["_models.SignedIdentifier"]] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """sets the permissions for the specified container. The permissions indicate whether blobs in a + container may be accessed publicly. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param access: Specifies whether data in the container may be accessed publicly and the level + of access. + :type access: str or ~azure.storage.blob.models.PublicAccessType + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param container_acl: the acls for the container. + :type container_acl: list[~azure.storage.blob.models.SignedIdentifier] + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + restype = "container" + comp = "acl" + content_type = kwargs.pop("content_type", "application/xml") + accept = "application/xml" + + # Construct URL + url = self.set_access_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if access is not None: + header_parameters['x-ms-blob-public-access'] = self._serialize.header("access", access, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + serialization_ctxt = {'xml': {'name': 'SignedIdentifiers', 'wrapped': True, 'itemsName': 'SignedIdentifier'}} + if container_acl is not None: + body_content = self._serialize.body(container_acl, '[SignedIdentifier]', is_xml=True, serialization_ctxt=serialization_ctxt) + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_access_policy.metadata = {'url': '/{containerName}'} # type: ignore + + async def restore( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + deleted_container_name: Optional[str] = None, + deleted_container_version: Optional[str] = None, + **kwargs: Any + ) -> None: + """Restores a previously-deleted container. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param deleted_container_name: Optional. Version 2019-12-12 and later. Specifies the name of + the deleted container to restore. + :type deleted_container_name: str + :param deleted_container_version: Optional. Version 2019-12-12 and later. Specifies the + version of the deleted container to restore. + :type deleted_container_version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "container" + comp = "undelete" + accept = "application/xml" + + # Construct URL + url = self.restore.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if deleted_container_name is not None: + header_parameters['x-ms-deleted-container-name'] = self._serialize.header("deleted_container_name", deleted_container_name, 'str') + if deleted_container_version is not None: + header_parameters['x-ms-deleted-container-version'] = self._serialize.header("deleted_container_version", deleted_container_version, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + restore.metadata = {'url': '/{containerName}'} # type: ignore + + async def rename( + self, + source_container_name: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + source_lease_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """Renames an existing container. + + :param source_container_name: Required. Specifies the name of the container to rename. + :type source_container_name: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param source_lease_id: A lease ID for the source path. If specified, the source path must have + an active lease and the lease ID must match. + :type source_lease_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "container" + comp = "rename" + accept = "application/xml" + + # Construct URL + url = self.rename.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['x-ms-source-container-name'] = self._serialize.header("source_container_name", source_container_name, 'str') + if source_lease_id is not None: + header_parameters['x-ms-source-lease-id'] = self._serialize.header("source_lease_id", source_lease_id, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + rename.metadata = {'url': '/{containerName}'} # type: ignore + + async def submit_batch( + self, + content_length: int, + multipart_content_type: str, + body: IO, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> IO: + """The Batch operation allows multiple API calls to be embedded into a single HTTP request. + + :param content_length: The length of the request. + :type content_length: long + :param multipart_content_type: Required. The value of this header must be multipart/mixed with + a batch boundary. Example header value: multipart/mixed; boundary=batch_:code:``. + :type multipart_content_type: str + :param body: Initial data. + :type body: IO + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IO, or the result of cls(response) + :rtype: IO + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[IO] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "container" + comp = "batch" + content_type = kwargs.pop("content_type", "application/xml") + accept = "application/xml" + + # Construct URL + url = self.submit_batch.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + header_parameters['Content-Type'] = self._serialize.header("multipart_content_type", multipart_content_type, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(body, 'IO', is_xml=True) + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=True, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + deserialized = response.stream_download(self._client._pipeline) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + submit_batch.metadata = {'url': '/{containerName}'} # type: ignore + + async def filter_blobs( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + where: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + **kwargs: Any + ) -> "_models.FilterBlobSegment": + """The Filter Blobs operation enables callers to list blobs in a container whose tags match a + given search expression. Filter blobs searches within the given container. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param where: Filters the results to return only to return only blobs whose tags match the + specified expression. + :type where: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. + :type maxresults: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FilterBlobSegment, or the result of cls(response) + :rtype: ~azure.storage.blob.models.FilterBlobSegment + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FilterBlobSegment"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "container" + comp = "blobs" + accept = "application/xml" + + # Construct URL + url = self.filter_blobs.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if where is not None: + query_parameters['where'] = self._serialize.query("where", where, 'str') + if marker is not None: + query_parameters['marker'] = self._serialize.query("marker", marker, 'str') + if maxresults is not None: + query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('FilterBlobSegment', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + filter_blobs.metadata = {'url': '/{containerName}'} # type: ignore + + async def acquire_lease( + self, + timeout: Optional[int] = None, + duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a + lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease + duration cannot be changed using renew or change. + :type duration: int + :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns + 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid + Constructor (String) for a list of valid GUID string formats. + :type proposed_lease_id: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + comp = "lease" + restype = "container" + action = "acquire" + accept = "application/xml" + + # Construct URL + url = self.acquire_lease.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') + if duration is not None: + header_parameters['x-ms-lease-duration'] = self._serialize.header("duration", duration, 'int') + if proposed_lease_id is not None: + header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + acquire_lease.metadata = {'url': '/{containerName}'} # type: ignore + + async def release_lease( + self, + lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + comp = "lease" + restype = "container" + action = "release" + accept = "application/xml" + + # Construct URL + url = self.release_lease.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + release_lease.metadata = {'url': '/{containerName}'} # type: ignore + + async def renew_lease( + self, + lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + comp = "lease" + restype = "container" + action = "renew" + accept = "application/xml" + + # Construct URL + url = self.renew_lease.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + renew_lease.metadata = {'url': '/{containerName}'} # type: ignore + + async def break_lease( + self, + timeout: Optional[int] = None, + break_period: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param break_period: For a break operation, proposed duration the lease should continue before + it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter + than the time remaining on the lease. If longer, the time remaining on the lease is used. A new + lease will not be available before the break period has expired, but the lease may be held for + longer than the break period. If this header does not appear with a break operation, a + fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease + breaks immediately. + :type break_period: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + comp = "lease" + restype = "container" + action = "break" + accept = "application/xml" + + # Construct URL + url = self.break_lease.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') + if break_period is not None: + header_parameters['x-ms-lease-break-period'] = self._serialize.header("break_period", break_period, 'int') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-time']=self._deserialize('int', response.headers.get('x-ms-lease-time')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + break_lease.metadata = {'url': '/{containerName}'} # type: ignore + + async def change_lease( + self, + lease_id: str, + proposed_lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns + 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid + Constructor (String) for a list of valid GUID string formats. + :type proposed_lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + comp = "lease" + restype = "container" + action = "change" + accept = "application/xml" + + # Construct URL + url = self.change_lease.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') + header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + change_lease.metadata = {'url': '/{containerName}'} # type: ignore + + async def list_blob_flat_segment( + self, + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, "_models.ListBlobsIncludeItem"]]] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> "_models.ListBlobsFlatSegmentResponse": + """[Update] The List Blobs operation returns a list of the blobs under the specified container. + + :param prefix: Filters the results to return only containers whose name begins with the + specified prefix. + :type prefix: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. + :type maxresults: int + :param include: Include this parameter to specify one or more datasets to include in the + response. + :type include: list[str or ~azure.storage.blob.models.ListBlobsIncludeItem] + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListBlobsFlatSegmentResponse, or the result of cls(response) + :rtype: ~azure.storage.blob.models.ListBlobsFlatSegmentResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListBlobsFlatSegmentResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "container" + comp = "list" + accept = "application/xml" + + # Construct URL + url = self.list_blob_flat_segment.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if prefix is not None: + query_parameters['prefix'] = self._serialize.query("prefix", prefix, 'str') + if marker is not None: + query_parameters['marker'] = self._serialize.query("marker", marker, 'str') + if maxresults is not None: + query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) + if include is not None: + query_parameters['include'] = self._serialize.query("include", include, '[str]', div=',') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('ListBlobsFlatSegmentResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + list_blob_flat_segment.metadata = {'url': '/{containerName}'} # type: ignore + + async def list_blob_hierarchy_segment( + self, + delimiter: str, + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, "_models.ListBlobsIncludeItem"]]] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> "_models.ListBlobsHierarchySegmentResponse": + """[Update] The List Blobs operation returns a list of the blobs under the specified container. + + :param delimiter: When the request includes this parameter, the operation returns a BlobPrefix + element in the response body that acts as a placeholder for all blobs whose names begin with + the same substring up to the appearance of the delimiter character. The delimiter may be a + single character or a string. + :type delimiter: str + :param prefix: Filters the results to return only containers whose name begins with the + specified prefix. + :type prefix: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. + :type maxresults: int + :param include: Include this parameter to specify one or more datasets to include in the + response. + :type include: list[str or ~azure.storage.blob.models.ListBlobsIncludeItem] + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListBlobsHierarchySegmentResponse, or the result of cls(response) + :rtype: ~azure.storage.blob.models.ListBlobsHierarchySegmentResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListBlobsHierarchySegmentResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "container" + comp = "list" + accept = "application/xml" + + # Construct URL + url = self.list_blob_hierarchy_segment.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if prefix is not None: + query_parameters['prefix'] = self._serialize.query("prefix", prefix, 'str') + query_parameters['delimiter'] = self._serialize.query("delimiter", delimiter, 'str') + if marker is not None: + query_parameters['marker'] = self._serialize.query("marker", marker, 'str') + if maxresults is not None: + query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) + if include is not None: + query_parameters['include'] = self._serialize.query("include", include, '[str]', div=',') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('ListBlobsHierarchySegmentResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + list_blob_hierarchy_segment.metadata = {'url': '/{containerName}'} # type: ignore + + async def get_account_info( + self, + **kwargs: Any + ) -> None: + """Returns the sku name and account kind. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "account" + comp = "properties" + accept = "application/xml" + + # Construct URL + url = self.get_account_info.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) + response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) + + if cls: + return cls(pipeline_response, None, response_headers) + + get_account_info.metadata = {'url': '/{containerName}'} # type: ignore diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_page_blob_operations.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_page_blob_operations.py new file mode 100644 index 00000000000..06f17558bd2 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_page_blob_operations.py @@ -0,0 +1,1424 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class PageBlobOperations: + """PageBlobOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.storage.blob.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def create( + self, + content_length: int, + blob_content_length: int, + timeout: Optional[int] = None, + tier: Optional[Union[str, "_models.PremiumPageBlobAccessTier"]] = None, + metadata: Optional[str] = None, + blob_sequence_number: Optional[int] = 0, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] = None, + legal_hold: Optional[bool] = None, + blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Create operation creates a new page blob. + + :param content_length: The length of the request. + :type content_length: long + :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 + TB. The page blob size must be aligned to a 512-byte boundary. + :type blob_content_length: long + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param tier: Optional. Indicates the tier to be set on the page blob. + :type tier: str or ~azure.storage.blob.models.PremiumPageBlobAccessTier + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param blob_sequence_number: Set for page blobs only. The sequence number is a user-controlled + value that you can use to track requests. The value of the sequence number must be between 0 + and 2^63 - 1. + :type blob_sequence_number: long + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :type blob_tags_string: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. + :type legal_hold: bool + :param blob_http_headers: Parameter group. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _blob_content_type = None + _blob_content_encoding = None + _blob_content_language = None + _blob_content_md5 = None + _blob_cache_control = None + _lease_id = None + _blob_content_disposition = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if blob_http_headers is not None: + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + blob_type = "PageBlob" + accept = "application/xml" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + if tier is not None: + header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') + if _blob_content_type is not None: + header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') + if _blob_content_encoding is not None: + header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') + if _blob_content_language is not None: + header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') + if _blob_content_md5 is not None: + header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') + if _blob_cache_control is not None: + header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _blob_content_disposition is not None: + header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-blob-content-length'] = self._serialize.header("blob_content_length", blob_content_length, 'long') + if blob_sequence_number is not None: + header_parameters['x-ms-blob-sequence-number'] = self._serialize.header("blob_sequence_number", blob_sequence_number, 'long') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if blob_tags_string is not None: + header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') + if immutability_policy_expiry is not None: + header_parameters['x-ms-immutability-policy-until-date'] = self._serialize.header("immutability_policy_expiry", immutability_policy_expiry, 'rfc-1123') + if immutability_policy_mode is not None: + header_parameters['x-ms-immutability-policy-mode'] = self._serialize.header("immutability_policy_mode", immutability_policy_mode, 'str') + if legal_hold is not None: + header_parameters['x-ms-legal-hold'] = self._serialize.header("legal_hold", legal_hold, 'bool') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + create.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def upload_pages( + self, + content_length: int, + body: IO, + transactional_content_md5: Optional[bytearray] = None, + transactional_content_crc64: Optional[bytearray] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + sequence_number_access_conditions: Optional["_models.SequenceNumberAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Upload Pages operation writes a range of pages to a page blob. + + :param content_length: The length of the request. + :type content_length: long + :param body: Initial data. + :type body: IO + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. + :type transactional_content_md5: bytearray + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. + :type transactional_content_crc64: bytearray + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param range: Return only the bytes of the blob in the specified range. + :type range: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param sequence_number_access_conditions: Parameter group. + :type sequence_number_access_conditions: ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_sequence_number_less_than_or_equal_to = None + _if_sequence_number_less_than = None + _if_sequence_number_equal_to = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if sequence_number_access_conditions is not None: + _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + comp = "page" + page_write = "update" + content_type = kwargs.pop("content_type", "application/octet-stream") + accept = "application/xml" + + # Construct URL + url = self.upload_pages.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-page-write'] = self._serialize.header("page_write", page_write, 'str') + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + if transactional_content_md5 is not None: + header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') + if transactional_content_crc64 is not None: + header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') + if range is not None: + header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _if_sequence_number_less_than_or_equal_to is not None: + header_parameters['x-ms-if-sequence-number-le'] = self._serialize.header("if_sequence_number_less_than_or_equal_to", _if_sequence_number_less_than_or_equal_to, 'long') + if _if_sequence_number_less_than is not None: + header_parameters['x-ms-if-sequence-number-lt'] = self._serialize.header("if_sequence_number_less_than", _if_sequence_number_less_than, 'long') + if _if_sequence_number_equal_to is not None: + header_parameters['x-ms-if-sequence-number-eq'] = self._serialize.header("if_sequence_number_equal_to", _if_sequence_number_equal_to, 'long') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content_kwargs['stream_content'] = body + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + upload_pages.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def clear_pages( + self, + content_length: int, + timeout: Optional[int] = None, + range: Optional[str] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + sequence_number_access_conditions: Optional["_models.SequenceNumberAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Clear Pages operation clears a set of pages from a page blob. + + :param content_length: The length of the request. + :type content_length: long + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param range: Return only the bytes of the blob in the specified range. + :type range: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param sequence_number_access_conditions: Parameter group. + :type sequence_number_access_conditions: ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_sequence_number_less_than_or_equal_to = None + _if_sequence_number_less_than = None + _if_sequence_number_equal_to = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if sequence_number_access_conditions is not None: + _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + comp = "page" + page_write = "clear" + accept = "application/xml" + + # Construct URL + url = self.clear_pages.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-page-write'] = self._serialize.header("page_write", page_write, 'str') + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + if range is not None: + header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _if_sequence_number_less_than_or_equal_to is not None: + header_parameters['x-ms-if-sequence-number-le'] = self._serialize.header("if_sequence_number_less_than_or_equal_to", _if_sequence_number_less_than_or_equal_to, 'long') + if _if_sequence_number_less_than is not None: + header_parameters['x-ms-if-sequence-number-lt'] = self._serialize.header("if_sequence_number_less_than", _if_sequence_number_less_than, 'long') + if _if_sequence_number_equal_to is not None: + header_parameters['x-ms-if-sequence-number-eq'] = self._serialize.header("if_sequence_number_equal_to", _if_sequence_number_equal_to, 'long') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + clear_pages.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def upload_pages_from_url( + self, + source_url: str, + source_range: str, + content_length: int, + range: str, + source_content_md5: Optional[bytearray] = None, + source_contentcrc64: Optional[bytearray] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + sequence_number_access_conditions: Optional["_models.SequenceNumberAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Upload Pages operation writes a range of pages to a page blob where the contents are read + from a URL. + + :param source_url: Specify a URL to the copy source. + :type source_url: str + :param source_range: Bytes of source data in the specified range. The length of this range + should match the ContentLength header and x-ms-range/Range destination range header. + :type source_range: str + :param content_length: The length of the request. + :type content_length: long + :param range: The range of bytes to which the source range would be written. The range should + be 512 aligned and range-end is required. + :type range: str + :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read + from the copy source. + :type source_content_md5: bytearray + :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be + read from the copy source. + :type source_contentcrc64: bytearray + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. + :type copy_source_authorization: str + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param sequence_number_access_conditions: Parameter group. + :type sequence_number_access_conditions: ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param source_modified_access_conditions: Parameter group. + :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _lease_id = None + _if_sequence_number_less_than_or_equal_to = None + _if_sequence_number_less_than = None + _if_sequence_number_equal_to = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if sequence_number_access_conditions is not None: + _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + comp = "page" + page_write = "update" + accept = "application/xml" + + # Construct URL + url = self.upload_pages_from_url.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-page-write'] = self._serialize.header("page_write", page_write, 'str') + header_parameters['x-ms-copy-source'] = self._serialize.header("source_url", source_url, 'str') + header_parameters['x-ms-source-range'] = self._serialize.header("source_range", source_range, 'str') + if source_content_md5 is not None: + header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') + if source_contentcrc64 is not None: + header_parameters['x-ms-source-content-crc64'] = self._serialize.header("source_contentcrc64", source_contentcrc64, 'bytearray') + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_sequence_number_less_than_or_equal_to is not None: + header_parameters['x-ms-if-sequence-number-le'] = self._serialize.header("if_sequence_number_less_than_or_equal_to", _if_sequence_number_less_than_or_equal_to, 'long') + if _if_sequence_number_less_than is not None: + header_parameters['x-ms-if-sequence-number-lt'] = self._serialize.header("if_sequence_number_less_than", _if_sequence_number_less_than, 'long') + if _if_sequence_number_equal_to is not None: + header_parameters['x-ms-if-sequence-number-eq'] = self._serialize.header("if_sequence_number_equal_to", _if_sequence_number_equal_to, 'long') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + if _source_if_modified_since is not None: + header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') + if _source_if_unmodified_since is not None: + header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') + if _source_if_match is not None: + header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') + if _source_if_none_match is not None: + header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if copy_source_authorization is not None: + header_parameters['x-ms-copy-source-authorization'] = self._serialize.header("copy_source_authorization", copy_source_authorization, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + upload_pages_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def get_page_ranges( + self, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> "_models.PageList": + """The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot + of a page blob. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. + :type snapshot: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param range: Return only the bytes of the blob in the specified range. + :type range: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PageList, or the result of cls(response) + :rtype: ~azure.storage.blob.models.PageList + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PageList"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "pagelist" + accept = "application/xml" + + # Construct URL + url = self.get_page_ranges.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if snapshot is not None: + query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if range is not None: + header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('PageList', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_page_ranges.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def get_page_ranges_diff( + self, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + prevsnapshot: Optional[str] = None, + prev_snapshot_url: Optional[str] = None, + range: Optional[str] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> "_models.PageList": + """The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that + were changed between target blob and previous snapshot. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. + :type snapshot: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param prevsnapshot: Optional in version 2015-07-08 and newer. The prevsnapshot parameter is a + DateTime value that specifies that the response will contain only pages that were changed + between target blob and previous snapshot. Changed pages include both updated and cleared + pages. The target blob may be a snapshot, as long as the snapshot specified by prevsnapshot is + the older of the two. Note that incremental snapshots are currently supported only for blobs + created on or after January 1, 2016. + :type prevsnapshot: str + :param prev_snapshot_url: Optional. This header is only supported in service versions + 2019-04-19 and after and specifies the URL of a previous snapshot of the target blob. The + response will only contain pages that were changed between the target blob and its previous + snapshot. + :type prev_snapshot_url: str + :param range: Return only the bytes of the blob in the specified range. + :type range: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PageList, or the result of cls(response) + :rtype: ~azure.storage.blob.models.PageList + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PageList"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "pagelist" + accept = "application/xml" + + # Construct URL + url = self.get_page_ranges_diff.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if snapshot is not None: + query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if prevsnapshot is not None: + query_parameters['prevsnapshot'] = self._serialize.query("prevsnapshot", prevsnapshot, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if prev_snapshot_url is not None: + header_parameters['x-ms-previous-snapshot-url'] = self._serialize.header("prev_snapshot_url", prev_snapshot_url, 'str') + if range is not None: + header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('PageList', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_page_ranges_diff.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def resize( + self, + blob_content_length: int, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """Resize the Blob. + + :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 + TB. The page blob size must be aligned to a 512-byte boundary. + :type blob_content_length: long + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "properties" + accept = "application/xml" + + # Construct URL + url = self.resize.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-blob-content-length'] = self._serialize.header("blob_content_length", blob_content_length, 'long') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + resize.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def update_sequence_number( + self, + sequence_number_action: Union[str, "_models.SequenceNumberActionType"], + timeout: Optional[int] = None, + blob_sequence_number: Optional[int] = 0, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """Update the sequence number of the blob. + + :param sequence_number_action: Required if the x-ms-blob-sequence-number header is set for the + request. This property applies to page blobs only. This property indicates how the service + should modify the blob's sequence number. + :type sequence_number_action: str or ~azure.storage.blob.models.SequenceNumberActionType + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param blob_sequence_number: Set for page blobs only. The sequence number is a user-controlled + value that you can use to track requests. The value of the sequence number must be between 0 + and 2^63 - 1. + :type blob_sequence_number: long + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "properties" + accept = "application/xml" + + # Construct URL + url = self.update_sequence_number.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-sequence-number-action'] = self._serialize.header("sequence_number_action", sequence_number_action, 'str') + if blob_sequence_number is not None: + header_parameters['x-ms-blob-sequence-number'] = self._serialize.header("blob_sequence_number", blob_sequence_number, 'long') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + update_sequence_number.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + async def copy_incremental( + self, + copy_source: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Copy Incremental operation copies a snapshot of the source page blob to a destination page + blob. The snapshot is copied such that only the differential changes between the previously + copied snapshot are transferred to the destination. The copied snapshots are complete copies of + the original snapshot and can be read or copied from as usual. This API is supported since REST + version 2016-05-31. + + :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of + up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it + would appear in a request URI. The source blob must either be public or must be authenticated + via a shared access signature. + :type copy_source: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "incrementalcopy" + accept = "application/xml" + + # Construct URL + url = self.copy_incremental.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + + if cls: + return cls(pipeline_response, None, response_headers) + + copy_incremental.metadata = {'url': '/{containerName}/{blob}'} # type: ignore diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_service_operations.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_service_operations.py new file mode 100644 index 00000000000..a6592a33f7f --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/aio/operations/_service_operations.py @@ -0,0 +1,698 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, IO, List, Optional, TypeVar, Union +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ServiceOperations: + """ServiceOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.storage.blob.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def set_properties( + self, + storage_service_properties: "_models.StorageServiceProperties", + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> None: + """Sets properties for a storage account's Blob service endpoint, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param storage_service_properties: The StorageService properties. + :type storage_service_properties: ~azure.storage.blob.models.StorageServiceProperties + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "service" + comp = "properties" + content_type = kwargs.pop("content_type", "application/xml") + accept = "application/xml" + + # Construct URL + url = self.set_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(storage_service_properties, 'StorageServiceProperties', is_xml=True) + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_properties.metadata = {'url': '/'} # type: ignore + + async def get_properties( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> "_models.StorageServiceProperties": + """gets the properties of a storage account's Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageServiceProperties, or the result of cls(response) + :rtype: ~azure.storage.blob.models.StorageServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "service" + comp = "properties" + accept = "application/xml" + + # Construct URL + url = self.get_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + deserialized = self._deserialize('StorageServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_properties.metadata = {'url': '/'} # type: ignore + + async def get_statistics( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> "_models.StorageServiceStats": + """Retrieves statistics related to replication for the Blob service. It is only available on the + secondary location endpoint when read-access geo-redundant replication is enabled for the + storage account. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageServiceStats, or the result of cls(response) + :rtype: ~azure.storage.blob.models.StorageServiceStats + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageServiceStats"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "service" + comp = "stats" + accept = "application/xml" + + # Construct URL + url = self.get_statistics.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('StorageServiceStats', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_statistics.metadata = {'url': '/'} # type: ignore + + async def list_containers_segment( + self, + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, "_models.ListContainersIncludeType"]]] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> "_models.ListContainersSegmentResponse": + """The List Containers Segment operation returns a list of the containers under the specified + account. + + :param prefix: Filters the results to return only containers whose name begins with the + specified prefix. + :type prefix: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. + :type maxresults: int + :param include: Include this parameter to specify that the container's metadata be returned as + part of the response body. + :type include: list[str or ~azure.storage.blob.models.ListContainersIncludeType] + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListContainersSegmentResponse, or the result of cls(response) + :rtype: ~azure.storage.blob.models.ListContainersSegmentResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListContainersSegmentResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + comp = "list" + accept = "application/xml" + + # Construct URL + url = self.list_containers_segment.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if prefix is not None: + query_parameters['prefix'] = self._serialize.query("prefix", prefix, 'str') + if marker is not None: + query_parameters['marker'] = self._serialize.query("marker", marker, 'str') + if maxresults is not None: + query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) + if include is not None: + query_parameters['include'] = self._serialize.query("include", include, '[str]', div=',') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + deserialized = self._deserialize('ListContainersSegmentResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + list_containers_segment.metadata = {'url': '/'} # type: ignore + + async def get_user_delegation_key( + self, + key_info: "_models.KeyInfo", + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> "_models.UserDelegationKey": + """Retrieves a user delegation key for the Blob service. This is only a valid operation when using + bearer token authentication. + + :param key_info: Key information. + :type key_info: ~azure.storage.blob.models.KeyInfo + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: UserDelegationKey, or the result of cls(response) + :rtype: ~azure.storage.blob.models.UserDelegationKey + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.UserDelegationKey"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "service" + comp = "userdelegationkey" + content_type = kwargs.pop("content_type", "application/xml") + accept = "application/xml" + + # Construct URL + url = self.get_user_delegation_key.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(key_info, 'KeyInfo', is_xml=True) + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('UserDelegationKey', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_user_delegation_key.metadata = {'url': '/'} # type: ignore + + async def get_account_info( + self, + **kwargs: Any + ) -> None: + """Returns the sku name and account kind. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "account" + comp = "properties" + accept = "application/xml" + + # Construct URL + url = self.get_account_info.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) + response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) + response_headers['x-ms-is-hns-enabled']=self._deserialize('bool', response.headers.get('x-ms-is-hns-enabled')) + + if cls: + return cls(pipeline_response, None, response_headers) + + get_account_info.metadata = {'url': '/'} # type: ignore + + async def submit_batch( + self, + content_length: int, + multipart_content_type: str, + body: IO, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> IO: + """The Batch operation allows multiple API calls to be embedded into a single HTTP request. + + :param content_length: The length of the request. + :type content_length: long + :param multipart_content_type: Required. The value of this header must be multipart/mixed with + a batch boundary. Example header value: multipart/mixed; boundary=batch_:code:``. + :type multipart_content_type: str + :param body: Initial data. + :type body: IO + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IO, or the result of cls(response) + :rtype: IO + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[IO] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + comp = "batch" + content_type = kwargs.pop("content_type", "application/xml") + accept = "application/xml" + + # Construct URL + url = self.submit_batch.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + header_parameters['Content-Type'] = self._serialize.header("multipart_content_type", multipart_content_type, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(body, 'IO', is_xml=True) + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=True, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + deserialized = response.stream_download(self._client._pipeline) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + submit_batch.metadata = {'url': '/'} # type: ignore + + async def filter_blobs( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + where: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + **kwargs: Any + ) -> "_models.FilterBlobSegment": + """The Filter Blobs operation enables callers to list blobs across all containers whose tags match + a given search expression. Filter blobs searches across all containers within a storage + account but can be scoped within the expression to a single container. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param where: Filters the results to return only to return only blobs whose tags match the + specified expression. + :type where: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. + :type maxresults: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FilterBlobSegment, or the result of cls(response) + :rtype: ~azure.storage.blob.models.FilterBlobSegment + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FilterBlobSegment"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + comp = "blobs" + accept = "application/xml" + + # Construct URL + url = self.filter_blobs.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if where is not None: + query_parameters['where'] = self._serialize.query("where", where, 'str') + if marker is not None: + query_parameters['marker'] = self._serialize.query("marker", marker, 'str') + if maxresults is not None: + query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('FilterBlobSegment', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + filter_blobs.metadata = {'url': '/'} # type: ignore diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/models/__init__.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/models/__init__.py new file mode 100644 index 00000000000..e3307ac818b --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/models/__init__.py @@ -0,0 +1,219 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +try: + from ._models_py3 import AccessPolicy + from ._models_py3 import AppendPositionAccessConditions + from ._models_py3 import ArrowConfiguration + from ._models_py3 import ArrowField + from ._models_py3 import BlobFlatListSegment + from ._models_py3 import BlobHTTPHeaders + from ._models_py3 import BlobHierarchyListSegment + from ._models_py3 import BlobItemInternal + from ._models_py3 import BlobMetadata + from ._models_py3 import BlobName + from ._models_py3 import BlobPrefix + from ._models_py3 import BlobPropertiesInternal + from ._models_py3 import BlobTag + from ._models_py3 import BlobTags + from ._models_py3 import Block + from ._models_py3 import BlockList + from ._models_py3 import BlockLookupList + from ._models_py3 import ClearRange + from ._models_py3 import ContainerCpkScopeInfo + from ._models_py3 import ContainerItem + from ._models_py3 import ContainerProperties + from ._models_py3 import CorsRule + from ._models_py3 import CpkInfo + from ._models_py3 import CpkScopeInfo + from ._models_py3 import DelimitedTextConfiguration + from ._models_py3 import FilterBlobItem + from ._models_py3 import FilterBlobSegment + from ._models_py3 import GeoReplication + from ._models_py3 import JsonTextConfiguration + from ._models_py3 import KeyInfo + from ._models_py3 import LeaseAccessConditions + from ._models_py3 import ListBlobsFlatSegmentResponse + from ._models_py3 import ListBlobsHierarchySegmentResponse + from ._models_py3 import ListContainersSegmentResponse + from ._models_py3 import Logging + from ._models_py3 import Metrics + from ._models_py3 import ModifiedAccessConditions + from ._models_py3 import PageList + from ._models_py3 import PageRange + from ._models_py3 import QueryFormat + from ._models_py3 import QueryRequest + from ._models_py3 import QuerySerialization + from ._models_py3 import RetentionPolicy + from ._models_py3 import SequenceNumberAccessConditions + from ._models_py3 import SignedIdentifier + from ._models_py3 import SourceModifiedAccessConditions + from ._models_py3 import StaticWebsite + from ._models_py3 import StorageError + from ._models_py3 import StorageServiceProperties + from ._models_py3 import StorageServiceStats + from ._models_py3 import UserDelegationKey +except (SyntaxError, ImportError): + from ._models import AccessPolicy # type: ignore + from ._models import AppendPositionAccessConditions # type: ignore + from ._models import ArrowConfiguration # type: ignore + from ._models import ArrowField # type: ignore + from ._models import BlobFlatListSegment # type: ignore + from ._models import BlobHTTPHeaders # type: ignore + from ._models import BlobHierarchyListSegment # type: ignore + from ._models import BlobItemInternal # type: ignore + from ._models import BlobMetadata # type: ignore + from ._models import BlobName # type: ignore + from ._models import BlobPrefix # type: ignore + from ._models import BlobPropertiesInternal # type: ignore + from ._models import BlobTag # type: ignore + from ._models import BlobTags # type: ignore + from ._models import Block # type: ignore + from ._models import BlockList # type: ignore + from ._models import BlockLookupList # type: ignore + from ._models import ClearRange # type: ignore + from ._models import ContainerCpkScopeInfo # type: ignore + from ._models import ContainerItem # type: ignore + from ._models import ContainerProperties # type: ignore + from ._models import CorsRule # type: ignore + from ._models import CpkInfo # type: ignore + from ._models import CpkScopeInfo # type: ignore + from ._models import DelimitedTextConfiguration # type: ignore + from ._models import FilterBlobItem # type: ignore + from ._models import FilterBlobSegment # type: ignore + from ._models import GeoReplication # type: ignore + from ._models import JsonTextConfiguration # type: ignore + from ._models import KeyInfo # type: ignore + from ._models import LeaseAccessConditions # type: ignore + from ._models import ListBlobsFlatSegmentResponse # type: ignore + from ._models import ListBlobsHierarchySegmentResponse # type: ignore + from ._models import ListContainersSegmentResponse # type: ignore + from ._models import Logging # type: ignore + from ._models import Metrics # type: ignore + from ._models import ModifiedAccessConditions # type: ignore + from ._models import PageList # type: ignore + from ._models import PageRange # type: ignore + from ._models import QueryFormat # type: ignore + from ._models import QueryRequest # type: ignore + from ._models import QuerySerialization # type: ignore + from ._models import RetentionPolicy # type: ignore + from ._models import SequenceNumberAccessConditions # type: ignore + from ._models import SignedIdentifier # type: ignore + from ._models import SourceModifiedAccessConditions # type: ignore + from ._models import StaticWebsite # type: ignore + from ._models import StorageError # type: ignore + from ._models import StorageServiceProperties # type: ignore + from ._models import StorageServiceStats # type: ignore + from ._models import UserDelegationKey # type: ignore + +from ._azure_blob_storage_enums import ( + AccessTier, + AccessTierOptional, + AccessTierRequired, + AccountKind, + ArchiveStatus, + BlobExpiryOptions, + BlobImmutabilityPolicyMode, + BlobType, + BlockListType, + CopyStatusType, + DeleteSnapshotsOptionType, + EncryptionAlgorithmType, + GeoReplicationStatusType, + LeaseDurationType, + LeaseStateType, + LeaseStatusType, + ListBlobsIncludeItem, + ListContainersIncludeType, + PremiumPageBlobAccessTier, + PublicAccessType, + QueryFormatType, + RehydratePriority, + SequenceNumberActionType, + SkuName, + StorageErrorCode, +) + +__all__ = [ + 'AccessPolicy', + 'AppendPositionAccessConditions', + 'ArrowConfiguration', + 'ArrowField', + 'BlobFlatListSegment', + 'BlobHTTPHeaders', + 'BlobHierarchyListSegment', + 'BlobItemInternal', + 'BlobMetadata', + 'BlobName', + 'BlobPrefix', + 'BlobPropertiesInternal', + 'BlobTag', + 'BlobTags', + 'Block', + 'BlockList', + 'BlockLookupList', + 'ClearRange', + 'ContainerCpkScopeInfo', + 'ContainerItem', + 'ContainerProperties', + 'CorsRule', + 'CpkInfo', + 'CpkScopeInfo', + 'DelimitedTextConfiguration', + 'FilterBlobItem', + 'FilterBlobSegment', + 'GeoReplication', + 'JsonTextConfiguration', + 'KeyInfo', + 'LeaseAccessConditions', + 'ListBlobsFlatSegmentResponse', + 'ListBlobsHierarchySegmentResponse', + 'ListContainersSegmentResponse', + 'Logging', + 'Metrics', + 'ModifiedAccessConditions', + 'PageList', + 'PageRange', + 'QueryFormat', + 'QueryRequest', + 'QuerySerialization', + 'RetentionPolicy', + 'SequenceNumberAccessConditions', + 'SignedIdentifier', + 'SourceModifiedAccessConditions', + 'StaticWebsite', + 'StorageError', + 'StorageServiceProperties', + 'StorageServiceStats', + 'UserDelegationKey', + 'AccessTier', + 'AccessTierOptional', + 'AccessTierRequired', + 'AccountKind', + 'ArchiveStatus', + 'BlobExpiryOptions', + 'BlobImmutabilityPolicyMode', + 'BlobType', + 'BlockListType', + 'CopyStatusType', + 'DeleteSnapshotsOptionType', + 'EncryptionAlgorithmType', + 'GeoReplicationStatusType', + 'LeaseDurationType', + 'LeaseStateType', + 'LeaseStatusType', + 'ListBlobsIncludeItem', + 'ListContainersIncludeType', + 'PremiumPageBlobAccessTier', + 'PublicAccessType', + 'QueryFormatType', + 'RehydratePriority', + 'SequenceNumberActionType', + 'SkuName', + 'StorageErrorCode', +] diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/models/_azure_blob_storage_enums.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/models/_azure_blob_storage_enums.py new file mode 100644 index 00000000000..31325457b82 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/models/_azure_blob_storage_enums.py @@ -0,0 +1,346 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum, EnumMeta +from six import with_metaclass + +class _CaseInsensitiveEnumMeta(EnumMeta): + def __getitem__(self, name): + return super().__getitem__(name.upper()) + + def __getattr__(cls, name): + """Return the enum member matching `name` + We use __getattr__ instead of descriptors or inserting into the enum + class' __dict__ in order to support `name` and `value` being both + properties for enum members (which live in the class' __dict__) and + enum members themselves. + """ + try: + return cls._member_map_[name.upper()] + except KeyError: + raise AttributeError(name) + + +class AccessTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + P4 = "P4" + P6 = "P6" + P10 = "P10" + P15 = "P15" + P20 = "P20" + P30 = "P30" + P40 = "P40" + P50 = "P50" + P60 = "P60" + P70 = "P70" + P80 = "P80" + HOT = "Hot" + COOL = "Cool" + ARCHIVE = "Archive" + +class AccessTierOptional(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + P4 = "P4" + P6 = "P6" + P10 = "P10" + P15 = "P15" + P20 = "P20" + P30 = "P30" + P40 = "P40" + P50 = "P50" + P60 = "P60" + P70 = "P70" + P80 = "P80" + HOT = "Hot" + COOL = "Cool" + ARCHIVE = "Archive" + +class AccessTierRequired(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + P4 = "P4" + P6 = "P6" + P10 = "P10" + P15 = "P15" + P20 = "P20" + P30 = "P30" + P40 = "P40" + P50 = "P50" + P60 = "P60" + P70 = "P70" + P80 = "P80" + HOT = "Hot" + COOL = "Cool" + ARCHIVE = "Archive" + +class AccountKind(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + STORAGE = "Storage" + BLOB_STORAGE = "BlobStorage" + STORAGE_V2 = "StorageV2" + FILE_STORAGE = "FileStorage" + BLOCK_BLOB_STORAGE = "BlockBlobStorage" + +class ArchiveStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + REHYDRATE_PENDING_TO_HOT = "rehydrate-pending-to-hot" + REHYDRATE_PENDING_TO_COOL = "rehydrate-pending-to-cool" + +class BlobExpiryOptions(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + NEVER_EXPIRE = "NeverExpire" + RELATIVE_TO_CREATION = "RelativeToCreation" + RELATIVE_TO_NOW = "RelativeToNow" + ABSOLUTE = "Absolute" + +class BlobImmutabilityPolicyMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + MUTABLE = "Mutable" + UNLOCKED = "Unlocked" + LOCKED = "Locked" + +class BlobType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + BLOCK_BLOB = "BlockBlob" + PAGE_BLOB = "PageBlob" + APPEND_BLOB = "AppendBlob" + +class BlockListType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + COMMITTED = "committed" + UNCOMMITTED = "uncommitted" + ALL = "all" + +class CopyStatusType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + PENDING = "pending" + SUCCESS = "success" + ABORTED = "aborted" + FAILED = "failed" + +class DeleteSnapshotsOptionType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + INCLUDE = "include" + ONLY = "only" + +class EncryptionAlgorithmType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + NONE = "None" + AES256 = "AES256" + +class GeoReplicationStatusType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The status of the secondary location + """ + + LIVE = "live" + BOOTSTRAP = "bootstrap" + UNAVAILABLE = "unavailable" + +class LeaseDurationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + INFINITE = "infinite" + FIXED = "fixed" + +class LeaseStateType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + AVAILABLE = "available" + LEASED = "leased" + EXPIRED = "expired" + BREAKING = "breaking" + BROKEN = "broken" + +class LeaseStatusType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + LOCKED = "locked" + UNLOCKED = "unlocked" + +class ListBlobsIncludeItem(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + COPY = "copy" + DELETED = "deleted" + METADATA = "metadata" + SNAPSHOTS = "snapshots" + UNCOMMITTEDBLOBS = "uncommittedblobs" + VERSIONS = "versions" + TAGS = "tags" + IMMUTABILITYPOLICY = "immutabilitypolicy" + LEGALHOLD = "legalhold" + DELETEDWITHVERSIONS = "deletedwithversions" + +class ListContainersIncludeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + METADATA = "metadata" + DELETED = "deleted" + SYSTEM = "system" + +class PremiumPageBlobAccessTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + P4 = "P4" + P6 = "P6" + P10 = "P10" + P15 = "P15" + P20 = "P20" + P30 = "P30" + P40 = "P40" + P50 = "P50" + P60 = "P60" + P70 = "P70" + P80 = "P80" + +class PublicAccessType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + CONTAINER = "container" + BLOB = "blob" + +class QueryFormatType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The quick query format type. + """ + + DELIMITED = "delimited" + JSON = "json" + ARROW = "arrow" + PARQUET = "parquet" + +class RehydratePriority(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """If an object is in rehydrate pending state then this header is returned with priority of + rehydrate. Valid values are High and Standard. + """ + + HIGH = "High" + STANDARD = "Standard" + +class SequenceNumberActionType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + MAX = "max" + UPDATE = "update" + INCREMENT = "increment" + +class SkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + STANDARD_LRS = "Standard_LRS" + STANDARD_GRS = "Standard_GRS" + STANDARD_RAGRS = "Standard_RAGRS" + STANDARD_ZRS = "Standard_ZRS" + PREMIUM_LRS = "Premium_LRS" + +class StorageErrorCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Error codes returned by the service + """ + + ACCOUNT_ALREADY_EXISTS = "AccountAlreadyExists" + ACCOUNT_BEING_CREATED = "AccountBeingCreated" + ACCOUNT_IS_DISABLED = "AccountIsDisabled" + AUTHENTICATION_FAILED = "AuthenticationFailed" + AUTHORIZATION_FAILURE = "AuthorizationFailure" + CONDITION_HEADERS_NOT_SUPPORTED = "ConditionHeadersNotSupported" + CONDITION_NOT_MET = "ConditionNotMet" + EMPTY_METADATA_KEY = "EmptyMetadataKey" + INSUFFICIENT_ACCOUNT_PERMISSIONS = "InsufficientAccountPermissions" + INTERNAL_ERROR = "InternalError" + INVALID_AUTHENTICATION_INFO = "InvalidAuthenticationInfo" + INVALID_HEADER_VALUE = "InvalidHeaderValue" + INVALID_HTTP_VERB = "InvalidHttpVerb" + INVALID_INPUT = "InvalidInput" + INVALID_MD5 = "InvalidMd5" + INVALID_METADATA = "InvalidMetadata" + INVALID_QUERY_PARAMETER_VALUE = "InvalidQueryParameterValue" + INVALID_RANGE = "InvalidRange" + INVALID_RESOURCE_NAME = "InvalidResourceName" + INVALID_URI = "InvalidUri" + INVALID_XML_DOCUMENT = "InvalidXmlDocument" + INVALID_XML_NODE_VALUE = "InvalidXmlNodeValue" + MD5_MISMATCH = "Md5Mismatch" + METADATA_TOO_LARGE = "MetadataTooLarge" + MISSING_CONTENT_LENGTH_HEADER = "MissingContentLengthHeader" + MISSING_REQUIRED_QUERY_PARAMETER = "MissingRequiredQueryParameter" + MISSING_REQUIRED_HEADER = "MissingRequiredHeader" + MISSING_REQUIRED_XML_NODE = "MissingRequiredXmlNode" + MULTIPLE_CONDITION_HEADERS_NOT_SUPPORTED = "MultipleConditionHeadersNotSupported" + OPERATION_TIMED_OUT = "OperationTimedOut" + OUT_OF_RANGE_INPUT = "OutOfRangeInput" + OUT_OF_RANGE_QUERY_PARAMETER_VALUE = "OutOfRangeQueryParameterValue" + REQUEST_BODY_TOO_LARGE = "RequestBodyTooLarge" + RESOURCE_TYPE_MISMATCH = "ResourceTypeMismatch" + REQUEST_URL_FAILED_TO_PARSE = "RequestUrlFailedToParse" + RESOURCE_ALREADY_EXISTS = "ResourceAlreadyExists" + RESOURCE_NOT_FOUND = "ResourceNotFound" + SERVER_BUSY = "ServerBusy" + UNSUPPORTED_HEADER = "UnsupportedHeader" + UNSUPPORTED_XML_NODE = "UnsupportedXmlNode" + UNSUPPORTED_QUERY_PARAMETER = "UnsupportedQueryParameter" + UNSUPPORTED_HTTP_VERB = "UnsupportedHttpVerb" + APPEND_POSITION_CONDITION_NOT_MET = "AppendPositionConditionNotMet" + BLOB_ALREADY_EXISTS = "BlobAlreadyExists" + BLOB_IMMUTABLE_DUE_TO_POLICY = "BlobImmutableDueToPolicy" + BLOB_NOT_FOUND = "BlobNotFound" + BLOB_OVERWRITTEN = "BlobOverwritten" + BLOB_TIER_INADEQUATE_FOR_CONTENT_LENGTH = "BlobTierInadequateForContentLength" + BLOB_USES_CUSTOMER_SPECIFIED_ENCRYPTION = "BlobUsesCustomerSpecifiedEncryption" + BLOCK_COUNT_EXCEEDS_LIMIT = "BlockCountExceedsLimit" + BLOCK_LIST_TOO_LONG = "BlockListTooLong" + CANNOT_CHANGE_TO_LOWER_TIER = "CannotChangeToLowerTier" + CANNOT_VERIFY_COPY_SOURCE = "CannotVerifyCopySource" + CONTAINER_ALREADY_EXISTS = "ContainerAlreadyExists" + CONTAINER_BEING_DELETED = "ContainerBeingDeleted" + CONTAINER_DISABLED = "ContainerDisabled" + CONTAINER_NOT_FOUND = "ContainerNotFound" + CONTENT_LENGTH_LARGER_THAN_TIER_LIMIT = "ContentLengthLargerThanTierLimit" + COPY_ACROSS_ACCOUNTS_NOT_SUPPORTED = "CopyAcrossAccountsNotSupported" + COPY_ID_MISMATCH = "CopyIdMismatch" + FEATURE_VERSION_MISMATCH = "FeatureVersionMismatch" + INCREMENTAL_COPY_BLOB_MISMATCH = "IncrementalCopyBlobMismatch" + INCREMENTAL_COPY_OF_ERALIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEralierVersionSnapshotNotAllowed" + INCREMENTAL_COPY_SOURCE_MUST_BE_SNAPSHOT = "IncrementalCopySourceMustBeSnapshot" + INFINITE_LEASE_DURATION_REQUIRED = "InfiniteLeaseDurationRequired" + INVALID_BLOB_OR_BLOCK = "InvalidBlobOrBlock" + INVALID_BLOB_TIER = "InvalidBlobTier" + INVALID_BLOB_TYPE = "InvalidBlobType" + INVALID_BLOCK_ID = "InvalidBlockId" + INVALID_BLOCK_LIST = "InvalidBlockList" + INVALID_OPERATION = "InvalidOperation" + INVALID_PAGE_RANGE = "InvalidPageRange" + INVALID_SOURCE_BLOB_TYPE = "InvalidSourceBlobType" + INVALID_SOURCE_BLOB_URL = "InvalidSourceBlobUrl" + INVALID_VERSION_FOR_PAGE_BLOB_OPERATION = "InvalidVersionForPageBlobOperation" + LEASE_ALREADY_PRESENT = "LeaseAlreadyPresent" + LEASE_ALREADY_BROKEN = "LeaseAlreadyBroken" + LEASE_ID_MISMATCH_WITH_BLOB_OPERATION = "LeaseIdMismatchWithBlobOperation" + LEASE_ID_MISMATCH_WITH_CONTAINER_OPERATION = "LeaseIdMismatchWithContainerOperation" + LEASE_ID_MISMATCH_WITH_LEASE_OPERATION = "LeaseIdMismatchWithLeaseOperation" + LEASE_ID_MISSING = "LeaseIdMissing" + LEASE_IS_BREAKING_AND_CANNOT_BE_ACQUIRED = "LeaseIsBreakingAndCannotBeAcquired" + LEASE_IS_BREAKING_AND_CANNOT_BE_CHANGED = "LeaseIsBreakingAndCannotBeChanged" + LEASE_IS_BROKEN_AND_CANNOT_BE_RENEWED = "LeaseIsBrokenAndCannotBeRenewed" + LEASE_LOST = "LeaseLost" + LEASE_NOT_PRESENT_WITH_BLOB_OPERATION = "LeaseNotPresentWithBlobOperation" + LEASE_NOT_PRESENT_WITH_CONTAINER_OPERATION = "LeaseNotPresentWithContainerOperation" + LEASE_NOT_PRESENT_WITH_LEASE_OPERATION = "LeaseNotPresentWithLeaseOperation" + MAX_BLOB_SIZE_CONDITION_NOT_MET = "MaxBlobSizeConditionNotMet" + NO_AUTHENTICATION_INFORMATION = "NoAuthenticationInformation" + NO_PENDING_COPY_OPERATION = "NoPendingCopyOperation" + OPERATION_NOT_ALLOWED_ON_INCREMENTAL_COPY_BLOB = "OperationNotAllowedOnIncrementalCopyBlob" + PENDING_COPY_OPERATION = "PendingCopyOperation" + PREVIOUS_SNAPSHOT_CANNOT_BE_NEWER = "PreviousSnapshotCannotBeNewer" + PREVIOUS_SNAPSHOT_NOT_FOUND = "PreviousSnapshotNotFound" + PREVIOUS_SNAPSHOT_OPERATION_NOT_SUPPORTED = "PreviousSnapshotOperationNotSupported" + SEQUENCE_NUMBER_CONDITION_NOT_MET = "SequenceNumberConditionNotMet" + SEQUENCE_NUMBER_INCREMENT_TOO_LARGE = "SequenceNumberIncrementTooLarge" + SNAPSHOT_COUNT_EXCEEDED = "SnapshotCountExceeded" + SNAPSHOT_OPERATION_RATE_EXCEEDED = "SnapshotOperationRateExceeded" + SNAPSHOTS_PRESENT = "SnapshotsPresent" + SOURCE_CONDITION_NOT_MET = "SourceConditionNotMet" + SYSTEM_IN_USE = "SystemInUse" + TARGET_CONDITION_NOT_MET = "TargetConditionNotMet" + UNAUTHORIZED_BLOB_OVERWRITE = "UnauthorizedBlobOverwrite" + BLOB_BEING_REHYDRATED = "BlobBeingRehydrated" + BLOB_ARCHIVED = "BlobArchived" + BLOB_NOT_ARCHIVED = "BlobNotArchived" + AUTHORIZATION_SOURCE_IP_MISMATCH = "AuthorizationSourceIPMismatch" + AUTHORIZATION_PROTOCOL_MISMATCH = "AuthorizationProtocolMismatch" + AUTHORIZATION_PERMISSION_MISMATCH = "AuthorizationPermissionMismatch" + AUTHORIZATION_SERVICE_MISMATCH = "AuthorizationServiceMismatch" + AUTHORIZATION_RESOURCE_TYPE_MISMATCH = "AuthorizationResourceTypeMismatch" diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/models/_models.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/models/_models.py new file mode 100644 index 00000000000..abf16321bba --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/models/_models.py @@ -0,0 +1,1995 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + + +class AccessPolicy(msrest.serialization.Model): + """An Access policy. + + :param start: the date-time the policy is active. + :type start: str + :param expiry: the date-time the policy expires. + :type expiry: str + :param permission: the permissions for the acl policy. + :type permission: str + """ + + _attribute_map = { + 'start': {'key': 'Start', 'type': 'str'}, + 'expiry': {'key': 'Expiry', 'type': 'str'}, + 'permission': {'key': 'Permission', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AccessPolicy, self).__init__(**kwargs) + self.start = kwargs.get('start', None) + self.expiry = kwargs.get('expiry', None) + self.permission = kwargs.get('permission', None) + + +class AppendPositionAccessConditions(msrest.serialization.Model): + """Parameter group. + + :param max_size: Optional conditional header. The max length in bytes permitted for the append + blob. If the Append Block operation would cause the blob to exceed that limit or if the blob + size is already greater than the value specified in this header, the request will fail with + MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). + :type max_size: long + :param append_position: Optional conditional header, used only for the Append Block operation. + A number indicating the byte offset to compare. Append Block will succeed only if the append + position is equal to this number. If it is not, the request will fail with the + AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). + :type append_position: long + """ + + _attribute_map = { + 'max_size': {'key': 'maxSize', 'type': 'long'}, + 'append_position': {'key': 'appendPosition', 'type': 'long'}, + } + + def __init__( + self, + **kwargs + ): + super(AppendPositionAccessConditions, self).__init__(**kwargs) + self.max_size = kwargs.get('max_size', None) + self.append_position = kwargs.get('append_position', None) + + +class ArrowConfiguration(msrest.serialization.Model): + """Groups the settings used for formatting the response if the response should be Arrow formatted. + + All required parameters must be populated in order to send to Azure. + + :param schema: Required. + :type schema: list[~azure.storage.blob.models.ArrowField] + """ + + _validation = { + 'schema': {'required': True}, + } + + _attribute_map = { + 'schema': {'key': 'Schema', 'type': '[ArrowField]', 'xml': {'name': 'Schema', 'wrapped': True, 'itemsName': 'Field'}}, + } + _xml_map = { + 'name': 'ArrowConfiguration' + } + + def __init__( + self, + **kwargs + ): + super(ArrowConfiguration, self).__init__(**kwargs) + self.schema = kwargs['schema'] + + +class ArrowField(msrest.serialization.Model): + """Groups settings regarding specific field of an arrow schema. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. + :type type: str + :param name: + :type name: str + :param precision: + :type precision: int + :param scale: + :type scale: int + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'Type', 'type': 'str'}, + 'name': {'key': 'Name', 'type': 'str'}, + 'precision': {'key': 'Precision', 'type': 'int'}, + 'scale': {'key': 'Scale', 'type': 'int'}, + } + _xml_map = { + 'name': 'Field' + } + + def __init__( + self, + **kwargs + ): + super(ArrowField, self).__init__(**kwargs) + self.type = kwargs['type'] + self.name = kwargs.get('name', None) + self.precision = kwargs.get('precision', None) + self.scale = kwargs.get('scale', None) + + +class BlobFlatListSegment(msrest.serialization.Model): + """BlobFlatListSegment. + + All required parameters must be populated in order to send to Azure. + + :param blob_items: Required. + :type blob_items: list[~azure.storage.blob.models.BlobItemInternal] + """ + + _validation = { + 'blob_items': {'required': True}, + } + + _attribute_map = { + 'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]'}, + } + _xml_map = { + 'name': 'Blobs' + } + + def __init__( + self, + **kwargs + ): + super(BlobFlatListSegment, self).__init__(**kwargs) + self.blob_items = kwargs['blob_items'] + + +class BlobHierarchyListSegment(msrest.serialization.Model): + """BlobHierarchyListSegment. + + All required parameters must be populated in order to send to Azure. + + :param blob_prefixes: + :type blob_prefixes: list[~azure.storage.blob.models.BlobPrefix] + :param blob_items: Required. + :type blob_items: list[~azure.storage.blob.models.BlobItemInternal] + """ + + _validation = { + 'blob_items': {'required': True}, + } + + _attribute_map = { + 'blob_prefixes': {'key': 'BlobPrefixes', 'type': '[BlobPrefix]', 'xml': {'name': 'BlobPrefix'}}, + 'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]', 'xml': {'name': 'Blob', 'itemsName': 'Blob'}}, + } + _xml_map = { + 'name': 'Blobs' + } + + def __init__( + self, + **kwargs + ): + super(BlobHierarchyListSegment, self).__init__(**kwargs) + self.blob_prefixes = kwargs.get('blob_prefixes', None) + self.blob_items = kwargs['blob_items'] + + +class BlobHTTPHeaders(msrest.serialization.Model): + """Parameter group. + + :param blob_cache_control: Optional. Sets the blob's cache control. If specified, this property + is stored with the blob and returned with a read request. + :type blob_cache_control: str + :param blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. + :type blob_content_type: str + :param blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is not + validated, as the hashes for the individual blocks were validated when each was uploaded. + :type blob_content_md5: bytearray + :param blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. + :type blob_content_encoding: str + :param blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. + :type blob_content_language: str + :param blob_content_disposition: Optional. Sets the blob's Content-Disposition header. + :type blob_content_disposition: str + """ + + _attribute_map = { + 'blob_cache_control': {'key': 'blobCacheControl', 'type': 'str'}, + 'blob_content_type': {'key': 'blobContentType', 'type': 'str'}, + 'blob_content_md5': {'key': 'blobContentMD5', 'type': 'bytearray'}, + 'blob_content_encoding': {'key': 'blobContentEncoding', 'type': 'str'}, + 'blob_content_language': {'key': 'blobContentLanguage', 'type': 'str'}, + 'blob_content_disposition': {'key': 'blobContentDisposition', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobHTTPHeaders, self).__init__(**kwargs) + self.blob_cache_control = kwargs.get('blob_cache_control', None) + self.blob_content_type = kwargs.get('blob_content_type', None) + self.blob_content_md5 = kwargs.get('blob_content_md5', None) + self.blob_content_encoding = kwargs.get('blob_content_encoding', None) + self.blob_content_language = kwargs.get('blob_content_language', None) + self.blob_content_disposition = kwargs.get('blob_content_disposition', None) + + +class BlobItemInternal(msrest.serialization.Model): + """An Azure Storage blob. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. + :type name: ~azure.storage.blob.models.BlobName + :param deleted: Required. + :type deleted: bool + :param snapshot: Required. + :type snapshot: str + :param version_id: + :type version_id: str + :param is_current_version: + :type is_current_version: bool + :param properties: Required. Properties of a blob. + :type properties: ~azure.storage.blob.models.BlobPropertiesInternal + :param metadata: + :type metadata: ~azure.storage.blob.models.BlobMetadata + :param blob_tags: Blob tags. + :type blob_tags: ~azure.storage.blob.models.BlobTags + :param has_versions_only: + :type has_versions_only: bool + :param object_replication_metadata: Dictionary of :code:``. + :type object_replication_metadata: dict[str, str] + """ + + _validation = { + 'name': {'required': True}, + 'deleted': {'required': True}, + 'snapshot': {'required': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'Name', 'type': 'BlobName'}, + 'deleted': {'key': 'Deleted', 'type': 'bool'}, + 'snapshot': {'key': 'Snapshot', 'type': 'str'}, + 'version_id': {'key': 'VersionId', 'type': 'str'}, + 'is_current_version': {'key': 'IsCurrentVersion', 'type': 'bool'}, + 'properties': {'key': 'Properties', 'type': 'BlobPropertiesInternal'}, + 'metadata': {'key': 'Metadata', 'type': 'BlobMetadata'}, + 'blob_tags': {'key': 'BlobTags', 'type': 'BlobTags'}, + 'has_versions_only': {'key': 'HasVersionsOnly', 'type': 'bool'}, + 'object_replication_metadata': {'key': 'OrMetadata', 'type': '{str}'}, + } + _xml_map = { + 'name': 'Blob' + } + + def __init__( + self, + **kwargs + ): + super(BlobItemInternal, self).__init__(**kwargs) + self.name = kwargs['name'] + self.deleted = kwargs['deleted'] + self.snapshot = kwargs['snapshot'] + self.version_id = kwargs.get('version_id', None) + self.is_current_version = kwargs.get('is_current_version', None) + self.properties = kwargs['properties'] + self.metadata = kwargs.get('metadata', None) + self.blob_tags = kwargs.get('blob_tags', None) + self.has_versions_only = kwargs.get('has_versions_only', None) + self.object_replication_metadata = kwargs.get('object_replication_metadata', None) + + +class BlobMetadata(msrest.serialization.Model): + """BlobMetadata. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, str] + :param encrypted: + :type encrypted: str + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{str}'}, + 'encrypted': {'key': 'Encrypted', 'type': 'str', 'xml': {'attr': True}}, + } + _xml_map = { + 'name': 'Metadata' + } + + def __init__( + self, + **kwargs + ): + super(BlobMetadata, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.encrypted = kwargs.get('encrypted', None) + + +class BlobName(msrest.serialization.Model): + """BlobName. + + :param encoded: Indicates if the blob name is encoded. + :type encoded: bool + :param content: The name of the blob. + :type content: str + """ + + _attribute_map = { + 'encoded': {'key': 'Encoded', 'type': 'bool', 'xml': {'name': 'Encoded', 'attr': True}}, + 'content': {'key': 'content', 'type': 'str', 'xml': {'text': True}}, + } + + def __init__( + self, + **kwargs + ): + super(BlobName, self).__init__(**kwargs) + self.encoded = kwargs.get('encoded', None) + self.content = kwargs.get('content', None) + + +class BlobPrefix(msrest.serialization.Model): + """BlobPrefix. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. + :type name: ~azure.storage.blob.models.BlobName + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'Name', 'type': 'BlobName'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobPrefix, self).__init__(**kwargs) + self.name = kwargs['name'] + + +class BlobPropertiesInternal(msrest.serialization.Model): + """Properties of a blob. + + All required parameters must be populated in order to send to Azure. + + :param creation_time: + :type creation_time: ~datetime.datetime + :param last_modified: Required. + :type last_modified: ~datetime.datetime + :param etag: Required. + :type etag: str + :param content_length: Size in bytes. + :type content_length: long + :param content_type: + :type content_type: str + :param content_encoding: + :type content_encoding: str + :param content_language: + :type content_language: str + :param content_md5: + :type content_md5: bytearray + :param content_disposition: + :type content_disposition: str + :param cache_control: + :type cache_control: str + :param blob_sequence_number: + :type blob_sequence_number: long + :param blob_type: Possible values include: "BlockBlob", "PageBlob", "AppendBlob". + :type blob_type: str or ~azure.storage.blob.models.BlobType + :param lease_status: Possible values include: "locked", "unlocked". + :type lease_status: str or ~azure.storage.blob.models.LeaseStatusType + :param lease_state: Possible values include: "available", "leased", "expired", "breaking", + "broken". + :type lease_state: str or ~azure.storage.blob.models.LeaseStateType + :param lease_duration: Possible values include: "infinite", "fixed". + :type lease_duration: str or ~azure.storage.blob.models.LeaseDurationType + :param copy_id: + :type copy_id: str + :param copy_status: Possible values include: "pending", "success", "aborted", "failed". + :type copy_status: str or ~azure.storage.blob.models.CopyStatusType + :param copy_source: + :type copy_source: str + :param copy_progress: + :type copy_progress: str + :param copy_completion_time: + :type copy_completion_time: ~datetime.datetime + :param copy_status_description: + :type copy_status_description: str + :param server_encrypted: + :type server_encrypted: bool + :param incremental_copy: + :type incremental_copy: bool + :param destination_snapshot: + :type destination_snapshot: str + :param deleted_time: + :type deleted_time: ~datetime.datetime + :param remaining_retention_days: + :type remaining_retention_days: int + :param access_tier: Possible values include: "P4", "P6", "P10", "P15", "P20", "P30", "P40", + "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive". + :type access_tier: str or ~azure.storage.blob.models.AccessTier + :param access_tier_inferred: + :type access_tier_inferred: bool + :param archive_status: Possible values include: "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool". + :type archive_status: str or ~azure.storage.blob.models.ArchiveStatus + :param customer_provided_key_sha256: + :type customer_provided_key_sha256: str + :param encryption_scope: The name of the encryption scope under which the blob is encrypted. + :type encryption_scope: str + :param access_tier_change_time: + :type access_tier_change_time: ~datetime.datetime + :param tag_count: + :type tag_count: int + :param expires_on: + :type expires_on: ~datetime.datetime + :param is_sealed: + :type is_sealed: bool + :param rehydrate_priority: If an object is in rehydrate pending state then this header is + returned with priority of rehydrate. Valid values are High and Standard. Possible values + include: "High", "Standard". + :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :param last_accessed_on: + :type last_accessed_on: ~datetime.datetime + :param immutability_policy_expires_on: + :type immutability_policy_expires_on: ~datetime.datetime + :param immutability_policy_mode: Possible values include: "Mutable", "Unlocked", "Locked". + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: + :type legal_hold: bool + """ + + _validation = { + 'last_modified': {'required': True}, + 'etag': {'required': True}, + } + + _attribute_map = { + 'creation_time': {'key': 'Creation-Time', 'type': 'rfc-1123'}, + 'last_modified': {'key': 'Last-Modified', 'type': 'rfc-1123'}, + 'etag': {'key': 'Etag', 'type': 'str'}, + 'content_length': {'key': 'Content-Length', 'type': 'long'}, + 'content_type': {'key': 'Content-Type', 'type': 'str'}, + 'content_encoding': {'key': 'Content-Encoding', 'type': 'str'}, + 'content_language': {'key': 'Content-Language', 'type': 'str'}, + 'content_md5': {'key': 'Content-MD5', 'type': 'bytearray'}, + 'content_disposition': {'key': 'Content-Disposition', 'type': 'str'}, + 'cache_control': {'key': 'Cache-Control', 'type': 'str'}, + 'blob_sequence_number': {'key': 'x-ms-blob-sequence-number', 'type': 'long'}, + 'blob_type': {'key': 'BlobType', 'type': 'str'}, + 'lease_status': {'key': 'LeaseStatus', 'type': 'str'}, + 'lease_state': {'key': 'LeaseState', 'type': 'str'}, + 'lease_duration': {'key': 'LeaseDuration', 'type': 'str'}, + 'copy_id': {'key': 'CopyId', 'type': 'str'}, + 'copy_status': {'key': 'CopyStatus', 'type': 'str'}, + 'copy_source': {'key': 'CopySource', 'type': 'str'}, + 'copy_progress': {'key': 'CopyProgress', 'type': 'str'}, + 'copy_completion_time': {'key': 'CopyCompletionTime', 'type': 'rfc-1123'}, + 'copy_status_description': {'key': 'CopyStatusDescription', 'type': 'str'}, + 'server_encrypted': {'key': 'ServerEncrypted', 'type': 'bool'}, + 'incremental_copy': {'key': 'IncrementalCopy', 'type': 'bool'}, + 'destination_snapshot': {'key': 'DestinationSnapshot', 'type': 'str'}, + 'deleted_time': {'key': 'DeletedTime', 'type': 'rfc-1123'}, + 'remaining_retention_days': {'key': 'RemainingRetentionDays', 'type': 'int'}, + 'access_tier': {'key': 'AccessTier', 'type': 'str'}, + 'access_tier_inferred': {'key': 'AccessTierInferred', 'type': 'bool'}, + 'archive_status': {'key': 'ArchiveStatus', 'type': 'str'}, + 'customer_provided_key_sha256': {'key': 'CustomerProvidedKeySha256', 'type': 'str'}, + 'encryption_scope': {'key': 'EncryptionScope', 'type': 'str'}, + 'access_tier_change_time': {'key': 'AccessTierChangeTime', 'type': 'rfc-1123'}, + 'tag_count': {'key': 'TagCount', 'type': 'int'}, + 'expires_on': {'key': 'Expiry-Time', 'type': 'rfc-1123'}, + 'is_sealed': {'key': 'Sealed', 'type': 'bool'}, + 'rehydrate_priority': {'key': 'RehydratePriority', 'type': 'str'}, + 'last_accessed_on': {'key': 'LastAccessTime', 'type': 'rfc-1123'}, + 'immutability_policy_expires_on': {'key': 'ImmutabilityPolicyUntilDate', 'type': 'rfc-1123'}, + 'immutability_policy_mode': {'key': 'ImmutabilityPolicyMode', 'type': 'str'}, + 'legal_hold': {'key': 'LegalHold', 'type': 'bool'}, + } + _xml_map = { + 'name': 'Properties' + } + + def __init__( + self, + **kwargs + ): + super(BlobPropertiesInternal, self).__init__(**kwargs) + self.creation_time = kwargs.get('creation_time', None) + self.last_modified = kwargs['last_modified'] + self.etag = kwargs['etag'] + self.content_length = kwargs.get('content_length', None) + self.content_type = kwargs.get('content_type', None) + self.content_encoding = kwargs.get('content_encoding', None) + self.content_language = kwargs.get('content_language', None) + self.content_md5 = kwargs.get('content_md5', None) + self.content_disposition = kwargs.get('content_disposition', None) + self.cache_control = kwargs.get('cache_control', None) + self.blob_sequence_number = kwargs.get('blob_sequence_number', None) + self.blob_type = kwargs.get('blob_type', None) + self.lease_status = kwargs.get('lease_status', None) + self.lease_state = kwargs.get('lease_state', None) + self.lease_duration = kwargs.get('lease_duration', None) + self.copy_id = kwargs.get('copy_id', None) + self.copy_status = kwargs.get('copy_status', None) + self.copy_source = kwargs.get('copy_source', None) + self.copy_progress = kwargs.get('copy_progress', None) + self.copy_completion_time = kwargs.get('copy_completion_time', None) + self.copy_status_description = kwargs.get('copy_status_description', None) + self.server_encrypted = kwargs.get('server_encrypted', None) + self.incremental_copy = kwargs.get('incremental_copy', None) + self.destination_snapshot = kwargs.get('destination_snapshot', None) + self.deleted_time = kwargs.get('deleted_time', None) + self.remaining_retention_days = kwargs.get('remaining_retention_days', None) + self.access_tier = kwargs.get('access_tier', None) + self.access_tier_inferred = kwargs.get('access_tier_inferred', None) + self.archive_status = kwargs.get('archive_status', None) + self.customer_provided_key_sha256 = kwargs.get('customer_provided_key_sha256', None) + self.encryption_scope = kwargs.get('encryption_scope', None) + self.access_tier_change_time = kwargs.get('access_tier_change_time', None) + self.tag_count = kwargs.get('tag_count', None) + self.expires_on = kwargs.get('expires_on', None) + self.is_sealed = kwargs.get('is_sealed', None) + self.rehydrate_priority = kwargs.get('rehydrate_priority', None) + self.last_accessed_on = kwargs.get('last_accessed_on', None) + self.immutability_policy_expires_on = kwargs.get('immutability_policy_expires_on', None) + self.immutability_policy_mode = kwargs.get('immutability_policy_mode', None) + self.legal_hold = kwargs.get('legal_hold', None) + + +class BlobTag(msrest.serialization.Model): + """BlobTag. + + All required parameters must be populated in order to send to Azure. + + :param key: Required. + :type key: str + :param value: Required. + :type value: str + """ + + _validation = { + 'key': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'key': {'key': 'Key', 'type': 'str'}, + 'value': {'key': 'Value', 'type': 'str'}, + } + _xml_map = { + 'name': 'Tag' + } + + def __init__( + self, + **kwargs + ): + super(BlobTag, self).__init__(**kwargs) + self.key = kwargs['key'] + self.value = kwargs['value'] + + +class BlobTags(msrest.serialization.Model): + """Blob tags. + + All required parameters must be populated in order to send to Azure. + + :param blob_tag_set: Required. + :type blob_tag_set: list[~azure.storage.blob.models.BlobTag] + """ + + _validation = { + 'blob_tag_set': {'required': True}, + } + + _attribute_map = { + 'blob_tag_set': {'key': 'BlobTagSet', 'type': '[BlobTag]', 'xml': {'name': 'TagSet', 'wrapped': True, 'itemsName': 'Tag'}}, + } + _xml_map = { + 'name': 'Tags' + } + + def __init__( + self, + **kwargs + ): + super(BlobTags, self).__init__(**kwargs) + self.blob_tag_set = kwargs['blob_tag_set'] + + +class Block(msrest.serialization.Model): + """Represents a single block in a block blob. It describes the block's ID and size. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The base64 encoded block ID. + :type name: str + :param size: Required. The block size in bytes. + :type size: long + """ + + _validation = { + 'name': {'required': True}, + 'size': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'Name', 'type': 'str'}, + 'size': {'key': 'Size', 'type': 'long'}, + } + + def __init__( + self, + **kwargs + ): + super(Block, self).__init__(**kwargs) + self.name = kwargs['name'] + self.size = kwargs['size'] + + +class BlockList(msrest.serialization.Model): + """BlockList. + + :param committed_blocks: + :type committed_blocks: list[~azure.storage.blob.models.Block] + :param uncommitted_blocks: + :type uncommitted_blocks: list[~azure.storage.blob.models.Block] + """ + + _attribute_map = { + 'committed_blocks': {'key': 'CommittedBlocks', 'type': '[Block]', 'xml': {'wrapped': True}}, + 'uncommitted_blocks': {'key': 'UncommittedBlocks', 'type': '[Block]', 'xml': {'wrapped': True}}, + } + + def __init__( + self, + **kwargs + ): + super(BlockList, self).__init__(**kwargs) + self.committed_blocks = kwargs.get('committed_blocks', None) + self.uncommitted_blocks = kwargs.get('uncommitted_blocks', None) + + +class BlockLookupList(msrest.serialization.Model): + """BlockLookupList. + + :param committed: + :type committed: list[str] + :param uncommitted: + :type uncommitted: list[str] + :param latest: + :type latest: list[str] + """ + + _attribute_map = { + 'committed': {'key': 'Committed', 'type': '[str]', 'xml': {'itemsName': 'Committed'}}, + 'uncommitted': {'key': 'Uncommitted', 'type': '[str]', 'xml': {'itemsName': 'Uncommitted'}}, + 'latest': {'key': 'Latest', 'type': '[str]', 'xml': {'itemsName': 'Latest'}}, + } + _xml_map = { + 'name': 'BlockList' + } + + def __init__( + self, + **kwargs + ): + super(BlockLookupList, self).__init__(**kwargs) + self.committed = kwargs.get('committed', None) + self.uncommitted = kwargs.get('uncommitted', None) + self.latest = kwargs.get('latest', None) + + +class ClearRange(msrest.serialization.Model): + """ClearRange. + + All required parameters must be populated in order to send to Azure. + + :param start: Required. + :type start: long + :param end: Required. + :type end: long + """ + + _validation = { + 'start': {'required': True}, + 'end': {'required': True}, + } + + _attribute_map = { + 'start': {'key': 'Start', 'type': 'long', 'xml': {'name': 'Start'}}, + 'end': {'key': 'End', 'type': 'long', 'xml': {'name': 'End'}}, + } + _xml_map = { + 'name': 'ClearRange' + } + + def __init__( + self, + **kwargs + ): + super(ClearRange, self).__init__(**kwargs) + self.start = kwargs['start'] + self.end = kwargs['end'] + + +class ContainerCpkScopeInfo(msrest.serialization.Model): + """Parameter group. + + :param default_encryption_scope: Optional. Version 2019-07-07 and later. Specifies the + default encryption scope to set on the container and use for all future writes. + :type default_encryption_scope: str + :param prevent_encryption_scope_override: Optional. Version 2019-07-07 and newer. If true, + prevents any request from specifying a different encryption scope than the scope set on the + container. + :type prevent_encryption_scope_override: bool + """ + + _attribute_map = { + 'default_encryption_scope': {'key': 'DefaultEncryptionScope', 'type': 'str'}, + 'prevent_encryption_scope_override': {'key': 'PreventEncryptionScopeOverride', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(ContainerCpkScopeInfo, self).__init__(**kwargs) + self.default_encryption_scope = kwargs.get('default_encryption_scope', None) + self.prevent_encryption_scope_override = kwargs.get('prevent_encryption_scope_override', None) + + +class ContainerItem(msrest.serialization.Model): + """An Azure Storage container. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. + :type name: str + :param deleted: + :type deleted: bool + :param version: + :type version: str + :param properties: Required. Properties of a container. + :type properties: ~azure.storage.blob.models.ContainerProperties + :param metadata: Dictionary of :code:``. + :type metadata: dict[str, str] + """ + + _validation = { + 'name': {'required': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'Name', 'type': 'str'}, + 'deleted': {'key': 'Deleted', 'type': 'bool'}, + 'version': {'key': 'Version', 'type': 'str'}, + 'properties': {'key': 'Properties', 'type': 'ContainerProperties'}, + 'metadata': {'key': 'Metadata', 'type': '{str}'}, + } + _xml_map = { + 'name': 'Container' + } + + def __init__( + self, + **kwargs + ): + super(ContainerItem, self).__init__(**kwargs) + self.name = kwargs['name'] + self.deleted = kwargs.get('deleted', None) + self.version = kwargs.get('version', None) + self.properties = kwargs['properties'] + self.metadata = kwargs.get('metadata', None) + + +class ContainerProperties(msrest.serialization.Model): + """Properties of a container. + + All required parameters must be populated in order to send to Azure. + + :param last_modified: Required. + :type last_modified: ~datetime.datetime + :param etag: Required. + :type etag: str + :param lease_status: Possible values include: "locked", "unlocked". + :type lease_status: str or ~azure.storage.blob.models.LeaseStatusType + :param lease_state: Possible values include: "available", "leased", "expired", "breaking", + "broken". + :type lease_state: str or ~azure.storage.blob.models.LeaseStateType + :param lease_duration: Possible values include: "infinite", "fixed". + :type lease_duration: str or ~azure.storage.blob.models.LeaseDurationType + :param public_access: Possible values include: "container", "blob". + :type public_access: str or ~azure.storage.blob.models.PublicAccessType + :param has_immutability_policy: + :type has_immutability_policy: bool + :param has_legal_hold: + :type has_legal_hold: bool + :param default_encryption_scope: + :type default_encryption_scope: str + :param prevent_encryption_scope_override: + :type prevent_encryption_scope_override: bool + :param deleted_time: + :type deleted_time: ~datetime.datetime + :param remaining_retention_days: + :type remaining_retention_days: int + :param is_immutable_storage_with_versioning_enabled: Indicates if version level worm is enabled + on this container. + :type is_immutable_storage_with_versioning_enabled: bool + """ + + _validation = { + 'last_modified': {'required': True}, + 'etag': {'required': True}, + } + + _attribute_map = { + 'last_modified': {'key': 'Last-Modified', 'type': 'rfc-1123'}, + 'etag': {'key': 'Etag', 'type': 'str'}, + 'lease_status': {'key': 'LeaseStatus', 'type': 'str'}, + 'lease_state': {'key': 'LeaseState', 'type': 'str'}, + 'lease_duration': {'key': 'LeaseDuration', 'type': 'str'}, + 'public_access': {'key': 'PublicAccess', 'type': 'str'}, + 'has_immutability_policy': {'key': 'HasImmutabilityPolicy', 'type': 'bool'}, + 'has_legal_hold': {'key': 'HasLegalHold', 'type': 'bool'}, + 'default_encryption_scope': {'key': 'DefaultEncryptionScope', 'type': 'str'}, + 'prevent_encryption_scope_override': {'key': 'DenyEncryptionScopeOverride', 'type': 'bool'}, + 'deleted_time': {'key': 'DeletedTime', 'type': 'rfc-1123'}, + 'remaining_retention_days': {'key': 'RemainingRetentionDays', 'type': 'int'}, + 'is_immutable_storage_with_versioning_enabled': {'key': 'ImmutableStorageWithVersioningEnabled', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(ContainerProperties, self).__init__(**kwargs) + self.last_modified = kwargs['last_modified'] + self.etag = kwargs['etag'] + self.lease_status = kwargs.get('lease_status', None) + self.lease_state = kwargs.get('lease_state', None) + self.lease_duration = kwargs.get('lease_duration', None) + self.public_access = kwargs.get('public_access', None) + self.has_immutability_policy = kwargs.get('has_immutability_policy', None) + self.has_legal_hold = kwargs.get('has_legal_hold', None) + self.default_encryption_scope = kwargs.get('default_encryption_scope', None) + self.prevent_encryption_scope_override = kwargs.get('prevent_encryption_scope_override', None) + self.deleted_time = kwargs.get('deleted_time', None) + self.remaining_retention_days = kwargs.get('remaining_retention_days', None) + self.is_immutable_storage_with_versioning_enabled = kwargs.get('is_immutable_storage_with_versioning_enabled', None) + + +class CorsRule(msrest.serialization.Model): + """CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain. + + All required parameters must be populated in order to send to Azure. + + :param allowed_origins: Required. The origin domains that are permitted to make a request + against the storage service via CORS. The origin domain is the domain from which the request + originates. Note that the origin must be an exact case-sensitive match with the origin that the + user age sends to the service. You can also use the wildcard character '*' to allow all origin + domains to make requests via CORS. + :type allowed_origins: str + :param allowed_methods: Required. The methods (HTTP request verbs) that the origin domain may + use for a CORS request. (comma separated). + :type allowed_methods: str + :param allowed_headers: Required. the request headers that the origin domain may specify on the + CORS request. + :type allowed_headers: str + :param exposed_headers: Required. The response headers that may be sent in the response to the + CORS request and exposed by the browser to the request issuer. + :type exposed_headers: str + :param max_age_in_seconds: Required. The maximum amount time that a browser should cache the + preflight OPTIONS request. + :type max_age_in_seconds: int + """ + + _validation = { + 'allowed_origins': {'required': True}, + 'allowed_methods': {'required': True}, + 'allowed_headers': {'required': True}, + 'exposed_headers': {'required': True}, + 'max_age_in_seconds': {'required': True, 'minimum': 0}, + } + + _attribute_map = { + 'allowed_origins': {'key': 'AllowedOrigins', 'type': 'str'}, + 'allowed_methods': {'key': 'AllowedMethods', 'type': 'str'}, + 'allowed_headers': {'key': 'AllowedHeaders', 'type': 'str'}, + 'exposed_headers': {'key': 'ExposedHeaders', 'type': 'str'}, + 'max_age_in_seconds': {'key': 'MaxAgeInSeconds', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(CorsRule, self).__init__(**kwargs) + self.allowed_origins = kwargs['allowed_origins'] + self.allowed_methods = kwargs['allowed_methods'] + self.allowed_headers = kwargs['allowed_headers'] + self.exposed_headers = kwargs['exposed_headers'] + self.max_age_in_seconds = kwargs['max_age_in_seconds'] + + +class CpkInfo(msrest.serialization.Model): + """Parameter group. + + :param encryption_key: Optional. Specifies the encryption key to use to encrypt the data + provided in the request. If not specified, encryption is performed with the root account + encryption key. For more information, see Encryption at Rest for Azure Storage Services. + :type encryption_key: str + :param encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be provided + if the x-ms-encryption-key header is provided. + :type encryption_key_sha256: str + :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, + the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is + provided. Possible values include: "None", "AES256". + :type encryption_algorithm: str or ~azure.storage.blob.models.EncryptionAlgorithmType + """ + + _attribute_map = { + 'encryption_key': {'key': 'encryptionKey', 'type': 'str'}, + 'encryption_key_sha256': {'key': 'encryptionKeySha256', 'type': 'str'}, + 'encryption_algorithm': {'key': 'encryptionAlgorithm', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(CpkInfo, self).__init__(**kwargs) + self.encryption_key = kwargs.get('encryption_key', None) + self.encryption_key_sha256 = kwargs.get('encryption_key_sha256', None) + self.encryption_algorithm = kwargs.get('encryption_algorithm', None) + + +class CpkScopeInfo(msrest.serialization.Model): + """Parameter group. + + :param encryption_scope: Optional. Version 2019-07-07 and later. Specifies the name of the + encryption scope to use to encrypt the data provided in the request. If not specified, + encryption is performed with the default account encryption scope. For more information, see + Encryption at Rest for Azure Storage Services. + :type encryption_scope: str + """ + + _attribute_map = { + 'encryption_scope': {'key': 'encryptionScope', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(CpkScopeInfo, self).__init__(**kwargs) + self.encryption_scope = kwargs.get('encryption_scope', None) + + +class DelimitedTextConfiguration(msrest.serialization.Model): + """Groups the settings used for interpreting the blob data if the blob is delimited text formatted. + + :param column_separator: The string used to separate columns. + :type column_separator: str + :param field_quote: The string used to quote a specific field. + :type field_quote: str + :param record_separator: The string used to separate records. + :type record_separator: str + :param escape_char: The string used as an escape character. + :type escape_char: str + :param headers_present: Represents whether the data has headers. + :type headers_present: bool + """ + + _attribute_map = { + 'column_separator': {'key': 'ColumnSeparator', 'type': 'str', 'xml': {'name': 'ColumnSeparator'}}, + 'field_quote': {'key': 'FieldQuote', 'type': 'str', 'xml': {'name': 'FieldQuote'}}, + 'record_separator': {'key': 'RecordSeparator', 'type': 'str', 'xml': {'name': 'RecordSeparator'}}, + 'escape_char': {'key': 'EscapeChar', 'type': 'str', 'xml': {'name': 'EscapeChar'}}, + 'headers_present': {'key': 'HeadersPresent', 'type': 'bool', 'xml': {'name': 'HasHeaders'}}, + } + _xml_map = { + 'name': 'DelimitedTextConfiguration' + } + + def __init__( + self, + **kwargs + ): + super(DelimitedTextConfiguration, self).__init__(**kwargs) + self.column_separator = kwargs.get('column_separator', None) + self.field_quote = kwargs.get('field_quote', None) + self.record_separator = kwargs.get('record_separator', None) + self.escape_char = kwargs.get('escape_char', None) + self.headers_present = kwargs.get('headers_present', None) + + +class FilterBlobItem(msrest.serialization.Model): + """Blob info from a Filter Blobs API call. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. + :type name: str + :param container_name: Required. + :type container_name: str + :param tags: A set of tags. Blob tags. + :type tags: ~azure.storage.blob.models.BlobTags + """ + + _validation = { + 'name': {'required': True}, + 'container_name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'Name', 'type': 'str'}, + 'container_name': {'key': 'ContainerName', 'type': 'str'}, + 'tags': {'key': 'Tags', 'type': 'BlobTags'}, + } + _xml_map = { + 'name': 'Blob' + } + + def __init__( + self, + **kwargs + ): + super(FilterBlobItem, self).__init__(**kwargs) + self.name = kwargs['name'] + self.container_name = kwargs['container_name'] + self.tags = kwargs.get('tags', None) + + +class FilterBlobSegment(msrest.serialization.Model): + """The result of a Filter Blobs API call. + + All required parameters must be populated in order to send to Azure. + + :param service_endpoint: Required. + :type service_endpoint: str + :param where: Required. + :type where: str + :param blobs: Required. + :type blobs: list[~azure.storage.blob.models.FilterBlobItem] + :param next_marker: + :type next_marker: str + """ + + _validation = { + 'service_endpoint': {'required': True}, + 'where': {'required': True}, + 'blobs': {'required': True}, + } + + _attribute_map = { + 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, + 'where': {'key': 'Where', 'type': 'str'}, + 'blobs': {'key': 'Blobs', 'type': '[FilterBlobItem]', 'xml': {'name': 'Blobs', 'wrapped': True, 'itemsName': 'Blob'}}, + 'next_marker': {'key': 'NextMarker', 'type': 'str'}, + } + _xml_map = { + 'name': 'EnumerationResults' + } + + def __init__( + self, + **kwargs + ): + super(FilterBlobSegment, self).__init__(**kwargs) + self.service_endpoint = kwargs['service_endpoint'] + self.where = kwargs['where'] + self.blobs = kwargs['blobs'] + self.next_marker = kwargs.get('next_marker', None) + + +class GeoReplication(msrest.serialization.Model): + """Geo-Replication information for the Secondary Storage Service. + + All required parameters must be populated in order to send to Azure. + + :param status: Required. The status of the secondary location. Possible values include: "live", + "bootstrap", "unavailable". + :type status: str or ~azure.storage.blob.models.GeoReplicationStatusType + :param last_sync_time: Required. A GMT date/time value, to the second. All primary writes + preceding this value are guaranteed to be available for read operations at the secondary. + Primary writes after this point in time may or may not be available for reads. + :type last_sync_time: ~datetime.datetime + """ + + _validation = { + 'status': {'required': True}, + 'last_sync_time': {'required': True}, + } + + _attribute_map = { + 'status': {'key': 'Status', 'type': 'str'}, + 'last_sync_time': {'key': 'LastSyncTime', 'type': 'rfc-1123'}, + } + + def __init__( + self, + **kwargs + ): + super(GeoReplication, self).__init__(**kwargs) + self.status = kwargs['status'] + self.last_sync_time = kwargs['last_sync_time'] + + +class JsonTextConfiguration(msrest.serialization.Model): + """json text configuration. + + :param record_separator: The string used to separate records. + :type record_separator: str + """ + + _attribute_map = { + 'record_separator': {'key': 'RecordSeparator', 'type': 'str', 'xml': {'name': 'RecordSeparator'}}, + } + _xml_map = { + 'name': 'JsonTextConfiguration' + } + + def __init__( + self, + **kwargs + ): + super(JsonTextConfiguration, self).__init__(**kwargs) + self.record_separator = kwargs.get('record_separator', None) + + +class KeyInfo(msrest.serialization.Model): + """Key information. + + All required parameters must be populated in order to send to Azure. + + :param start: Required. The date-time the key is active in ISO 8601 UTC time. + :type start: str + :param expiry: Required. The date-time the key expires in ISO 8601 UTC time. + :type expiry: str + """ + + _validation = { + 'start': {'required': True}, + 'expiry': {'required': True}, + } + + _attribute_map = { + 'start': {'key': 'Start', 'type': 'str'}, + 'expiry': {'key': 'Expiry', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(KeyInfo, self).__init__(**kwargs) + self.start = kwargs['start'] + self.expiry = kwargs['expiry'] + + +class LeaseAccessConditions(msrest.serialization.Model): + """Parameter group. + + :param lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. + :type lease_id: str + """ + + _attribute_map = { + 'lease_id': {'key': 'leaseId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LeaseAccessConditions, self).__init__(**kwargs) + self.lease_id = kwargs.get('lease_id', None) + + +class ListBlobsFlatSegmentResponse(msrest.serialization.Model): + """An enumeration of blobs. + + All required parameters must be populated in order to send to Azure. + + :param service_endpoint: Required. + :type service_endpoint: str + :param container_name: Required. + :type container_name: str + :param prefix: + :type prefix: str + :param marker: + :type marker: str + :param max_results: + :type max_results: int + :param segment: Required. + :type segment: ~azure.storage.blob.models.BlobFlatListSegment + :param next_marker: + :type next_marker: str + """ + + _validation = { + 'service_endpoint': {'required': True}, + 'container_name': {'required': True}, + 'segment': {'required': True}, + } + + _attribute_map = { + 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, + 'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'attr': True}}, + 'prefix': {'key': 'Prefix', 'type': 'str'}, + 'marker': {'key': 'Marker', 'type': 'str'}, + 'max_results': {'key': 'MaxResults', 'type': 'int'}, + 'segment': {'key': 'Segment', 'type': 'BlobFlatListSegment'}, + 'next_marker': {'key': 'NextMarker', 'type': 'str'}, + } + _xml_map = { + 'name': 'EnumerationResults' + } + + def __init__( + self, + **kwargs + ): + super(ListBlobsFlatSegmentResponse, self).__init__(**kwargs) + self.service_endpoint = kwargs['service_endpoint'] + self.container_name = kwargs['container_name'] + self.prefix = kwargs.get('prefix', None) + self.marker = kwargs.get('marker', None) + self.max_results = kwargs.get('max_results', None) + self.segment = kwargs['segment'] + self.next_marker = kwargs.get('next_marker', None) + + +class ListBlobsHierarchySegmentResponse(msrest.serialization.Model): + """An enumeration of blobs. + + All required parameters must be populated in order to send to Azure. + + :param service_endpoint: Required. + :type service_endpoint: str + :param container_name: Required. + :type container_name: str + :param prefix: + :type prefix: str + :param marker: + :type marker: str + :param max_results: + :type max_results: int + :param delimiter: + :type delimiter: str + :param segment: Required. + :type segment: ~azure.storage.blob.models.BlobHierarchyListSegment + :param next_marker: + :type next_marker: str + """ + + _validation = { + 'service_endpoint': {'required': True}, + 'container_name': {'required': True}, + 'segment': {'required': True}, + } + + _attribute_map = { + 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, + 'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'attr': True}}, + 'prefix': {'key': 'Prefix', 'type': 'str'}, + 'marker': {'key': 'Marker', 'type': 'str'}, + 'max_results': {'key': 'MaxResults', 'type': 'int'}, + 'delimiter': {'key': 'Delimiter', 'type': 'str'}, + 'segment': {'key': 'Segment', 'type': 'BlobHierarchyListSegment'}, + 'next_marker': {'key': 'NextMarker', 'type': 'str'}, + } + _xml_map = { + 'name': 'EnumerationResults' + } + + def __init__( + self, + **kwargs + ): + super(ListBlobsHierarchySegmentResponse, self).__init__(**kwargs) + self.service_endpoint = kwargs['service_endpoint'] + self.container_name = kwargs['container_name'] + self.prefix = kwargs.get('prefix', None) + self.marker = kwargs.get('marker', None) + self.max_results = kwargs.get('max_results', None) + self.delimiter = kwargs.get('delimiter', None) + self.segment = kwargs['segment'] + self.next_marker = kwargs.get('next_marker', None) + + +class ListContainersSegmentResponse(msrest.serialization.Model): + """An enumeration of containers. + + All required parameters must be populated in order to send to Azure. + + :param service_endpoint: Required. + :type service_endpoint: str + :param prefix: + :type prefix: str + :param marker: + :type marker: str + :param max_results: + :type max_results: int + :param container_items: Required. + :type container_items: list[~azure.storage.blob.models.ContainerItem] + :param next_marker: + :type next_marker: str + """ + + _validation = { + 'service_endpoint': {'required': True}, + 'container_items': {'required': True}, + } + + _attribute_map = { + 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, + 'prefix': {'key': 'Prefix', 'type': 'str'}, + 'marker': {'key': 'Marker', 'type': 'str'}, + 'max_results': {'key': 'MaxResults', 'type': 'int'}, + 'container_items': {'key': 'ContainerItems', 'type': '[ContainerItem]', 'xml': {'name': 'Containers', 'wrapped': True, 'itemsName': 'Container'}}, + 'next_marker': {'key': 'NextMarker', 'type': 'str'}, + } + _xml_map = { + 'name': 'EnumerationResults' + } + + def __init__( + self, + **kwargs + ): + super(ListContainersSegmentResponse, self).__init__(**kwargs) + self.service_endpoint = kwargs['service_endpoint'] + self.prefix = kwargs.get('prefix', None) + self.marker = kwargs.get('marker', None) + self.max_results = kwargs.get('max_results', None) + self.container_items = kwargs['container_items'] + self.next_marker = kwargs.get('next_marker', None) + + +class Logging(msrest.serialization.Model): + """Azure Analytics Logging settings. + + All required parameters must be populated in order to send to Azure. + + :param version: Required. The version of Storage Analytics to configure. + :type version: str + :param delete: Required. Indicates whether all delete requests should be logged. + :type delete: bool + :param read: Required. Indicates whether all read requests should be logged. + :type read: bool + :param write: Required. Indicates whether all write requests should be logged. + :type write: bool + :param retention_policy: Required. the retention policy which determines how long the + associated data should persist. + :type retention_policy: ~azure.storage.blob.models.RetentionPolicy + """ + + _validation = { + 'version': {'required': True}, + 'delete': {'required': True}, + 'read': {'required': True}, + 'write': {'required': True}, + 'retention_policy': {'required': True}, + } + + _attribute_map = { + 'version': {'key': 'Version', 'type': 'str'}, + 'delete': {'key': 'Delete', 'type': 'bool'}, + 'read': {'key': 'Read', 'type': 'bool'}, + 'write': {'key': 'Write', 'type': 'bool'}, + 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'}, + } + + def __init__( + self, + **kwargs + ): + super(Logging, self).__init__(**kwargs) + self.version = kwargs['version'] + self.delete = kwargs['delete'] + self.read = kwargs['read'] + self.write = kwargs['write'] + self.retention_policy = kwargs['retention_policy'] + + +class Metrics(msrest.serialization.Model): + """a summary of request statistics grouped by API in hour or minute aggregates for blobs. + + All required parameters must be populated in order to send to Azure. + + :param version: The version of Storage Analytics to configure. + :type version: str + :param enabled: Required. Indicates whether metrics are enabled for the Blob service. + :type enabled: bool + :param include_apis: Indicates whether metrics should generate summary statistics for called + API operations. + :type include_apis: bool + :param retention_policy: the retention policy which determines how long the associated data + should persist. + :type retention_policy: ~azure.storage.blob.models.RetentionPolicy + """ + + _validation = { + 'enabled': {'required': True}, + } + + _attribute_map = { + 'version': {'key': 'Version', 'type': 'str'}, + 'enabled': {'key': 'Enabled', 'type': 'bool'}, + 'include_apis': {'key': 'IncludeAPIs', 'type': 'bool'}, + 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'}, + } + + def __init__( + self, + **kwargs + ): + super(Metrics, self).__init__(**kwargs) + self.version = kwargs.get('version', None) + self.enabled = kwargs['enabled'] + self.include_apis = kwargs.get('include_apis', None) + self.retention_policy = kwargs.get('retention_policy', None) + + +class ModifiedAccessConditions(msrest.serialization.Model): + """Parameter group. + + :param if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. + :type if_modified_since: ~datetime.datetime + :param if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. + :type if_unmodified_since: ~datetime.datetime + :param if_match: Specify an ETag value to operate only on blobs with a matching value. + :type if_match: str + :param if_none_match: Specify an ETag value to operate only on blobs without a matching value. + :type if_none_match: str + :param if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. + :type if_tags: str + """ + + _attribute_map = { + 'if_modified_since': {'key': 'ifModifiedSince', 'type': 'rfc-1123'}, + 'if_unmodified_since': {'key': 'ifUnmodifiedSince', 'type': 'rfc-1123'}, + 'if_match': {'key': 'ifMatch', 'type': 'str'}, + 'if_none_match': {'key': 'ifNoneMatch', 'type': 'str'}, + 'if_tags': {'key': 'ifTags', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ModifiedAccessConditions, self).__init__(**kwargs) + self.if_modified_since = kwargs.get('if_modified_since', None) + self.if_unmodified_since = kwargs.get('if_unmodified_since', None) + self.if_match = kwargs.get('if_match', None) + self.if_none_match = kwargs.get('if_none_match', None) + self.if_tags = kwargs.get('if_tags', None) + + +class PageList(msrest.serialization.Model): + """the list of pages. + + :param page_range: + :type page_range: list[~azure.storage.blob.models.PageRange] + :param clear_range: + :type clear_range: list[~azure.storage.blob.models.ClearRange] + """ + + _attribute_map = { + 'page_range': {'key': 'PageRange', 'type': '[PageRange]'}, + 'clear_range': {'key': 'ClearRange', 'type': '[ClearRange]'}, + } + + def __init__( + self, + **kwargs + ): + super(PageList, self).__init__(**kwargs) + self.page_range = kwargs.get('page_range', None) + self.clear_range = kwargs.get('clear_range', None) + + +class PageRange(msrest.serialization.Model): + """PageRange. + + All required parameters must be populated in order to send to Azure. + + :param start: Required. + :type start: long + :param end: Required. + :type end: long + """ + + _validation = { + 'start': {'required': True}, + 'end': {'required': True}, + } + + _attribute_map = { + 'start': {'key': 'Start', 'type': 'long', 'xml': {'name': 'Start'}}, + 'end': {'key': 'End', 'type': 'long', 'xml': {'name': 'End'}}, + } + _xml_map = { + 'name': 'PageRange' + } + + def __init__( + self, + **kwargs + ): + super(PageRange, self).__init__(**kwargs) + self.start = kwargs['start'] + self.end = kwargs['end'] + + +class QueryFormat(msrest.serialization.Model): + """QueryFormat. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The quick query format type. Possible values include: "delimited", + "json", "arrow", "parquet". + :type type: str or ~azure.storage.blob.models.QueryFormatType + :param delimited_text_configuration: Groups the settings used for interpreting the blob data if + the blob is delimited text formatted. + :type delimited_text_configuration: ~azure.storage.blob.models.DelimitedTextConfiguration + :param json_text_configuration: json text configuration. + :type json_text_configuration: ~azure.storage.blob.models.JsonTextConfiguration + :param arrow_configuration: Groups the settings used for formatting the response if the + response should be Arrow formatted. + :type arrow_configuration: ~azure.storage.blob.models.ArrowConfiguration + :param parquet_text_configuration: Any object. + :type parquet_text_configuration: any + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'Type', 'type': 'str', 'xml': {'name': 'Type'}}, + 'delimited_text_configuration': {'key': 'DelimitedTextConfiguration', 'type': 'DelimitedTextConfiguration'}, + 'json_text_configuration': {'key': 'JsonTextConfiguration', 'type': 'JsonTextConfiguration'}, + 'arrow_configuration': {'key': 'ArrowConfiguration', 'type': 'ArrowConfiguration'}, + 'parquet_text_configuration': {'key': 'ParquetTextConfiguration', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(QueryFormat, self).__init__(**kwargs) + self.type = kwargs['type'] + self.delimited_text_configuration = kwargs.get('delimited_text_configuration', None) + self.json_text_configuration = kwargs.get('json_text_configuration', None) + self.arrow_configuration = kwargs.get('arrow_configuration', None) + self.parquet_text_configuration = kwargs.get('parquet_text_configuration', None) + + +class QueryRequest(msrest.serialization.Model): + """Groups the set of query request settings. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar query_type: Required. The type of the provided query expression. Has constant value: + "SQL". + :vartype query_type: str + :param expression: Required. The query expression in SQL. The maximum size of the query + expression is 256KiB. + :type expression: str + :param input_serialization: + :type input_serialization: ~azure.storage.blob.models.QuerySerialization + :param output_serialization: + :type output_serialization: ~azure.storage.blob.models.QuerySerialization + """ + + _validation = { + 'query_type': {'required': True, 'constant': True}, + 'expression': {'required': True}, + } + + _attribute_map = { + 'query_type': {'key': 'QueryType', 'type': 'str', 'xml': {'name': 'QueryType'}}, + 'expression': {'key': 'Expression', 'type': 'str', 'xml': {'name': 'Expression'}}, + 'input_serialization': {'key': 'InputSerialization', 'type': 'QuerySerialization'}, + 'output_serialization': {'key': 'OutputSerialization', 'type': 'QuerySerialization'}, + } + _xml_map = { + 'name': 'QueryRequest' + } + + query_type = "SQL" + + def __init__( + self, + **kwargs + ): + super(QueryRequest, self).__init__(**kwargs) + self.expression = kwargs['expression'] + self.input_serialization = kwargs.get('input_serialization', None) + self.output_serialization = kwargs.get('output_serialization', None) + + +class QuerySerialization(msrest.serialization.Model): + """QuerySerialization. + + All required parameters must be populated in order to send to Azure. + + :param format: Required. + :type format: ~azure.storage.blob.models.QueryFormat + """ + + _validation = { + 'format': {'required': True}, + } + + _attribute_map = { + 'format': {'key': 'Format', 'type': 'QueryFormat'}, + } + + def __init__( + self, + **kwargs + ): + super(QuerySerialization, self).__init__(**kwargs) + self.format = kwargs['format'] + + +class RetentionPolicy(msrest.serialization.Model): + """the retention policy which determines how long the associated data should persist. + + All required parameters must be populated in order to send to Azure. + + :param enabled: Required. Indicates whether a retention policy is enabled for the storage + service. + :type enabled: bool + :param days: Indicates the number of days that metrics or logging or soft-deleted data should + be retained. All data older than this value will be deleted. + :type days: int + :param allow_permanent_delete: Indicates whether permanent delete is allowed on this storage + account. + :type allow_permanent_delete: bool + """ + + _validation = { + 'enabled': {'required': True}, + 'days': {'minimum': 1}, + } + + _attribute_map = { + 'enabled': {'key': 'Enabled', 'type': 'bool'}, + 'days': {'key': 'Days', 'type': 'int'}, + 'allow_permanent_delete': {'key': 'AllowPermanentDelete', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(RetentionPolicy, self).__init__(**kwargs) + self.enabled = kwargs['enabled'] + self.days = kwargs.get('days', None) + self.allow_permanent_delete = kwargs.get('allow_permanent_delete', None) + + +class SequenceNumberAccessConditions(msrest.serialization.Model): + """Parameter group. + + :param if_sequence_number_less_than_or_equal_to: Specify this header value to operate only on a + blob if it has a sequence number less than or equal to the specified. + :type if_sequence_number_less_than_or_equal_to: long + :param if_sequence_number_less_than: Specify this header value to operate only on a blob if it + has a sequence number less than the specified. + :type if_sequence_number_less_than: long + :param if_sequence_number_equal_to: Specify this header value to operate only on a blob if it + has the specified sequence number. + :type if_sequence_number_equal_to: long + """ + + _attribute_map = { + 'if_sequence_number_less_than_or_equal_to': {'key': 'ifSequenceNumberLessThanOrEqualTo', 'type': 'long'}, + 'if_sequence_number_less_than': {'key': 'ifSequenceNumberLessThan', 'type': 'long'}, + 'if_sequence_number_equal_to': {'key': 'ifSequenceNumberEqualTo', 'type': 'long'}, + } + + def __init__( + self, + **kwargs + ): + super(SequenceNumberAccessConditions, self).__init__(**kwargs) + self.if_sequence_number_less_than_or_equal_to = kwargs.get('if_sequence_number_less_than_or_equal_to', None) + self.if_sequence_number_less_than = kwargs.get('if_sequence_number_less_than', None) + self.if_sequence_number_equal_to = kwargs.get('if_sequence_number_equal_to', None) + + +class SignedIdentifier(msrest.serialization.Model): + """signed identifier. + + All required parameters must be populated in order to send to Azure. + + :param id: Required. a unique id. + :type id: str + :param access_policy: An Access policy. + :type access_policy: ~azure.storage.blob.models.AccessPolicy + """ + + _validation = { + 'id': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'Id', 'type': 'str'}, + 'access_policy': {'key': 'AccessPolicy', 'type': 'AccessPolicy'}, + } + _xml_map = { + 'name': 'SignedIdentifier' + } + + def __init__( + self, + **kwargs + ): + super(SignedIdentifier, self).__init__(**kwargs) + self.id = kwargs['id'] + self.access_policy = kwargs.get('access_policy', None) + + +class SourceModifiedAccessConditions(msrest.serialization.Model): + """Parameter group. + + :param source_if_modified_since: Specify this header value to operate only on a blob if it has + been modified since the specified date/time. + :type source_if_modified_since: ~datetime.datetime + :param source_if_unmodified_since: Specify this header value to operate only on a blob if it + has not been modified since the specified date/time. + :type source_if_unmodified_since: ~datetime.datetime + :param source_if_match: Specify an ETag value to operate only on blobs with a matching value. + :type source_if_match: str + :param source_if_none_match: Specify an ETag value to operate only on blobs without a matching + value. + :type source_if_none_match: str + :param source_if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. + :type source_if_tags: str + """ + + _attribute_map = { + 'source_if_modified_since': {'key': 'sourceIfModifiedSince', 'type': 'rfc-1123'}, + 'source_if_unmodified_since': {'key': 'sourceIfUnmodifiedSince', 'type': 'rfc-1123'}, + 'source_if_match': {'key': 'sourceIfMatch', 'type': 'str'}, + 'source_if_none_match': {'key': 'sourceIfNoneMatch', 'type': 'str'}, + 'source_if_tags': {'key': 'sourceIfTags', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SourceModifiedAccessConditions, self).__init__(**kwargs) + self.source_if_modified_since = kwargs.get('source_if_modified_since', None) + self.source_if_unmodified_since = kwargs.get('source_if_unmodified_since', None) + self.source_if_match = kwargs.get('source_if_match', None) + self.source_if_none_match = kwargs.get('source_if_none_match', None) + self.source_if_tags = kwargs.get('source_if_tags', None) + + +class StaticWebsite(msrest.serialization.Model): + """The properties that enable an account to host a static website. + + All required parameters must be populated in order to send to Azure. + + :param enabled: Required. Indicates whether this account is hosting a static website. + :type enabled: bool + :param index_document: The default name of the index page under each directory. + :type index_document: str + :param error_document404_path: The absolute path of the custom 404 page. + :type error_document404_path: str + :param default_index_document_path: Absolute path of the default index page. + :type default_index_document_path: str + """ + + _validation = { + 'enabled': {'required': True}, + } + + _attribute_map = { + 'enabled': {'key': 'Enabled', 'type': 'bool'}, + 'index_document': {'key': 'IndexDocument', 'type': 'str'}, + 'error_document404_path': {'key': 'ErrorDocument404Path', 'type': 'str'}, + 'default_index_document_path': {'key': 'DefaultIndexDocumentPath', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(StaticWebsite, self).__init__(**kwargs) + self.enabled = kwargs['enabled'] + self.index_document = kwargs.get('index_document', None) + self.error_document404_path = kwargs.get('error_document404_path', None) + self.default_index_document_path = kwargs.get('default_index_document_path', None) + + +class StorageError(msrest.serialization.Model): + """StorageError. + + :param message: + :type message: str + """ + + _attribute_map = { + 'message': {'key': 'Message', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageError, self).__init__(**kwargs) + self.message = kwargs.get('message', None) + + +class StorageServiceProperties(msrest.serialization.Model): + """Storage Service Properties. + + :param logging: Azure Analytics Logging settings. + :type logging: ~azure.storage.blob.models.Logging + :param hour_metrics: a summary of request statistics grouped by API in hour or minute + aggregates for blobs. + :type hour_metrics: ~azure.storage.blob.models.Metrics + :param minute_metrics: a summary of request statistics grouped by API in hour or minute + aggregates for blobs. + :type minute_metrics: ~azure.storage.blob.models.Metrics + :param cors: The set of CORS rules. + :type cors: list[~azure.storage.blob.models.CorsRule] + :param default_service_version: The default version to use for requests to the Blob service if + an incoming request's version is not specified. Possible values include version 2008-10-27 and + all more recent versions. + :type default_service_version: str + :param delete_retention_policy: the retention policy which determines how long the associated + data should persist. + :type delete_retention_policy: ~azure.storage.blob.models.RetentionPolicy + :param static_website: The properties that enable an account to host a static website. + :type static_website: ~azure.storage.blob.models.StaticWebsite + """ + + _attribute_map = { + 'logging': {'key': 'Logging', 'type': 'Logging'}, + 'hour_metrics': {'key': 'HourMetrics', 'type': 'Metrics'}, + 'minute_metrics': {'key': 'MinuteMetrics', 'type': 'Metrics'}, + 'cors': {'key': 'Cors', 'type': '[CorsRule]', 'xml': {'wrapped': True}}, + 'default_service_version': {'key': 'DefaultServiceVersion', 'type': 'str'}, + 'delete_retention_policy': {'key': 'DeleteRetentionPolicy', 'type': 'RetentionPolicy'}, + 'static_website': {'key': 'StaticWebsite', 'type': 'StaticWebsite'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageServiceProperties, self).__init__(**kwargs) + self.logging = kwargs.get('logging', None) + self.hour_metrics = kwargs.get('hour_metrics', None) + self.minute_metrics = kwargs.get('minute_metrics', None) + self.cors = kwargs.get('cors', None) + self.default_service_version = kwargs.get('default_service_version', None) + self.delete_retention_policy = kwargs.get('delete_retention_policy', None) + self.static_website = kwargs.get('static_website', None) + + +class StorageServiceStats(msrest.serialization.Model): + """Stats for the storage service. + + :param geo_replication: Geo-Replication information for the Secondary Storage Service. + :type geo_replication: ~azure.storage.blob.models.GeoReplication + """ + + _attribute_map = { + 'geo_replication': {'key': 'GeoReplication', 'type': 'GeoReplication'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageServiceStats, self).__init__(**kwargs) + self.geo_replication = kwargs.get('geo_replication', None) + + +class UserDelegationKey(msrest.serialization.Model): + """A user delegation key. + + All required parameters must be populated in order to send to Azure. + + :param signed_oid: Required. The Azure Active Directory object ID in GUID format. + :type signed_oid: str + :param signed_tid: Required. The Azure Active Directory tenant ID in GUID format. + :type signed_tid: str + :param signed_start: Required. The date-time the key is active. + :type signed_start: ~datetime.datetime + :param signed_expiry: Required. The date-time the key expires. + :type signed_expiry: ~datetime.datetime + :param signed_service: Required. Abbreviation of the Azure Storage service that accepts the + key. + :type signed_service: str + :param signed_version: Required. The service version that created the key. + :type signed_version: str + :param value: Required. The key as a base64 string. + :type value: str + """ + + _validation = { + 'signed_oid': {'required': True}, + 'signed_tid': {'required': True}, + 'signed_start': {'required': True}, + 'signed_expiry': {'required': True}, + 'signed_service': {'required': True}, + 'signed_version': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'signed_oid': {'key': 'SignedOid', 'type': 'str'}, + 'signed_tid': {'key': 'SignedTid', 'type': 'str'}, + 'signed_start': {'key': 'SignedStart', 'type': 'iso-8601'}, + 'signed_expiry': {'key': 'SignedExpiry', 'type': 'iso-8601'}, + 'signed_service': {'key': 'SignedService', 'type': 'str'}, + 'signed_version': {'key': 'SignedVersion', 'type': 'str'}, + 'value': {'key': 'Value', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UserDelegationKey, self).__init__(**kwargs) + self.signed_oid = kwargs['signed_oid'] + self.signed_tid = kwargs['signed_tid'] + self.signed_start = kwargs['signed_start'] + self.signed_expiry = kwargs['signed_expiry'] + self.signed_service = kwargs['signed_service'] + self.signed_version = kwargs['signed_version'] + self.value = kwargs['value'] diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/models/_models_py3.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/models/_models_py3.py new file mode 100644 index 00000000000..e51ab85be0c --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/models/_models_py3.py @@ -0,0 +1,2265 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +import datetime +from typing import Any, Dict, List, Optional, Union + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + +from ._azure_blob_storage_enums import * + + +class AccessPolicy(msrest.serialization.Model): + """An Access policy. + + :param start: the date-time the policy is active. + :type start: str + :param expiry: the date-time the policy expires. + :type expiry: str + :param permission: the permissions for the acl policy. + :type permission: str + """ + + _attribute_map = { + 'start': {'key': 'Start', 'type': 'str'}, + 'expiry': {'key': 'Expiry', 'type': 'str'}, + 'permission': {'key': 'Permission', 'type': 'str'}, + } + + def __init__( + self, + *, + start: Optional[str] = None, + expiry: Optional[str] = None, + permission: Optional[str] = None, + **kwargs + ): + super(AccessPolicy, self).__init__(**kwargs) + self.start = start + self.expiry = expiry + self.permission = permission + + +class AppendPositionAccessConditions(msrest.serialization.Model): + """Parameter group. + + :param max_size: Optional conditional header. The max length in bytes permitted for the append + blob. If the Append Block operation would cause the blob to exceed that limit or if the blob + size is already greater than the value specified in this header, the request will fail with + MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). + :type max_size: long + :param append_position: Optional conditional header, used only for the Append Block operation. + A number indicating the byte offset to compare. Append Block will succeed only if the append + position is equal to this number. If it is not, the request will fail with the + AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). + :type append_position: long + """ + + _attribute_map = { + 'max_size': {'key': 'maxSize', 'type': 'long'}, + 'append_position': {'key': 'appendPosition', 'type': 'long'}, + } + + def __init__( + self, + *, + max_size: Optional[int] = None, + append_position: Optional[int] = None, + **kwargs + ): + super(AppendPositionAccessConditions, self).__init__(**kwargs) + self.max_size = max_size + self.append_position = append_position + + +class ArrowConfiguration(msrest.serialization.Model): + """Groups the settings used for formatting the response if the response should be Arrow formatted. + + All required parameters must be populated in order to send to Azure. + + :param schema: Required. + :type schema: list[~azure.storage.blob.models.ArrowField] + """ + + _validation = { + 'schema': {'required': True}, + } + + _attribute_map = { + 'schema': {'key': 'Schema', 'type': '[ArrowField]', 'xml': {'name': 'Schema', 'wrapped': True, 'itemsName': 'Field'}}, + } + _xml_map = { + 'name': 'ArrowConfiguration' + } + + def __init__( + self, + *, + schema: List["ArrowField"], + **kwargs + ): + super(ArrowConfiguration, self).__init__(**kwargs) + self.schema = schema + + +class ArrowField(msrest.serialization.Model): + """Groups settings regarding specific field of an arrow schema. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. + :type type: str + :param name: + :type name: str + :param precision: + :type precision: int + :param scale: + :type scale: int + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'Type', 'type': 'str'}, + 'name': {'key': 'Name', 'type': 'str'}, + 'precision': {'key': 'Precision', 'type': 'int'}, + 'scale': {'key': 'Scale', 'type': 'int'}, + } + _xml_map = { + 'name': 'Field' + } + + def __init__( + self, + *, + type: str, + name: Optional[str] = None, + precision: Optional[int] = None, + scale: Optional[int] = None, + **kwargs + ): + super(ArrowField, self).__init__(**kwargs) + self.type = type + self.name = name + self.precision = precision + self.scale = scale + + +class BlobFlatListSegment(msrest.serialization.Model): + """BlobFlatListSegment. + + All required parameters must be populated in order to send to Azure. + + :param blob_items: Required. + :type blob_items: list[~azure.storage.blob.models.BlobItemInternal] + """ + + _validation = { + 'blob_items': {'required': True}, + } + + _attribute_map = { + 'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]'}, + } + _xml_map = { + 'name': 'Blobs' + } + + def __init__( + self, + *, + blob_items: List["BlobItemInternal"], + **kwargs + ): + super(BlobFlatListSegment, self).__init__(**kwargs) + self.blob_items = blob_items + + +class BlobHierarchyListSegment(msrest.serialization.Model): + """BlobHierarchyListSegment. + + All required parameters must be populated in order to send to Azure. + + :param blob_prefixes: + :type blob_prefixes: list[~azure.storage.blob.models.BlobPrefix] + :param blob_items: Required. + :type blob_items: list[~azure.storage.blob.models.BlobItemInternal] + """ + + _validation = { + 'blob_items': {'required': True}, + } + + _attribute_map = { + 'blob_prefixes': {'key': 'BlobPrefixes', 'type': '[BlobPrefix]', 'xml': {'name': 'BlobPrefix'}}, + 'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]', 'xml': {'name': 'Blob', 'itemsName': 'Blob'}}, + } + _xml_map = { + 'name': 'Blobs' + } + + def __init__( + self, + *, + blob_items: List["BlobItemInternal"], + blob_prefixes: Optional[List["BlobPrefix"]] = None, + **kwargs + ): + super(BlobHierarchyListSegment, self).__init__(**kwargs) + self.blob_prefixes = blob_prefixes + self.blob_items = blob_items + + +class BlobHTTPHeaders(msrest.serialization.Model): + """Parameter group. + + :param blob_cache_control: Optional. Sets the blob's cache control. If specified, this property + is stored with the blob and returned with a read request. + :type blob_cache_control: str + :param blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. + :type blob_content_type: str + :param blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is not + validated, as the hashes for the individual blocks were validated when each was uploaded. + :type blob_content_md5: bytearray + :param blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. + :type blob_content_encoding: str + :param blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. + :type blob_content_language: str + :param blob_content_disposition: Optional. Sets the blob's Content-Disposition header. + :type blob_content_disposition: str + """ + + _attribute_map = { + 'blob_cache_control': {'key': 'blobCacheControl', 'type': 'str'}, + 'blob_content_type': {'key': 'blobContentType', 'type': 'str'}, + 'blob_content_md5': {'key': 'blobContentMD5', 'type': 'bytearray'}, + 'blob_content_encoding': {'key': 'blobContentEncoding', 'type': 'str'}, + 'blob_content_language': {'key': 'blobContentLanguage', 'type': 'str'}, + 'blob_content_disposition': {'key': 'blobContentDisposition', 'type': 'str'}, + } + + def __init__( + self, + *, + blob_cache_control: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_md5: Optional[bytearray] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + **kwargs + ): + super(BlobHTTPHeaders, self).__init__(**kwargs) + self.blob_cache_control = blob_cache_control + self.blob_content_type = blob_content_type + self.blob_content_md5 = blob_content_md5 + self.blob_content_encoding = blob_content_encoding + self.blob_content_language = blob_content_language + self.blob_content_disposition = blob_content_disposition + + +class BlobItemInternal(msrest.serialization.Model): + """An Azure Storage blob. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. + :type name: ~azure.storage.blob.models.BlobName + :param deleted: Required. + :type deleted: bool + :param snapshot: Required. + :type snapshot: str + :param version_id: + :type version_id: str + :param is_current_version: + :type is_current_version: bool + :param properties: Required. Properties of a blob. + :type properties: ~azure.storage.blob.models.BlobPropertiesInternal + :param metadata: + :type metadata: ~azure.storage.blob.models.BlobMetadata + :param blob_tags: Blob tags. + :type blob_tags: ~azure.storage.blob.models.BlobTags + :param has_versions_only: + :type has_versions_only: bool + :param object_replication_metadata: Dictionary of :code:``. + :type object_replication_metadata: dict[str, str] + """ + + _validation = { + 'name': {'required': True}, + 'deleted': {'required': True}, + 'snapshot': {'required': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'Name', 'type': 'BlobName'}, + 'deleted': {'key': 'Deleted', 'type': 'bool'}, + 'snapshot': {'key': 'Snapshot', 'type': 'str'}, + 'version_id': {'key': 'VersionId', 'type': 'str'}, + 'is_current_version': {'key': 'IsCurrentVersion', 'type': 'bool'}, + 'properties': {'key': 'Properties', 'type': 'BlobPropertiesInternal'}, + 'metadata': {'key': 'Metadata', 'type': 'BlobMetadata'}, + 'blob_tags': {'key': 'BlobTags', 'type': 'BlobTags'}, + 'has_versions_only': {'key': 'HasVersionsOnly', 'type': 'bool'}, + 'object_replication_metadata': {'key': 'OrMetadata', 'type': '{str}'}, + } + _xml_map = { + 'name': 'Blob' + } + + def __init__( + self, + *, + name: "BlobName", + deleted: bool, + snapshot: str, + properties: "BlobPropertiesInternal", + version_id: Optional[str] = None, + is_current_version: Optional[bool] = None, + metadata: Optional["BlobMetadata"] = None, + blob_tags: Optional["BlobTags"] = None, + has_versions_only: Optional[bool] = None, + object_replication_metadata: Optional[Dict[str, str]] = None, + **kwargs + ): + super(BlobItemInternal, self).__init__(**kwargs) + self.name = name + self.deleted = deleted + self.snapshot = snapshot + self.version_id = version_id + self.is_current_version = is_current_version + self.properties = properties + self.metadata = metadata + self.blob_tags = blob_tags + self.has_versions_only = has_versions_only + self.object_replication_metadata = object_replication_metadata + + +class BlobMetadata(msrest.serialization.Model): + """BlobMetadata. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, str] + :param encrypted: + :type encrypted: str + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{str}'}, + 'encrypted': {'key': 'Encrypted', 'type': 'str', 'xml': {'attr': True}}, + } + _xml_map = { + 'name': 'Metadata' + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, str]] = None, + encrypted: Optional[str] = None, + **kwargs + ): + super(BlobMetadata, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.encrypted = encrypted + + +class BlobName(msrest.serialization.Model): + """BlobName. + + :param encoded: Indicates if the blob name is encoded. + :type encoded: bool + :param content: The name of the blob. + :type content: str + """ + + _attribute_map = { + 'encoded': {'key': 'Encoded', 'type': 'bool', 'xml': {'name': 'Encoded', 'attr': True}}, + 'content': {'key': 'content', 'type': 'str', 'xml': {'text': True}}, + } + + def __init__( + self, + *, + encoded: Optional[bool] = None, + content: Optional[str] = None, + **kwargs + ): + super(BlobName, self).__init__(**kwargs) + self.encoded = encoded + self.content = content + + +class BlobPrefix(msrest.serialization.Model): + """BlobPrefix. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. + :type name: ~azure.storage.blob.models.BlobName + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'Name', 'type': 'BlobName'}, + } + + def __init__( + self, + *, + name: "BlobName", + **kwargs + ): + super(BlobPrefix, self).__init__(**kwargs) + self.name = name + + +class BlobPropertiesInternal(msrest.serialization.Model): + """Properties of a blob. + + All required parameters must be populated in order to send to Azure. + + :param creation_time: + :type creation_time: ~datetime.datetime + :param last_modified: Required. + :type last_modified: ~datetime.datetime + :param etag: Required. + :type etag: str + :param content_length: Size in bytes. + :type content_length: long + :param content_type: + :type content_type: str + :param content_encoding: + :type content_encoding: str + :param content_language: + :type content_language: str + :param content_md5: + :type content_md5: bytearray + :param content_disposition: + :type content_disposition: str + :param cache_control: + :type cache_control: str + :param blob_sequence_number: + :type blob_sequence_number: long + :param blob_type: Possible values include: "BlockBlob", "PageBlob", "AppendBlob". + :type blob_type: str or ~azure.storage.blob.models.BlobType + :param lease_status: Possible values include: "locked", "unlocked". + :type lease_status: str or ~azure.storage.blob.models.LeaseStatusType + :param lease_state: Possible values include: "available", "leased", "expired", "breaking", + "broken". + :type lease_state: str or ~azure.storage.blob.models.LeaseStateType + :param lease_duration: Possible values include: "infinite", "fixed". + :type lease_duration: str or ~azure.storage.blob.models.LeaseDurationType + :param copy_id: + :type copy_id: str + :param copy_status: Possible values include: "pending", "success", "aborted", "failed". + :type copy_status: str or ~azure.storage.blob.models.CopyStatusType + :param copy_source: + :type copy_source: str + :param copy_progress: + :type copy_progress: str + :param copy_completion_time: + :type copy_completion_time: ~datetime.datetime + :param copy_status_description: + :type copy_status_description: str + :param server_encrypted: + :type server_encrypted: bool + :param incremental_copy: + :type incremental_copy: bool + :param destination_snapshot: + :type destination_snapshot: str + :param deleted_time: + :type deleted_time: ~datetime.datetime + :param remaining_retention_days: + :type remaining_retention_days: int + :param access_tier: Possible values include: "P4", "P6", "P10", "P15", "P20", "P30", "P40", + "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive". + :type access_tier: str or ~azure.storage.blob.models.AccessTier + :param access_tier_inferred: + :type access_tier_inferred: bool + :param archive_status: Possible values include: "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool". + :type archive_status: str or ~azure.storage.blob.models.ArchiveStatus + :param customer_provided_key_sha256: + :type customer_provided_key_sha256: str + :param encryption_scope: The name of the encryption scope under which the blob is encrypted. + :type encryption_scope: str + :param access_tier_change_time: + :type access_tier_change_time: ~datetime.datetime + :param tag_count: + :type tag_count: int + :param expires_on: + :type expires_on: ~datetime.datetime + :param is_sealed: + :type is_sealed: bool + :param rehydrate_priority: If an object is in rehydrate pending state then this header is + returned with priority of rehydrate. Valid values are High and Standard. Possible values + include: "High", "Standard". + :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :param last_accessed_on: + :type last_accessed_on: ~datetime.datetime + :param immutability_policy_expires_on: + :type immutability_policy_expires_on: ~datetime.datetime + :param immutability_policy_mode: Possible values include: "Mutable", "Unlocked", "Locked". + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: + :type legal_hold: bool + """ + + _validation = { + 'last_modified': {'required': True}, + 'etag': {'required': True}, + } + + _attribute_map = { + 'creation_time': {'key': 'Creation-Time', 'type': 'rfc-1123'}, + 'last_modified': {'key': 'Last-Modified', 'type': 'rfc-1123'}, + 'etag': {'key': 'Etag', 'type': 'str'}, + 'content_length': {'key': 'Content-Length', 'type': 'long'}, + 'content_type': {'key': 'Content-Type', 'type': 'str'}, + 'content_encoding': {'key': 'Content-Encoding', 'type': 'str'}, + 'content_language': {'key': 'Content-Language', 'type': 'str'}, + 'content_md5': {'key': 'Content-MD5', 'type': 'bytearray'}, + 'content_disposition': {'key': 'Content-Disposition', 'type': 'str'}, + 'cache_control': {'key': 'Cache-Control', 'type': 'str'}, + 'blob_sequence_number': {'key': 'x-ms-blob-sequence-number', 'type': 'long'}, + 'blob_type': {'key': 'BlobType', 'type': 'str'}, + 'lease_status': {'key': 'LeaseStatus', 'type': 'str'}, + 'lease_state': {'key': 'LeaseState', 'type': 'str'}, + 'lease_duration': {'key': 'LeaseDuration', 'type': 'str'}, + 'copy_id': {'key': 'CopyId', 'type': 'str'}, + 'copy_status': {'key': 'CopyStatus', 'type': 'str'}, + 'copy_source': {'key': 'CopySource', 'type': 'str'}, + 'copy_progress': {'key': 'CopyProgress', 'type': 'str'}, + 'copy_completion_time': {'key': 'CopyCompletionTime', 'type': 'rfc-1123'}, + 'copy_status_description': {'key': 'CopyStatusDescription', 'type': 'str'}, + 'server_encrypted': {'key': 'ServerEncrypted', 'type': 'bool'}, + 'incremental_copy': {'key': 'IncrementalCopy', 'type': 'bool'}, + 'destination_snapshot': {'key': 'DestinationSnapshot', 'type': 'str'}, + 'deleted_time': {'key': 'DeletedTime', 'type': 'rfc-1123'}, + 'remaining_retention_days': {'key': 'RemainingRetentionDays', 'type': 'int'}, + 'access_tier': {'key': 'AccessTier', 'type': 'str'}, + 'access_tier_inferred': {'key': 'AccessTierInferred', 'type': 'bool'}, + 'archive_status': {'key': 'ArchiveStatus', 'type': 'str'}, + 'customer_provided_key_sha256': {'key': 'CustomerProvidedKeySha256', 'type': 'str'}, + 'encryption_scope': {'key': 'EncryptionScope', 'type': 'str'}, + 'access_tier_change_time': {'key': 'AccessTierChangeTime', 'type': 'rfc-1123'}, + 'tag_count': {'key': 'TagCount', 'type': 'int'}, + 'expires_on': {'key': 'Expiry-Time', 'type': 'rfc-1123'}, + 'is_sealed': {'key': 'Sealed', 'type': 'bool'}, + 'rehydrate_priority': {'key': 'RehydratePriority', 'type': 'str'}, + 'last_accessed_on': {'key': 'LastAccessTime', 'type': 'rfc-1123'}, + 'immutability_policy_expires_on': {'key': 'ImmutabilityPolicyUntilDate', 'type': 'rfc-1123'}, + 'immutability_policy_mode': {'key': 'ImmutabilityPolicyMode', 'type': 'str'}, + 'legal_hold': {'key': 'LegalHold', 'type': 'bool'}, + } + _xml_map = { + 'name': 'Properties' + } + + def __init__( + self, + *, + last_modified: datetime.datetime, + etag: str, + creation_time: Optional[datetime.datetime] = None, + content_length: Optional[int] = None, + content_type: Optional[str] = None, + content_encoding: Optional[str] = None, + content_language: Optional[str] = None, + content_md5: Optional[bytearray] = None, + content_disposition: Optional[str] = None, + cache_control: Optional[str] = None, + blob_sequence_number: Optional[int] = None, + blob_type: Optional[Union[str, "BlobType"]] = None, + lease_status: Optional[Union[str, "LeaseStatusType"]] = None, + lease_state: Optional[Union[str, "LeaseStateType"]] = None, + lease_duration: Optional[Union[str, "LeaseDurationType"]] = None, + copy_id: Optional[str] = None, + copy_status: Optional[Union[str, "CopyStatusType"]] = None, + copy_source: Optional[str] = None, + copy_progress: Optional[str] = None, + copy_completion_time: Optional[datetime.datetime] = None, + copy_status_description: Optional[str] = None, + server_encrypted: Optional[bool] = None, + incremental_copy: Optional[bool] = None, + destination_snapshot: Optional[str] = None, + deleted_time: Optional[datetime.datetime] = None, + remaining_retention_days: Optional[int] = None, + access_tier: Optional[Union[str, "AccessTier"]] = None, + access_tier_inferred: Optional[bool] = None, + archive_status: Optional[Union[str, "ArchiveStatus"]] = None, + customer_provided_key_sha256: Optional[str] = None, + encryption_scope: Optional[str] = None, + access_tier_change_time: Optional[datetime.datetime] = None, + tag_count: Optional[int] = None, + expires_on: Optional[datetime.datetime] = None, + is_sealed: Optional[bool] = None, + rehydrate_priority: Optional[Union[str, "RehydratePriority"]] = None, + last_accessed_on: Optional[datetime.datetime] = None, + immutability_policy_expires_on: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, "BlobImmutabilityPolicyMode"]] = None, + legal_hold: Optional[bool] = None, + **kwargs + ): + super(BlobPropertiesInternal, self).__init__(**kwargs) + self.creation_time = creation_time + self.last_modified = last_modified + self.etag = etag + self.content_length = content_length + self.content_type = content_type + self.content_encoding = content_encoding + self.content_language = content_language + self.content_md5 = content_md5 + self.content_disposition = content_disposition + self.cache_control = cache_control + self.blob_sequence_number = blob_sequence_number + self.blob_type = blob_type + self.lease_status = lease_status + self.lease_state = lease_state + self.lease_duration = lease_duration + self.copy_id = copy_id + self.copy_status = copy_status + self.copy_source = copy_source + self.copy_progress = copy_progress + self.copy_completion_time = copy_completion_time + self.copy_status_description = copy_status_description + self.server_encrypted = server_encrypted + self.incremental_copy = incremental_copy + self.destination_snapshot = destination_snapshot + self.deleted_time = deleted_time + self.remaining_retention_days = remaining_retention_days + self.access_tier = access_tier + self.access_tier_inferred = access_tier_inferred + self.archive_status = archive_status + self.customer_provided_key_sha256 = customer_provided_key_sha256 + self.encryption_scope = encryption_scope + self.access_tier_change_time = access_tier_change_time + self.tag_count = tag_count + self.expires_on = expires_on + self.is_sealed = is_sealed + self.rehydrate_priority = rehydrate_priority + self.last_accessed_on = last_accessed_on + self.immutability_policy_expires_on = immutability_policy_expires_on + self.immutability_policy_mode = immutability_policy_mode + self.legal_hold = legal_hold + + +class BlobTag(msrest.serialization.Model): + """BlobTag. + + All required parameters must be populated in order to send to Azure. + + :param key: Required. + :type key: str + :param value: Required. + :type value: str + """ + + _validation = { + 'key': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'key': {'key': 'Key', 'type': 'str'}, + 'value': {'key': 'Value', 'type': 'str'}, + } + _xml_map = { + 'name': 'Tag' + } + + def __init__( + self, + *, + key: str, + value: str, + **kwargs + ): + super(BlobTag, self).__init__(**kwargs) + self.key = key + self.value = value + + +class BlobTags(msrest.serialization.Model): + """Blob tags. + + All required parameters must be populated in order to send to Azure. + + :param blob_tag_set: Required. + :type blob_tag_set: list[~azure.storage.blob.models.BlobTag] + """ + + _validation = { + 'blob_tag_set': {'required': True}, + } + + _attribute_map = { + 'blob_tag_set': {'key': 'BlobTagSet', 'type': '[BlobTag]', 'xml': {'name': 'TagSet', 'wrapped': True, 'itemsName': 'Tag'}}, + } + _xml_map = { + 'name': 'Tags' + } + + def __init__( + self, + *, + blob_tag_set: List["BlobTag"], + **kwargs + ): + super(BlobTags, self).__init__(**kwargs) + self.blob_tag_set = blob_tag_set + + +class Block(msrest.serialization.Model): + """Represents a single block in a block blob. It describes the block's ID and size. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The base64 encoded block ID. + :type name: str + :param size: Required. The block size in bytes. + :type size: long + """ + + _validation = { + 'name': {'required': True}, + 'size': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'Name', 'type': 'str'}, + 'size': {'key': 'Size', 'type': 'long'}, + } + + def __init__( + self, + *, + name: str, + size: int, + **kwargs + ): + super(Block, self).__init__(**kwargs) + self.name = name + self.size = size + + +class BlockList(msrest.serialization.Model): + """BlockList. + + :param committed_blocks: + :type committed_blocks: list[~azure.storage.blob.models.Block] + :param uncommitted_blocks: + :type uncommitted_blocks: list[~azure.storage.blob.models.Block] + """ + + _attribute_map = { + 'committed_blocks': {'key': 'CommittedBlocks', 'type': '[Block]', 'xml': {'wrapped': True}}, + 'uncommitted_blocks': {'key': 'UncommittedBlocks', 'type': '[Block]', 'xml': {'wrapped': True}}, + } + + def __init__( + self, + *, + committed_blocks: Optional[List["Block"]] = None, + uncommitted_blocks: Optional[List["Block"]] = None, + **kwargs + ): + super(BlockList, self).__init__(**kwargs) + self.committed_blocks = committed_blocks + self.uncommitted_blocks = uncommitted_blocks + + +class BlockLookupList(msrest.serialization.Model): + """BlockLookupList. + + :param committed: + :type committed: list[str] + :param uncommitted: + :type uncommitted: list[str] + :param latest: + :type latest: list[str] + """ + + _attribute_map = { + 'committed': {'key': 'Committed', 'type': '[str]', 'xml': {'itemsName': 'Committed'}}, + 'uncommitted': {'key': 'Uncommitted', 'type': '[str]', 'xml': {'itemsName': 'Uncommitted'}}, + 'latest': {'key': 'Latest', 'type': '[str]', 'xml': {'itemsName': 'Latest'}}, + } + _xml_map = { + 'name': 'BlockList' + } + + def __init__( + self, + *, + committed: Optional[List[str]] = None, + uncommitted: Optional[List[str]] = None, + latest: Optional[List[str]] = None, + **kwargs + ): + super(BlockLookupList, self).__init__(**kwargs) + self.committed = committed + self.uncommitted = uncommitted + self.latest = latest + + +class ClearRange(msrest.serialization.Model): + """ClearRange. + + All required parameters must be populated in order to send to Azure. + + :param start: Required. + :type start: long + :param end: Required. + :type end: long + """ + + _validation = { + 'start': {'required': True}, + 'end': {'required': True}, + } + + _attribute_map = { + 'start': {'key': 'Start', 'type': 'long', 'xml': {'name': 'Start'}}, + 'end': {'key': 'End', 'type': 'long', 'xml': {'name': 'End'}}, + } + _xml_map = { + 'name': 'ClearRange' + } + + def __init__( + self, + *, + start: int, + end: int, + **kwargs + ): + super(ClearRange, self).__init__(**kwargs) + self.start = start + self.end = end + + +class ContainerCpkScopeInfo(msrest.serialization.Model): + """Parameter group. + + :param default_encryption_scope: Optional. Version 2019-07-07 and later. Specifies the + default encryption scope to set on the container and use for all future writes. + :type default_encryption_scope: str + :param prevent_encryption_scope_override: Optional. Version 2019-07-07 and newer. If true, + prevents any request from specifying a different encryption scope than the scope set on the + container. + :type prevent_encryption_scope_override: bool + """ + + _attribute_map = { + 'default_encryption_scope': {'key': 'DefaultEncryptionScope', 'type': 'str'}, + 'prevent_encryption_scope_override': {'key': 'PreventEncryptionScopeOverride', 'type': 'bool'}, + } + + def __init__( + self, + *, + default_encryption_scope: Optional[str] = None, + prevent_encryption_scope_override: Optional[bool] = None, + **kwargs + ): + super(ContainerCpkScopeInfo, self).__init__(**kwargs) + self.default_encryption_scope = default_encryption_scope + self.prevent_encryption_scope_override = prevent_encryption_scope_override + + +class ContainerItem(msrest.serialization.Model): + """An Azure Storage container. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. + :type name: str + :param deleted: + :type deleted: bool + :param version: + :type version: str + :param properties: Required. Properties of a container. + :type properties: ~azure.storage.blob.models.ContainerProperties + :param metadata: Dictionary of :code:``. + :type metadata: dict[str, str] + """ + + _validation = { + 'name': {'required': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'Name', 'type': 'str'}, + 'deleted': {'key': 'Deleted', 'type': 'bool'}, + 'version': {'key': 'Version', 'type': 'str'}, + 'properties': {'key': 'Properties', 'type': 'ContainerProperties'}, + 'metadata': {'key': 'Metadata', 'type': '{str}'}, + } + _xml_map = { + 'name': 'Container' + } + + def __init__( + self, + *, + name: str, + properties: "ContainerProperties", + deleted: Optional[bool] = None, + version: Optional[str] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs + ): + super(ContainerItem, self).__init__(**kwargs) + self.name = name + self.deleted = deleted + self.version = version + self.properties = properties + self.metadata = metadata + + +class ContainerProperties(msrest.serialization.Model): + """Properties of a container. + + All required parameters must be populated in order to send to Azure. + + :param last_modified: Required. + :type last_modified: ~datetime.datetime + :param etag: Required. + :type etag: str + :param lease_status: Possible values include: "locked", "unlocked". + :type lease_status: str or ~azure.storage.blob.models.LeaseStatusType + :param lease_state: Possible values include: "available", "leased", "expired", "breaking", + "broken". + :type lease_state: str or ~azure.storage.blob.models.LeaseStateType + :param lease_duration: Possible values include: "infinite", "fixed". + :type lease_duration: str or ~azure.storage.blob.models.LeaseDurationType + :param public_access: Possible values include: "container", "blob". + :type public_access: str or ~azure.storage.blob.models.PublicAccessType + :param has_immutability_policy: + :type has_immutability_policy: bool + :param has_legal_hold: + :type has_legal_hold: bool + :param default_encryption_scope: + :type default_encryption_scope: str + :param prevent_encryption_scope_override: + :type prevent_encryption_scope_override: bool + :param deleted_time: + :type deleted_time: ~datetime.datetime + :param remaining_retention_days: + :type remaining_retention_days: int + :param is_immutable_storage_with_versioning_enabled: Indicates if version level worm is enabled + on this container. + :type is_immutable_storage_with_versioning_enabled: bool + """ + + _validation = { + 'last_modified': {'required': True}, + 'etag': {'required': True}, + } + + _attribute_map = { + 'last_modified': {'key': 'Last-Modified', 'type': 'rfc-1123'}, + 'etag': {'key': 'Etag', 'type': 'str'}, + 'lease_status': {'key': 'LeaseStatus', 'type': 'str'}, + 'lease_state': {'key': 'LeaseState', 'type': 'str'}, + 'lease_duration': {'key': 'LeaseDuration', 'type': 'str'}, + 'public_access': {'key': 'PublicAccess', 'type': 'str'}, + 'has_immutability_policy': {'key': 'HasImmutabilityPolicy', 'type': 'bool'}, + 'has_legal_hold': {'key': 'HasLegalHold', 'type': 'bool'}, + 'default_encryption_scope': {'key': 'DefaultEncryptionScope', 'type': 'str'}, + 'prevent_encryption_scope_override': {'key': 'DenyEncryptionScopeOverride', 'type': 'bool'}, + 'deleted_time': {'key': 'DeletedTime', 'type': 'rfc-1123'}, + 'remaining_retention_days': {'key': 'RemainingRetentionDays', 'type': 'int'}, + 'is_immutable_storage_with_versioning_enabled': {'key': 'ImmutableStorageWithVersioningEnabled', 'type': 'bool'}, + } + + def __init__( + self, + *, + last_modified: datetime.datetime, + etag: str, + lease_status: Optional[Union[str, "LeaseStatusType"]] = None, + lease_state: Optional[Union[str, "LeaseStateType"]] = None, + lease_duration: Optional[Union[str, "LeaseDurationType"]] = None, + public_access: Optional[Union[str, "PublicAccessType"]] = None, + has_immutability_policy: Optional[bool] = None, + has_legal_hold: Optional[bool] = None, + default_encryption_scope: Optional[str] = None, + prevent_encryption_scope_override: Optional[bool] = None, + deleted_time: Optional[datetime.datetime] = None, + remaining_retention_days: Optional[int] = None, + is_immutable_storage_with_versioning_enabled: Optional[bool] = None, + **kwargs + ): + super(ContainerProperties, self).__init__(**kwargs) + self.last_modified = last_modified + self.etag = etag + self.lease_status = lease_status + self.lease_state = lease_state + self.lease_duration = lease_duration + self.public_access = public_access + self.has_immutability_policy = has_immutability_policy + self.has_legal_hold = has_legal_hold + self.default_encryption_scope = default_encryption_scope + self.prevent_encryption_scope_override = prevent_encryption_scope_override + self.deleted_time = deleted_time + self.remaining_retention_days = remaining_retention_days + self.is_immutable_storage_with_versioning_enabled = is_immutable_storage_with_versioning_enabled + + +class CorsRule(msrest.serialization.Model): + """CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain. + + All required parameters must be populated in order to send to Azure. + + :param allowed_origins: Required. The origin domains that are permitted to make a request + against the storage service via CORS. The origin domain is the domain from which the request + originates. Note that the origin must be an exact case-sensitive match with the origin that the + user age sends to the service. You can also use the wildcard character '*' to allow all origin + domains to make requests via CORS. + :type allowed_origins: str + :param allowed_methods: Required. The methods (HTTP request verbs) that the origin domain may + use for a CORS request. (comma separated). + :type allowed_methods: str + :param allowed_headers: Required. the request headers that the origin domain may specify on the + CORS request. + :type allowed_headers: str + :param exposed_headers: Required. The response headers that may be sent in the response to the + CORS request and exposed by the browser to the request issuer. + :type exposed_headers: str + :param max_age_in_seconds: Required. The maximum amount time that a browser should cache the + preflight OPTIONS request. + :type max_age_in_seconds: int + """ + + _validation = { + 'allowed_origins': {'required': True}, + 'allowed_methods': {'required': True}, + 'allowed_headers': {'required': True}, + 'exposed_headers': {'required': True}, + 'max_age_in_seconds': {'required': True, 'minimum': 0}, + } + + _attribute_map = { + 'allowed_origins': {'key': 'AllowedOrigins', 'type': 'str'}, + 'allowed_methods': {'key': 'AllowedMethods', 'type': 'str'}, + 'allowed_headers': {'key': 'AllowedHeaders', 'type': 'str'}, + 'exposed_headers': {'key': 'ExposedHeaders', 'type': 'str'}, + 'max_age_in_seconds': {'key': 'MaxAgeInSeconds', 'type': 'int'}, + } + + def __init__( + self, + *, + allowed_origins: str, + allowed_methods: str, + allowed_headers: str, + exposed_headers: str, + max_age_in_seconds: int, + **kwargs + ): + super(CorsRule, self).__init__(**kwargs) + self.allowed_origins = allowed_origins + self.allowed_methods = allowed_methods + self.allowed_headers = allowed_headers + self.exposed_headers = exposed_headers + self.max_age_in_seconds = max_age_in_seconds + + +class CpkInfo(msrest.serialization.Model): + """Parameter group. + + :param encryption_key: Optional. Specifies the encryption key to use to encrypt the data + provided in the request. If not specified, encryption is performed with the root account + encryption key. For more information, see Encryption at Rest for Azure Storage Services. + :type encryption_key: str + :param encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be provided + if the x-ms-encryption-key header is provided. + :type encryption_key_sha256: str + :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, + the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is + provided. Possible values include: "None", "AES256". + :type encryption_algorithm: str or ~azure.storage.blob.models.EncryptionAlgorithmType + """ + + _attribute_map = { + 'encryption_key': {'key': 'encryptionKey', 'type': 'str'}, + 'encryption_key_sha256': {'key': 'encryptionKeySha256', 'type': 'str'}, + 'encryption_algorithm': {'key': 'encryptionAlgorithm', 'type': 'str'}, + } + + def __init__( + self, + *, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, "EncryptionAlgorithmType"]] = None, + **kwargs + ): + super(CpkInfo, self).__init__(**kwargs) + self.encryption_key = encryption_key + self.encryption_key_sha256 = encryption_key_sha256 + self.encryption_algorithm = encryption_algorithm + + +class CpkScopeInfo(msrest.serialization.Model): + """Parameter group. + + :param encryption_scope: Optional. Version 2019-07-07 and later. Specifies the name of the + encryption scope to use to encrypt the data provided in the request. If not specified, + encryption is performed with the default account encryption scope. For more information, see + Encryption at Rest for Azure Storage Services. + :type encryption_scope: str + """ + + _attribute_map = { + 'encryption_scope': {'key': 'encryptionScope', 'type': 'str'}, + } + + def __init__( + self, + *, + encryption_scope: Optional[str] = None, + **kwargs + ): + super(CpkScopeInfo, self).__init__(**kwargs) + self.encryption_scope = encryption_scope + + +class DelimitedTextConfiguration(msrest.serialization.Model): + """Groups the settings used for interpreting the blob data if the blob is delimited text formatted. + + :param column_separator: The string used to separate columns. + :type column_separator: str + :param field_quote: The string used to quote a specific field. + :type field_quote: str + :param record_separator: The string used to separate records. + :type record_separator: str + :param escape_char: The string used as an escape character. + :type escape_char: str + :param headers_present: Represents whether the data has headers. + :type headers_present: bool + """ + + _attribute_map = { + 'column_separator': {'key': 'ColumnSeparator', 'type': 'str', 'xml': {'name': 'ColumnSeparator'}}, + 'field_quote': {'key': 'FieldQuote', 'type': 'str', 'xml': {'name': 'FieldQuote'}}, + 'record_separator': {'key': 'RecordSeparator', 'type': 'str', 'xml': {'name': 'RecordSeparator'}}, + 'escape_char': {'key': 'EscapeChar', 'type': 'str', 'xml': {'name': 'EscapeChar'}}, + 'headers_present': {'key': 'HeadersPresent', 'type': 'bool', 'xml': {'name': 'HasHeaders'}}, + } + _xml_map = { + 'name': 'DelimitedTextConfiguration' + } + + def __init__( + self, + *, + column_separator: Optional[str] = None, + field_quote: Optional[str] = None, + record_separator: Optional[str] = None, + escape_char: Optional[str] = None, + headers_present: Optional[bool] = None, + **kwargs + ): + super(DelimitedTextConfiguration, self).__init__(**kwargs) + self.column_separator = column_separator + self.field_quote = field_quote + self.record_separator = record_separator + self.escape_char = escape_char + self.headers_present = headers_present + + +class FilterBlobItem(msrest.serialization.Model): + """Blob info from a Filter Blobs API call. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. + :type name: str + :param container_name: Required. + :type container_name: str + :param tags: A set of tags. Blob tags. + :type tags: ~azure.storage.blob.models.BlobTags + """ + + _validation = { + 'name': {'required': True}, + 'container_name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'Name', 'type': 'str'}, + 'container_name': {'key': 'ContainerName', 'type': 'str'}, + 'tags': {'key': 'Tags', 'type': 'BlobTags'}, + } + _xml_map = { + 'name': 'Blob' + } + + def __init__( + self, + *, + name: str, + container_name: str, + tags: Optional["BlobTags"] = None, + **kwargs + ): + super(FilterBlobItem, self).__init__(**kwargs) + self.name = name + self.container_name = container_name + self.tags = tags + + +class FilterBlobSegment(msrest.serialization.Model): + """The result of a Filter Blobs API call. + + All required parameters must be populated in order to send to Azure. + + :param service_endpoint: Required. + :type service_endpoint: str + :param where: Required. + :type where: str + :param blobs: Required. + :type blobs: list[~azure.storage.blob.models.FilterBlobItem] + :param next_marker: + :type next_marker: str + """ + + _validation = { + 'service_endpoint': {'required': True}, + 'where': {'required': True}, + 'blobs': {'required': True}, + } + + _attribute_map = { + 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, + 'where': {'key': 'Where', 'type': 'str'}, + 'blobs': {'key': 'Blobs', 'type': '[FilterBlobItem]', 'xml': {'name': 'Blobs', 'wrapped': True, 'itemsName': 'Blob'}}, + 'next_marker': {'key': 'NextMarker', 'type': 'str'}, + } + _xml_map = { + 'name': 'EnumerationResults' + } + + def __init__( + self, + *, + service_endpoint: str, + where: str, + blobs: List["FilterBlobItem"], + next_marker: Optional[str] = None, + **kwargs + ): + super(FilterBlobSegment, self).__init__(**kwargs) + self.service_endpoint = service_endpoint + self.where = where + self.blobs = blobs + self.next_marker = next_marker + + +class GeoReplication(msrest.serialization.Model): + """Geo-Replication information for the Secondary Storage Service. + + All required parameters must be populated in order to send to Azure. + + :param status: Required. The status of the secondary location. Possible values include: "live", + "bootstrap", "unavailable". + :type status: str or ~azure.storage.blob.models.GeoReplicationStatusType + :param last_sync_time: Required. A GMT date/time value, to the second. All primary writes + preceding this value are guaranteed to be available for read operations at the secondary. + Primary writes after this point in time may or may not be available for reads. + :type last_sync_time: ~datetime.datetime + """ + + _validation = { + 'status': {'required': True}, + 'last_sync_time': {'required': True}, + } + + _attribute_map = { + 'status': {'key': 'Status', 'type': 'str'}, + 'last_sync_time': {'key': 'LastSyncTime', 'type': 'rfc-1123'}, + } + + def __init__( + self, + *, + status: Union[str, "GeoReplicationStatusType"], + last_sync_time: datetime.datetime, + **kwargs + ): + super(GeoReplication, self).__init__(**kwargs) + self.status = status + self.last_sync_time = last_sync_time + + +class JsonTextConfiguration(msrest.serialization.Model): + """json text configuration. + + :param record_separator: The string used to separate records. + :type record_separator: str + """ + + _attribute_map = { + 'record_separator': {'key': 'RecordSeparator', 'type': 'str', 'xml': {'name': 'RecordSeparator'}}, + } + _xml_map = { + 'name': 'JsonTextConfiguration' + } + + def __init__( + self, + *, + record_separator: Optional[str] = None, + **kwargs + ): + super(JsonTextConfiguration, self).__init__(**kwargs) + self.record_separator = record_separator + + +class KeyInfo(msrest.serialization.Model): + """Key information. + + All required parameters must be populated in order to send to Azure. + + :param start: Required. The date-time the key is active in ISO 8601 UTC time. + :type start: str + :param expiry: Required. The date-time the key expires in ISO 8601 UTC time. + :type expiry: str + """ + + _validation = { + 'start': {'required': True}, + 'expiry': {'required': True}, + } + + _attribute_map = { + 'start': {'key': 'Start', 'type': 'str'}, + 'expiry': {'key': 'Expiry', 'type': 'str'}, + } + + def __init__( + self, + *, + start: str, + expiry: str, + **kwargs + ): + super(KeyInfo, self).__init__(**kwargs) + self.start = start + self.expiry = expiry + + +class LeaseAccessConditions(msrest.serialization.Model): + """Parameter group. + + :param lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. + :type lease_id: str + """ + + _attribute_map = { + 'lease_id': {'key': 'leaseId', 'type': 'str'}, + } + + def __init__( + self, + *, + lease_id: Optional[str] = None, + **kwargs + ): + super(LeaseAccessConditions, self).__init__(**kwargs) + self.lease_id = lease_id + + +class ListBlobsFlatSegmentResponse(msrest.serialization.Model): + """An enumeration of blobs. + + All required parameters must be populated in order to send to Azure. + + :param service_endpoint: Required. + :type service_endpoint: str + :param container_name: Required. + :type container_name: str + :param prefix: + :type prefix: str + :param marker: + :type marker: str + :param max_results: + :type max_results: int + :param segment: Required. + :type segment: ~azure.storage.blob.models.BlobFlatListSegment + :param next_marker: + :type next_marker: str + """ + + _validation = { + 'service_endpoint': {'required': True}, + 'container_name': {'required': True}, + 'segment': {'required': True}, + } + + _attribute_map = { + 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, + 'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'attr': True}}, + 'prefix': {'key': 'Prefix', 'type': 'str'}, + 'marker': {'key': 'Marker', 'type': 'str'}, + 'max_results': {'key': 'MaxResults', 'type': 'int'}, + 'segment': {'key': 'Segment', 'type': 'BlobFlatListSegment'}, + 'next_marker': {'key': 'NextMarker', 'type': 'str'}, + } + _xml_map = { + 'name': 'EnumerationResults' + } + + def __init__( + self, + *, + service_endpoint: str, + container_name: str, + segment: "BlobFlatListSegment", + prefix: Optional[str] = None, + marker: Optional[str] = None, + max_results: Optional[int] = None, + next_marker: Optional[str] = None, + **kwargs + ): + super(ListBlobsFlatSegmentResponse, self).__init__(**kwargs) + self.service_endpoint = service_endpoint + self.container_name = container_name + self.prefix = prefix + self.marker = marker + self.max_results = max_results + self.segment = segment + self.next_marker = next_marker + + +class ListBlobsHierarchySegmentResponse(msrest.serialization.Model): + """An enumeration of blobs. + + All required parameters must be populated in order to send to Azure. + + :param service_endpoint: Required. + :type service_endpoint: str + :param container_name: Required. + :type container_name: str + :param prefix: + :type prefix: str + :param marker: + :type marker: str + :param max_results: + :type max_results: int + :param delimiter: + :type delimiter: str + :param segment: Required. + :type segment: ~azure.storage.blob.models.BlobHierarchyListSegment + :param next_marker: + :type next_marker: str + """ + + _validation = { + 'service_endpoint': {'required': True}, + 'container_name': {'required': True}, + 'segment': {'required': True}, + } + + _attribute_map = { + 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, + 'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'attr': True}}, + 'prefix': {'key': 'Prefix', 'type': 'str'}, + 'marker': {'key': 'Marker', 'type': 'str'}, + 'max_results': {'key': 'MaxResults', 'type': 'int'}, + 'delimiter': {'key': 'Delimiter', 'type': 'str'}, + 'segment': {'key': 'Segment', 'type': 'BlobHierarchyListSegment'}, + 'next_marker': {'key': 'NextMarker', 'type': 'str'}, + } + _xml_map = { + 'name': 'EnumerationResults' + } + + def __init__( + self, + *, + service_endpoint: str, + container_name: str, + segment: "BlobHierarchyListSegment", + prefix: Optional[str] = None, + marker: Optional[str] = None, + max_results: Optional[int] = None, + delimiter: Optional[str] = None, + next_marker: Optional[str] = None, + **kwargs + ): + super(ListBlobsHierarchySegmentResponse, self).__init__(**kwargs) + self.service_endpoint = service_endpoint + self.container_name = container_name + self.prefix = prefix + self.marker = marker + self.max_results = max_results + self.delimiter = delimiter + self.segment = segment + self.next_marker = next_marker + + +class ListContainersSegmentResponse(msrest.serialization.Model): + """An enumeration of containers. + + All required parameters must be populated in order to send to Azure. + + :param service_endpoint: Required. + :type service_endpoint: str + :param prefix: + :type prefix: str + :param marker: + :type marker: str + :param max_results: + :type max_results: int + :param container_items: Required. + :type container_items: list[~azure.storage.blob.models.ContainerItem] + :param next_marker: + :type next_marker: str + """ + + _validation = { + 'service_endpoint': {'required': True}, + 'container_items': {'required': True}, + } + + _attribute_map = { + 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, + 'prefix': {'key': 'Prefix', 'type': 'str'}, + 'marker': {'key': 'Marker', 'type': 'str'}, + 'max_results': {'key': 'MaxResults', 'type': 'int'}, + 'container_items': {'key': 'ContainerItems', 'type': '[ContainerItem]', 'xml': {'name': 'Containers', 'wrapped': True, 'itemsName': 'Container'}}, + 'next_marker': {'key': 'NextMarker', 'type': 'str'}, + } + _xml_map = { + 'name': 'EnumerationResults' + } + + def __init__( + self, + *, + service_endpoint: str, + container_items: List["ContainerItem"], + prefix: Optional[str] = None, + marker: Optional[str] = None, + max_results: Optional[int] = None, + next_marker: Optional[str] = None, + **kwargs + ): + super(ListContainersSegmentResponse, self).__init__(**kwargs) + self.service_endpoint = service_endpoint + self.prefix = prefix + self.marker = marker + self.max_results = max_results + self.container_items = container_items + self.next_marker = next_marker + + +class Logging(msrest.serialization.Model): + """Azure Analytics Logging settings. + + All required parameters must be populated in order to send to Azure. + + :param version: Required. The version of Storage Analytics to configure. + :type version: str + :param delete: Required. Indicates whether all delete requests should be logged. + :type delete: bool + :param read: Required. Indicates whether all read requests should be logged. + :type read: bool + :param write: Required. Indicates whether all write requests should be logged. + :type write: bool + :param retention_policy: Required. the retention policy which determines how long the + associated data should persist. + :type retention_policy: ~azure.storage.blob.models.RetentionPolicy + """ + + _validation = { + 'version': {'required': True}, + 'delete': {'required': True}, + 'read': {'required': True}, + 'write': {'required': True}, + 'retention_policy': {'required': True}, + } + + _attribute_map = { + 'version': {'key': 'Version', 'type': 'str'}, + 'delete': {'key': 'Delete', 'type': 'bool'}, + 'read': {'key': 'Read', 'type': 'bool'}, + 'write': {'key': 'Write', 'type': 'bool'}, + 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'}, + } + + def __init__( + self, + *, + version: str, + delete: bool, + read: bool, + write: bool, + retention_policy: "RetentionPolicy", + **kwargs + ): + super(Logging, self).__init__(**kwargs) + self.version = version + self.delete = delete + self.read = read + self.write = write + self.retention_policy = retention_policy + + +class Metrics(msrest.serialization.Model): + """a summary of request statistics grouped by API in hour or minute aggregates for blobs. + + All required parameters must be populated in order to send to Azure. + + :param version: The version of Storage Analytics to configure. + :type version: str + :param enabled: Required. Indicates whether metrics are enabled for the Blob service. + :type enabled: bool + :param include_apis: Indicates whether metrics should generate summary statistics for called + API operations. + :type include_apis: bool + :param retention_policy: the retention policy which determines how long the associated data + should persist. + :type retention_policy: ~azure.storage.blob.models.RetentionPolicy + """ + + _validation = { + 'enabled': {'required': True}, + } + + _attribute_map = { + 'version': {'key': 'Version', 'type': 'str'}, + 'enabled': {'key': 'Enabled', 'type': 'bool'}, + 'include_apis': {'key': 'IncludeAPIs', 'type': 'bool'}, + 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'}, + } + + def __init__( + self, + *, + enabled: bool, + version: Optional[str] = None, + include_apis: Optional[bool] = None, + retention_policy: Optional["RetentionPolicy"] = None, + **kwargs + ): + super(Metrics, self).__init__(**kwargs) + self.version = version + self.enabled = enabled + self.include_apis = include_apis + self.retention_policy = retention_policy + + +class ModifiedAccessConditions(msrest.serialization.Model): + """Parameter group. + + :param if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. + :type if_modified_since: ~datetime.datetime + :param if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. + :type if_unmodified_since: ~datetime.datetime + :param if_match: Specify an ETag value to operate only on blobs with a matching value. + :type if_match: str + :param if_none_match: Specify an ETag value to operate only on blobs without a matching value. + :type if_none_match: str + :param if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. + :type if_tags: str + """ + + _attribute_map = { + 'if_modified_since': {'key': 'ifModifiedSince', 'type': 'rfc-1123'}, + 'if_unmodified_since': {'key': 'ifUnmodifiedSince', 'type': 'rfc-1123'}, + 'if_match': {'key': 'ifMatch', 'type': 'str'}, + 'if_none_match': {'key': 'ifNoneMatch', 'type': 'str'}, + 'if_tags': {'key': 'ifTags', 'type': 'str'}, + } + + def __init__( + self, + *, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + **kwargs + ): + super(ModifiedAccessConditions, self).__init__(**kwargs) + self.if_modified_since = if_modified_since + self.if_unmodified_since = if_unmodified_since + self.if_match = if_match + self.if_none_match = if_none_match + self.if_tags = if_tags + + +class PageList(msrest.serialization.Model): + """the list of pages. + + :param page_range: + :type page_range: list[~azure.storage.blob.models.PageRange] + :param clear_range: + :type clear_range: list[~azure.storage.blob.models.ClearRange] + """ + + _attribute_map = { + 'page_range': {'key': 'PageRange', 'type': '[PageRange]'}, + 'clear_range': {'key': 'ClearRange', 'type': '[ClearRange]'}, + } + + def __init__( + self, + *, + page_range: Optional[List["PageRange"]] = None, + clear_range: Optional[List["ClearRange"]] = None, + **kwargs + ): + super(PageList, self).__init__(**kwargs) + self.page_range = page_range + self.clear_range = clear_range + + +class PageRange(msrest.serialization.Model): + """PageRange. + + All required parameters must be populated in order to send to Azure. + + :param start: Required. + :type start: long + :param end: Required. + :type end: long + """ + + _validation = { + 'start': {'required': True}, + 'end': {'required': True}, + } + + _attribute_map = { + 'start': {'key': 'Start', 'type': 'long', 'xml': {'name': 'Start'}}, + 'end': {'key': 'End', 'type': 'long', 'xml': {'name': 'End'}}, + } + _xml_map = { + 'name': 'PageRange' + } + + def __init__( + self, + *, + start: int, + end: int, + **kwargs + ): + super(PageRange, self).__init__(**kwargs) + self.start = start + self.end = end + + +class QueryFormat(msrest.serialization.Model): + """QueryFormat. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The quick query format type. Possible values include: "delimited", + "json", "arrow", "parquet". + :type type: str or ~azure.storage.blob.models.QueryFormatType + :param delimited_text_configuration: Groups the settings used for interpreting the blob data if + the blob is delimited text formatted. + :type delimited_text_configuration: ~azure.storage.blob.models.DelimitedTextConfiguration + :param json_text_configuration: json text configuration. + :type json_text_configuration: ~azure.storage.blob.models.JsonTextConfiguration + :param arrow_configuration: Groups the settings used for formatting the response if the + response should be Arrow formatted. + :type arrow_configuration: ~azure.storage.blob.models.ArrowConfiguration + :param parquet_text_configuration: Any object. + :type parquet_text_configuration: any + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'Type', 'type': 'str', 'xml': {'name': 'Type'}}, + 'delimited_text_configuration': {'key': 'DelimitedTextConfiguration', 'type': 'DelimitedTextConfiguration'}, + 'json_text_configuration': {'key': 'JsonTextConfiguration', 'type': 'JsonTextConfiguration'}, + 'arrow_configuration': {'key': 'ArrowConfiguration', 'type': 'ArrowConfiguration'}, + 'parquet_text_configuration': {'key': 'ParquetTextConfiguration', 'type': 'object'}, + } + + def __init__( + self, + *, + type: Union[str, "QueryFormatType"], + delimited_text_configuration: Optional["DelimitedTextConfiguration"] = None, + json_text_configuration: Optional["JsonTextConfiguration"] = None, + arrow_configuration: Optional["ArrowConfiguration"] = None, + parquet_text_configuration: Optional[Any] = None, + **kwargs + ): + super(QueryFormat, self).__init__(**kwargs) + self.type = type + self.delimited_text_configuration = delimited_text_configuration + self.json_text_configuration = json_text_configuration + self.arrow_configuration = arrow_configuration + self.parquet_text_configuration = parquet_text_configuration + + +class QueryRequest(msrest.serialization.Model): + """Groups the set of query request settings. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar query_type: Required. The type of the provided query expression. Has constant value: + "SQL". + :vartype query_type: str + :param expression: Required. The query expression in SQL. The maximum size of the query + expression is 256KiB. + :type expression: str + :param input_serialization: + :type input_serialization: ~azure.storage.blob.models.QuerySerialization + :param output_serialization: + :type output_serialization: ~azure.storage.blob.models.QuerySerialization + """ + + _validation = { + 'query_type': {'required': True, 'constant': True}, + 'expression': {'required': True}, + } + + _attribute_map = { + 'query_type': {'key': 'QueryType', 'type': 'str', 'xml': {'name': 'QueryType'}}, + 'expression': {'key': 'Expression', 'type': 'str', 'xml': {'name': 'Expression'}}, + 'input_serialization': {'key': 'InputSerialization', 'type': 'QuerySerialization'}, + 'output_serialization': {'key': 'OutputSerialization', 'type': 'QuerySerialization'}, + } + _xml_map = { + 'name': 'QueryRequest' + } + + query_type = "SQL" + + def __init__( + self, + *, + expression: str, + input_serialization: Optional["QuerySerialization"] = None, + output_serialization: Optional["QuerySerialization"] = None, + **kwargs + ): + super(QueryRequest, self).__init__(**kwargs) + self.expression = expression + self.input_serialization = input_serialization + self.output_serialization = output_serialization + + +class QuerySerialization(msrest.serialization.Model): + """QuerySerialization. + + All required parameters must be populated in order to send to Azure. + + :param format: Required. + :type format: ~azure.storage.blob.models.QueryFormat + """ + + _validation = { + 'format': {'required': True}, + } + + _attribute_map = { + 'format': {'key': 'Format', 'type': 'QueryFormat'}, + } + + def __init__( + self, + *, + format: "QueryFormat", + **kwargs + ): + super(QuerySerialization, self).__init__(**kwargs) + self.format = format + + +class RetentionPolicy(msrest.serialization.Model): + """the retention policy which determines how long the associated data should persist. + + All required parameters must be populated in order to send to Azure. + + :param enabled: Required. Indicates whether a retention policy is enabled for the storage + service. + :type enabled: bool + :param days: Indicates the number of days that metrics or logging or soft-deleted data should + be retained. All data older than this value will be deleted. + :type days: int + :param allow_permanent_delete: Indicates whether permanent delete is allowed on this storage + account. + :type allow_permanent_delete: bool + """ + + _validation = { + 'enabled': {'required': True}, + 'days': {'minimum': 1}, + } + + _attribute_map = { + 'enabled': {'key': 'Enabled', 'type': 'bool'}, + 'days': {'key': 'Days', 'type': 'int'}, + 'allow_permanent_delete': {'key': 'AllowPermanentDelete', 'type': 'bool'}, + } + + def __init__( + self, + *, + enabled: bool, + days: Optional[int] = None, + allow_permanent_delete: Optional[bool] = None, + **kwargs + ): + super(RetentionPolicy, self).__init__(**kwargs) + self.enabled = enabled + self.days = days + self.allow_permanent_delete = allow_permanent_delete + + +class SequenceNumberAccessConditions(msrest.serialization.Model): + """Parameter group. + + :param if_sequence_number_less_than_or_equal_to: Specify this header value to operate only on a + blob if it has a sequence number less than or equal to the specified. + :type if_sequence_number_less_than_or_equal_to: long + :param if_sequence_number_less_than: Specify this header value to operate only on a blob if it + has a sequence number less than the specified. + :type if_sequence_number_less_than: long + :param if_sequence_number_equal_to: Specify this header value to operate only on a blob if it + has the specified sequence number. + :type if_sequence_number_equal_to: long + """ + + _attribute_map = { + 'if_sequence_number_less_than_or_equal_to': {'key': 'ifSequenceNumberLessThanOrEqualTo', 'type': 'long'}, + 'if_sequence_number_less_than': {'key': 'ifSequenceNumberLessThan', 'type': 'long'}, + 'if_sequence_number_equal_to': {'key': 'ifSequenceNumberEqualTo', 'type': 'long'}, + } + + def __init__( + self, + *, + if_sequence_number_less_than_or_equal_to: Optional[int] = None, + if_sequence_number_less_than: Optional[int] = None, + if_sequence_number_equal_to: Optional[int] = None, + **kwargs + ): + super(SequenceNumberAccessConditions, self).__init__(**kwargs) + self.if_sequence_number_less_than_or_equal_to = if_sequence_number_less_than_or_equal_to + self.if_sequence_number_less_than = if_sequence_number_less_than + self.if_sequence_number_equal_to = if_sequence_number_equal_to + + +class SignedIdentifier(msrest.serialization.Model): + """signed identifier. + + All required parameters must be populated in order to send to Azure. + + :param id: Required. a unique id. + :type id: str + :param access_policy: An Access policy. + :type access_policy: ~azure.storage.blob.models.AccessPolicy + """ + + _validation = { + 'id': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'Id', 'type': 'str'}, + 'access_policy': {'key': 'AccessPolicy', 'type': 'AccessPolicy'}, + } + _xml_map = { + 'name': 'SignedIdentifier' + } + + def __init__( + self, + *, + id: str, + access_policy: Optional["AccessPolicy"] = None, + **kwargs + ): + super(SignedIdentifier, self).__init__(**kwargs) + self.id = id + self.access_policy = access_policy + + +class SourceModifiedAccessConditions(msrest.serialization.Model): + """Parameter group. + + :param source_if_modified_since: Specify this header value to operate only on a blob if it has + been modified since the specified date/time. + :type source_if_modified_since: ~datetime.datetime + :param source_if_unmodified_since: Specify this header value to operate only on a blob if it + has not been modified since the specified date/time. + :type source_if_unmodified_since: ~datetime.datetime + :param source_if_match: Specify an ETag value to operate only on blobs with a matching value. + :type source_if_match: str + :param source_if_none_match: Specify an ETag value to operate only on blobs without a matching + value. + :type source_if_none_match: str + :param source_if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. + :type source_if_tags: str + """ + + _attribute_map = { + 'source_if_modified_since': {'key': 'sourceIfModifiedSince', 'type': 'rfc-1123'}, + 'source_if_unmodified_since': {'key': 'sourceIfUnmodifiedSince', 'type': 'rfc-1123'}, + 'source_if_match': {'key': 'sourceIfMatch', 'type': 'str'}, + 'source_if_none_match': {'key': 'sourceIfNoneMatch', 'type': 'str'}, + 'source_if_tags': {'key': 'sourceIfTags', 'type': 'str'}, + } + + def __init__( + self, + *, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + source_if_tags: Optional[str] = None, + **kwargs + ): + super(SourceModifiedAccessConditions, self).__init__(**kwargs) + self.source_if_modified_since = source_if_modified_since + self.source_if_unmodified_since = source_if_unmodified_since + self.source_if_match = source_if_match + self.source_if_none_match = source_if_none_match + self.source_if_tags = source_if_tags + + +class StaticWebsite(msrest.serialization.Model): + """The properties that enable an account to host a static website. + + All required parameters must be populated in order to send to Azure. + + :param enabled: Required. Indicates whether this account is hosting a static website. + :type enabled: bool + :param index_document: The default name of the index page under each directory. + :type index_document: str + :param error_document404_path: The absolute path of the custom 404 page. + :type error_document404_path: str + :param default_index_document_path: Absolute path of the default index page. + :type default_index_document_path: str + """ + + _validation = { + 'enabled': {'required': True}, + } + + _attribute_map = { + 'enabled': {'key': 'Enabled', 'type': 'bool'}, + 'index_document': {'key': 'IndexDocument', 'type': 'str'}, + 'error_document404_path': {'key': 'ErrorDocument404Path', 'type': 'str'}, + 'default_index_document_path': {'key': 'DefaultIndexDocumentPath', 'type': 'str'}, + } + + def __init__( + self, + *, + enabled: bool, + index_document: Optional[str] = None, + error_document404_path: Optional[str] = None, + default_index_document_path: Optional[str] = None, + **kwargs + ): + super(StaticWebsite, self).__init__(**kwargs) + self.enabled = enabled + self.index_document = index_document + self.error_document404_path = error_document404_path + self.default_index_document_path = default_index_document_path + + +class StorageError(msrest.serialization.Model): + """StorageError. + + :param message: + :type message: str + """ + + _attribute_map = { + 'message': {'key': 'Message', 'type': 'str'}, + } + + def __init__( + self, + *, + message: Optional[str] = None, + **kwargs + ): + super(StorageError, self).__init__(**kwargs) + self.message = message + + +class StorageServiceProperties(msrest.serialization.Model): + """Storage Service Properties. + + :param logging: Azure Analytics Logging settings. + :type logging: ~azure.storage.blob.models.Logging + :param hour_metrics: a summary of request statistics grouped by API in hour or minute + aggregates for blobs. + :type hour_metrics: ~azure.storage.blob.models.Metrics + :param minute_metrics: a summary of request statistics grouped by API in hour or minute + aggregates for blobs. + :type minute_metrics: ~azure.storage.blob.models.Metrics + :param cors: The set of CORS rules. + :type cors: list[~azure.storage.blob.models.CorsRule] + :param default_service_version: The default version to use for requests to the Blob service if + an incoming request's version is not specified. Possible values include version 2008-10-27 and + all more recent versions. + :type default_service_version: str + :param delete_retention_policy: the retention policy which determines how long the associated + data should persist. + :type delete_retention_policy: ~azure.storage.blob.models.RetentionPolicy + :param static_website: The properties that enable an account to host a static website. + :type static_website: ~azure.storage.blob.models.StaticWebsite + """ + + _attribute_map = { + 'logging': {'key': 'Logging', 'type': 'Logging'}, + 'hour_metrics': {'key': 'HourMetrics', 'type': 'Metrics'}, + 'minute_metrics': {'key': 'MinuteMetrics', 'type': 'Metrics'}, + 'cors': {'key': 'Cors', 'type': '[CorsRule]', 'xml': {'wrapped': True}}, + 'default_service_version': {'key': 'DefaultServiceVersion', 'type': 'str'}, + 'delete_retention_policy': {'key': 'DeleteRetentionPolicy', 'type': 'RetentionPolicy'}, + 'static_website': {'key': 'StaticWebsite', 'type': 'StaticWebsite'}, + } + + def __init__( + self, + *, + logging: Optional["Logging"] = None, + hour_metrics: Optional["Metrics"] = None, + minute_metrics: Optional["Metrics"] = None, + cors: Optional[List["CorsRule"]] = None, + default_service_version: Optional[str] = None, + delete_retention_policy: Optional["RetentionPolicy"] = None, + static_website: Optional["StaticWebsite"] = None, + **kwargs + ): + super(StorageServiceProperties, self).__init__(**kwargs) + self.logging = logging + self.hour_metrics = hour_metrics + self.minute_metrics = minute_metrics + self.cors = cors + self.default_service_version = default_service_version + self.delete_retention_policy = delete_retention_policy + self.static_website = static_website + + +class StorageServiceStats(msrest.serialization.Model): + """Stats for the storage service. + + :param geo_replication: Geo-Replication information for the Secondary Storage Service. + :type geo_replication: ~azure.storage.blob.models.GeoReplication + """ + + _attribute_map = { + 'geo_replication': {'key': 'GeoReplication', 'type': 'GeoReplication'}, + } + + def __init__( + self, + *, + geo_replication: Optional["GeoReplication"] = None, + **kwargs + ): + super(StorageServiceStats, self).__init__(**kwargs) + self.geo_replication = geo_replication + + +class UserDelegationKey(msrest.serialization.Model): + """A user delegation key. + + All required parameters must be populated in order to send to Azure. + + :param signed_oid: Required. The Azure Active Directory object ID in GUID format. + :type signed_oid: str + :param signed_tid: Required. The Azure Active Directory tenant ID in GUID format. + :type signed_tid: str + :param signed_start: Required. The date-time the key is active. + :type signed_start: ~datetime.datetime + :param signed_expiry: Required. The date-time the key expires. + :type signed_expiry: ~datetime.datetime + :param signed_service: Required. Abbreviation of the Azure Storage service that accepts the + key. + :type signed_service: str + :param signed_version: Required. The service version that created the key. + :type signed_version: str + :param value: Required. The key as a base64 string. + :type value: str + """ + + _validation = { + 'signed_oid': {'required': True}, + 'signed_tid': {'required': True}, + 'signed_start': {'required': True}, + 'signed_expiry': {'required': True}, + 'signed_service': {'required': True}, + 'signed_version': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'signed_oid': {'key': 'SignedOid', 'type': 'str'}, + 'signed_tid': {'key': 'SignedTid', 'type': 'str'}, + 'signed_start': {'key': 'SignedStart', 'type': 'iso-8601'}, + 'signed_expiry': {'key': 'SignedExpiry', 'type': 'iso-8601'}, + 'signed_service': {'key': 'SignedService', 'type': 'str'}, + 'signed_version': {'key': 'SignedVersion', 'type': 'str'}, + 'value': {'key': 'Value', 'type': 'str'}, + } + + def __init__( + self, + *, + signed_oid: str, + signed_tid: str, + signed_start: datetime.datetime, + signed_expiry: datetime.datetime, + signed_service: str, + signed_version: str, + value: str, + **kwargs + ): + super(UserDelegationKey, self).__init__(**kwargs) + self.signed_oid = signed_oid + self.signed_tid = signed_tid + self.signed_start = signed_start + self.signed_expiry = signed_expiry + self.signed_service = signed_service + self.signed_version = signed_version + self.value = value diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/__init__.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/__init__.py new file mode 100644 index 00000000000..902269d05ed --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/__init__.py @@ -0,0 +1,23 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._service_operations import ServiceOperations +from ._container_operations import ContainerOperations +from ._blob_operations import BlobOperations +from ._page_blob_operations import PageBlobOperations +from ._append_blob_operations import AppendBlobOperations +from ._block_blob_operations import BlockBlobOperations + +__all__ = [ + 'ServiceOperations', + 'ContainerOperations', + 'BlobOperations', + 'PageBlobOperations', + 'AppendBlobOperations', + 'BlockBlobOperations', +] diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_append_blob_operations.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_append_blob_operations.py new file mode 100644 index 00000000000..b38af4b94f1 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_append_blob_operations.py @@ -0,0 +1,734 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class AppendBlobOperations(object): + """AppendBlobOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.storage.blob.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def create( + self, + content_length, # type: int + timeout=None, # type: Optional[int] + metadata=None, # type: Optional[str] + request_id_parameter=None, # type: Optional[str] + blob_tags_string=None, # type: Optional[str] + immutability_policy_expiry=None, # type: Optional[datetime.datetime] + immutability_policy_mode=None, # type: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] + legal_hold=None, # type: Optional[bool] + blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Create Append Blob operation creates a new append blob. + + :param content_length: The length of the request. + :type content_length: long + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :type blob_tags_string: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. + :type legal_hold: bool + :param blob_http_headers: Parameter group. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _blob_content_type = None + _blob_content_encoding = None + _blob_content_language = None + _blob_content_md5 = None + _blob_cache_control = None + _lease_id = None + _blob_content_disposition = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if blob_http_headers is not None: + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + blob_type = "AppendBlob" + accept = "application/xml" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + if _blob_content_type is not None: + header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') + if _blob_content_encoding is not None: + header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') + if _blob_content_language is not None: + header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') + if _blob_content_md5 is not None: + header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') + if _blob_cache_control is not None: + header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _blob_content_disposition is not None: + header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if blob_tags_string is not None: + header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') + if immutability_policy_expiry is not None: + header_parameters['x-ms-immutability-policy-until-date'] = self._serialize.header("immutability_policy_expiry", immutability_policy_expiry, 'rfc-1123') + if immutability_policy_mode is not None: + header_parameters['x-ms-immutability-policy-mode'] = self._serialize.header("immutability_policy_mode", immutability_policy_mode, 'str') + if legal_hold is not None: + header_parameters['x-ms-legal-hold'] = self._serialize.header("legal_hold", legal_hold, 'bool') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + create.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def append_block( + self, + content_length, # type: int + body, # type: IO + timeout=None, # type: Optional[int] + transactional_content_md5=None, # type: Optional[bytearray] + transactional_content_crc64=None, # type: Optional[bytearray] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + append_position_access_conditions=None, # type: Optional["_models.AppendPositionAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Append Block operation commits a new block of data to the end of an existing append blob. + The Append Block operation is permitted only if the blob was created with x-ms-blob-type set to + AppendBlob. Append Block is supported only on version 2015-02-21 version or later. + + :param content_length: The length of the request. + :type content_length: long + :param body: Initial data. + :type body: IO + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. + :type transactional_content_md5: bytearray + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. + :type transactional_content_crc64: bytearray + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param append_position_access_conditions: Parameter group. + :type append_position_access_conditions: ~azure.storage.blob.models.AppendPositionAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _max_size = None + _append_position = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if append_position_access_conditions is not None: + _max_size = append_position_access_conditions.max_size + _append_position = append_position_access_conditions.append_position + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "appendblock" + content_type = kwargs.pop("content_type", "application/octet-stream") + accept = "application/xml" + + # Construct URL + url = self.append_block.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + if transactional_content_md5 is not None: + header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') + if transactional_content_crc64 is not None: + header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _max_size is not None: + header_parameters['x-ms-blob-condition-maxsize'] = self._serialize.header("max_size", _max_size, 'long') + if _append_position is not None: + header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("append_position", _append_position, 'long') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content_kwargs['stream_content'] = body + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-append-offset']=self._deserialize('str', response.headers.get('x-ms-blob-append-offset')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + append_block.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def append_block_from_url( + self, + source_url, # type: str + content_length, # type: int + source_range=None, # type: Optional[str] + source_content_md5=None, # type: Optional[bytearray] + source_contentcrc64=None, # type: Optional[bytearray] + timeout=None, # type: Optional[int] + transactional_content_md5=None, # type: Optional[bytearray] + request_id_parameter=None, # type: Optional[str] + copy_source_authorization=None, # type: Optional[str] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + append_position_access_conditions=None, # type: Optional["_models.AppendPositionAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Append Block operation commits a new block of data to the end of an existing append blob + where the contents are read from a source url. The Append Block operation is permitted only if + the blob was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on + version 2015-02-21 version or later. + + :param source_url: Specify a URL to the copy source. + :type source_url: str + :param content_length: The length of the request. + :type content_length: long + :param source_range: Bytes of source data in the specified range. + :type source_range: str + :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read + from the copy source. + :type source_content_md5: bytearray + :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be + read from the copy source. + :type source_contentcrc64: bytearray + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. + :type transactional_content_md5: bytearray + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. + :type copy_source_authorization: str + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param append_position_access_conditions: Parameter group. + :type append_position_access_conditions: ~azure.storage.blob.models.AppendPositionAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param source_modified_access_conditions: Parameter group. + :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _lease_id = None + _max_size = None + _append_position = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + if append_position_access_conditions is not None: + _max_size = append_position_access_conditions.max_size + _append_position = append_position_access_conditions.append_position + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + comp = "appendblock" + accept = "application/xml" + + # Construct URL + url = self.append_block_from_url.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-copy-source'] = self._serialize.header("source_url", source_url, 'str') + if source_range is not None: + header_parameters['x-ms-source-range'] = self._serialize.header("source_range", source_range, 'str') + if source_content_md5 is not None: + header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') + if source_contentcrc64 is not None: + header_parameters['x-ms-source-content-crc64'] = self._serialize.header("source_contentcrc64", source_contentcrc64, 'bytearray') + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + if transactional_content_md5 is not None: + header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _max_size is not None: + header_parameters['x-ms-blob-condition-maxsize'] = self._serialize.header("max_size", _max_size, 'long') + if _append_position is not None: + header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("append_position", _append_position, 'long') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + if _source_if_modified_since is not None: + header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') + if _source_if_unmodified_since is not None: + header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') + if _source_if_match is not None: + header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') + if _source_if_none_match is not None: + header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if copy_source_authorization is not None: + header_parameters['x-ms-copy-source-authorization'] = self._serialize.header("copy_source_authorization", copy_source_authorization, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-append-offset']=self._deserialize('str', response.headers.get('x-ms-blob-append-offset')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + + if cls: + return cls(pipeline_response, None, response_headers) + + append_block_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def seal( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + append_position_access_conditions=None, # type: Optional["_models.AppendPositionAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Seal operation seals the Append Blob to make it read-only. Seal is supported only on + version 2019-12-12 version or later. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param append_position_access_conditions: Parameter group. + :type append_position_access_conditions: ~azure.storage.blob.models.AppendPositionAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _append_position = None + if append_position_access_conditions is not None: + _append_position = append_position_access_conditions.append_position + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + comp = "seal" + accept = "application/xml" + + # Construct URL + url = self.seal.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _append_position is not None: + header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("append_position", _append_position, 'long') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) + + if cls: + return cls(pipeline_response, None, response_headers) + + seal.metadata = {'url': '/{containerName}/{blob}'} # type: ignore diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_blob_operations.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_blob_operations.py new file mode 100644 index 00000000000..e041221239f --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_blob_operations.py @@ -0,0 +1,3036 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class BlobOperations(object): + """BlobOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.storage.blob.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def download( + self, + snapshot=None, # type: Optional[str] + version_id=None, # type: Optional[str] + timeout=None, # type: Optional[int] + range=None, # type: Optional[str] + range_get_content_md5=None, # type: Optional[bool] + range_get_content_crc64=None, # type: Optional[bool] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> IO + """The Download operation reads or downloads a blob from the system, including its metadata and + properties. You can also call Download to read a snapshot. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + :type version_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param range: Return only the bytes of the blob in the specified range. + :type range: str + :param range_get_content_md5: When set to true and specified together with the Range, the + service returns the MD5 hash for the range, as long as the range is less than or equal to 4 MB + in size. + :type range_get_content_md5: bool + :param range_get_content_crc64: When set to true and specified together with the Range, the + service returns the CRC64 hash for the range, as long as the range is less than or equal to 4 + MB in size. + :type range_get_content_crc64: bool + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IO, or the result of cls(response) + :rtype: IO + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[IO] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + accept = "application/xml" + + # Construct URL + url = self.download.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if snapshot is not None: + query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') + if version_id is not None: + query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if range is not None: + header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if range_get_content_md5 is not None: + header_parameters['x-ms-range-get-content-md5'] = self._serialize.header("range_get_content_md5", range_get_content_md5, 'bool') + if range_get_content_crc64 is not None: + header_parameters['x-ms-range-get-content-crc64'] = self._serialize.header("range_get_content_crc64", range_get_content_crc64, 'bool') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=True, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 206]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 200: + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) + response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) + response_headers['x-ms-or']=self._deserialize('str', response.headers.get('x-ms-or')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) + response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) + response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) + response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) + response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) + response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) + response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['x-ms-is-current-version']=self._deserialize('bool', response.headers.get('x-ms-is-current-version')) + response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) + response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) + response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) + response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) + response_headers['x-ms-immutability-policy-until-date']=self._deserialize('rfc-1123', response.headers.get('x-ms-immutability-policy-until-date')) + response_headers['x-ms-immutability-policy-mode']=self._deserialize('str', response.headers.get('x-ms-immutability-policy-mode')) + response_headers['x-ms-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-legal-hold')) + deserialized = response.stream_download(self._client._pipeline) + + if response.status_code == 206: + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) + response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) + response_headers['x-ms-or']=self._deserialize('str', response.headers.get('x-ms-or')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) + response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) + response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) + response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) + response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) + response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['x-ms-is-current-version']=self._deserialize('bool', response.headers.get('x-ms-is-current-version')) + response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) + response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) + response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) + response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) + response_headers['x-ms-immutability-policy-until-date']=self._deserialize('rfc-1123', response.headers.get('x-ms-immutability-policy-until-date')) + response_headers['x-ms-immutability-policy-mode']=self._deserialize('str', response.headers.get('x-ms-immutability-policy-mode')) + response_headers['x-ms-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-legal-hold')) + deserialized = response.stream_download(self._client._pipeline) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + download.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def get_properties( + self, + snapshot=None, # type: Optional[str] + version_id=None, # type: Optional[str] + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Get Properties operation returns all user-defined metadata, standard HTTP properties, and + system properties for the blob. It does not return the content of the blob. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + :type version_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + accept = "application/xml" + + # Construct URL + url = self.get_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if snapshot is not None: + query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') + if version_id is not None: + query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.head(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-creation-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-creation-time')) + response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) + response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) + response_headers['x-ms-or']=self._deserialize('str', response.headers.get('x-ms-or')) + response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) + response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) + response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) + response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['x-ms-incremental-copy']=self._deserialize('bool', response.headers.get('x-ms-incremental-copy')) + response_headers['x-ms-copy-destination-snapshot']=self._deserialize('str', response.headers.get('x-ms-copy-destination-snapshot')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) + response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) + response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) + response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-access-tier']=self._deserialize('str', response.headers.get('x-ms-access-tier')) + response_headers['x-ms-access-tier-inferred']=self._deserialize('bool', response.headers.get('x-ms-access-tier-inferred')) + response_headers['x-ms-archive-status']=self._deserialize('str', response.headers.get('x-ms-archive-status')) + response_headers['x-ms-access-tier-change-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-access-tier-change-time')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['x-ms-is-current-version']=self._deserialize('bool', response.headers.get('x-ms-is-current-version')) + response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) + response_headers['x-ms-expiry-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-expiry-time')) + response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) + response_headers['x-ms-rehydrate-priority']=self._deserialize('str', response.headers.get('x-ms-rehydrate-priority')) + response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) + response_headers['x-ms-immutability-policy-until-date']=self._deserialize('rfc-1123', response.headers.get('x-ms-immutability-policy-until-date')) + response_headers['x-ms-immutability-policy-mode']=self._deserialize('str', response.headers.get('x-ms-immutability-policy-mode')) + response_headers['x-ms-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-legal-hold')) + + if cls: + return cls(pipeline_response, None, response_headers) + + get_properties.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def delete( + self, + snapshot=None, # type: Optional[str] + version_id=None, # type: Optional[str] + timeout=None, # type: Optional[int] + delete_snapshots=None, # type: Optional[Union[str, "_models.DeleteSnapshotsOptionType"]] + request_id_parameter=None, # type: Optional[str] + blob_delete_type="Permanent", # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """If the storage account's soft delete feature is disabled then, when a blob is deleted, it is + permanently removed from the storage account. If the storage account's soft delete feature is + enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible + immediately. However, the blob service retains the blob or snapshot for the number of days + specified by the DeleteRetentionPolicy section of [Storage service properties] + (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's + data is permanently removed from the storage account. Note that you continue to be charged for + the soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and + specify the "include=deleted" query parameter to discover which blobs and snapshots have been + soft deleted. You can then use the Undelete Blob API to restore a soft-deleted blob. All other + operations on a soft-deleted blob or snapshot causes the service to return an HTTP status code + of 404 (ResourceNotFound). + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + :type version_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param delete_snapshots: Required if the blob has associated snapshots. Specify one of the + following two options: include: Delete the base blob and all of its snapshots. only: Delete + only the blob's snapshots and not the blob itself. + :type delete_snapshots: str or ~azure.storage.blob.models.DeleteSnapshotsOptionType + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param blob_delete_type: Optional. Only possible value is 'permanent', which specifies to + permanently delete a blob if blob soft delete is enabled. + :type blob_delete_type: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + accept = "application/xml" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if snapshot is not None: + query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') + if version_id is not None: + query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if blob_delete_type is not None: + query_parameters['deletetype'] = self._serialize.query("blob_delete_type", blob_delete_type, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if delete_snapshots is not None: + header_parameters['x-ms-delete-snapshots'] = self._serialize.header("delete_snapshots", delete_snapshots, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + delete.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def undelete( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """Undelete a blob that was previously soft deleted. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + comp = "undelete" + accept = "application/xml" + + # Construct URL + url = self.undelete.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + undelete.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def set_expiry( + self, + expiry_options, # type: Union[str, "_models.BlobExpiryOptions"] + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + expires_on=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """Sets the time a blob will expire and be deleted. + + :param expiry_options: Required. Indicates mode of the expiry time. + :type expiry_options: str or ~azure.storage.blob.models.BlobExpiryOptions + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param expires_on: The time to set the blob to expiry. + :type expires_on: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + comp = "expiry" + accept = "application/xml" + + # Construct URL + url = self.set_expiry.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['x-ms-expiry-option'] = self._serialize.header("expiry_options", expiry_options, 'str') + if expires_on is not None: + header_parameters['x-ms-expiry-time'] = self._serialize.header("expires_on", expires_on, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_expiry.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def set_http_headers( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Set HTTP Headers operation sets system properties on the blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param blob_http_headers: Parameter group. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _blob_cache_control = None + _blob_content_type = None + _blob_content_md5 = None + _blob_content_encoding = None + _blob_content_language = None + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _blob_content_disposition = None + if blob_http_headers is not None: + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_disposition = blob_http_headers.blob_content_disposition + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "properties" + accept = "application/xml" + + # Construct URL + url = self.set_http_headers.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _blob_cache_control is not None: + header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') + if _blob_content_type is not None: + header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') + if _blob_content_md5 is not None: + header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') + if _blob_content_encoding is not None: + header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') + if _blob_content_language is not None: + header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + if _blob_content_disposition is not None: + header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_http_headers.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def set_immutability_policy( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + immutability_policy_expiry=None, # type: Optional[datetime.datetime] + immutability_policy_mode=None, # type: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Set Immutability Policy operation sets the immutability policy on the blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_unmodified_since = modified_access_conditions.if_unmodified_since + comp = "immutabilityPolicies" + accept = "application/xml" + + # Construct URL + url = self.set_immutability_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if immutability_policy_expiry is not None: + header_parameters['x-ms-immutability-policy-until-date'] = self._serialize.header("immutability_policy_expiry", immutability_policy_expiry, 'rfc-1123') + if immutability_policy_mode is not None: + header_parameters['x-ms-immutability-policy-mode'] = self._serialize.header("immutability_policy_mode", immutability_policy_mode, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-immutability-policy-until-date']=self._deserialize('rfc-1123', response.headers.get('x-ms-immutability-policy-until-date')) + response_headers['x-ms-immutability-policy-mode']=self._deserialize('str', response.headers.get('x-ms-immutability-policy-mode')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_immutability_policy.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def delete_immutability_policy( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """The Delete Immutability Policy operation deletes the immutability policy on the blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + comp = "immutabilityPolicies" + accept = "application/xml" + + # Construct URL + url = self.delete_immutability_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + delete_immutability_policy.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def set_legal_hold( + self, + legal_hold, # type: bool + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """The Set Legal Hold operation sets a legal hold on the blob. + + :param legal_hold: Specified if a legal hold should be set on the blob. + :type legal_hold: bool + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + comp = "legalhold" + accept = "application/xml" + + # Construct URL + url = self.set_legal_hold.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['x-ms-legal-hold'] = self._serialize.header("legal_hold", legal_hold, 'bool') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-legal-hold')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_legal_hold.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def set_metadata( + self, + timeout=None, # type: Optional[int] + metadata=None, # type: Optional[str] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or + more name-value pairs. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "metadata" + accept = "application/xml" + + # Construct URL + url = self.set_metadata.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_metadata.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def acquire_lease( + self, + timeout=None, # type: Optional[int] + duration=None, # type: Optional[int] + proposed_lease_id=None, # type: Optional[str] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a + lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease + duration cannot be changed using renew or change. + :type duration: int + :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns + 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid + Constructor (String) for a list of valid GUID string formats. + :type proposed_lease_id: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "lease" + action = "acquire" + accept = "application/xml" + + # Construct URL + url = self.acquire_lease.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') + if duration is not None: + header_parameters['x-ms-lease-duration'] = self._serialize.header("duration", duration, 'int') + if proposed_lease_id is not None: + header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + acquire_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def release_lease( + self, + lease_id, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "lease" + action = "release" + accept = "application/xml" + + # Construct URL + url = self.release_lease.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + release_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def renew_lease( + self, + lease_id, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "lease" + action = "renew" + accept = "application/xml" + + # Construct URL + url = self.renew_lease.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + renew_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def change_lease( + self, + lease_id, # type: str + proposed_lease_id, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns + 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid + Constructor (String) for a list of valid GUID string formats. + :type proposed_lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "lease" + action = "change" + accept = "application/xml" + + # Construct URL + url = self.change_lease.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') + header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + change_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def break_lease( + self, + timeout=None, # type: Optional[int] + break_period=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param break_period: For a break operation, proposed duration the lease should continue before + it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter + than the time remaining on the lease. If longer, the time remaining on the lease is used. A new + lease will not be available before the break period has expired, but the lease may be held for + longer than the break period. If this header does not appear with a break operation, a + fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease + breaks immediately. + :type break_period: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "lease" + action = "break" + accept = "application/xml" + + # Construct URL + url = self.break_lease.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') + if break_period is not None: + header_parameters['x-ms-lease-break-period'] = self._serialize.header("break_period", break_period, 'int') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-time']=self._deserialize('int', response.headers.get('x-ms-lease-time')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + break_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def create_snapshot( + self, + timeout=None, # type: Optional[int] + metadata=None, # type: Optional[str] + request_id_parameter=None, # type: Optional[str] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Create Snapshot operation creates a read-only snapshot of a blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _lease_id = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "snapshot" + accept = "application/xml" + + # Construct URL + url = self.create_snapshot.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-snapshot']=self._deserialize('str', response.headers.get('x-ms-snapshot')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + + if cls: + return cls(pipeline_response, None, response_headers) + + create_snapshot.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def start_copy_from_url( + self, + copy_source, # type: str + timeout=None, # type: Optional[int] + metadata=None, # type: Optional[str] + tier=None, # type: Optional[Union[str, "_models.AccessTierOptional"]] + rehydrate_priority=None, # type: Optional[Union[str, "_models.RehydratePriority"]] + request_id_parameter=None, # type: Optional[str] + blob_tags_string=None, # type: Optional[str] + seal_blob=None, # type: Optional[bool] + immutability_policy_expiry=None, # type: Optional[datetime.datetime] + immutability_policy_mode=None, # type: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] + legal_hold=None, # type: Optional[bool] + source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Start Copy From URL operation copies a blob or an internet resource to a new blob. + + :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of + up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it + would appear in a request URI. The source blob must either be public or must be authenticated + via a shared access signature. + :type copy_source: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param tier: Optional. Indicates the tier to be set on the blob. + :type tier: str or ~azure.storage.blob.models.AccessTierOptional + :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived + blob. + :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :type blob_tags_string: str + :param seal_blob: Overrides the sealed state of the destination blob. Service version + 2019-12-12 and newer. + :type seal_blob: bool + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. + :type legal_hold: bool + :param source_modified_access_conditions: Parameter group. + :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + _source_if_tags = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _lease_id = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + _source_if_tags = source_modified_access_conditions.source_if_tags + accept = "application/xml" + + # Construct URL + url = self.start_copy_from_url.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if tier is not None: + header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') + if rehydrate_priority is not None: + header_parameters['x-ms-rehydrate-priority'] = self._serialize.header("rehydrate_priority", rehydrate_priority, 'str') + if _source_if_modified_since is not None: + header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') + if _source_if_unmodified_since is not None: + header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') + if _source_if_match is not None: + header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') + if _source_if_none_match is not None: + header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') + if _source_if_tags is not None: + header_parameters['x-ms-source-if-tags'] = self._serialize.header("source_if_tags", _source_if_tags, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if blob_tags_string is not None: + header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') + if seal_blob is not None: + header_parameters['x-ms-seal-blob'] = self._serialize.header("seal_blob", seal_blob, 'bool') + if immutability_policy_expiry is not None: + header_parameters['x-ms-immutability-policy-until-date'] = self._serialize.header("immutability_policy_expiry", immutability_policy_expiry, 'rfc-1123') + if immutability_policy_mode is not None: + header_parameters['x-ms-immutability-policy-mode'] = self._serialize.header("immutability_policy_mode", immutability_policy_mode, 'str') + if legal_hold is not None: + header_parameters['x-ms-legal-hold'] = self._serialize.header("legal_hold", legal_hold, 'bool') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + + if cls: + return cls(pipeline_response, None, response_headers) + + start_copy_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def copy_from_url( + self, + copy_source, # type: str + timeout=None, # type: Optional[int] + metadata=None, # type: Optional[str] + tier=None, # type: Optional[Union[str, "_models.AccessTierOptional"]] + request_id_parameter=None, # type: Optional[str] + source_content_md5=None, # type: Optional[bytearray] + blob_tags_string=None, # type: Optional[str] + immutability_policy_expiry=None, # type: Optional[datetime.datetime] + immutability_policy_mode=None, # type: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] + legal_hold=None, # type: Optional[bool] + copy_source_authorization=None, # type: Optional[str] + source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Copy From URL operation copies a blob or an internet resource to a new blob. It will not + return a response until the copy is complete. + + :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of + up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it + would appear in a request URI. The source blob must either be public or must be authenticated + via a shared access signature. + :type copy_source: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param tier: Optional. Indicates the tier to be set on the blob. + :type tier: str or ~azure.storage.blob.models.AccessTierOptional + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read + from the copy source. + :type source_content_md5: bytearray + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :type blob_tags_string: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. + :type legal_hold: bool + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. + :type copy_source_authorization: str + :param source_modified_access_conditions: Parameter group. + :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _lease_id = None + _encryption_scope = None + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + x_ms_requires_sync = "true" + accept = "application/xml" + + # Construct URL + url = self.copy_from_url.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-requires-sync'] = self._serialize.header("x_ms_requires_sync", x_ms_requires_sync, 'str') + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if tier is not None: + header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') + if _source_if_modified_since is not None: + header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') + if _source_if_unmodified_since is not None: + header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') + if _source_if_match is not None: + header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') + if _source_if_none_match is not None: + header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if source_content_md5 is not None: + header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') + if blob_tags_string is not None: + header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') + if immutability_policy_expiry is not None: + header_parameters['x-ms-immutability-policy-until-date'] = self._serialize.header("immutability_policy_expiry", immutability_policy_expiry, 'rfc-1123') + if immutability_policy_mode is not None: + header_parameters['x-ms-immutability-policy-mode'] = self._serialize.header("immutability_policy_mode", immutability_policy_mode, 'str') + if legal_hold is not None: + header_parameters['x-ms-legal-hold'] = self._serialize.header("legal_hold", legal_hold, 'bool') + if copy_source_authorization is not None: + header_parameters['x-ms-copy-source-authorization'] = self._serialize.header("copy_source_authorization", copy_source_authorization, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + copy_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def abort_copy_from_url( + self, + copy_id, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a + destination blob with zero length and full metadata. + + :param copy_id: The copy identifier provided in the x-ms-copy-id header of the original Copy + Blob operation. + :type copy_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + comp = "copy" + copy_action_abort_constant = "abort" + accept = "application/xml" + + # Construct URL + url = self.abort_copy_from_url.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + query_parameters['copyid'] = self._serialize.query("copy_id", copy_id, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-copy-action'] = self._serialize.header("copy_action_abort_constant", copy_action_abort_constant, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + abort_copy_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def set_tier( + self, + tier, # type: Union[str, "_models.AccessTierRequired"] + snapshot=None, # type: Optional[str] + version_id=None, # type: Optional[str] + timeout=None, # type: Optional[int] + rehydrate_priority=None, # type: Optional[Union[str, "_models.RehydratePriority"]] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a + premium storage account and on a block blob in a blob storage account (locally redundant + storage only). A premium page blob's tier determines the allowed size, IOPS, and bandwidth of + the blob. A block blob's tier determines Hot/Cool/Archive storage type. This operation does not + update the blob's ETag. + + :param tier: Indicates the tier to be set on the blob. + :type tier: str or ~azure.storage.blob.models.AccessTierRequired + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + :type version_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived + blob. + :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_tags = modified_access_conditions.if_tags + comp = "tier" + accept = "application/xml" + + # Construct URL + url = self.set_tier.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if snapshot is not None: + query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') + if version_id is not None: + query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') + if rehydrate_priority is not None: + header_parameters['x-ms-rehydrate-priority'] = self._serialize.header("rehydrate_priority", rehydrate_priority, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 200: + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + if response.status_code == 202: + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_tier.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def get_account_info( + self, + **kwargs # type: Any + ): + # type: (...) -> None + """Returns the sku name and account kind. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "account" + comp = "properties" + accept = "application/xml" + + # Construct URL + url = self.get_account_info.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) + response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) + + if cls: + return cls(pipeline_response, None, response_headers) + + get_account_info.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def query( + self, + snapshot=None, # type: Optional[str] + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + query_request=None, # type: Optional["_models.QueryRequest"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> IO + """The Query operation enables users to select/project on blob data by providing simple query + expressions. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. + :type snapshot: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param query_request: the query request. + :type query_request: ~azure.storage.blob.models.QueryRequest + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IO, or the result of cls(response) + :rtype: IO + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[IO] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "query" + content_type = kwargs.pop("content_type", "application/xml") + accept = "application/xml" + + # Construct URL + url = self.query.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if snapshot is not None: + query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if query_request is not None: + body_content = self._serialize.body(query_request, 'QueryRequest', is_xml=True) + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=True, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 206]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 200: + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) + response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) + response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) + response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) + response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) + response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) + response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) + deserialized = response.stream_download(self._client._pipeline) + + if response.status_code == 206: + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) + response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) + response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) + response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) + response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) + response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) + deserialized = response.stream_download(self._client._pipeline) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + query.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def get_tags( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + snapshot=None, # type: Optional[str] + version_id=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> "_models.BlobTags" + """The Get Tags operation enables users to get the tags associated with a blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + :type version_id: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlobTags, or the result of cls(response) + :rtype: ~azure.storage.blob.models.BlobTags + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobTags"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_tags = None + _lease_id = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_tags = modified_access_conditions.if_tags + comp = "tags" + accept = "application/xml" + + # Construct URL + url = self.get_tags.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if snapshot is not None: + query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') + if version_id is not None: + query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('BlobTags', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_tags.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def set_tags( + self, + timeout=None, # type: Optional[int] + version_id=None, # type: Optional[str] + transactional_content_md5=None, # type: Optional[bytearray] + transactional_content_crc64=None, # type: Optional[bytearray] + request_id_parameter=None, # type: Optional[str] + tags=None, # type: Optional["_models.BlobTags"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Set Tags operation enables users to set tags on a blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + :type version_id: str + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. + :type transactional_content_md5: bytearray + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. + :type transactional_content_crc64: bytearray + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param tags: Blob tags. + :type tags: ~azure.storage.blob.models.BlobTags + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_tags = None + _lease_id = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_tags = modified_access_conditions.if_tags + comp = "tags" + content_type = kwargs.pop("content_type", "application/xml") + accept = "application/xml" + + # Construct URL + url = self.set_tags.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if version_id is not None: + query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if transactional_content_md5 is not None: + header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') + if transactional_content_crc64 is not None: + header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if tags is not None: + body_content = self._serialize.body(tags, 'BlobTags', is_xml=True) + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_tags.metadata = {'url': '/{containerName}/{blob}'} # type: ignore diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_block_blob_operations.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_block_blob_operations.py new file mode 100644 index 00000000000..3cbe55e0be5 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_block_blob_operations.py @@ -0,0 +1,1148 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class BlockBlobOperations(object): + """BlockBlobOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.storage.blob.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def upload( + self, + content_length, # type: int + body, # type: IO + timeout=None, # type: Optional[int] + transactional_content_md5=None, # type: Optional[bytearray] + metadata=None, # type: Optional[str] + tier=None, # type: Optional[Union[str, "_models.AccessTierOptional"]] + request_id_parameter=None, # type: Optional[str] + blob_tags_string=None, # type: Optional[str] + immutability_policy_expiry=None, # type: Optional[datetime.datetime] + immutability_policy_mode=None, # type: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] + legal_hold=None, # type: Optional[bool] + blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Upload Block Blob operation updates the content of an existing block blob. Updating an + existing block blob overwrites any existing metadata on the blob. Partial updates are not + supported with Put Blob; the content of the existing blob is overwritten with the content of + the new blob. To perform a partial update of the content of a block blob, use the Put Block + List operation. + + :param content_length: The length of the request. + :type content_length: long + :param body: Initial data. + :type body: IO + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. + :type transactional_content_md5: bytearray + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param tier: Optional. Indicates the tier to be set on the blob. + :type tier: str or ~azure.storage.blob.models.AccessTierOptional + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :type blob_tags_string: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. + :type legal_hold: bool + :param blob_http_headers: Parameter group. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _blob_content_type = None + _blob_content_encoding = None + _blob_content_language = None + _blob_content_md5 = None + _blob_cache_control = None + _lease_id = None + _blob_content_disposition = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if blob_http_headers is not None: + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + blob_type = "BlockBlob" + content_type = kwargs.pop("content_type", "application/octet-stream") + accept = "application/xml" + + # Construct URL + url = self.upload.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') + if transactional_content_md5 is not None: + header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + if _blob_content_type is not None: + header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') + if _blob_content_encoding is not None: + header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') + if _blob_content_language is not None: + header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') + if _blob_content_md5 is not None: + header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') + if _blob_cache_control is not None: + header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _blob_content_disposition is not None: + header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if tier is not None: + header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if blob_tags_string is not None: + header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') + if immutability_policy_expiry is not None: + header_parameters['x-ms-immutability-policy-until-date'] = self._serialize.header("immutability_policy_expiry", immutability_policy_expiry, 'rfc-1123') + if immutability_policy_mode is not None: + header_parameters['x-ms-immutability-policy-mode'] = self._serialize.header("immutability_policy_mode", immutability_policy_mode, 'str') + if legal_hold is not None: + header_parameters['x-ms-legal-hold'] = self._serialize.header("legal_hold", legal_hold, 'bool') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content_kwargs['stream_content'] = body + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + upload.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def put_blob_from_url( + self, + content_length, # type: int + copy_source, # type: str + timeout=None, # type: Optional[int] + transactional_content_md5=None, # type: Optional[bytearray] + metadata=None, # type: Optional[str] + tier=None, # type: Optional[Union[str, "_models.AccessTierOptional"]] + request_id_parameter=None, # type: Optional[str] + source_content_md5=None, # type: Optional[bytearray] + blob_tags_string=None, # type: Optional[str] + copy_source_blob_properties=None, # type: Optional[bool] + copy_source_authorization=None, # type: Optional[str] + blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Put Blob from URL operation creates a new Block Blob where the contents of the blob are + read from a given URL. This API is supported beginning with the 2020-04-08 version. Partial + updates are not supported with Put Blob from URL; the content of an existing blob is + overwritten with the content of the new blob. To perform partial updates to a block blob’s + contents using a source URL, use the Put Block from URL API in conjunction with Put Block List. + + :param content_length: The length of the request. + :type content_length: long + :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of + up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it + would appear in a request URI. The source blob must either be public or must be authenticated + via a shared access signature. + :type copy_source: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. + :type transactional_content_md5: bytearray + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param tier: Optional. Indicates the tier to be set on the blob. + :type tier: str or ~azure.storage.blob.models.AccessTierOptional + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read + from the copy source. + :type source_content_md5: bytearray + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :type blob_tags_string: str + :param copy_source_blob_properties: Optional, default is true. Indicates if properties from + the source blob should be copied. + :type copy_source_blob_properties: bool + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. + :type copy_source_authorization: str + :param blob_http_headers: Parameter group. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param source_modified_access_conditions: Parameter group. + :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _blob_content_type = None + _blob_content_encoding = None + _blob_content_language = None + _blob_content_md5 = None + _blob_cache_control = None + _lease_id = None + _blob_content_disposition = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + _source_if_tags = None + if blob_http_headers is not None: + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + _source_if_tags = source_modified_access_conditions.source_if_tags + blob_type = "BlockBlob" + accept = "application/xml" + + # Construct URL + url = self.put_blob_from_url.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') + if transactional_content_md5 is not None: + header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + if _blob_content_type is not None: + header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') + if _blob_content_encoding is not None: + header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') + if _blob_content_language is not None: + header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') + if _blob_content_md5 is not None: + header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') + if _blob_cache_control is not None: + header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _blob_content_disposition is not None: + header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if tier is not None: + header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + if _source_if_modified_since is not None: + header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') + if _source_if_unmodified_since is not None: + header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') + if _source_if_match is not None: + header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') + if _source_if_none_match is not None: + header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') + if _source_if_tags is not None: + header_parameters['x-ms-source-if-tags'] = self._serialize.header("source_if_tags", _source_if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if source_content_md5 is not None: + header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') + if blob_tags_string is not None: + header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') + header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') + if copy_source_blob_properties is not None: + header_parameters['x-ms-copy-source-blob-properties'] = self._serialize.header("copy_source_blob_properties", copy_source_blob_properties, 'bool') + if copy_source_authorization is not None: + header_parameters['x-ms-copy-source-authorization'] = self._serialize.header("copy_source_authorization", copy_source_authorization, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + put_blob_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def stage_block( + self, + block_id, # type: str + content_length, # type: int + body, # type: IO + transactional_content_md5=None, # type: Optional[bytearray] + transactional_content_crc64=None, # type: Optional[bytearray] + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Stage Block operation creates a new block to be committed as part of a blob. + + :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the + string must be less than or equal to 64 bytes in size. For a given blob, the length of the + value specified for the blockid parameter must be the same size for each block. + :type block_id: str + :param content_length: The length of the request. + :type content_length: long + :param body: Initial data. + :type body: IO + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. + :type transactional_content_md5: bytearray + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. + :type transactional_content_crc64: bytearray + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + comp = "block" + content_type = kwargs.pop("content_type", "application/octet-stream") + accept = "application/xml" + + # Construct URL + url = self.stage_block.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + query_parameters['blockid'] = self._serialize.query("block_id", block_id, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + if transactional_content_md5 is not None: + header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') + if transactional_content_crc64 is not None: + header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content_kwargs['stream_content'] = body + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + stage_block.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def stage_block_from_url( + self, + block_id, # type: str + content_length, # type: int + source_url, # type: str + source_range=None, # type: Optional[str] + source_content_md5=None, # type: Optional[bytearray] + source_contentcrc64=None, # type: Optional[bytearray] + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + copy_source_authorization=None, # type: Optional[str] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Stage Block operation creates a new block to be committed as part of a blob where the + contents are read from a URL. + + :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the + string must be less than or equal to 64 bytes in size. For a given blob, the length of the + value specified for the blockid parameter must be the same size for each block. + :type block_id: str + :param content_length: The length of the request. + :type content_length: long + :param source_url: Specify a URL to the copy source. + :type source_url: str + :param source_range: Bytes of source data in the specified range. + :type source_range: str + :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read + from the copy source. + :type source_content_md5: bytearray + :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be + read from the copy source. + :type source_contentcrc64: bytearray + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. + :type copy_source_authorization: str + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param source_modified_access_conditions: Parameter group. + :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _lease_id = None + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + comp = "block" + accept = "application/xml" + + # Construct URL + url = self.stage_block_from_url.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + query_parameters['blockid'] = self._serialize.query("block_id", block_id, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + header_parameters['x-ms-copy-source'] = self._serialize.header("source_url", source_url, 'str') + if source_range is not None: + header_parameters['x-ms-source-range'] = self._serialize.header("source_range", source_range, 'str') + if source_content_md5 is not None: + header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') + if source_contentcrc64 is not None: + header_parameters['x-ms-source-content-crc64'] = self._serialize.header("source_contentcrc64", source_contentcrc64, 'bytearray') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _source_if_modified_since is not None: + header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') + if _source_if_unmodified_since is not None: + header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') + if _source_if_match is not None: + header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') + if _source_if_none_match is not None: + header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if copy_source_authorization is not None: + header_parameters['x-ms-copy-source-authorization'] = self._serialize.header("copy_source_authorization", copy_source_authorization, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + stage_block_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def commit_block_list( + self, + blocks, # type: "_models.BlockLookupList" + timeout=None, # type: Optional[int] + transactional_content_md5=None, # type: Optional[bytearray] + transactional_content_crc64=None, # type: Optional[bytearray] + metadata=None, # type: Optional[str] + tier=None, # type: Optional[Union[str, "_models.AccessTierOptional"]] + request_id_parameter=None, # type: Optional[str] + blob_tags_string=None, # type: Optional[str] + immutability_policy_expiry=None, # type: Optional[datetime.datetime] + immutability_policy_mode=None, # type: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] + legal_hold=None, # type: Optional[bool] + blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Commit Block List operation writes a blob by specifying the list of block IDs that make up + the blob. In order to be written as part of a blob, a block must have been successfully written + to the server in a prior Put Block operation. You can call Put Block List to update a blob by + uploading only those blocks that have changed, then committing the new and existing blocks + together. You can do this by specifying whether to commit a block from the committed block list + or from the uncommitted block list, or to commit the most recently uploaded version of the + block, whichever list it may belong to. + + :param blocks: Blob Blocks. + :type blocks: ~azure.storage.blob.models.BlockLookupList + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. + :type transactional_content_md5: bytearray + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. + :type transactional_content_crc64: bytearray + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param tier: Optional. Indicates the tier to be set on the blob. + :type tier: str or ~azure.storage.blob.models.AccessTierOptional + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :type blob_tags_string: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. + :type legal_hold: bool + :param blob_http_headers: Parameter group. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _blob_cache_control = None + _blob_content_type = None + _blob_content_encoding = None + _blob_content_language = None + _blob_content_md5 = None + _lease_id = None + _blob_content_disposition = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if blob_http_headers is not None: + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_content_disposition = blob_http_headers.blob_content_disposition + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "blocklist" + content_type = kwargs.pop("content_type", "application/xml") + accept = "application/xml" + + # Construct URL + url = self.commit_block_list.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _blob_cache_control is not None: + header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') + if _blob_content_type is not None: + header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') + if _blob_content_encoding is not None: + header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') + if _blob_content_language is not None: + header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') + if _blob_content_md5 is not None: + header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') + if transactional_content_md5 is not None: + header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') + if transactional_content_crc64 is not None: + header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _blob_content_disposition is not None: + header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if tier is not None: + header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if blob_tags_string is not None: + header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') + if immutability_policy_expiry is not None: + header_parameters['x-ms-immutability-policy-until-date'] = self._serialize.header("immutability_policy_expiry", immutability_policy_expiry, 'rfc-1123') + if immutability_policy_mode is not None: + header_parameters['x-ms-immutability-policy-mode'] = self._serialize.header("immutability_policy_mode", immutability_policy_mode, 'str') + if legal_hold is not None: + header_parameters['x-ms-legal-hold'] = self._serialize.header("legal_hold", legal_hold, 'bool') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(blocks, 'BlockLookupList', is_xml=True) + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + commit_block_list.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def get_block_list( + self, + snapshot=None, # type: Optional[str] + list_type="committed", # type: Union[str, "_models.BlockListType"] + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> "_models.BlockList" + """The Get Block List operation retrieves the list of blocks that have been uploaded as part of a + block blob. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. + :type snapshot: str + :param list_type: Specifies whether to return the list of committed blocks, the list of + uncommitted blocks, or both lists together. + :type list_type: str or ~azure.storage.blob.models.BlockListType + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlockList, or the result of cls(response) + :rtype: ~azure.storage.blob.models.BlockList + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlockList"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_tags = modified_access_conditions.if_tags + comp = "blocklist" + accept = "application/xml" + + # Construct URL + url = self.get_block_list.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if snapshot is not None: + query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') + query_parameters['blocklisttype'] = self._serialize.query("list_type", list_type, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('BlockList', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_block_list.metadata = {'url': '/{containerName}/{blob}'} # type: ignore diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_container_operations.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_container_operations.py new file mode 100644 index 00000000000..c9f8080eebf --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_container_operations.py @@ -0,0 +1,1770 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, IO, List, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class ContainerOperations(object): + """ContainerOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.storage.blob.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def create( + self, + timeout=None, # type: Optional[int] + metadata=None, # type: Optional[str] + access=None, # type: Optional[Union[str, "_models.PublicAccessType"]] + request_id_parameter=None, # type: Optional[str] + container_cpk_scope_info=None, # type: Optional["_models.ContainerCpkScopeInfo"] + **kwargs # type: Any + ): + # type: (...) -> None + """creates a new container under the specified account. If the container with the same name + already exists, the operation fails. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param access: Specifies whether data in the container may be accessed publicly and the level + of access. + :type access: str or ~azure.storage.blob.models.PublicAccessType + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param container_cpk_scope_info: Parameter group. + :type container_cpk_scope_info: ~azure.storage.blob.models.ContainerCpkScopeInfo + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _default_encryption_scope = None + _prevent_encryption_scope_override = None + if container_cpk_scope_info is not None: + _default_encryption_scope = container_cpk_scope_info.default_encryption_scope + _prevent_encryption_scope_override = container_cpk_scope_info.prevent_encryption_scope_override + restype = "container" + accept = "application/xml" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if access is not None: + header_parameters['x-ms-blob-public-access'] = self._serialize.header("access", access, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if _default_encryption_scope is not None: + header_parameters['x-ms-default-encryption-scope'] = self._serialize.header("default_encryption_scope", _default_encryption_scope, 'str') + if _prevent_encryption_scope_override is not None: + header_parameters['x-ms-deny-encryption-scope-override'] = self._serialize.header("prevent_encryption_scope_override", _prevent_encryption_scope_override, 'bool') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + create.metadata = {'url': '/{containerName}'} # type: ignore + + def get_properties( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """returns all user-defined metadata and system properties for the specified container. The data + returned does not include the container's list of blobs. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + restype = "container" + accept = "application/xml" + + # Construct URL + url = self.get_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-public-access']=self._deserialize('str', response.headers.get('x-ms-blob-public-access')) + response_headers['x-ms-has-immutability-policy']=self._deserialize('bool', response.headers.get('x-ms-has-immutability-policy')) + response_headers['x-ms-has-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-has-legal-hold')) + response_headers['x-ms-default-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-default-encryption-scope')) + response_headers['x-ms-deny-encryption-scope-override']=self._deserialize('bool', response.headers.get('x-ms-deny-encryption-scope-override')) + response_headers['x-ms-immutable-storage-with-versioning-enabled']=self._deserialize('bool', response.headers.get('x-ms-immutable-storage-with-versioning-enabled')) + + if cls: + return cls(pipeline_response, None, response_headers) + + get_properties.metadata = {'url': '/{containerName}'} # type: ignore + + def delete( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """operation marks the specified container for deletion. The container and any blobs contained + within it are later deleted during garbage collection. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + restype = "container" + accept = "application/xml" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + delete.metadata = {'url': '/{containerName}'} # type: ignore + + def set_metadata( + self, + timeout=None, # type: Optional[int] + metadata=None, # type: Optional[str] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """operation sets one or more user-defined name-value pairs for the specified container. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_modified_since = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + restype = "container" + comp = "metadata" + accept = "application/xml" + + # Construct URL + url = self.set_metadata.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_metadata.metadata = {'url': '/{containerName}'} # type: ignore + + def get_access_policy( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> List["_models.SignedIdentifier"] + """gets the permissions for the specified container. The permissions indicate whether container + data may be accessed publicly. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: list of SignedIdentifier, or the result of cls(response) + :rtype: list[~azure.storage.blob.models.SignedIdentifier] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[List["_models.SignedIdentifier"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + restype = "container" + comp = "acl" + accept = "application/xml" + + # Construct URL + url = self.get_access_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-blob-public-access']=self._deserialize('str', response.headers.get('x-ms-blob-public-access')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('[SignedIdentifier]', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_access_policy.metadata = {'url': '/{containerName}'} # type: ignore + + def set_access_policy( + self, + timeout=None, # type: Optional[int] + access=None, # type: Optional[Union[str, "_models.PublicAccessType"]] + request_id_parameter=None, # type: Optional[str] + container_acl=None, # type: Optional[List["_models.SignedIdentifier"]] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """sets the permissions for the specified container. The permissions indicate whether blobs in a + container may be accessed publicly. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param access: Specifies whether data in the container may be accessed publicly and the level + of access. + :type access: str or ~azure.storage.blob.models.PublicAccessType + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param container_acl: the acls for the container. + :type container_acl: list[~azure.storage.blob.models.SignedIdentifier] + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + restype = "container" + comp = "acl" + content_type = kwargs.pop("content_type", "application/xml") + accept = "application/xml" + + # Construct URL + url = self.set_access_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if access is not None: + header_parameters['x-ms-blob-public-access'] = self._serialize.header("access", access, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + serialization_ctxt = {'xml': {'name': 'SignedIdentifiers', 'wrapped': True, 'itemsName': 'SignedIdentifier'}} + if container_acl is not None: + body_content = self._serialize.body(container_acl, '[SignedIdentifier]', is_xml=True, serialization_ctxt=serialization_ctxt) + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_access_policy.metadata = {'url': '/{containerName}'} # type: ignore + + def restore( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + deleted_container_name=None, # type: Optional[str] + deleted_container_version=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """Restores a previously-deleted container. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param deleted_container_name: Optional. Version 2019-12-12 and later. Specifies the name of + the deleted container to restore. + :type deleted_container_name: str + :param deleted_container_version: Optional. Version 2019-12-12 and later. Specifies the + version of the deleted container to restore. + :type deleted_container_version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "container" + comp = "undelete" + accept = "application/xml" + + # Construct URL + url = self.restore.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if deleted_container_name is not None: + header_parameters['x-ms-deleted-container-name'] = self._serialize.header("deleted_container_name", deleted_container_name, 'str') + if deleted_container_version is not None: + header_parameters['x-ms-deleted-container-version'] = self._serialize.header("deleted_container_version", deleted_container_version, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + restore.metadata = {'url': '/{containerName}'} # type: ignore + + def rename( + self, + source_container_name, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + source_lease_id=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """Renames an existing container. + + :param source_container_name: Required. Specifies the name of the container to rename. + :type source_container_name: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param source_lease_id: A lease ID for the source path. If specified, the source path must have + an active lease and the lease ID must match. + :type source_lease_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "container" + comp = "rename" + accept = "application/xml" + + # Construct URL + url = self.rename.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['x-ms-source-container-name'] = self._serialize.header("source_container_name", source_container_name, 'str') + if source_lease_id is not None: + header_parameters['x-ms-source-lease-id'] = self._serialize.header("source_lease_id", source_lease_id, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + rename.metadata = {'url': '/{containerName}'} # type: ignore + + def submit_batch( + self, + content_length, # type: int + multipart_content_type, # type: str + body, # type: IO + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> IO + """The Batch operation allows multiple API calls to be embedded into a single HTTP request. + + :param content_length: The length of the request. + :type content_length: long + :param multipart_content_type: Required. The value of this header must be multipart/mixed with + a batch boundary. Example header value: multipart/mixed; boundary=batch_:code:``. + :type multipart_content_type: str + :param body: Initial data. + :type body: IO + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IO, or the result of cls(response) + :rtype: IO + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[IO] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "container" + comp = "batch" + content_type = kwargs.pop("content_type", "application/xml") + accept = "application/xml" + + # Construct URL + url = self.submit_batch.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + header_parameters['Content-Type'] = self._serialize.header("multipart_content_type", multipart_content_type, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(body, 'IO', is_xml=True) + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=True, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + deserialized = response.stream_download(self._client._pipeline) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + submit_batch.metadata = {'url': '/{containerName}'} # type: ignore + + def filter_blobs( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + where=None, # type: Optional[str] + marker=None, # type: Optional[str] + maxresults=None, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> "_models.FilterBlobSegment" + """The Filter Blobs operation enables callers to list blobs in a container whose tags match a + given search expression. Filter blobs searches within the given container. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param where: Filters the results to return only to return only blobs whose tags match the + specified expression. + :type where: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. + :type maxresults: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FilterBlobSegment, or the result of cls(response) + :rtype: ~azure.storage.blob.models.FilterBlobSegment + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FilterBlobSegment"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "container" + comp = "blobs" + accept = "application/xml" + + # Construct URL + url = self.filter_blobs.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if where is not None: + query_parameters['where'] = self._serialize.query("where", where, 'str') + if marker is not None: + query_parameters['marker'] = self._serialize.query("marker", marker, 'str') + if maxresults is not None: + query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('FilterBlobSegment', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + filter_blobs.metadata = {'url': '/{containerName}'} # type: ignore + + def acquire_lease( + self, + timeout=None, # type: Optional[int] + duration=None, # type: Optional[int] + proposed_lease_id=None, # type: Optional[str] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a + lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease + duration cannot be changed using renew or change. + :type duration: int + :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns + 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid + Constructor (String) for a list of valid GUID string formats. + :type proposed_lease_id: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + comp = "lease" + restype = "container" + action = "acquire" + accept = "application/xml" + + # Construct URL + url = self.acquire_lease.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') + if duration is not None: + header_parameters['x-ms-lease-duration'] = self._serialize.header("duration", duration, 'int') + if proposed_lease_id is not None: + header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + acquire_lease.metadata = {'url': '/{containerName}'} # type: ignore + + def release_lease( + self, + lease_id, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + comp = "lease" + restype = "container" + action = "release" + accept = "application/xml" + + # Construct URL + url = self.release_lease.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + release_lease.metadata = {'url': '/{containerName}'} # type: ignore + + def renew_lease( + self, + lease_id, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + comp = "lease" + restype = "container" + action = "renew" + accept = "application/xml" + + # Construct URL + url = self.renew_lease.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + renew_lease.metadata = {'url': '/{containerName}'} # type: ignore + + def break_lease( + self, + timeout=None, # type: Optional[int] + break_period=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param break_period: For a break operation, proposed duration the lease should continue before + it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter + than the time remaining on the lease. If longer, the time remaining on the lease is used. A new + lease will not be available before the break period has expired, but the lease may be held for + longer than the break period. If this header does not appear with a break operation, a + fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease + breaks immediately. + :type break_period: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + comp = "lease" + restype = "container" + action = "break" + accept = "application/xml" + + # Construct URL + url = self.break_lease.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') + if break_period is not None: + header_parameters['x-ms-lease-break-period'] = self._serialize.header("break_period", break_period, 'int') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-time']=self._deserialize('int', response.headers.get('x-ms-lease-time')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + break_lease.metadata = {'url': '/{containerName}'} # type: ignore + + def change_lease( + self, + lease_id, # type: str + proposed_lease_id, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns + 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid + Constructor (String) for a list of valid GUID string formats. + :type proposed_lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + comp = "lease" + restype = "container" + action = "change" + accept = "application/xml" + + # Construct URL + url = self.change_lease.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') + header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + change_lease.metadata = {'url': '/{containerName}'} # type: ignore + + def list_blob_flat_segment( + self, + prefix=None, # type: Optional[str] + marker=None, # type: Optional[str] + maxresults=None, # type: Optional[int] + include=None, # type: Optional[List[Union[str, "_models.ListBlobsIncludeItem"]]] + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "_models.ListBlobsFlatSegmentResponse" + """[Update] The List Blobs operation returns a list of the blobs under the specified container. + + :param prefix: Filters the results to return only containers whose name begins with the + specified prefix. + :type prefix: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. + :type maxresults: int + :param include: Include this parameter to specify one or more datasets to include in the + response. + :type include: list[str or ~azure.storage.blob.models.ListBlobsIncludeItem] + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListBlobsFlatSegmentResponse, or the result of cls(response) + :rtype: ~azure.storage.blob.models.ListBlobsFlatSegmentResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListBlobsFlatSegmentResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "container" + comp = "list" + accept = "application/xml" + + # Construct URL + url = self.list_blob_flat_segment.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if prefix is not None: + query_parameters['prefix'] = self._serialize.query("prefix", prefix, 'str') + if marker is not None: + query_parameters['marker'] = self._serialize.query("marker", marker, 'str') + if maxresults is not None: + query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) + if include is not None: + query_parameters['include'] = self._serialize.query("include", include, '[str]', div=',') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('ListBlobsFlatSegmentResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + list_blob_flat_segment.metadata = {'url': '/{containerName}'} # type: ignore + + def list_blob_hierarchy_segment( + self, + delimiter, # type: str + prefix=None, # type: Optional[str] + marker=None, # type: Optional[str] + maxresults=None, # type: Optional[int] + include=None, # type: Optional[List[Union[str, "_models.ListBlobsIncludeItem"]]] + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "_models.ListBlobsHierarchySegmentResponse" + """[Update] The List Blobs operation returns a list of the blobs under the specified container. + + :param delimiter: When the request includes this parameter, the operation returns a BlobPrefix + element in the response body that acts as a placeholder for all blobs whose names begin with + the same substring up to the appearance of the delimiter character. The delimiter may be a + single character or a string. + :type delimiter: str + :param prefix: Filters the results to return only containers whose name begins with the + specified prefix. + :type prefix: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. + :type maxresults: int + :param include: Include this parameter to specify one or more datasets to include in the + response. + :type include: list[str or ~azure.storage.blob.models.ListBlobsIncludeItem] + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListBlobsHierarchySegmentResponse, or the result of cls(response) + :rtype: ~azure.storage.blob.models.ListBlobsHierarchySegmentResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListBlobsHierarchySegmentResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "container" + comp = "list" + accept = "application/xml" + + # Construct URL + url = self.list_blob_hierarchy_segment.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if prefix is not None: + query_parameters['prefix'] = self._serialize.query("prefix", prefix, 'str') + query_parameters['delimiter'] = self._serialize.query("delimiter", delimiter, 'str') + if marker is not None: + query_parameters['marker'] = self._serialize.query("marker", marker, 'str') + if maxresults is not None: + query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) + if include is not None: + query_parameters['include'] = self._serialize.query("include", include, '[str]', div=',') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('ListBlobsHierarchySegmentResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + list_blob_hierarchy_segment.metadata = {'url': '/{containerName}'} # type: ignore + + def get_account_info( + self, + **kwargs # type: Any + ): + # type: (...) -> None + """Returns the sku name and account kind. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "account" + comp = "properties" + accept = "application/xml" + + # Construct URL + url = self.get_account_info.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) + response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) + + if cls: + return cls(pipeline_response, None, response_headers) + + get_account_info.metadata = {'url': '/{containerName}'} # type: ignore diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_page_blob_operations.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_page_blob_operations.py new file mode 100644 index 00000000000..c953df2b9a7 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_page_blob_operations.py @@ -0,0 +1,1437 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class PageBlobOperations(object): + """PageBlobOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.storage.blob.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def create( + self, + content_length, # type: int + blob_content_length, # type: int + timeout=None, # type: Optional[int] + tier=None, # type: Optional[Union[str, "_models.PremiumPageBlobAccessTier"]] + metadata=None, # type: Optional[str] + blob_sequence_number=0, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + blob_tags_string=None, # type: Optional[str] + immutability_policy_expiry=None, # type: Optional[datetime.datetime] + immutability_policy_mode=None, # type: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] + legal_hold=None, # type: Optional[bool] + blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Create operation creates a new page blob. + + :param content_length: The length of the request. + :type content_length: long + :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 + TB. The page blob size must be aligned to a 512-byte boundary. + :type blob_content_length: long + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param tier: Optional. Indicates the tier to be set on the page blob. + :type tier: str or ~azure.storage.blob.models.PremiumPageBlobAccessTier + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. + :type metadata: str + :param blob_sequence_number: Set for page blobs only. The sequence number is a user-controlled + value that you can use to track requests. The value of the sequence number must be between 0 + and 2^63 - 1. + :type blob_sequence_number: long + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :type blob_tags_string: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. + :type legal_hold: bool + :param blob_http_headers: Parameter group. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _blob_content_type = None + _blob_content_encoding = None + _blob_content_language = None + _blob_content_md5 = None + _blob_cache_control = None + _lease_id = None + _blob_content_disposition = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if blob_http_headers is not None: + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + blob_type = "PageBlob" + accept = "application/xml" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + if tier is not None: + header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') + if _blob_content_type is not None: + header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') + if _blob_content_encoding is not None: + header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') + if _blob_content_language is not None: + header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') + if _blob_content_md5 is not None: + header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') + if _blob_cache_control is not None: + header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') + if metadata is not None: + header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _blob_content_disposition is not None: + header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-blob-content-length'] = self._serialize.header("blob_content_length", blob_content_length, 'long') + if blob_sequence_number is not None: + header_parameters['x-ms-blob-sequence-number'] = self._serialize.header("blob_sequence_number", blob_sequence_number, 'long') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if blob_tags_string is not None: + header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') + if immutability_policy_expiry is not None: + header_parameters['x-ms-immutability-policy-until-date'] = self._serialize.header("immutability_policy_expiry", immutability_policy_expiry, 'rfc-1123') + if immutability_policy_mode is not None: + header_parameters['x-ms-immutability-policy-mode'] = self._serialize.header("immutability_policy_mode", immutability_policy_mode, 'str') + if legal_hold is not None: + header_parameters['x-ms-legal-hold'] = self._serialize.header("legal_hold", legal_hold, 'bool') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + create.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def upload_pages( + self, + content_length, # type: int + body, # type: IO + transactional_content_md5=None, # type: Optional[bytearray] + transactional_content_crc64=None, # type: Optional[bytearray] + timeout=None, # type: Optional[int] + range=None, # type: Optional[str] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + sequence_number_access_conditions=None, # type: Optional["_models.SequenceNumberAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Upload Pages operation writes a range of pages to a page blob. + + :param content_length: The length of the request. + :type content_length: long + :param body: Initial data. + :type body: IO + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. + :type transactional_content_md5: bytearray + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. + :type transactional_content_crc64: bytearray + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param range: Return only the bytes of the blob in the specified range. + :type range: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param sequence_number_access_conditions: Parameter group. + :type sequence_number_access_conditions: ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_sequence_number_less_than_or_equal_to = None + _if_sequence_number_less_than = None + _if_sequence_number_equal_to = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if sequence_number_access_conditions is not None: + _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + comp = "page" + page_write = "update" + content_type = kwargs.pop("content_type", "application/octet-stream") + accept = "application/xml" + + # Construct URL + url = self.upload_pages.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-page-write'] = self._serialize.header("page_write", page_write, 'str') + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + if transactional_content_md5 is not None: + header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') + if transactional_content_crc64 is not None: + header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') + if range is not None: + header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _if_sequence_number_less_than_or_equal_to is not None: + header_parameters['x-ms-if-sequence-number-le'] = self._serialize.header("if_sequence_number_less_than_or_equal_to", _if_sequence_number_less_than_or_equal_to, 'long') + if _if_sequence_number_less_than is not None: + header_parameters['x-ms-if-sequence-number-lt'] = self._serialize.header("if_sequence_number_less_than", _if_sequence_number_less_than, 'long') + if _if_sequence_number_equal_to is not None: + header_parameters['x-ms-if-sequence-number-eq'] = self._serialize.header("if_sequence_number_equal_to", _if_sequence_number_equal_to, 'long') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content_kwargs['stream_content'] = body + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + upload_pages.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def clear_pages( + self, + content_length, # type: int + timeout=None, # type: Optional[int] + range=None, # type: Optional[str] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + sequence_number_access_conditions=None, # type: Optional["_models.SequenceNumberAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Clear Pages operation clears a set of pages from a page blob. + + :param content_length: The length of the request. + :type content_length: long + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param range: Return only the bytes of the blob in the specified range. + :type range: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param sequence_number_access_conditions: Parameter group. + :type sequence_number_access_conditions: ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_sequence_number_less_than_or_equal_to = None + _if_sequence_number_less_than = None + _if_sequence_number_equal_to = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if sequence_number_access_conditions is not None: + _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + comp = "page" + page_write = "clear" + accept = "application/xml" + + # Construct URL + url = self.clear_pages.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-page-write'] = self._serialize.header("page_write", page_write, 'str') + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + if range is not None: + header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _if_sequence_number_less_than_or_equal_to is not None: + header_parameters['x-ms-if-sequence-number-le'] = self._serialize.header("if_sequence_number_less_than_or_equal_to", _if_sequence_number_less_than_or_equal_to, 'long') + if _if_sequence_number_less_than is not None: + header_parameters['x-ms-if-sequence-number-lt'] = self._serialize.header("if_sequence_number_less_than", _if_sequence_number_less_than, 'long') + if _if_sequence_number_equal_to is not None: + header_parameters['x-ms-if-sequence-number-eq'] = self._serialize.header("if_sequence_number_equal_to", _if_sequence_number_equal_to, 'long') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + clear_pages.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def upload_pages_from_url( + self, + source_url, # type: str + source_range, # type: str + content_length, # type: int + range, # type: str + source_content_md5=None, # type: Optional[bytearray] + source_contentcrc64=None, # type: Optional[bytearray] + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + copy_source_authorization=None, # type: Optional[str] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + sequence_number_access_conditions=None, # type: Optional["_models.SequenceNumberAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Upload Pages operation writes a range of pages to a page blob where the contents are read + from a URL. + + :param source_url: Specify a URL to the copy source. + :type source_url: str + :param source_range: Bytes of source data in the specified range. The length of this range + should match the ContentLength header and x-ms-range/Range destination range header. + :type source_range: str + :param content_length: The length of the request. + :type content_length: long + :param range: The range of bytes to which the source range would be written. The range should + be 512 aligned and range-end is required. + :type range: str + :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read + from the copy source. + :type source_content_md5: bytearray + :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be + read from the copy source. + :type source_contentcrc64: bytearray + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. + :type copy_source_authorization: str + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param sequence_number_access_conditions: Parameter group. + :type sequence_number_access_conditions: ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param source_modified_access_conditions: Parameter group. + :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _lease_id = None + _if_sequence_number_less_than_or_equal_to = None + _if_sequence_number_less_than = None + _if_sequence_number_equal_to = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if sequence_number_access_conditions is not None: + _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + comp = "page" + page_write = "update" + accept = "application/xml" + + # Construct URL + url = self.upload_pages_from_url.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-page-write'] = self._serialize.header("page_write", page_write, 'str') + header_parameters['x-ms-copy-source'] = self._serialize.header("source_url", source_url, 'str') + header_parameters['x-ms-source-range'] = self._serialize.header("source_range", source_range, 'str') + if source_content_md5 is not None: + header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') + if source_contentcrc64 is not None: + header_parameters['x-ms-source-content-crc64'] = self._serialize.header("source_contentcrc64", source_contentcrc64, 'bytearray') + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_sequence_number_less_than_or_equal_to is not None: + header_parameters['x-ms-if-sequence-number-le'] = self._serialize.header("if_sequence_number_less_than_or_equal_to", _if_sequence_number_less_than_or_equal_to, 'long') + if _if_sequence_number_less_than is not None: + header_parameters['x-ms-if-sequence-number-lt'] = self._serialize.header("if_sequence_number_less_than", _if_sequence_number_less_than, 'long') + if _if_sequence_number_equal_to is not None: + header_parameters['x-ms-if-sequence-number-eq'] = self._serialize.header("if_sequence_number_equal_to", _if_sequence_number_equal_to, 'long') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + if _source_if_modified_since is not None: + header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') + if _source_if_unmodified_since is not None: + header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') + if _source_if_match is not None: + header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') + if _source_if_none_match is not None: + header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + if copy_source_authorization is not None: + header_parameters['x-ms-copy-source-authorization'] = self._serialize.header("copy_source_authorization", copy_source_authorization, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + if cls: + return cls(pipeline_response, None, response_headers) + + upload_pages_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def get_page_ranges( + self, + snapshot=None, # type: Optional[str] + timeout=None, # type: Optional[int] + range=None, # type: Optional[str] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> "_models.PageList" + """The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot + of a page blob. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. + :type snapshot: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param range: Return only the bytes of the blob in the specified range. + :type range: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PageList, or the result of cls(response) + :rtype: ~azure.storage.blob.models.PageList + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PageList"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "pagelist" + accept = "application/xml" + + # Construct URL + url = self.get_page_ranges.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if snapshot is not None: + query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if range is not None: + header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('PageList', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_page_ranges.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def get_page_ranges_diff( + self, + snapshot=None, # type: Optional[str] + timeout=None, # type: Optional[int] + prevsnapshot=None, # type: Optional[str] + prev_snapshot_url=None, # type: Optional[str] + range=None, # type: Optional[str] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> "_models.PageList" + """The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that + were changed between target blob and previous snapshot. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. + :type snapshot: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param prevsnapshot: Optional in version 2015-07-08 and newer. The prevsnapshot parameter is a + DateTime value that specifies that the response will contain only pages that were changed + between target blob and previous snapshot. Changed pages include both updated and cleared + pages. The target blob may be a snapshot, as long as the snapshot specified by prevsnapshot is + the older of the two. Note that incremental snapshots are currently supported only for blobs + created on or after January 1, 2016. + :type prevsnapshot: str + :param prev_snapshot_url: Optional. This header is only supported in service versions + 2019-04-19 and after and specifies the URL of a previous snapshot of the target blob. The + response will only contain pages that were changed between the target blob and its previous + snapshot. + :type prev_snapshot_url: str + :param range: Return only the bytes of the blob in the specified range. + :type range: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PageList, or the result of cls(response) + :rtype: ~azure.storage.blob.models.PageList + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PageList"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "pagelist" + accept = "application/xml" + + # Construct URL + url = self.get_page_ranges_diff.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if snapshot is not None: + query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if prevsnapshot is not None: + query_parameters['prevsnapshot'] = self._serialize.query("prevsnapshot", prevsnapshot, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if prev_snapshot_url is not None: + header_parameters['x-ms-previous-snapshot-url'] = self._serialize.header("prev_snapshot_url", prev_snapshot_url, 'str') + if range is not None: + header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('PageList', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_page_ranges_diff.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def resize( + self, + blob_content_length, # type: int + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """Resize the Blob. + + :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 + TB. The page blob size must be aligned to a 512-byte boundary. + :type blob_content_length: long + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "properties" + accept = "application/xml" + + # Construct URL + url = self.resize.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _encryption_key is not None: + header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') + if _encryption_key_sha256 is not None: + header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') + if _encryption_algorithm is not None: + header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", _encryption_algorithm, 'str') + if _encryption_scope is not None: + header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-blob-content-length'] = self._serialize.header("blob_content_length", blob_content_length, 'long') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + resize.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def update_sequence_number( + self, + sequence_number_action, # type: Union[str, "_models.SequenceNumberActionType"] + timeout=None, # type: Optional[int] + blob_sequence_number=0, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """Update the sequence number of the blob. + + :param sequence_number_action: Required if the x-ms-blob-sequence-number header is set for the + request. This property applies to page blobs only. This property indicates how the service + should modify the blob's sequence number. + :type sequence_number_action: str or ~azure.storage.blob.models.SequenceNumberActionType + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param blob_sequence_number: Set for page blobs only. The sequence number is a user-controlled + value that you can use to track requests. The value of the sequence number must be between 0 + and 2^63 - 1. + :type blob_sequence_number: long + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "properties" + accept = "application/xml" + + # Construct URL + url = self.update_sequence_number.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-sequence-number-action'] = self._serialize.header("sequence_number_action", sequence_number_action, 'str') + if blob_sequence_number is not None: + header_parameters['x-ms-blob-sequence-number'] = self._serialize.header("blob_sequence_number", blob_sequence_number, 'long') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + update_sequence_number.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + + def copy_incremental( + self, + copy_source, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Copy Incremental operation copies a snapshot of the source page blob to a destination page + blob. The snapshot is copied such that only the differential changes between the previously + copied snapshot are transferred to the destination. The copied snapshots are complete copies of + the original snapshot and can be read or copied from as usual. This API is supported since REST + version 2016-05-31. + + :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of + up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it + would appear in a request URI. The source blob must either be public or must be authenticated + via a shared access signature. + :type copy_source: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + comp = "incrementalcopy" + accept = "application/xml" + + # Construct URL + url = self.copy_incremental.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_tags is not None: + header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') + header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + + if cls: + return cls(pipeline_response, None, response_headers) + + copy_incremental.metadata = {'url': '/{containerName}/{blob}'} # type: ignore diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_service_operations.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_service_operations.py new file mode 100644 index 00000000000..63628418ddd --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_generated/operations/_service_operations.py @@ -0,0 +1,710 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, IO, List, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class ServiceOperations(object): + """ServiceOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.storage.blob.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def set_properties( + self, + storage_service_properties, # type: "_models.StorageServiceProperties" + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """Sets properties for a storage account's Blob service endpoint, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param storage_service_properties: The StorageService properties. + :type storage_service_properties: ~azure.storage.blob.models.StorageServiceProperties + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "service" + comp = "properties" + content_type = kwargs.pop("content_type", "application/xml") + accept = "application/xml" + + # Construct URL + url = self.set_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(storage_service_properties, 'StorageServiceProperties', is_xml=True) + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_properties.metadata = {'url': '/'} # type: ignore + + def get_properties( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "_models.StorageServiceProperties" + """gets the properties of a storage account's Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageServiceProperties, or the result of cls(response) + :rtype: ~azure.storage.blob.models.StorageServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "service" + comp = "properties" + accept = "application/xml" + + # Construct URL + url = self.get_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + deserialized = self._deserialize('StorageServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_properties.metadata = {'url': '/'} # type: ignore + + def get_statistics( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "_models.StorageServiceStats" + """Retrieves statistics related to replication for the Blob service. It is only available on the + secondary location endpoint when read-access geo-redundant replication is enabled for the + storage account. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageServiceStats, or the result of cls(response) + :rtype: ~azure.storage.blob.models.StorageServiceStats + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageServiceStats"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "service" + comp = "stats" + accept = "application/xml" + + # Construct URL + url = self.get_statistics.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('StorageServiceStats', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_statistics.metadata = {'url': '/'} # type: ignore + + def list_containers_segment( + self, + prefix=None, # type: Optional[str] + marker=None, # type: Optional[str] + maxresults=None, # type: Optional[int] + include=None, # type: Optional[List[Union[str, "_models.ListContainersIncludeType"]]] + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "_models.ListContainersSegmentResponse" + """The List Containers Segment operation returns a list of the containers under the specified + account. + + :param prefix: Filters the results to return only containers whose name begins with the + specified prefix. + :type prefix: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. + :type maxresults: int + :param include: Include this parameter to specify that the container's metadata be returned as + part of the response body. + :type include: list[str or ~azure.storage.blob.models.ListContainersIncludeType] + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListContainersSegmentResponse, or the result of cls(response) + :rtype: ~azure.storage.blob.models.ListContainersSegmentResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListContainersSegmentResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + comp = "list" + accept = "application/xml" + + # Construct URL + url = self.list_containers_segment.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if prefix is not None: + query_parameters['prefix'] = self._serialize.query("prefix", prefix, 'str') + if marker is not None: + query_parameters['marker'] = self._serialize.query("marker", marker, 'str') + if maxresults is not None: + query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) + if include is not None: + query_parameters['include'] = self._serialize.query("include", include, '[str]', div=',') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + deserialized = self._deserialize('ListContainersSegmentResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + list_containers_segment.metadata = {'url': '/'} # type: ignore + + def get_user_delegation_key( + self, + key_info, # type: "_models.KeyInfo" + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "_models.UserDelegationKey" + """Retrieves a user delegation key for the Blob service. This is only a valid operation when using + bearer token authentication. + + :param key_info: Key information. + :type key_info: ~azure.storage.blob.models.KeyInfo + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: UserDelegationKey, or the result of cls(response) + :rtype: ~azure.storage.blob.models.UserDelegationKey + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.UserDelegationKey"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "service" + comp = "userdelegationkey" + content_type = kwargs.pop("content_type", "application/xml") + accept = "application/xml" + + # Construct URL + url = self.get_user_delegation_key.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(key_info, 'KeyInfo', is_xml=True) + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('UserDelegationKey', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_user_delegation_key.metadata = {'url': '/'} # type: ignore + + def get_account_info( + self, + **kwargs # type: Any + ): + # type: (...) -> None + """Returns the sku name and account kind. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "account" + comp = "properties" + accept = "application/xml" + + # Construct URL + url = self.get_account_info.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) + response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) + response_headers['x-ms-is-hns-enabled']=self._deserialize('bool', response.headers.get('x-ms-is-hns-enabled')) + + if cls: + return cls(pipeline_response, None, response_headers) + + get_account_info.metadata = {'url': '/'} # type: ignore + + def submit_batch( + self, + content_length, # type: int + multipart_content_type, # type: str + body, # type: IO + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> IO + """The Batch operation allows multiple API calls to be embedded into a single HTTP request. + + :param content_length: The length of the request. + :type content_length: long + :param multipart_content_type: Required. The value of this header must be multipart/mixed with + a batch boundary. Example header value: multipart/mixed; boundary=batch_:code:``. + :type multipart_content_type: str + :param body: Initial data. + :type body: IO + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IO, or the result of cls(response) + :rtype: IO + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[IO] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + comp = "batch" + content_type = kwargs.pop("content_type", "application/xml") + accept = "application/xml" + + # Construct URL + url = self.submit_batch.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') + header_parameters['Content-Type'] = self._serialize.header("multipart_content_type", multipart_content_type, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(body, 'IO', is_xml=True) + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=True, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + deserialized = response.stream_download(self._client._pipeline) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + submit_batch.metadata = {'url': '/'} # type: ignore + + def filter_blobs( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + where=None, # type: Optional[str] + marker=None, # type: Optional[str] + maxresults=None, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> "_models.FilterBlobSegment" + """The Filter Blobs operation enables callers to list blobs across all containers whose tags match + a given search expression. Filter blobs searches across all containers within a storage + account but can be scoped within the expression to a single container. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param where: Filters the results to return only to return only blobs whose tags match the + specified expression. + :type where: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. + :type maxresults: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FilterBlobSegment, or the result of cls(response) + :rtype: ~azure.storage.blob.models.FilterBlobSegment + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FilterBlobSegment"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + comp = "blobs" + accept = "application/xml" + + # Construct URL + url = self.filter_blobs.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if where is not None: + query_parameters['where'] = self._serialize.query("where", where, 'str') + if marker is not None: + query_parameters['marker'] = self._serialize.query("marker", marker, 'str') + if maxresults is not None: + query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('FilterBlobSegment', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + filter_blobs.metadata = {'url': '/'} # type: ignore diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_lease.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_lease.py new file mode 100644 index 00000000000..d495d6e2dfb --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_lease.py @@ -0,0 +1,331 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import uuid + +from typing import ( # pylint: disable=unused-import + Union, Optional, Any, TypeVar, TYPE_CHECKING +) + +from azure.core.exceptions import HttpResponseError +from azure.core.tracing.decorator import distributed_trace + +from ._shared.response_handlers import return_response_headers, process_storage_error +from ._serialize import get_modify_conditions + +if TYPE_CHECKING: + from datetime import datetime + + BlobClient = TypeVar("BlobClient") + ContainerClient = TypeVar("ContainerClient") + + +class BlobLeaseClient(object): + """Creates a new BlobLeaseClient. + + This client provides lease operations on a BlobClient or ContainerClient. + + :ivar str id: + The ID of the lease currently being maintained. This will be `None` if no + lease has yet been acquired. + :ivar str etag: + The ETag of the lease currently being maintained. This will be `None` if no + lease has yet been acquired or modified. + :ivar ~datetime.datetime last_modified: + The last modified timestamp of the lease currently being maintained. + This will be `None` if no lease has yet been acquired or modified. + + :param client: + The client of the blob or container to lease. + :type client: ~azure.storage.blob.BlobClient or + ~azure.storage.blob.ContainerClient + :param str lease_id: + A string representing the lease ID of an existing lease. This value does not + need to be specified in order to acquire a new lease, or break one. + """ + def __init__( + self, client, lease_id=None + ): # pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs + # type: (Union[BlobClient, ContainerClient], Optional[str]) -> None + self.id = lease_id or str(uuid.uuid4()) + self.last_modified = None + self.etag = None + if hasattr(client, 'blob_name'): + self._client = client._client.blob # type: ignore # pylint: disable=protected-access + elif hasattr(client, 'container_name'): + self._client = client._client.container # type: ignore # pylint: disable=protected-access + else: + raise TypeError("Lease must use either BlobClient or ContainerClient.") + + def __enter__(self): + return self + + def __exit__(self, *args): + self.release() + + @distributed_trace + def acquire(self, lease_duration=-1, **kwargs): + # type: (int, **Any) -> None + """Requests a new lease. + + If the container does not have an active lease, the Blob service creates a + lease on the container and returns a new lease ID. + + :param int lease_duration: + Specifies the duration of the lease, in seconds, or negative one + (-1) for a lease that never expires. A non-infinite lease can be + between 15 and 60 seconds. A lease duration cannot be changed + using renew or change. Default is -1 (infinite lease). + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + """ + mod_conditions = get_modify_conditions(kwargs) + try: + response = self._client.acquire_lease( + timeout=kwargs.pop('timeout', None), + duration=lease_duration, + proposed_lease_id=self.id, + modified_access_conditions=mod_conditions, + cls=return_response_headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + self.id = response.get('lease_id') # type: str + self.last_modified = response.get('last_modified') # type: datetime + self.etag = response.get('etag') # type: str + + @distributed_trace + def renew(self, **kwargs): + # type: (Any) -> None + """Renews the lease. + + The lease can be renewed if the lease ID specified in the + lease client matches that associated with the container or blob. Note that + the lease may be renewed even if it has expired as long as the container + or blob has not been leased again since the expiration of that lease. When you + renew a lease, the lease duration clock resets. + + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: None + """ + mod_conditions = get_modify_conditions(kwargs) + try: + response = self._client.renew_lease( + lease_id=self.id, + timeout=kwargs.pop('timeout', None), + modified_access_conditions=mod_conditions, + cls=return_response_headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + self.etag = response.get('etag') # type: str + self.id = response.get('lease_id') # type: str + self.last_modified = response.get('last_modified') # type: datetime + + @distributed_trace + def release(self, **kwargs): + # type: (Any) -> None + """Release the lease. + + The lease may be released if the client lease id specified matches + that associated with the container or blob. Releasing the lease allows another client + to immediately acquire the lease for the container or blob as soon as the release is complete. + + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: None + """ + mod_conditions = get_modify_conditions(kwargs) + try: + response = self._client.release_lease( + lease_id=self.id, + timeout=kwargs.pop('timeout', None), + modified_access_conditions=mod_conditions, + cls=return_response_headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + self.etag = response.get('etag') # type: str + self.id = response.get('lease_id') # type: str + self.last_modified = response.get('last_modified') # type: datetime + + @distributed_trace + def change(self, proposed_lease_id, **kwargs): + # type: (str, Any) -> None + """Change the lease ID of an active lease. + + :param str proposed_lease_id: + Proposed lease ID, in a GUID string format. The Blob service returns 400 + (Invalid request) if the proposed lease ID is not in the correct format. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: None + """ + mod_conditions = get_modify_conditions(kwargs) + try: + response = self._client.change_lease( + lease_id=self.id, + proposed_lease_id=proposed_lease_id, + timeout=kwargs.pop('timeout', None), + modified_access_conditions=mod_conditions, + cls=return_response_headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + self.etag = response.get('etag') # type: str + self.id = response.get('lease_id') # type: str + self.last_modified = response.get('last_modified') # type: datetime + + @distributed_trace + def break_lease(self, lease_break_period=None, **kwargs): + # type: (Optional[int], Any) -> int + """Break the lease, if the container or blob has an active lease. + + Once a lease is broken, it cannot be renewed. Any authorized request can break the lease; + the request is not required to specify a matching lease ID. When a lease + is broken, the lease break period is allowed to elapse, during which time + no lease operation except break and release can be performed on the container or blob. + When a lease is successfully broken, the response indicates the interval + in seconds until a new lease can be acquired. + + :param int lease_break_period: + This is the proposed duration of seconds that the lease + should continue before it is broken, between 0 and 60 seconds. This + break period is only used if it is shorter than the time remaining + on the lease. If longer, the time remaining on the lease is used. + A new lease will not be available before the break period has + expired, but the lease may be held for longer than the break + period. If this header does not appear with a break + operation, a fixed-duration lease breaks after the remaining lease + period elapses, and an infinite lease breaks immediately. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: Approximate time remaining in the lease period, in seconds. + :rtype: int + """ + mod_conditions = get_modify_conditions(kwargs) + try: + response = self._client.break_lease( + timeout=kwargs.pop('timeout', None), + break_period=lease_break_period, + modified_access_conditions=mod_conditions, + cls=return_response_headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + return response.get('lease_time') # type: ignore diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_list_blobs_helper.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_list_blobs_helper.py new file mode 100644 index 00000000000..faf9433c6fb --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_list_blobs_helper.py @@ -0,0 +1,244 @@ +# pylint: disable=too-many-lines +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +try: + from urllib.parse import unquote +except ImportError: + from urllib import unquote +from azure.core.paging import PageIterator, ItemPaged +from azure.core.exceptions import HttpResponseError +from ._deserialize import get_blob_properties_from_generated_code, parse_tags +from ._generated.models import BlobItemInternal, BlobPrefix as GenBlobPrefix, FilterBlobItem +from ._models import BlobProperties, FilteredBlob +from ._shared.models import DictMixin +from ._shared.response_handlers import return_context_and_deserialized, process_storage_error + + +class BlobPropertiesPaged(PageIterator): + """An Iterable of Blob properties. + + :ivar str service_endpoint: The service URL. + :ivar str prefix: A blob name prefix being used to filter the list. + :ivar str marker: The continuation token of the current page of results. + :ivar int results_per_page: The maximum number of results retrieved per API call. + :ivar str continuation_token: The continuation token to retrieve the next page of results. + :ivar str location_mode: The location mode being used to list results. The available + options include "primary" and "secondary". + :ivar current_page: The current page of listed results. + :vartype current_page: list(~azure.storage.blob.BlobProperties) + :ivar str container: The container that the blobs are listed from. + :ivar str delimiter: A delimiting character used for hierarchy listing. + + :param callable command: Function to retrieve the next page of items. + :param str container: The name of the container. + :param str prefix: Filters the results to return only blobs whose names + begin with the specified prefix. + :param int results_per_page: The maximum number of blobs to retrieve per + call. + :param str continuation_token: An opaque continuation token. + :param str delimiter: + Used to capture blobs whose names begin with the same substring up to + the appearance of the delimiter character. The delimiter may be a single + character or a string. + :param location_mode: Specifies the location the request should be sent to. + This mode only applies for RA-GRS accounts which allow secondary read access. + Options include 'primary' or 'secondary'. + """ + def __init__( + self, command, + container=None, + prefix=None, + results_per_page=None, + continuation_token=None, + delimiter=None, + location_mode=None): + super(BlobPropertiesPaged, self).__init__( + get_next=self._get_next_cb, + extract_data=self._extract_data_cb, + continuation_token=continuation_token or "" + ) + self._command = command + self.service_endpoint = None + self.prefix = prefix + self.marker = None + self.results_per_page = results_per_page + self.container = container + self.delimiter = delimiter + self.current_page = None + self.location_mode = location_mode + + def _get_next_cb(self, continuation_token): + try: + return self._command( + prefix=self.prefix, + marker=continuation_token or None, + maxresults=self.results_per_page, + cls=return_context_and_deserialized, + use_location=self.location_mode) + except HttpResponseError as error: + process_storage_error(error) + + def _extract_data_cb(self, get_next_return): + self.location_mode, self._response = get_next_return + self.service_endpoint = self._response.service_endpoint + self.prefix = self._response.prefix + self.marker = self._response.marker + self.results_per_page = self._response.max_results + self.container = self._response.container_name + self.current_page = [self._build_item(item) for item in self._response.segment.blob_items] + + return self._response.next_marker or None, self.current_page + + def _build_item(self, item): + if isinstance(item, BlobProperties): + return item + if isinstance(item, BlobItemInternal): + blob = get_blob_properties_from_generated_code(item) # pylint: disable=protected-access + blob.container = self.container + return blob + return item + + +class BlobPrefixPaged(BlobPropertiesPaged): + def __init__(self, *args, **kwargs): + super(BlobPrefixPaged, self).__init__(*args, **kwargs) + self.name = self.prefix + + def _extract_data_cb(self, get_next_return): + continuation_token, _ = super(BlobPrefixPaged, self)._extract_data_cb(get_next_return) + self.current_page = self._response.segment.blob_prefixes + self._response.segment.blob_items + self.current_page = [self._build_item(item) for item in self.current_page] + self.delimiter = self._response.delimiter + + return continuation_token, self.current_page + + def _build_item(self, item): + item = super(BlobPrefixPaged, self)._build_item(item) + if isinstance(item, GenBlobPrefix): + if item.name.encoded: + name = unquote(item.name.content) + else: + name = item.name.content + return BlobPrefix( + self._command, + container=self.container, + prefix=name, + results_per_page=self.results_per_page, + location_mode=self.location_mode) + return item + + +class BlobPrefix(ItemPaged, DictMixin): + """An Iterable of Blob properties. + + Returned from walk_blobs when a delimiter is used. + Can be thought of as a virtual blob directory. + + :ivar str name: The prefix, or "directory name" of the blob. + :ivar str service_endpoint: The service URL. + :ivar str prefix: A blob name prefix being used to filter the list. + :ivar str marker: The continuation token of the current page of results. + :ivar int results_per_page: The maximum number of results retrieved per API call. + :ivar str next_marker: The continuation token to retrieve the next page of results. + :ivar str location_mode: The location mode being used to list results. The available + options include "primary" and "secondary". + :ivar current_page: The current page of listed results. + :vartype current_page: list(~azure.storage.blob.BlobProperties) + :ivar str container: The container that the blobs are listed from. + :ivar str delimiter: A delimiting character used for hierarchy listing. + + :param callable command: Function to retrieve the next page of items. + :param str prefix: Filters the results to return only blobs whose names + begin with the specified prefix. + :param int results_per_page: The maximum number of blobs to retrieve per + call. + :param str marker: An opaque continuation token. + :param str delimiter: + Used to capture blobs whose names begin with the same substring up to + the appearance of the delimiter character. The delimiter may be a single + character or a string. + :param location_mode: Specifies the location the request should be sent to. + This mode only applies for RA-GRS accounts which allow secondary read access. + Options include 'primary' or 'secondary'. + """ + def __init__(self, *args, **kwargs): + super(BlobPrefix, self).__init__(*args, page_iterator_class=BlobPrefixPaged, **kwargs) + self.name = kwargs.get('prefix') + self.prefix = kwargs.get('prefix') + self.results_per_page = kwargs.get('results_per_page') + self.container = kwargs.get('container') + self.delimiter = kwargs.get('delimiter') + self.location_mode = kwargs.get('location_mode') + + +class FilteredBlobPaged(PageIterator): + """An Iterable of Blob properties. + + :ivar str service_endpoint: The service URL. + :ivar str prefix: A blob name prefix being used to filter the list. + :ivar str marker: The continuation token of the current page of results. + :ivar int results_per_page: The maximum number of results retrieved per API call. + :ivar str continuation_token: The continuation token to retrieve the next page of results. + :ivar str location_mode: The location mode being used to list results. The available + options include "primary" and "secondary". + :ivar current_page: The current page of listed results. + :vartype current_page: list(~azure.storage.blob.FilteredBlob) + :ivar str container: The container that the blobs are listed from. + + :param callable command: Function to retrieve the next page of items. + :param str container: The name of the container. + :param int results_per_page: The maximum number of blobs to retrieve per + call. + :param str continuation_token: An opaque continuation token. + :param location_mode: Specifies the location the request should be sent to. + This mode only applies for RA-GRS accounts which allow secondary read access. + Options include 'primary' or 'secondary'. + """ + def __init__( + self, command, + container=None, + results_per_page=None, + continuation_token=None, + location_mode=None): + super(FilteredBlobPaged, self).__init__( + get_next=self._get_next_cb, + extract_data=self._extract_data_cb, + continuation_token=continuation_token or "" + ) + self._command = command + self.service_endpoint = None + self.marker = continuation_token + self.results_per_page = results_per_page + self.container = container + self.current_page = None + self.location_mode = location_mode + + def _get_next_cb(self, continuation_token): + try: + return self._command( + marker=continuation_token or None, + maxresults=self.results_per_page, + cls=return_context_and_deserialized, + use_location=self.location_mode) + except HttpResponseError as error: + process_storage_error(error) + + def _extract_data_cb(self, get_next_return): + self.location_mode, self._response = get_next_return + self.service_endpoint = self._response.service_endpoint + self.marker = self._response.next_marker + self.current_page = [self._build_item(item) for item in self._response.blobs] + + return self._response.next_marker or None, self.current_page + + @staticmethod + def _build_item(item): + if isinstance(item, FilterBlobItem): + tags = parse_tags(item.tags) + blob = FilteredBlob(name=item.name, container_name=item.container_name, tags=tags) + return blob + return item diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_models.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_models.py new file mode 100644 index 00000000000..c67806d1f25 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_models.py @@ -0,0 +1,1259 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=too-few-public-methods, too-many-instance-attributes +# pylint: disable=super-init-not-called, too-many-lines + +from enum import Enum + +from azure.core.paging import PageIterator +from azure.core.exceptions import HttpResponseError +from ._generated.models import ArrowField + +from ._shared import decode_base64_to_bytes +from ._shared.response_handlers import return_context_and_deserialized, process_storage_error +from ._shared.models import DictMixin, get_enum_value +from ._generated.models import Logging as GeneratedLogging +from ._generated.models import Metrics as GeneratedMetrics +from ._generated.models import RetentionPolicy as GeneratedRetentionPolicy +from ._generated.models import StaticWebsite as GeneratedStaticWebsite +from ._generated.models import CorsRule as GeneratedCorsRule +from ._generated.models import AccessPolicy as GenAccessPolicy + + +class BlobType(str, Enum): + + BlockBlob = "BlockBlob" + PageBlob = "PageBlob" + AppendBlob = "AppendBlob" + + +class BlockState(str, Enum): + """Block blob block types.""" + + Committed = 'Committed' #: Committed blocks. + Latest = 'Latest' #: Latest blocks. + Uncommitted = 'Uncommitted' #: Uncommitted blocks. + + +class StandardBlobTier(str, Enum): + """ + Specifies the blob tier to set the blob to. This is only applicable for + block blobs on standard storage accounts. + """ + + Archive = 'Archive' #: Archive + Cool = 'Cool' #: Cool + Hot = 'Hot' #: Hot + + +class PremiumPageBlobTier(str, Enum): + """ + Specifies the page blob tier to set the blob to. This is only applicable to page + blobs on premium storage accounts. Please take a look at: + https://docs.microsoft.com/en-us/azure/storage/storage-premium-storage#scalability-and-performance-targets + for detailed information on the corresponding IOPS and throughput per PageBlobTier. + """ + + P4 = 'P4' #: P4 Tier + P6 = 'P6' #: P6 Tier + P10 = 'P10' #: P10 Tier + P20 = 'P20' #: P20 Tier + P30 = 'P30' #: P30 Tier + P40 = 'P40' #: P40 Tier + P50 = 'P50' #: P50 Tier + P60 = 'P60' #: P60 Tier + + +class QuickQueryDialect(str, Enum): + """Specifies the quick query input/output dialect.""" + + DelimitedText = 'DelimitedTextDialect' + DelimitedJson = 'DelimitedJsonDialect' + Parquet = 'ParquetDialect' + + +class SequenceNumberAction(str, Enum): + """Sequence number actions.""" + + Increment = 'increment' + """ + Increments the value of the sequence number by 1. If specifying this option, + do not include the x-ms-blob-sequence-number header. + """ + + Max = 'max' + """ + Sets the sequence number to be the higher of the value included with the + request and the value currently stored for the blob. + """ + + Update = 'update' + """Sets the sequence number to the value included with the request.""" + + +class PublicAccess(str, Enum): + """ + Specifies whether data in the container may be accessed publicly and the level of access. + """ + + OFF = 'off' + """ + Specifies that there is no public read access for both the container and blobs within the container. + Clients cannot enumerate the containers within the storage account as well as the blobs within the container. + """ + + Blob = 'blob' + """ + Specifies public read access for blobs. Blob data within this container can be read + via anonymous request, but container data is not available. Clients cannot enumerate + blobs within the container via anonymous request. + """ + + Container = 'container' + """ + Specifies full public read access for container and blob data. Clients can enumerate + blobs within the container via anonymous request, but cannot enumerate containers + within the storage account. + """ + + +class BlobImmutabilityPolicyMode(str, Enum): + """ + Specifies the immutability policy mode to set on the blob. + "Mutable" can only be returned by service, don't set to "Mutable". + """ + + Unlocked = "Unlocked" + Locked = "Locked" + Mutable = "Mutable" + + +class BlobAnalyticsLogging(GeneratedLogging): + """Azure Analytics Logging settings. + + :keyword str version: + The version of Storage Analytics to configure. The default value is 1.0. + :keyword bool delete: + Indicates whether all delete requests should be logged. The default value is `False`. + :keyword bool read: + Indicates whether all read requests should be logged. The default value is `False`. + :keyword bool write: + Indicates whether all write requests should be logged. The default value is `False`. + :keyword ~azure.storage.blob.RetentionPolicy retention_policy: + Determines how long the associated data should persist. If not specified the retention + policy will be disabled by default. + """ + + def __init__(self, **kwargs): + self.version = kwargs.get('version', u'1.0') + self.delete = kwargs.get('delete', False) + self.read = kwargs.get('read', False) + self.write = kwargs.get('write', False) + self.retention_policy = kwargs.get('retention_policy') or RetentionPolicy() + + @classmethod + def _from_generated(cls, generated): + if not generated: + return cls() + return cls( + version=generated.version, + delete=generated.delete, + read=generated.read, + write=generated.write, + retention_policy=RetentionPolicy._from_generated(generated.retention_policy) # pylint: disable=protected-access + ) + + +class Metrics(GeneratedMetrics): + """A summary of request statistics grouped by API in hour or minute aggregates + for blobs. + + :keyword str version: + The version of Storage Analytics to configure. The default value is 1.0. + :keyword bool enabled: + Indicates whether metrics are enabled for the Blob service. + The default value is `False`. + :keyword bool include_apis: + Indicates whether metrics should generate summary statistics for called API operations. + :keyword ~azure.storage.blob.RetentionPolicy retention_policy: + Determines how long the associated data should persist. If not specified the retention + policy will be disabled by default. + """ + + def __init__(self, **kwargs): + self.version = kwargs.get('version', u'1.0') + self.enabled = kwargs.get('enabled', False) + self.include_apis = kwargs.get('include_apis') + self.retention_policy = kwargs.get('retention_policy') or RetentionPolicy() + + @classmethod + def _from_generated(cls, generated): + if not generated: + return cls() + return cls( + version=generated.version, + enabled=generated.enabled, + include_apis=generated.include_apis, + retention_policy=RetentionPolicy._from_generated(generated.retention_policy) # pylint: disable=protected-access + ) + + +class RetentionPolicy(GeneratedRetentionPolicy): + """The retention policy which determines how long the associated data should + persist. + + :param bool enabled: + Indicates whether a retention policy is enabled for the storage service. + The default value is False. + :param int days: + Indicates the number of days that metrics or logging or + soft-deleted data should be retained. All data older than this value will + be deleted. If enabled=True, the number of days must be specified. + """ + + def __init__(self, enabled=False, days=None): + super(RetentionPolicy, self).__init__(enabled=enabled, days=days, allow_permanent_delete=None) + if self.enabled and (self.days is None): + raise ValueError("If policy is enabled, 'days' must be specified.") + + @classmethod + def _from_generated(cls, generated): + if not generated: + return cls() + return cls( + enabled=generated.enabled, + days=generated.days, + ) + + +class StaticWebsite(GeneratedStaticWebsite): + """The properties that enable an account to host a static website. + + :keyword bool enabled: + Indicates whether this account is hosting a static website. + The default value is `False`. + :keyword str index_document: + The default name of the index page under each directory. + :keyword str error_document404_path: + The absolute path of the custom 404 page. + :keyword str default_index_document_path: + Absolute path of the default index page. + """ + + def __init__(self, **kwargs): + self.enabled = kwargs.get('enabled', False) + if self.enabled: + self.index_document = kwargs.get('index_document') + self.error_document404_path = kwargs.get('error_document404_path') + self.default_index_document_path = kwargs.get('default_index_document_path') + else: + self.index_document = None + self.error_document404_path = None + self.default_index_document_path = None + + @classmethod + def _from_generated(cls, generated): + if not generated: + return cls() + return cls( + enabled=generated.enabled, + index_document=generated.index_document, + error_document404_path=generated.error_document404_path, + default_index_document_path=generated.default_index_document_path + ) + + +class CorsRule(GeneratedCorsRule): + """CORS is an HTTP feature that enables a web application running under one + domain to access resources in another domain. Web browsers implement a + security restriction known as same-origin policy that prevents a web page + from calling APIs in a different domain; CORS provides a secure way to + allow one domain (the origin domain) to call APIs in another domain. + + :param list(str) allowed_origins: + A list of origin domains that will be allowed via CORS, or "*" to allow + all domains. The list of must contain at least one entry. Limited to 64 + origin domains. Each allowed origin can have up to 256 characters. + :param list(str) allowed_methods: + A list of HTTP methods that are allowed to be executed by the origin. + The list of must contain at least one entry. For Azure Storage, + permitted methods are DELETE, GET, HEAD, MERGE, POST, OPTIONS or PUT. + :keyword list(str) allowed_headers: + Defaults to an empty list. A list of headers allowed to be part of + the cross-origin request. Limited to 64 defined headers and 2 prefixed + headers. Each header can be up to 256 characters. + :keyword list(str) exposed_headers: + Defaults to an empty list. A list of response headers to expose to CORS + clients. Limited to 64 defined headers and two prefixed headers. Each + header can be up to 256 characters. + :keyword int max_age_in_seconds: + The number of seconds that the client/browser should cache a + preflight response. + """ + + def __init__(self, allowed_origins, allowed_methods, **kwargs): + self.allowed_origins = ','.join(allowed_origins) + self.allowed_methods = ','.join(allowed_methods) + self.allowed_headers = ','.join(kwargs.get('allowed_headers', [])) + self.exposed_headers = ','.join(kwargs.get('exposed_headers', [])) + self.max_age_in_seconds = kwargs.get('max_age_in_seconds', 0) + + @classmethod + def _from_generated(cls, generated): + return cls( + [generated.allowed_origins], + [generated.allowed_methods], + allowed_headers=[generated.allowed_headers], + exposed_headers=[generated.exposed_headers], + max_age_in_seconds=generated.max_age_in_seconds, + ) + + +class ContainerProperties(DictMixin): + """Blob container's properties class. + + Returned ``ContainerProperties`` instances expose these values through a + dictionary interface, for example: ``container_props["last_modified"]``. + Additionally, the container name is available as ``container_props["name"]``. + + :ivar str name: + Name of the container. + :ivar ~datetime.datetime last_modified: + A datetime object representing the last time the container was modified. + :ivar str etag: + The ETag contains a value that you can use to perform operations + conditionally. + :ivar ~azure.storage.blob.LeaseProperties lease: + Stores all the lease information for the container. + :ivar str public_access: Specifies whether data in the container may be accessed + publicly and the level of access. + :ivar bool has_immutability_policy: + Represents whether the container has an immutability policy. + :ivar bool has_legal_hold: + Represents whether the container has a legal hold. + :ivar bool immutable_storage_with_versioning_enabled: + Represents whether immutable storage with versioning enabled on the container. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :ivar dict metadata: A dict with name-value pairs to associate with the + container as metadata. + :ivar ~azure.storage.blob.ContainerEncryptionScope encryption_scope: + The default encryption scope configuration for the container. + :ivar bool deleted: + Whether this container was deleted. + :ivar str version: + The version of a deleted container. + """ + + def __init__(self, **kwargs): + self.name = None + self.last_modified = kwargs.get('Last-Modified') + self.etag = kwargs.get('ETag') + self.lease = LeaseProperties(**kwargs) + self.public_access = kwargs.get('x-ms-blob-public-access') + self.has_immutability_policy = kwargs.get('x-ms-has-immutability-policy') + self.deleted = None + self.version = None + self.has_legal_hold = kwargs.get('x-ms-has-legal-hold') + self.metadata = kwargs.get('metadata') + self.encryption_scope = None + self.immutable_storage_with_versioning_enabled = kwargs.get('x-ms-immutable-storage-with-versioning-enabled') + default_encryption_scope = kwargs.get('x-ms-default-encryption-scope') + if default_encryption_scope: + self.encryption_scope = ContainerEncryptionScope( + default_encryption_scope=default_encryption_scope, + prevent_encryption_scope_override=kwargs.get('x-ms-deny-encryption-scope-override', False) + ) + + @classmethod + def _from_generated(cls, generated): + props = cls() + props.name = generated.name + props.last_modified = generated.properties.last_modified + props.etag = generated.properties.etag + props.lease = LeaseProperties._from_generated(generated) # pylint: disable=protected-access + props.public_access = generated.properties.public_access + props.has_immutability_policy = generated.properties.has_immutability_policy + props.immutable_storage_with_versioning_enabled = \ + generated.properties.is_immutable_storage_with_versioning_enabled + props.deleted = generated.deleted + props.version = generated.version + props.has_legal_hold = generated.properties.has_legal_hold + props.metadata = generated.metadata + props.encryption_scope = ContainerEncryptionScope._from_generated(generated) #pylint: disable=protected-access + return props + + +class ContainerPropertiesPaged(PageIterator): + """An Iterable of Container properties. + + :ivar str service_endpoint: The service URL. + :ivar str prefix: A container name prefix being used to filter the list. + :ivar str marker: The continuation token of the current page of results. + :ivar int results_per_page: The maximum number of results retrieved per API call. + :ivar str continuation_token: The continuation token to retrieve the next page of results. + :ivar str location_mode: The location mode being used to list results. The available + options include "primary" and "secondary". + :ivar current_page: The current page of listed results. + :vartype current_page: list(~azure.storage.blob.ContainerProperties) + + :param callable command: Function to retrieve the next page of items. + :param str prefix: Filters the results to return only containers whose names + begin with the specified prefix. + :param int results_per_page: The maximum number of container names to retrieve per + call. + :param str continuation_token: An opaque continuation token. + """ + def __init__(self, command, prefix=None, results_per_page=None, continuation_token=None): + super(ContainerPropertiesPaged, self).__init__( + get_next=self._get_next_cb, + extract_data=self._extract_data_cb, + continuation_token=continuation_token or "" + ) + self._command = command + self.service_endpoint = None + self.prefix = prefix + self.marker = None + self.results_per_page = results_per_page + self.location_mode = None + self.current_page = [] + + def _get_next_cb(self, continuation_token): + try: + return self._command( + marker=continuation_token or None, + maxresults=self.results_per_page, + cls=return_context_and_deserialized, + use_location=self.location_mode) + except HttpResponseError as error: + process_storage_error(error) + + def _extract_data_cb(self, get_next_return): + self.location_mode, self._response = get_next_return + self.service_endpoint = self._response.service_endpoint + self.prefix = self._response.prefix + self.marker = self._response.marker + self.results_per_page = self._response.max_results + self.current_page = [self._build_item(item) for item in self._response.container_items] + + return self._response.next_marker or None, self.current_page + + @staticmethod + def _build_item(item): + return ContainerProperties._from_generated(item) # pylint: disable=protected-access + + +class ImmutabilityPolicy(DictMixin): + """Optional parameters for setting the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword ~datetime.datetime expiry_time: + Specifies the date time when the blobs immutability policy is set to expire. + :keyword str or ~azure.storage.blob.BlobImmutabilityPolicyMode policy_mode: + Specifies the immutability policy mode to set on the blob. + Possible values to set include: "Locked", "Unlocked". + "Mutable" can only be returned by service, don't set to "Mutable". + """ + + def __init__(self, **kwargs): + self.expiry_time = kwargs.pop('expiry_time', None) + self.policy_mode = kwargs.pop('policy_mode', None) + + @classmethod + def _from_generated(cls, generated): + immutability_policy = cls() + immutability_policy.expiry_time = generated.properties.immutability_policy_expires_on + immutability_policy.policy_mode = generated.properties.immutability_policy_mode + return immutability_policy + + +class BlobProperties(DictMixin): + """ + Blob Properties. + + :ivar str name: + The name of the blob. + :ivar str container: + The container in which the blob resides. + :ivar str snapshot: + Datetime value that uniquely identifies the blob snapshot. + :ivar ~azure.blob.storage.BlobType blob_type: + String indicating this blob's type. + :ivar dict metadata: + Name-value pairs associated with the blob as metadata. + :ivar ~datetime.datetime last_modified: + A datetime object representing the last time the blob was modified. + :ivar str etag: + The ETag contains a value that you can use to perform operations + conditionally. + :ivar int size: + The size of the content returned. If the entire blob was requested, + the length of blob in bytes. If a subset of the blob was requested, the + length of the returned subset. + :ivar str content_range: + Indicates the range of bytes returned in the event that the client + requested a subset of the blob. + :ivar int append_blob_committed_block_count: + (For Append Blobs) Number of committed blocks in the blob. + :ivar bool is_append_blob_sealed: + Indicate if the append blob is sealed or not. + + .. versionadded:: 12.4.0 + + :ivar int page_blob_sequence_number: + (For Page Blobs) Sequence number for page blob used for coordinating + concurrent writes. + :ivar bool server_encrypted: + Set to true if the blob is encrypted on the server. + :ivar ~azure.storage.blob.CopyProperties copy: + Stores all the copy properties for the blob. + :ivar ~azure.storage.blob.ContentSettings content_settings: + Stores all the content settings for the blob. + :ivar ~azure.storage.blob.LeaseProperties lease: + Stores all the lease information for the blob. + :ivar ~azure.storage.blob.StandardBlobTier blob_tier: + Indicates the access tier of the blob. The hot tier is optimized + for storing data that is accessed frequently. The cool storage tier + is optimized for storing data that is infrequently accessed and stored + for at least a month. The archive tier is optimized for storing + data that is rarely accessed and stored for at least six months + with flexible latency requirements. + :ivar str rehydrate_priority: + Indicates the priority with which to rehydrate an archived blob + :ivar ~datetime.datetime blob_tier_change_time: + Indicates when the access tier was last changed. + :ivar bool blob_tier_inferred: + Indicates whether the access tier was inferred by the service. + If false, it indicates that the tier was set explicitly. + :ivar bool deleted: + Whether this blob was deleted. + :ivar ~datetime.datetime deleted_time: + A datetime object representing the time at which the blob was deleted. + :ivar int remaining_retention_days: + The number of days that the blob will be retained before being permanently deleted by the service. + :ivar ~datetime.datetime creation_time: + Indicates when the blob was created, in UTC. + :ivar str archive_status: + Archive status of blob. + :ivar str encryption_key_sha256: + The SHA-256 hash of the provided encryption key. + :ivar str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + :ivar bool request_server_encrypted: + Whether this blob is encrypted. + :ivar list(~azure.storage.blob.ObjectReplicationPolicy) object_replication_source_properties: + Only present for blobs that have policy ids and rule ids applied to them. + + .. versionadded:: 12.4.0 + + :ivar str object_replication_destination_policy: + Represents the Object Replication Policy Id that created this blob. + + .. versionadded:: 12.4.0 + + :ivar ~datetime.datetime last_accessed_on: + Indicates when the last Read/Write operation was performed on a Blob. + + .. versionadded:: 12.6.0 + + :ivar int tag_count: + Tags count on this blob. + + .. versionadded:: 12.4.0 + + :ivar dict(str, str) tags: + Key value pair of tags on this blob. + + .. versionadded:: 12.4.0 + :ivar bool has_versions_only: + A true value indicates the root blob is deleted + + .. versionadded:: 12.10.0 + + :ivar ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :ivar bool has_legal_hold: + Specified if a legal hold should be set on the blob. + Currently this parameter of upload_blob() API is for BlockBlob only. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + """ + + def __init__(self, **kwargs): + self.name = kwargs.get('name') + self.container = None + self.snapshot = kwargs.get('x-ms-snapshot') + self.version_id = kwargs.get('x-ms-version-id') + self.is_current_version = kwargs.get('x-ms-is-current-version') + self.blob_type = BlobType(kwargs['x-ms-blob-type']) if kwargs.get('x-ms-blob-type') else None + self.metadata = kwargs.get('metadata') + self.encrypted_metadata = kwargs.get('encrypted_metadata') + self.last_modified = kwargs.get('Last-Modified') + self.etag = kwargs.get('ETag') + self.size = kwargs.get('Content-Length') + self.content_range = kwargs.get('Content-Range') + self.append_blob_committed_block_count = kwargs.get('x-ms-blob-committed-block-count') + self.is_append_blob_sealed = kwargs.get('x-ms-blob-sealed') + self.page_blob_sequence_number = kwargs.get('x-ms-blob-sequence-number') + self.server_encrypted = kwargs.get('x-ms-server-encrypted') + self.copy = CopyProperties(**kwargs) + self.content_settings = ContentSettings(**kwargs) + self.lease = LeaseProperties(**kwargs) + self.blob_tier = kwargs.get('x-ms-access-tier') + self.rehydrate_priority = kwargs.get('x-ms-rehydrate-priority') + self.blob_tier_change_time = kwargs.get('x-ms-access-tier-change-time') + self.blob_tier_inferred = kwargs.get('x-ms-access-tier-inferred') + self.deleted = False + self.deleted_time = None + self.remaining_retention_days = None + self.creation_time = kwargs.get('x-ms-creation-time') + self.archive_status = kwargs.get('x-ms-archive-status') + self.encryption_key_sha256 = kwargs.get('x-ms-encryption-key-sha256') + self.encryption_scope = kwargs.get('x-ms-encryption-scope') + self.request_server_encrypted = kwargs.get('x-ms-server-encrypted') + self.object_replication_source_properties = kwargs.get('object_replication_source_properties') + self.object_replication_destination_policy = kwargs.get('x-ms-or-policy-id') + self.last_accessed_on = kwargs.get('x-ms-last-access-time') + self.tag_count = kwargs.get('x-ms-tag-count') + self.tags = None + self.immutability_policy = ImmutabilityPolicy(expiry_time=kwargs.get('x-ms-immutability-policy-until-date'), + policy_mode=kwargs.get('x-ms-immutability-policy-mode')) + self.has_legal_hold = kwargs.get('x-ms-legal-hold') + self.has_versions_only = None + + +class FilteredBlob(DictMixin): + """Blob info from a Filter Blobs API call. + + :ivar name: Blob name + :type name: str + :ivar container_name: Container name. + :type container_name: str + :ivar tags: Key value pairs of blob tags. + :type tags: Dict[str, str] + """ + def __init__(self, **kwargs): + self.name = kwargs.get('name', None) + self.container_name = kwargs.get('container_name', None) + self.tags = kwargs.get('tags', None) + + +class LeaseProperties(DictMixin): + """Blob Lease Properties. + + :ivar str status: + The lease status of the blob. Possible values: locked|unlocked + :ivar str state: + Lease state of the blob. Possible values: available|leased|expired|breaking|broken + :ivar str duration: + When a blob is leased, specifies whether the lease is of infinite or fixed duration. + """ + + def __init__(self, **kwargs): + self.status = get_enum_value(kwargs.get('x-ms-lease-status')) + self.state = get_enum_value(kwargs.get('x-ms-lease-state')) + self.duration = get_enum_value(kwargs.get('x-ms-lease-duration')) + + @classmethod + def _from_generated(cls, generated): + lease = cls() + lease.status = get_enum_value(generated.properties.lease_status) + lease.state = get_enum_value(generated.properties.lease_state) + lease.duration = get_enum_value(generated.properties.lease_duration) + return lease + + +class ContentSettings(DictMixin): + """The content settings of a blob. + + :param str content_type: + The content type specified for the blob. If no content type was + specified, the default content type is application/octet-stream. + :param str content_encoding: + If the content_encoding has previously been set + for the blob, that value is stored. + :param str content_language: + If the content_language has previously been set + for the blob, that value is stored. + :param str content_disposition: + content_disposition conveys additional information about how to + process the response payload, and also can be used to attach + additional metadata. If content_disposition has previously been set + for the blob, that value is stored. + :param str cache_control: + If the cache_control has previously been set for + the blob, that value is stored. + :param bytearray content_md5: + If the content_md5 has been set for the blob, this response + header is stored so that the client can check for message content + integrity. + """ + + def __init__( + self, content_type=None, content_encoding=None, + content_language=None, content_disposition=None, + cache_control=None, content_md5=None, **kwargs): + + self.content_type = content_type or kwargs.get('Content-Type') + self.content_encoding = content_encoding or kwargs.get('Content-Encoding') + self.content_language = content_language or kwargs.get('Content-Language') + self.content_md5 = content_md5 or kwargs.get('Content-MD5') + self.content_disposition = content_disposition or kwargs.get('Content-Disposition') + self.cache_control = cache_control or kwargs.get('Cache-Control') + + @classmethod + def _from_generated(cls, generated): + settings = cls() + settings.content_type = generated.properties.content_type or None + settings.content_encoding = generated.properties.content_encoding or None + settings.content_language = generated.properties.content_language or None + settings.content_md5 = generated.properties.content_md5 or None + settings.content_disposition = generated.properties.content_disposition or None + settings.cache_control = generated.properties.cache_control or None + return settings + + +class CopyProperties(DictMixin): + """Blob Copy Properties. + + These properties will be `None` if this blob has never been the destination + in a Copy Blob operation, or if this blob has been modified after a concluded + Copy Blob operation, for example, using Set Blob Properties, Upload Blob, or Commit Block List. + + :ivar str id: + String identifier for the last attempted Copy Blob operation where this blob + was the destination blob. + :ivar str source: + URL up to 2 KB in length that specifies the source blob used in the last attempted + Copy Blob operation where this blob was the destination blob. + :ivar str status: + State of the copy operation identified by Copy ID, with these values: + success: + Copy completed successfully. + pending: + Copy is in progress. Check copy_status_description if intermittent, + non-fatal errors impede copy progress but don't cause failure. + aborted: + Copy was ended by Abort Copy Blob. + failed: + Copy failed. See copy_status_description for failure details. + :ivar str progress: + Contains the number of bytes copied and the total bytes in the source in the last + attempted Copy Blob operation where this blob was the destination blob. Can show + between 0 and Content-Length bytes copied. + :ivar ~datetime.datetime completion_time: + Conclusion time of the last attempted Copy Blob operation where this blob was the + destination blob. This value can specify the time of a completed, aborted, or + failed copy attempt. + :ivar str status_description: + Only appears when x-ms-copy-status is failed or pending. Describes cause of fatal + or non-fatal copy operation failure. + :ivar bool incremental_copy: + Copies the snapshot of the source page blob to a destination page blob. + The snapshot is copied such that only the differential changes between + the previously copied snapshot are transferred to the destination + :ivar ~datetime.datetime destination_snapshot: + Included if the blob is incremental copy blob or incremental copy snapshot, + if x-ms-copy-status is success. Snapshot time of the last successful + incremental copy snapshot for this blob. + """ + + def __init__(self, **kwargs): + self.id = kwargs.get('x-ms-copy-id') + self.source = kwargs.get('x-ms-copy-source') + self.status = get_enum_value(kwargs.get('x-ms-copy-status')) + self.progress = kwargs.get('x-ms-copy-progress') + self.completion_time = kwargs.get('x-ms-copy-completion_time') + self.status_description = kwargs.get('x-ms-copy-status-description') + self.incremental_copy = kwargs.get('x-ms-incremental-copy') + self.destination_snapshot = kwargs.get('x-ms-copy-destination-snapshot') + + @classmethod + def _from_generated(cls, generated): + copy = cls() + copy.id = generated.properties.copy_id or None + copy.status = get_enum_value(generated.properties.copy_status) or None + copy.source = generated.properties.copy_source or None + copy.progress = generated.properties.copy_progress or None + copy.completion_time = generated.properties.copy_completion_time or None + copy.status_description = generated.properties.copy_status_description or None + copy.incremental_copy = generated.properties.incremental_copy or None + copy.destination_snapshot = generated.properties.destination_snapshot or None + return copy + + +class BlobBlock(DictMixin): + """BlockBlob Block class. + + :param str block_id: + Block id. + :param str state: + Block state. Possible values: committed|uncommitted + :ivar int size: + Block size in bytes. + """ + + def __init__(self, block_id, state=BlockState.Latest): + self.id = block_id + self.state = state + self.size = None + + @classmethod + def _from_generated(cls, generated): + try: + decoded_bytes = decode_base64_to_bytes(generated.name) + block_id = decoded_bytes.decode('utf-8') + # this is to fix a bug. When large blocks are uploaded through upload_blob the block id isn't base64 encoded + # while service expected block id is base64 encoded, so when we get block_id if we cannot base64 decode, it + # means we didn't base64 encode it when stage the block, we want to use the returned block_id directly. + except UnicodeDecodeError: + block_id = generated.name + block = cls(block_id) + block.size = generated.size + return block + + +class PageRange(DictMixin): + """Page Range for page blob. + + :param int start: + Start of page range in bytes. + :param int end: + End of page range in bytes. + """ + + def __init__(self, start=None, end=None): + self.start = start + self.end = end + + +class AccessPolicy(GenAccessPolicy): + """Access Policy class used by the set and get access policy methods in each service. + + A stored access policy can specify the start time, expiry time, and + permissions for the Shared Access Signatures with which it's associated. + Depending on how you want to control access to your resource, you can + specify all of these parameters within the stored access policy, and omit + them from the URL for the Shared Access Signature. Doing so permits you to + modify the associated signature's behavior at any time, as well as to revoke + it. Or you can specify one or more of the access policy parameters within + the stored access policy, and the others on the URL. Finally, you can + specify all of the parameters on the URL. In this case, you can use the + stored access policy to revoke the signature, but not to modify its behavior. + + Together the Shared Access Signature and the stored access policy must + include all fields required to authenticate the signature. If any required + fields are missing, the request will fail. Likewise, if a field is specified + both in the Shared Access Signature URL and in the stored access policy, the + request will fail with status code 400 (Bad Request). + + :param permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. + :type permission: str or ~azure.storage.blob.ContainerSasPermissions + :param expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :type expiry: ~datetime.datetime or str + :param start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :type start: ~datetime.datetime or str + """ + def __init__(self, permission=None, expiry=None, start=None): + self.start = start + self.expiry = expiry + self.permission = permission + + +class ContainerSasPermissions(object): + """ContainerSasPermissions class to be used with the + :func:`~azure.storage.blob.generate_container_sas` function and + for the AccessPolicies used with + :func:`~azure.storage.blob.ContainerClient.set_container_access_policy`. + + :param bool read: + Read the content, properties, metadata or block list of any blob in the + container. Use any blob in the container as the source of a copy operation. + :param bool write: + For any blob in the container, create or write content, properties, + metadata, or block list. Snapshot or lease the blob. Resize the blob + (page blob only). Use the blob as the destination of a copy operation + within the same account. Note: You cannot grant permissions to read or + write container properties or metadata, nor to lease a container, with + a container SAS. Use an account SAS instead. + :param bool delete: + Delete any blob in the container. Note: You cannot grant permissions to + delete a container with a container SAS. Use an account SAS instead. + :param bool delete_previous_version: + Delete the previous blob version for the versioning enabled storage account. + :param bool list: + List blobs in the container. + :param bool tag: + Set or get tags on the blobs in the container. + :keyword bool add: + Add a block to an append blob. + :keyword bool create: + Write a new blob, snapshot a blob, or copy a blob to a new blob. + :keyword bool permanent_delete: + To enable permanent delete on the blob is permitted. + :keyword bool filter_by_tags: + To enable finding blobs by tags. + :keyword bool move: + Move a blob or a directory and its contents to a new location. + :keyword bool execute: + Get the system properties and, if the hierarchical namespace is enabled for the storage account, + get the POSIX ACL of a blob. + :keyword bool set_immutability_policy: + To enable operations related to set/delete immutability policy. + To get immutability policy, you just need read permission. + """ + def __init__(self, read=False, write=False, delete=False, + list=False, delete_previous_version=False, tag=False, **kwargs): # pylint: disable=redefined-builtin + self.read = read + self.add = kwargs.pop('add', False) + self.create = kwargs.pop('create', False) + self.write = write + self.delete = delete + self.delete_previous_version = delete_previous_version + self.permanent_delete = kwargs.pop('permanent_delete', False) + self.list = list + self.tag = tag + self.filter_by_tags = kwargs.pop('filter_by_tags', False) + self.move = kwargs.pop('move', False) + self.execute = kwargs.pop('execute', False) + self.set_immutability_policy = kwargs.pop('set_immutability_policy', False) + self._str = (('r' if self.read else '') + + ('a' if self.add else '') + + ('c' if self.create else '') + + ('w' if self.write else '') + + ('d' if self.delete else '') + + ('x' if self.delete_previous_version else '') + + ('y' if self.permanent_delete else '') + + ('l' if self.list else '') + + ('t' if self.tag else '') + + ('f' if self.filter_by_tags else '') + + ('m' if self.move else '') + + ('e' if self.execute else '') + + ('i' if self.set_immutability_policy else '')) + + def __str__(self): + return self._str + + @classmethod + def from_string(cls, permission): + """Create a ContainerSasPermissions from a string. + + To specify read, write, delete, or list permissions you need only to + include the first letter of the word in the string. E.g. For read and + write permissions, you would provide a string "rw". + + :param str permission: The string which dictates the read, write, delete, + and list permissions. + :return: A ContainerSasPermissions object + :rtype: ~azure.storage.blob.ContainerSasPermissions + """ + p_read = 'r' in permission + p_add = 'a' in permission + p_create = 'c' in permission + p_write = 'w' in permission + p_delete = 'd' in permission + p_delete_previous_version = 'x' in permission + p_permanent_delete = 'y' in permission + p_list = 'l' in permission + p_tag = 't' in permission + p_filter_by_tags = 'f' in permission + p_move = 'm' in permission + p_execute = 'e' in permission + p_set_immutability_policy = 'i' in permission + parsed = cls(read=p_read, write=p_write, delete=p_delete, list=p_list, + delete_previous_version=p_delete_previous_version, tag=p_tag, add=p_add, + create=p_create, permanent_delete=p_permanent_delete, filter_by_tags=p_filter_by_tags, + move=p_move, execute=p_execute, set_immutability_policy=p_set_immutability_policy) + + return parsed + + +class BlobSasPermissions(object): + """BlobSasPermissions class to be used with the + :func:`~azure.storage.blob.generate_blob_sas` function. + + :param bool read: + Read the content, properties, metadata and block list. Use the blob as + the source of a copy operation. + :param bool add: + Add a block to an append blob. + :param bool create: + Write a new blob, snapshot a blob, or copy a blob to a new blob. + :param bool write: + Create or write content, properties, metadata, or block list. Snapshot + or lease the blob. Resize the blob (page blob only). Use the blob as the + destination of a copy operation within the same account. + :param bool delete: + Delete the blob. + :param bool delete_previous_version: + Delete the previous blob version for the versioning enabled storage account. + :param bool tag: + Set or get tags on the blob. + :keyword bool permanent_delete: + To enable permanent delete on the blob is permitted. + :keyword bool move: + Move a blob or a directory and its contents to a new location. + :keyword bool execute: + Get the system properties and, if the hierarchical namespace is enabled for the storage account, + get the POSIX ACL of a blob. + :keyword bool set_immutability_policy: + To enable operations related to set/delete immutability policy. + To get immutability policy, you just need read permission. + """ + def __init__(self, read=False, add=False, create=False, write=False, + delete=False, delete_previous_version=False, tag=False, **kwargs): + self.read = read + self.add = add + self.create = create + self.write = write + self.delete = delete + self.delete_previous_version = delete_previous_version + self.permanent_delete = kwargs.pop('permanent_delete', False) + self.tag = tag + self.move = kwargs.pop('move', False) + self.execute = kwargs.pop('execute', False) + self.set_immutability_policy = kwargs.pop('set_immutability_policy', False) + self._str = (('r' if self.read else '') + + ('a' if self.add else '') + + ('c' if self.create else '') + + ('w' if self.write else '') + + ('d' if self.delete else '') + + ('x' if self.delete_previous_version else '') + + ('y' if self.permanent_delete else '') + + ('t' if self.tag else '') + + ('m' if self.move else '') + + ('e' if self.execute else '') + + ('i' if self.set_immutability_policy else '')) + + def __str__(self): + return self._str + + @classmethod + def from_string(cls, permission): + """Create a BlobSasPermissions from a string. + + To specify read, add, create, write, or delete permissions you need only to + include the first letter of the word in the string. E.g. For read and + write permissions, you would provide a string "rw". + + :param str permission: The string which dictates the read, add, create, + write, or delete permissions. + :return: A BlobSasPermissions object + :rtype: ~azure.storage.blob.BlobSasPermissions + """ + p_read = 'r' in permission + p_add = 'a' in permission + p_create = 'c' in permission + p_write = 'w' in permission + p_delete = 'd' in permission + p_delete_previous_version = 'x' in permission + p_permanent_delete = 'y' in permission + p_tag = 't' in permission + p_move = 'm' in permission + p_execute = 'e' in permission + p_set_immutability_policy = 'i' in permission + + parsed = cls(read=p_read, add=p_add, create=p_create, write=p_write, delete=p_delete, + delete_previous_version=p_delete_previous_version, tag=p_tag, permanent_delete=p_permanent_delete, + move=p_move, execute=p_execute, set_immutability_policy=p_set_immutability_policy) + + return parsed + + +class CustomerProvidedEncryptionKey(object): + """ + All data in Azure Storage is encrypted at-rest using an account-level encryption key. + In versions 2018-06-17 and newer, you can manage the key used to encrypt blob contents + and application metadata per-blob by providing an AES-256 encryption key in requests to the storage service. + + When you use a customer-provided key, Azure Storage does not manage or persist your key. + When writing data to a blob, the provided key is used to encrypt your data before writing it to disk. + A SHA-256 hash of the encryption key is written alongside the blob contents, + and is used to verify that all subsequent operations against the blob use the same encryption key. + This hash cannot be used to retrieve the encryption key or decrypt the contents of the blob. + When reading a blob, the provided key is used to decrypt your data after reading it from disk. + In both cases, the provided encryption key is securely discarded + as soon as the encryption or decryption process completes. + + :param str key_value: + Base64-encoded AES-256 encryption key value. + :param str key_hash: + Base64-encoded SHA256 of the encryption key. + :ivar str algorithm: + Specifies the algorithm to use when encrypting data using the given key. Must be AES256. + """ + def __init__(self, key_value, key_hash): + self.key_value = key_value + self.key_hash = key_hash + self.algorithm = 'AES256' + + +class ContainerEncryptionScope(object): + """The default encryption scope configuration for a container. + + This scope is used implicitly for all future writes within the container, + but can be overridden per blob operation. + + .. versionadded:: 12.2.0 + + :param str default_encryption_scope: + Specifies the default encryption scope to set on the container and use for + all future writes. + :param bool prevent_encryption_scope_override: + If true, prevents any request from specifying a different encryption scope than the scope + set on the container. Default value is false. + """ + + def __init__(self, default_encryption_scope, **kwargs): + self.default_encryption_scope = default_encryption_scope + self.prevent_encryption_scope_override = kwargs.get('prevent_encryption_scope_override', False) + + @classmethod + def _from_generated(cls, generated): + if generated.properties.default_encryption_scope: + scope = cls( + generated.properties.default_encryption_scope, + prevent_encryption_scope_override=generated.properties.prevent_encryption_scope_override or False + ) + return scope + return None + + +class DelimitedJsonDialect(DictMixin): + """Defines the input or output JSON serialization for a blob data query. + + :keyword str delimiter: The line separator character, default value is '\n' + """ + + def __init__(self, **kwargs): + self.delimiter = kwargs.pop('delimiter', '\n') + + +class DelimitedTextDialect(DictMixin): + """Defines the input or output delimited (CSV) serialization for a blob query request. + + :keyword str delimiter: + Column separator, defaults to ','. + :keyword str quotechar: + Field quote, defaults to '"'. + :keyword str lineterminator: + Record separator, defaults to '\\\\n'. + :keyword str escapechar: + Escape char, defaults to empty. + :keyword bool has_header: + Whether the blob data includes headers in the first line. The default value is False, meaning that the + data will be returned inclusive of the first line. If set to True, the data will be returned exclusive + of the first line. + """ + def __init__(self, **kwargs): + self.delimiter = kwargs.pop('delimiter', ',') + self.quotechar = kwargs.pop('quotechar', '"') + self.lineterminator = kwargs.pop('lineterminator', '\n') + self.escapechar = kwargs.pop('escapechar', "") + self.has_header = kwargs.pop('has_header', False) + + +class ArrowDialect(ArrowField): + """field of an arrow schema. + + All required parameters must be populated in order to send to Azure. + + :param ~azure.storage.blob.ArrowType type: Arrow field type. + :keyword str name: The name of the field. + :keyword int precision: The precision of the field. + :keyword int scale: The scale of the field. + """ + def __init__(self, type, **kwargs): # pylint: disable=redefined-builtin + super(ArrowDialect, self).__init__(type=type, **kwargs) + + +class ArrowType(str, Enum): + + INT64 = "int64" + BOOL = "bool" + TIMESTAMP_MS = "timestamp[ms]" + STRING = "string" + DOUBLE = "double" + DECIMAL = 'decimal' + + +class ObjectReplicationPolicy(DictMixin): + """Policy id and rule ids applied to a blob. + + :ivar str policy_id: + Policy id for the blob. A replication policy gets created (policy id) when creating a source/destination pair. + :ivar list(~azure.storage.blob.ObjectReplicationRule) rules: + Within each policy there may be multiple replication rules. + e.g. rule 1= src/container/.pdf to dst/container2/; rule2 = src/container1/.jpg to dst/container3 + """ + + def __init__(self, **kwargs): + self.policy_id = kwargs.pop('policy_id', None) + self.rules = kwargs.pop('rules', None) + + +class ObjectReplicationRule(DictMixin): + """Policy id and rule ids applied to a blob. + + :ivar str rule_id: + Rule id. + :ivar str status: + The status of the rule. It could be "Complete" or "Failed" + """ + + def __init__(self, **kwargs): + self.rule_id = kwargs.pop('rule_id', None) + self.status = kwargs.pop('status', None) + + +class BlobQueryError(object): + """The error happened during quick query operation. + + :ivar str error: + The name of the error. + :ivar bool is_fatal: + If true, this error prevents further query processing. More result data may be returned, + but there is no guarantee that all of the original data will be processed. + If false, this error does not prevent further query processing. + :ivar str description: + A description of the error. + :ivar int position: + The blob offset at which the error occurred. + """ + def __init__(self, error=None, is_fatal=False, description=None, position=None): + self.error = error + self.is_fatal = is_fatal + self.description = description + self.position = position diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_quick_query_helper.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_quick_query_helper.py new file mode 100644 index 00000000000..3164337308c --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_quick_query_helper.py @@ -0,0 +1,195 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from io import BytesIO +from typing import Union, Iterable, IO # pylint: disable=unused-import + +from ._shared.avro.datafile import DataFileReader +from ._shared.avro.avro_io import DatumReader + + +class BlobQueryReader(object): # pylint: disable=too-many-instance-attributes + """A streaming object to read query results. + + :ivar str name: + The name of the blob being quered. + :ivar str container: + The name of the container where the blob is. + :ivar dict response_headers: + The response_headers of the quick query request. + :ivar bytes record_delimiter: + The delimiter used to separate lines, or records with the data. The `records` + method will return these lines via a generator. + """ + + def __init__( + self, + name=None, + container=None, + errors=None, + record_delimiter='\n', + encoding=None, + headers=None, + response=None, + error_cls=None, + ): + self.name = name + self.container = container + self.response_headers = headers + self.record_delimiter = record_delimiter + self._size = 0 + self._bytes_processed = 0 + self._errors = errors + self._encoding = encoding + self._parsed_results = DataFileReader(QuickQueryStreamer(response), DatumReader()) + self._first_result = self._process_record(next(self._parsed_results)) + self._error_cls = error_cls + + def __len__(self): + return self._size + + def _process_record(self, result): + self._size = result.get('totalBytes', self._size) + self._bytes_processed = result.get('bytesScanned', self._bytes_processed) + if 'data' in result: + return result.get('data') + if 'fatal' in result: + error = self._error_cls( + error=result['name'], + is_fatal=result['fatal'], + description=result['description'], + position=result['position'] + ) + if self._errors: + self._errors(error) + return None + + def _iter_stream(self): + if self._first_result is not None: + yield self._first_result + for next_result in self._parsed_results: + processed_result = self._process_record(next_result) + if processed_result is not None: + yield processed_result + + def readall(self): + # type: () -> Union[bytes, str] + """Return all query results. + + This operation is blocking until all data is downloaded. + If encoding has been configured - this will be used to decode individual + records are they are received. + + :rtype: Union[bytes, str] + """ + stream = BytesIO() + self.readinto(stream) + data = stream.getvalue() + if self._encoding: + return data.decode(self._encoding) + return data + + def readinto(self, stream): + # type: (IO) -> None + """Download the query result to a stream. + + :param stream: + The stream to download to. This can be an open file-handle, + or any writable stream. + :returns: None + """ + for record in self._iter_stream(): + stream.write(record) + + def records(self): + # type: () -> Iterable[Union[bytes, str]] + """Returns a record generator for the query result. + + Records will be returned line by line. + If encoding has been configured - this will be used to decode individual + records are they are received. + + :rtype: Iterable[Union[bytes, str]] + """ + delimiter = self.record_delimiter.encode('utf-8') + for record_chunk in self._iter_stream(): + for record in record_chunk.split(delimiter): + if self._encoding: + yield record.decode(self._encoding) + else: + yield record + + +class QuickQueryStreamer(object): + """ + File-like streaming iterator. + """ + + def __init__(self, generator): + self.generator = generator + self.iterator = iter(generator) + self._buf = b"" + self._point = 0 + self._download_offset = 0 + self._buf_start = 0 + self.file_length = None + + def __len__(self): + return self.file_length + + def __iter__(self): + return self.iterator + + @staticmethod + def seekable(): + return True + + def __next__(self): + next_part = next(self.iterator) + self._download_offset += len(next_part) + return next_part + + next = __next__ # Python 2 compatibility. + + def tell(self): + return self._point + + def seek(self, offset, whence=0): + if whence == 0: + self._point = offset + elif whence == 1: + self._point += offset + else: + raise ValueError("whence must be 0, or 1") + if self._point < 0: + self._point = 0 # XXX is this right? + + def read(self, size): + try: + # keep reading from the generator until the buffer of this stream has enough data to read + while self._point + size > self._download_offset: + self._buf += self.__next__() + except StopIteration: + self.file_length = self._download_offset + + start_point = self._point + + # EOF + self._point = min(self._point + size, self._download_offset) + + relative_start = start_point - self._buf_start + if relative_start < 0: + raise ValueError("Buffer has dumped too much data") + relative_end = relative_start + size + data = self._buf[relative_start: relative_end] + + # dump the extra data in buffer + # buffer start--------------------16bytes----current read position + dumped_size = max(relative_end - 16 - relative_start, 0) + self._buf_start += dumped_size + self._buf = self._buf[dumped_size:] + + return data diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_serialize.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_serialize.py new file mode 100644 index 00000000000..b6399c0cb7d --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_serialize.py @@ -0,0 +1,215 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=no-self-use +from typing import ( # pylint: disable=unused-import + Any, Dict, Optional, Tuple, Union, + TYPE_CHECKING) + +try: + from urllib.parse import quote +except ImportError: + from urllib2 import quote # type: ignore + +from azure.core import MatchConditions + +from ._models import ( + ContainerEncryptionScope, + DelimitedJsonDialect) +from ._generated.models import ( + ModifiedAccessConditions, + SourceModifiedAccessConditions, + CpkScopeInfo, + ContainerCpkScopeInfo, + QueryFormat, + QuerySerialization, + DelimitedTextConfiguration, + JsonTextConfiguration, + ArrowConfiguration, + QueryFormatType, + BlobTag, + BlobTags, LeaseAccessConditions +) + +if TYPE_CHECKING: + from ._lease import BlobLeaseClient + + +_SUPPORTED_API_VERSIONS = [ + '2019-02-02', + '2019-07-07', + '2019-10-10', + '2019-12-12', + '2020-02-10', + '2020-04-08', + '2020-06-12', + '2020-08-04', + '2020-10-02', + '2020-12-06', + '2021-02-12', + '2021-04-10' +] + + +def _get_match_headers(kwargs, match_param, etag_param): + # type: (Dict[str, Any], str, str) -> Tuple(Dict[str, Any], Optional[str], Optional[str]) + if_match = None + if_none_match = None + match_condition = kwargs.pop(match_param, None) + if match_condition == MatchConditions.IfNotModified: + if_match = kwargs.pop(etag_param, None) + if not if_match: + raise ValueError("'{}' specified without '{}'.".format(match_param, etag_param)) + elif match_condition == MatchConditions.IfPresent: + if_match = '*' + elif match_condition == MatchConditions.IfModified: + if_none_match = kwargs.pop(etag_param, None) + if not if_none_match: + raise ValueError("'{}' specified without '{}'.".format(match_param, etag_param)) + elif match_condition == MatchConditions.IfMissing: + if_none_match = '*' + elif match_condition is None: + if kwargs.get(etag_param): + raise ValueError("'{}' specified without '{}'.".format(etag_param, match_param)) + else: + raise TypeError("Invalid match condition: {}".format(match_condition)) + return if_match, if_none_match + + +def get_access_conditions(lease): + # type: (Optional[Union[BlobLeaseClient, str]]) -> Union[LeaseAccessConditions, None] + try: + lease_id = lease.id # type: ignore + except AttributeError: + lease_id = lease # type: ignore + return LeaseAccessConditions(lease_id=lease_id) if lease_id else None + + +def get_modify_conditions(kwargs): + # type: (Dict[str, Any]) -> ModifiedAccessConditions + if_match, if_none_match = _get_match_headers(kwargs, 'match_condition', 'etag') + return ModifiedAccessConditions( + if_modified_since=kwargs.pop('if_modified_since', None), + if_unmodified_since=kwargs.pop('if_unmodified_since', None), + if_match=if_match or kwargs.pop('if_match', None), + if_none_match=if_none_match or kwargs.pop('if_none_match', None), + if_tags=kwargs.pop('if_tags_match_condition', None) + ) + + +def get_source_conditions(kwargs): + # type: (Dict[str, Any]) -> SourceModifiedAccessConditions + if_match, if_none_match = _get_match_headers(kwargs, 'source_match_condition', 'source_etag') + return SourceModifiedAccessConditions( + source_if_modified_since=kwargs.pop('source_if_modified_since', None), + source_if_unmodified_since=kwargs.pop('source_if_unmodified_since', None), + source_if_match=if_match or kwargs.pop('source_if_match', None), + source_if_none_match=if_none_match or kwargs.pop('source_if_none_match', None), + source_if_tags=kwargs.pop('source_if_tags_match_condition', None) + ) + + +def get_cpk_scope_info(kwargs): + # type: (Dict[str, Any]) -> CpkScopeInfo + if 'encryption_scope' in kwargs: + return CpkScopeInfo(encryption_scope=kwargs.pop('encryption_scope')) + return None + + +def get_container_cpk_scope_info(kwargs): + # type: (Dict[str, Any]) -> ContainerCpkScopeInfo + encryption_scope = kwargs.pop('container_encryption_scope', None) + if encryption_scope: + if isinstance(encryption_scope, ContainerEncryptionScope): + return ContainerCpkScopeInfo( + default_encryption_scope=encryption_scope.default_encryption_scope, + prevent_encryption_scope_override=encryption_scope.prevent_encryption_scope_override + ) + if isinstance(encryption_scope, dict): + return ContainerCpkScopeInfo( + default_encryption_scope=encryption_scope['default_encryption_scope'], + prevent_encryption_scope_override=encryption_scope.get('prevent_encryption_scope_override') + ) + raise TypeError("Container encryption scope must be dict or type ContainerEncryptionScope.") + return None + + +def get_api_version(kwargs): + # type: (Dict[str, Any]) -> str + api_version = kwargs.get('api_version', None) + if api_version and api_version not in _SUPPORTED_API_VERSIONS: + versions = '\n'.join(_SUPPORTED_API_VERSIONS) + raise ValueError("Unsupported API version '{}'. Please select from:\n{}".format(api_version, versions)) + return api_version or _SUPPORTED_API_VERSIONS[-1] + + +def serialize_blob_tags_header(tags=None): + # type: (Optional[Dict[str, str]]) -> str + if tags is None: + return None + + components = list() + if tags: + for key, value in tags.items(): + components.append(quote(key, safe='.-')) + components.append('=') + components.append(quote(value, safe='.-')) + components.append('&') + + if components: + del components[-1] + + return ''.join(components) + + +def serialize_blob_tags(tags=None): + # type: (Optional[Dict[str, str]]) -> Union[BlobTags, None] + tag_list = list() + if tags: + tag_list = [BlobTag(key=k, value=v) for k, v in tags.items()] + return BlobTags(blob_tag_set=tag_list) + + +def serialize_query_format(formater): + if formater == "ParquetDialect": + qq_format = QueryFormat( + type=QueryFormatType.PARQUET, + parquet_text_configuration=' ' + ) + elif isinstance(formater, DelimitedJsonDialect): + serialization_settings = JsonTextConfiguration( + record_separator=formater.delimiter + ) + qq_format = QueryFormat( + type=QueryFormatType.json, + json_text_configuration=serialization_settings) + elif hasattr(formater, 'quotechar'): # This supports a csv.Dialect as well + try: + headers = formater.has_header + except AttributeError: + headers = False + serialization_settings = DelimitedTextConfiguration( + column_separator=formater.delimiter, + field_quote=formater.quotechar, + record_separator=formater.lineterminator, + escape_char=formater.escapechar, + headers_present=headers + ) + qq_format = QueryFormat( + type=QueryFormatType.delimited, + delimited_text_configuration=serialization_settings + ) + elif isinstance(formater, list): + serialization_settings = ArrowConfiguration( + schema=formater + ) + qq_format = QueryFormat( + type=QueryFormatType.arrow, + arrow_configuration=serialization_settings) + elif not formater: + return None + else: + raise TypeError("Format must be DelimitedTextDialect or DelimitedJsonDialect or ParquetDialect.") + return QuerySerialization(format=qq_format) diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/__init__.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/__init__.py new file mode 100644 index 00000000000..160f8822382 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/__init__.py @@ -0,0 +1,56 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import base64 +import hashlib +import hmac + +try: + from urllib.parse import quote, unquote +except ImportError: + from urllib2 import quote, unquote # type: ignore + +import six + + +def url_quote(url): + return quote(url) + + +def url_unquote(url): + return unquote(url) + + +def encode_base64(data): + if isinstance(data, six.text_type): + data = data.encode('utf-8') + encoded = base64.b64encode(data) + return encoded.decode('utf-8') + + +def decode_base64_to_bytes(data): + if isinstance(data, six.text_type): + data = data.encode('utf-8') + return base64.b64decode(data) + + +def decode_base64_to_text(data): + decoded_bytes = decode_base64_to_bytes(data) + return decoded_bytes.decode('utf-8') + + +def sign_string(key, string_to_sign, key_is_base64=True): + if key_is_base64: + key = decode_base64_to_bytes(key) + else: + if isinstance(key, six.text_type): + key = key.encode('utf-8') + if isinstance(string_to_sign, six.text_type): + string_to_sign = string_to_sign.encode('utf-8') + signed_hmac_sha256 = hmac.HMAC(key, string_to_sign, hashlib.sha256) + digest = signed_hmac_sha256.digest() + encoded_digest = encode_base64(digest) + return encoded_digest diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/authentication.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/authentication.py new file mode 100644 index 00000000000..adf64c7485b --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/authentication.py @@ -0,0 +1,178 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import logging +import re +import sys + +try: + from urllib.parse import urlparse, unquote +except ImportError: + from urlparse import urlparse # type: ignore + from urllib2 import unquote # type: ignore + +try: + from yarl import URL +except ImportError: + pass + +try: + from azure.core.pipeline.transport import AioHttpTransport +except ImportError: + AioHttpTransport = None + +from azure.core.exceptions import ClientAuthenticationError +from azure.core.pipeline.policies import SansIOHTTPPolicy + +from . import sign_string + + +logger = logging.getLogger(__name__) + + + +# wraps a given exception with the desired exception type +def _wrap_exception(ex, desired_type): + msg = "" + if ex.args: + msg = ex.args[0] + if sys.version_info >= (3,): + # Automatic chaining in Python 3 means we keep the trace + return desired_type(msg) + # There isn't a good solution in 2 for keeping the stack trace + # in general, or that will not result in an error in 3 + # However, we can keep the previous error type and message + # TODO: In the future we will log the trace + return desired_type('{}: {}'.format(ex.__class__.__name__, msg)) + + +class AzureSigningError(ClientAuthenticationError): + """ + Represents a fatal error when attempting to sign a request. + In general, the cause of this exception is user error. For example, the given account key is not valid. + Please visit https://docs.microsoft.com/en-us/azure/storage/common/storage-create-storage-account for more info. + """ + + +# pylint: disable=no-self-use +class SharedKeyCredentialPolicy(SansIOHTTPPolicy): + + def __init__(self, account_name, account_key): + self.account_name = account_name + self.account_key = account_key + super(SharedKeyCredentialPolicy, self).__init__() + + @staticmethod + def _get_headers(request, headers_to_sign): + headers = dict((name.lower(), value) for name, value in request.http_request.headers.items() if value) + if 'content-length' in headers and headers['content-length'] == '0': + del headers['content-length'] + return '\n'.join(headers.get(x, '') for x in headers_to_sign) + '\n' + + @staticmethod + def _get_verb(request): + return request.http_request.method + '\n' + + def _get_canonicalized_resource(self, request): + uri_path = urlparse(request.http_request.url).path + try: + if isinstance(request.context.transport, AioHttpTransport) or \ + isinstance(getattr(request.context.transport, "_transport", None), AioHttpTransport) or \ + isinstance(getattr(getattr(request.context.transport, "_transport", None), "_transport", None), + AioHttpTransport): + uri_path = URL(uri_path) + return '/' + self.account_name + str(uri_path) + except TypeError: + pass + return '/' + self.account_name + uri_path + + @staticmethod + def _get_canonicalized_headers(request): + string_to_sign = '' + x_ms_headers = [] + for name, value in request.http_request.headers.items(): + if name.startswith('x-ms-'): + x_ms_headers.append((name.lower(), value)) + x_ms_headers.sort() + for name, value in x_ms_headers: + if value is not None: + string_to_sign += ''.join([name, ':', value, '\n']) + return string_to_sign + + @staticmethod + def _get_canonicalized_resource_query(request): + sorted_queries = list(request.http_request.query.items()) + sorted_queries.sort() + + string_to_sign = '' + for name, value in sorted_queries: + if value is not None: + string_to_sign += '\n' + name.lower() + ':' + unquote(value) + + return string_to_sign + + def _add_authorization_header(self, request, string_to_sign): + try: + signature = sign_string(self.account_key, string_to_sign) + auth_string = 'SharedKey ' + self.account_name + ':' + signature + request.http_request.headers['Authorization'] = auth_string + except Exception as ex: + # Wrap any error that occurred as signing error + # Doing so will clarify/locate the source of problem + raise _wrap_exception(ex, AzureSigningError) + + def on_request(self, request): + string_to_sign = \ + self._get_verb(request) + \ + self._get_headers( + request, + [ + 'content-encoding', 'content-language', 'content-length', + 'content-md5', 'content-type', 'date', 'if-modified-since', + 'if-match', 'if-none-match', 'if-unmodified-since', 'byte_range' + ] + ) + \ + self._get_canonicalized_headers(request) + \ + self._get_canonicalized_resource(request) + \ + self._get_canonicalized_resource_query(request) + + self._add_authorization_header(request, string_to_sign) + #logger.debug("String_to_sign=%s", string_to_sign) + + +class StorageHttpChallenge(object): + def __init__(self, challenge): + """ Parses an HTTP WWW-Authentication Bearer challenge from the Storage service. """ + if not challenge: + raise ValueError("Challenge cannot be empty") + + self._parameters = {} + self.scheme, trimmed_challenge = challenge.strip().split(" ", 1) + + # name=value pairs either comma or space separated with values possibly being + # enclosed in quotes + for item in re.split('[, ]', trimmed_challenge): + comps = item.split("=") + if len(comps) == 2: + key = comps[0].strip(' "') + value = comps[1].strip(' "') + if key: + self._parameters[key] = value + + # Extract and verify required parameters + self.authorization_uri = self._parameters.get('authorization_uri') + if not self.authorization_uri: + raise ValueError("Authorization Uri not found") + + self.resource_id = self._parameters.get('resource_id') + if not self.resource_id: + raise ValueError("Resource id not found") + + uri_path = urlparse(self.authorization_uri).path.lstrip("/") + self.tenant_id = uri_path.split("/")[0] + + def get_value(self, key): + return self._parameters.get(key) diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/__init__.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/__init__.py new file mode 100644 index 00000000000..5b396cd202e --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/__init__.py @@ -0,0 +1,5 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/avro_io.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/avro_io.py new file mode 100644 index 00000000000..93a5c134849 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/avro_io.py @@ -0,0 +1,464 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +"""Input/output utilities. + +Includes: + - i/o-specific constants + - i/o-specific exceptions + - schema validation + - leaf value encoding and decoding + - datum reader/writer stuff (?) + +Also includes a generic representation for data, which uses the +following mapping: + - Schema records are implemented as dict. + - Schema arrays are implemented as list. + - Schema maps are implemented as dict. + - Schema strings are implemented as unicode. + - Schema bytes are implemented as str. + - Schema ints are implemented as int. + - Schema longs are implemented as long. + - Schema floats are implemented as float. + - Schema doubles are implemented as float. + - Schema booleans are implemented as bool. +""" + +import json +import logging +import struct +import sys + +from ..avro import schema + +PY3 = sys.version_info[0] == 3 + +logger = logging.getLogger(__name__) + +# ------------------------------------------------------------------------------ +# Constants + +STRUCT_FLOAT = struct.Struct('= 0), n + input_bytes = self.reader.read(n) + if n > 0 and not input_bytes: + raise StopIteration + assert (len(input_bytes) == n), input_bytes + return input_bytes + + @staticmethod + def read_null(): + """ + null is written as zero bytes + """ + return None + + def read_boolean(self): + """ + a boolean is written as a single byte + whose value is either 0 (false) or 1 (true). + """ + b = ord(self.read(1)) + if b == 1: + return True + if b == 0: + return False + fail_msg = "Invalid value for boolean: %s" % b + raise schema.AvroException(fail_msg) + + def read_int(self): + """ + int and long values are written using variable-length, zig-zag coding. + """ + return self.read_long() + + def read_long(self): + """ + int and long values are written using variable-length, zig-zag coding. + """ + b = ord(self.read(1)) + n = b & 0x7F + shift = 7 + while (b & 0x80) != 0: + b = ord(self.read(1)) + n |= (b & 0x7F) << shift + shift += 7 + datum = (n >> 1) ^ -(n & 1) + return datum + + def read_float(self): + """ + A float is written as 4 bytes. + The float is converted into a 32-bit integer using a method equivalent to + Java's floatToIntBits and then encoded in little-endian format. + """ + return STRUCT_FLOAT.unpack(self.read(4))[0] + + def read_double(self): + """ + A double is written as 8 bytes. + The double is converted into a 64-bit integer using a method equivalent to + Java's doubleToLongBits and then encoded in little-endian format. + """ + return STRUCT_DOUBLE.unpack(self.read(8))[0] + + def read_bytes(self): + """ + Bytes are encoded as a long followed by that many bytes of data. + """ + nbytes = self.read_long() + assert (nbytes >= 0), nbytes + return self.read(nbytes) + + def read_utf8(self): + """ + A string is encoded as a long followed by + that many bytes of UTF-8 encoded character data. + """ + input_bytes = self.read_bytes() + if PY3: + try: + return input_bytes.decode('utf-8') + except UnicodeDecodeError as exn: + logger.error('Invalid UTF-8 input bytes: %r', input_bytes) + raise exn + else: + # PY2 + return unicode(input_bytes, "utf-8") # pylint: disable=undefined-variable + + def skip_null(self): + pass + + def skip_boolean(self): + self.skip(1) + + def skip_int(self): + self.skip_long() + + def skip_long(self): + b = ord(self.read(1)) + while (b & 0x80) != 0: + b = ord(self.read(1)) + + def skip_float(self): + self.skip(4) + + def skip_double(self): + self.skip(8) + + def skip_bytes(self): + self.skip(self.read_long()) + + def skip_utf8(self): + self.skip_bytes() + + def skip(self, n): + self.reader.seek(self.reader.tell() + n) + + +# ------------------------------------------------------------------------------ +# DatumReader + + +class DatumReader(object): + """Deserialize Avro-encoded data into a Python data structure.""" + + def __init__(self, writer_schema=None): + """ + As defined in the Avro specification, we call the schema encoded + in the data the "writer's schema". + """ + self._writer_schema = writer_schema + + # read/write properties + def set_writer_schema(self, writer_schema): + self._writer_schema = writer_schema + + writer_schema = property(lambda self: self._writer_schema, + set_writer_schema) + + def read(self, decoder): + return self.read_data(self.writer_schema, decoder) + + def read_data(self, writer_schema, decoder): + # function dispatch for reading data based on type of writer's schema + if writer_schema.type == 'null': + result = decoder.read_null() + elif writer_schema.type == 'boolean': + result = decoder.read_boolean() + elif writer_schema.type == 'string': + result = decoder.read_utf8() + elif writer_schema.type == 'int': + result = decoder.read_int() + elif writer_schema.type == 'long': + result = decoder.read_long() + elif writer_schema.type == 'float': + result = decoder.read_float() + elif writer_schema.type == 'double': + result = decoder.read_double() + elif writer_schema.type == 'bytes': + result = decoder.read_bytes() + elif writer_schema.type == 'fixed': + result = self.read_fixed(writer_schema, decoder) + elif writer_schema.type == 'enum': + result = self.read_enum(writer_schema, decoder) + elif writer_schema.type == 'array': + result = self.read_array(writer_schema, decoder) + elif writer_schema.type == 'map': + result = self.read_map(writer_schema, decoder) + elif writer_schema.type in ['union', 'error_union']: + result = self.read_union(writer_schema, decoder) + elif writer_schema.type in ['record', 'error', 'request']: + result = self.read_record(writer_schema, decoder) + else: + fail_msg = "Cannot read unknown schema type: %s" % writer_schema.type + raise schema.AvroException(fail_msg) + return result + + def skip_data(self, writer_schema, decoder): + if writer_schema.type == 'null': + result = decoder.skip_null() + elif writer_schema.type == 'boolean': + result = decoder.skip_boolean() + elif writer_schema.type == 'string': + result = decoder.skip_utf8() + elif writer_schema.type == 'int': + result = decoder.skip_int() + elif writer_schema.type == 'long': + result = decoder.skip_long() + elif writer_schema.type == 'float': + result = decoder.skip_float() + elif writer_schema.type == 'double': + result = decoder.skip_double() + elif writer_schema.type == 'bytes': + result = decoder.skip_bytes() + elif writer_schema.type == 'fixed': + result = self.skip_fixed(writer_schema, decoder) + elif writer_schema.type == 'enum': + result = self.skip_enum(decoder) + elif writer_schema.type == 'array': + self.skip_array(writer_schema, decoder) + result = None + elif writer_schema.type == 'map': + self.skip_map(writer_schema, decoder) + result = None + elif writer_schema.type in ['union', 'error_union']: + result = self.skip_union(writer_schema, decoder) + elif writer_schema.type in ['record', 'error', 'request']: + self.skip_record(writer_schema, decoder) + result = None + else: + fail_msg = "Unknown schema type: %s" % writer_schema.type + raise schema.AvroException(fail_msg) + return result + + @staticmethod + def read_fixed(writer_schema, decoder): + """ + Fixed instances are encoded using the number of bytes declared + in the schema. + """ + return decoder.read(writer_schema.size) + + @staticmethod + def skip_fixed(writer_schema, decoder): + return decoder.skip(writer_schema.size) + + @staticmethod + def read_enum(writer_schema, decoder): + """ + An enum is encoded by a int, representing the zero-based position + of the symbol in the schema. + """ + # read data + index_of_symbol = decoder.read_int() + if index_of_symbol >= len(writer_schema.symbols): + fail_msg = "Can't access enum index %d for enum with %d symbols" \ + % (index_of_symbol, len(writer_schema.symbols)) + raise SchemaResolutionException(fail_msg, writer_schema) + read_symbol = writer_schema.symbols[index_of_symbol] + return read_symbol + + @staticmethod + def skip_enum(decoder): + return decoder.skip_int() + + def read_array(self, writer_schema, decoder): + """ + Arrays are encoded as a series of blocks. + + Each block consists of a long count value, + followed by that many array items. + A block with count zero indicates the end of the array. + Each item is encoded per the array's item schema. + + If a block's count is negative, + then the count is followed immediately by a long block size, + indicating the number of bytes in the block. + The actual count in this case + is the absolute value of the count written. + """ + read_items = [] + block_count = decoder.read_long() + while block_count != 0: + if block_count < 0: + block_count = -block_count + decoder.read_long() + for _ in range(block_count): + read_items.append(self.read_data(writer_schema.items, decoder)) + block_count = decoder.read_long() + return read_items + + def skip_array(self, writer_schema, decoder): + block_count = decoder.read_long() + while block_count != 0: + if block_count < 0: + block_size = decoder.read_long() + decoder.skip(block_size) + else: + for _ in range(block_count): + self.skip_data(writer_schema.items, decoder) + block_count = decoder.read_long() + + def read_map(self, writer_schema, decoder): + """ + Maps are encoded as a series of blocks. + + Each block consists of a long count value, + followed by that many key/value pairs. + A block with count zero indicates the end of the map. + Each item is encoded per the map's value schema. + + If a block's count is negative, + then the count is followed immediately by a long block size, + indicating the number of bytes in the block. + The actual count in this case + is the absolute value of the count written. + """ + read_items = {} + block_count = decoder.read_long() + while block_count != 0: + if block_count < 0: + block_count = -block_count + decoder.read_long() + for _ in range(block_count): + key = decoder.read_utf8() + read_items[key] = self.read_data(writer_schema.values, decoder) + block_count = decoder.read_long() + return read_items + + def skip_map(self, writer_schema, decoder): + block_count = decoder.read_long() + while block_count != 0: + if block_count < 0: + block_size = decoder.read_long() + decoder.skip(block_size) + else: + for _ in range(block_count): + decoder.skip_utf8() + self.skip_data(writer_schema.values, decoder) + block_count = decoder.read_long() + + def read_union(self, writer_schema, decoder): + """ + A union is encoded by first writing a long value indicating + the zero-based position within the union of the schema of its value. + The value is then encoded per the indicated schema within the union. + """ + # schema resolution + index_of_schema = int(decoder.read_long()) + if index_of_schema >= len(writer_schema.schemas): + fail_msg = "Can't access branch index %d for union with %d branches" \ + % (index_of_schema, len(writer_schema.schemas)) + raise SchemaResolutionException(fail_msg, writer_schema) + selected_writer_schema = writer_schema.schemas[index_of_schema] + + # read data + return self.read_data(selected_writer_schema, decoder) + + def skip_union(self, writer_schema, decoder): + index_of_schema = int(decoder.read_long()) + if index_of_schema >= len(writer_schema.schemas): + fail_msg = "Can't access branch index %d for union with %d branches" \ + % (index_of_schema, len(writer_schema.schemas)) + raise SchemaResolutionException(fail_msg, writer_schema) + return self.skip_data(writer_schema.schemas[index_of_schema], decoder) + + def read_record(self, writer_schema, decoder): + """ + A record is encoded by encoding the values of its fields + in the order that they are declared. In other words, a record + is encoded as just the concatenation of the encodings of its fields. + Field values are encoded per their schema. + + Schema Resolution: + * the ordering of fields may be different: fields are matched by name. + * schemas for fields with the same name in both records are resolved + recursively. + * if the writer's record contains a field with a name not present in the + reader's record, the writer's value for that field is ignored. + * if the reader's record schema has a field that contains a default value, + and writer's schema does not have a field with the same name, then the + reader should use the default value from its field. + * if the reader's record schema has a field with no default value, and + writer's schema does not have a field with the same name, then the + field's value is unset. + """ + # schema resolution + read_record = {} + for field in writer_schema.fields: + field_val = self.read_data(field.type, decoder) + read_record[field.name] = field_val + return read_record + + def skip_record(self, writer_schema, decoder): + for field in writer_schema.fields: + self.skip_data(field.type, decoder) + + +# ------------------------------------------------------------------------------ + +if __name__ == '__main__': + raise Exception('Not a standalone module') diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/avro_io_async.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/avro_io_async.py new file mode 100644 index 00000000000..e9812163795 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/avro_io_async.py @@ -0,0 +1,448 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +"""Input/output utilities. + +Includes: + - i/o-specific constants + - i/o-specific exceptions + - schema validation + - leaf value encoding and decoding + - datum reader/writer stuff (?) + +Also includes a generic representation for data, which uses the +following mapping: + - Schema records are implemented as dict. + - Schema arrays are implemented as list. + - Schema maps are implemented as dict. + - Schema strings are implemented as unicode. + - Schema bytes are implemented as str. + - Schema ints are implemented as int. + - Schema longs are implemented as long. + - Schema floats are implemented as float. + - Schema doubles are implemented as float. + - Schema booleans are implemented as bool. +""" + +import logging +import sys + +from ..avro import schema + +from .avro_io import STRUCT_FLOAT, STRUCT_DOUBLE, SchemaResolutionException + +PY3 = sys.version_info[0] == 3 + +logger = logging.getLogger(__name__) + +# ------------------------------------------------------------------------------ +# Decoder + + +class AsyncBinaryDecoder(object): + """Read leaf values.""" + + def __init__(self, reader): + """ + reader is a Python object on which we can call read, seek, and tell. + """ + self._reader = reader + + @property + def reader(self): + """Reports the reader used by this decoder.""" + return self._reader + + async def read(self, n): + """Read n bytes. + + Args: + n: Number of bytes to read. + Returns: + The next n bytes from the input. + """ + assert (n >= 0), n + input_bytes = await self.reader.read(n) + if n > 0 and not input_bytes: + raise StopAsyncIteration + assert (len(input_bytes) == n), input_bytes + return input_bytes + + @staticmethod + def read_null(): + """ + null is written as zero bytes + """ + return None + + async def read_boolean(self): + """ + a boolean is written as a single byte + whose value is either 0 (false) or 1 (true). + """ + b = ord(await self.read(1)) + if b == 1: + return True + if b == 0: + return False + fail_msg = "Invalid value for boolean: %s" % b + raise schema.AvroException(fail_msg) + + async def read_int(self): + """ + int and long values are written using variable-length, zig-zag coding. + """ + return await self.read_long() + + async def read_long(self): + """ + int and long values are written using variable-length, zig-zag coding. + """ + b = ord(await self.read(1)) + n = b & 0x7F + shift = 7 + while (b & 0x80) != 0: + b = ord(await self.read(1)) + n |= (b & 0x7F) << shift + shift += 7 + datum = (n >> 1) ^ -(n & 1) + return datum + + async def read_float(self): + """ + A float is written as 4 bytes. + The float is converted into a 32-bit integer using a method equivalent to + Java's floatToIntBits and then encoded in little-endian format. + """ + return STRUCT_FLOAT.unpack(await self.read(4))[0] + + async def read_double(self): + """ + A double is written as 8 bytes. + The double is converted into a 64-bit integer using a method equivalent to + Java's doubleToLongBits and then encoded in little-endian format. + """ + return STRUCT_DOUBLE.unpack(await self.read(8))[0] + + async def read_bytes(self): + """ + Bytes are encoded as a long followed by that many bytes of data. + """ + nbytes = await self.read_long() + assert (nbytes >= 0), nbytes + return await self.read(nbytes) + + async def read_utf8(self): + """ + A string is encoded as a long followed by + that many bytes of UTF-8 encoded character data. + """ + input_bytes = await self.read_bytes() + if PY3: + try: + return input_bytes.decode('utf-8') + except UnicodeDecodeError as exn: + logger.error('Invalid UTF-8 input bytes: %r', input_bytes) + raise exn + else: + # PY2 + return unicode(input_bytes, "utf-8") # pylint: disable=undefined-variable + + def skip_null(self): + pass + + async def skip_boolean(self): + await self.skip(1) + + async def skip_int(self): + await self.skip_long() + + async def skip_long(self): + b = ord(await self.read(1)) + while (b & 0x80) != 0: + b = ord(await self.read(1)) + + async def skip_float(self): + await self.skip(4) + + async def skip_double(self): + await self.skip(8) + + async def skip_bytes(self): + await self.skip(await self.read_long()) + + async def skip_utf8(self): + await self.skip_bytes() + + async def skip(self, n): + await self.reader.seek(await self.reader.tell() + n) + + +# ------------------------------------------------------------------------------ +# DatumReader + + +class AsyncDatumReader(object): + """Deserialize Avro-encoded data into a Python data structure.""" + + def __init__(self, writer_schema=None): + """ + As defined in the Avro specification, we call the schema encoded + in the data the "writer's schema", and the schema expected by the + reader the "reader's schema". + """ + self._writer_schema = writer_schema + + # read/write properties + def set_writer_schema(self, writer_schema): + self._writer_schema = writer_schema + + writer_schema = property(lambda self: self._writer_schema, + set_writer_schema) + + async def read(self, decoder): + return await self.read_data(self.writer_schema, decoder) + + async def read_data(self, writer_schema, decoder): + # function dispatch for reading data based on type of writer's schema + if writer_schema.type == 'null': + result = decoder.read_null() + elif writer_schema.type == 'boolean': + result = await decoder.read_boolean() + elif writer_schema.type == 'string': + result = await decoder.read_utf8() + elif writer_schema.type == 'int': + result = await decoder.read_int() + elif writer_schema.type == 'long': + result = await decoder.read_long() + elif writer_schema.type == 'float': + result = await decoder.read_float() + elif writer_schema.type == 'double': + result = await decoder.read_double() + elif writer_schema.type == 'bytes': + result = await decoder.read_bytes() + elif writer_schema.type == 'fixed': + result = await self.read_fixed(writer_schema, decoder) + elif writer_schema.type == 'enum': + result = await self.read_enum(writer_schema, decoder) + elif writer_schema.type == 'array': + result = await self.read_array(writer_schema, decoder) + elif writer_schema.type == 'map': + result = await self.read_map(writer_schema, decoder) + elif writer_schema.type in ['union', 'error_union']: + result = await self.read_union(writer_schema, decoder) + elif writer_schema.type in ['record', 'error', 'request']: + result = await self.read_record(writer_schema, decoder) + else: + fail_msg = "Cannot read unknown schema type: %s" % writer_schema.type + raise schema.AvroException(fail_msg) + return result + + async def skip_data(self, writer_schema, decoder): + if writer_schema.type == 'null': + result = decoder.skip_null() + elif writer_schema.type == 'boolean': + result = await decoder.skip_boolean() + elif writer_schema.type == 'string': + result = await decoder.skip_utf8() + elif writer_schema.type == 'int': + result = await decoder.skip_int() + elif writer_schema.type == 'long': + result = await decoder.skip_long() + elif writer_schema.type == 'float': + result = await decoder.skip_float() + elif writer_schema.type == 'double': + result = await decoder.skip_double() + elif writer_schema.type == 'bytes': + result = await decoder.skip_bytes() + elif writer_schema.type == 'fixed': + result = await self.skip_fixed(writer_schema, decoder) + elif writer_schema.type == 'enum': + result = await self.skip_enum(decoder) + elif writer_schema.type == 'array': + await self.skip_array(writer_schema, decoder) + result = None + elif writer_schema.type == 'map': + await self.skip_map(writer_schema, decoder) + result = None + elif writer_schema.type in ['union', 'error_union']: + result = await self.skip_union(writer_schema, decoder) + elif writer_schema.type in ['record', 'error', 'request']: + await self.skip_record(writer_schema, decoder) + result = None + else: + fail_msg = "Unknown schema type: %s" % writer_schema.type + raise schema.AvroException(fail_msg) + return result + + @staticmethod + async def read_fixed(writer_schema, decoder): + """ + Fixed instances are encoded using the number of bytes declared + in the schema. + """ + return await decoder.read(writer_schema.size) + + @staticmethod + async def skip_fixed(writer_schema, decoder): + return await decoder.skip(writer_schema.size) + + @staticmethod + async def read_enum(writer_schema, decoder): + """ + An enum is encoded by a int, representing the zero-based position + of the symbol in the schema. + """ + # read data + index_of_symbol = await decoder.read_int() + if index_of_symbol >= len(writer_schema.symbols): + fail_msg = "Can't access enum index %d for enum with %d symbols" \ + % (index_of_symbol, len(writer_schema.symbols)) + raise SchemaResolutionException(fail_msg, writer_schema) + read_symbol = writer_schema.symbols[index_of_symbol] + return read_symbol + + @staticmethod + async def skip_enum(decoder): + return await decoder.skip_int() + + async def read_array(self, writer_schema, decoder): + """ + Arrays are encoded as a series of blocks. + + Each block consists of a long count value, + followed by that many array items. + A block with count zero indicates the end of the array. + Each item is encoded per the array's item schema. + + If a block's count is negative, + then the count is followed immediately by a long block size, + indicating the number of bytes in the block. + The actual count in this case + is the absolute value of the count written. + """ + read_items = [] + block_count = await decoder.read_long() + while block_count != 0: + if block_count < 0: + block_count = -block_count + await decoder.read_long() + for _ in range(block_count): + read_items.append(await self.read_data(writer_schema.items, decoder)) + block_count = await decoder.read_long() + return read_items + + async def skip_array(self, writer_schema, decoder): + block_count = await decoder.read_long() + while block_count != 0: + if block_count < 0: + block_size = await decoder.read_long() + await decoder.skip(block_size) + else: + for _ in range(block_count): + await self.skip_data(writer_schema.items, decoder) + block_count = await decoder.read_long() + + async def read_map(self, writer_schema, decoder): + """ + Maps are encoded as a series of blocks. + + Each block consists of a long count value, + followed by that many key/value pairs. + A block with count zero indicates the end of the map. + Each item is encoded per the map's value schema. + + If a block's count is negative, + then the count is followed immediately by a long block size, + indicating the number of bytes in the block. + The actual count in this case + is the absolute value of the count written. + """ + read_items = {} + block_count = await decoder.read_long() + while block_count != 0: + if block_count < 0: + block_count = -block_count + await decoder.read_long() + for _ in range(block_count): + key = await decoder.read_utf8() + read_items[key] = await self.read_data(writer_schema.values, decoder) + block_count = await decoder.read_long() + return read_items + + async def skip_map(self, writer_schema, decoder): + block_count = await decoder.read_long() + while block_count != 0: + if block_count < 0: + block_size = await decoder.read_long() + await decoder.skip(block_size) + else: + for _ in range(block_count): + await decoder.skip_utf8() + await self.skip_data(writer_schema.values, decoder) + block_count = await decoder.read_long() + + async def read_union(self, writer_schema, decoder): + """ + A union is encoded by first writing a long value indicating + the zero-based position within the union of the schema of its value. + The value is then encoded per the indicated schema within the union. + """ + # schema resolution + index_of_schema = int(await decoder.read_long()) + if index_of_schema >= len(writer_schema.schemas): + fail_msg = "Can't access branch index %d for union with %d branches" \ + % (index_of_schema, len(writer_schema.schemas)) + raise SchemaResolutionException(fail_msg, writer_schema) + selected_writer_schema = writer_schema.schemas[index_of_schema] + + # read data + return await self.read_data(selected_writer_schema, decoder) + + async def skip_union(self, writer_schema, decoder): + index_of_schema = int(await decoder.read_long()) + if index_of_schema >= len(writer_schema.schemas): + fail_msg = "Can't access branch index %d for union with %d branches" \ + % (index_of_schema, len(writer_schema.schemas)) + raise SchemaResolutionException(fail_msg, writer_schema) + return await self.skip_data(writer_schema.schemas[index_of_schema], decoder) + + async def read_record(self, writer_schema, decoder): + """ + A record is encoded by encoding the values of its fields + in the order that they are declared. In other words, a record + is encoded as just the concatenation of the encodings of its fields. + Field values are encoded per their schema. + + Schema Resolution: + * the ordering of fields may be different: fields are matched by name. + * schemas for fields with the same name in both records are resolved + recursively. + * if the writer's record contains a field with a name not present in the + reader's record, the writer's value for that field is ignored. + * if the reader's record schema has a field that contains a default value, + and writer's schema does not have a field with the same name, then the + reader should use the default value from its field. + * if the reader's record schema has a field with no default value, and + writer's schema does not have a field with the same name, then the + field's value is unset. + """ + # schema resolution + read_record = {} + for field in writer_schema.fields: + field_val = await self.read_data(field.type, decoder) + read_record[field.name] = field_val + return read_record + + async def skip_record(self, writer_schema, decoder): + for field in writer_schema.fields: + await self.skip_data(field.type, decoder) + + +# ------------------------------------------------------------------------------ + +if __name__ == '__main__': + raise Exception('Not a standalone module') diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/datafile.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/datafile.py new file mode 100644 index 00000000000..df06fe0cfe7 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/datafile.py @@ -0,0 +1,266 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +"""Read/Write Avro File Object Containers.""" + +import io +import logging +import sys +import zlib + +from ..avro import avro_io +from ..avro import schema + +PY3 = sys.version_info[0] == 3 + +logger = logging.getLogger(__name__) + +# ------------------------------------------------------------------------------ +# Constants + +# Version of the container file: +VERSION = 1 + +if PY3: + MAGIC = b'Obj' + bytes([VERSION]) + MAGIC_SIZE = len(MAGIC) +else: + MAGIC = 'Obj' + chr(VERSION) + MAGIC_SIZE = len(MAGIC) + +# Size of the synchronization marker, in number of bytes: +SYNC_SIZE = 16 + +# Schema of the container header: +META_SCHEMA = schema.parse(""" +{ + "type": "record", "name": "org.apache.avro.file.Header", + "fields": [{ + "name": "magic", + "type": {"type": "fixed", "name": "magic", "size": %(magic_size)d} + }, { + "name": "meta", + "type": {"type": "map", "values": "bytes"} + }, { + "name": "sync", + "type": {"type": "fixed", "name": "sync", "size": %(sync_size)d} + }] +} +""" % { + 'magic_size': MAGIC_SIZE, + 'sync_size': SYNC_SIZE, +}) + +# Codecs supported by container files: +VALID_CODECS = frozenset(['null', 'deflate']) + +# Metadata key associated to the schema: +SCHEMA_KEY = "avro.schema" + + +# ------------------------------------------------------------------------------ +# Exceptions + + +class DataFileException(schema.AvroException): + """Problem reading or writing file object containers.""" + +# ------------------------------------------------------------------------------ + + +class DataFileReader(object): # pylint: disable=too-many-instance-attributes + """Read files written by DataFileWriter.""" + + def __init__(self, reader, datum_reader, **kwargs): + """Initializes a new data file reader. + + Args: + reader: Open file to read from. + datum_reader: Avro datum reader. + """ + self._reader = reader + self._raw_decoder = avro_io.BinaryDecoder(reader) + self._header_reader = kwargs.pop('header_reader', None) + self._header_decoder = None if self._header_reader is None else avro_io.BinaryDecoder(self._header_reader) + self._datum_decoder = None # Maybe reset at every block. + self._datum_reader = datum_reader + + # In case self._reader only has partial content(without header). + # seek(0, 0) to make sure read the (partial)content from beginning. + self._reader.seek(0, 0) + + # read the header: magic, meta, sync + self._read_header() + + # ensure codec is valid + avro_codec_raw = self.get_meta('avro.codec') + if avro_codec_raw is None: + self.codec = "null" + else: + self.codec = avro_codec_raw.decode('utf-8') + if self.codec not in VALID_CODECS: + raise DataFileException('Unknown codec: %s.' % self.codec) + + # get ready to read + self._block_count = 0 + + # object_position is to support reading from current position in the future read, + # no need to downloading from the beginning of avro. + if hasattr(self._reader, 'object_position'): + self.reader.track_object_position() + + self._cur_object_index = 0 + # header_reader indicates reader only has partial content. The reader doesn't have block header, + # so we read use the block count stored last time. + # Also ChangeFeed only has codec==null, so use _raw_decoder is good. + if self._header_reader is not None: + self._datum_decoder = self._raw_decoder + + self.datum_reader.writer_schema = ( + schema.parse(self.get_meta(SCHEMA_KEY).decode('utf-8'))) + + def __enter__(self): + return self + + def __exit__(self, data_type, value, traceback): + # Perform a close if there's no exception + if data_type is None: + self.close() + + def __iter__(self): + return self + + # read-only properties + @property + def reader(self): + return self._reader + + @property + def raw_decoder(self): + return self._raw_decoder + + @property + def datum_decoder(self): + return self._datum_decoder + + @property + def datum_reader(self): + return self._datum_reader + + @property + def sync_marker(self): + return self._sync_marker + + @property + def meta(self): + return self._meta + + # read/write properties + @property + def block_count(self): + return self._block_count + + def get_meta(self, key): + """Reports the value of a given metadata key. + + Args: + key: Metadata key (string) to report the value of. + Returns: + Value associated to the metadata key, as bytes. + """ + return self._meta.get(key) + + def _read_header(self): + header_reader = self._header_reader if self._header_reader else self._reader + header_decoder = self._header_decoder if self._header_decoder else self._raw_decoder + + # seek to the beginning of the file to get magic block + header_reader.seek(0, 0) + + # read header into a dict + header = self.datum_reader.read_data(META_SCHEMA, header_decoder) + + # check magic number + if header.get('magic') != MAGIC: + fail_msg = "Not an Avro data file: %s doesn't match %s." \ + % (header.get('magic'), MAGIC) + raise schema.AvroException(fail_msg) + + # set metadata + self._meta = header['meta'] + + # set sync marker + self._sync_marker = header['sync'] + + def _read_block_header(self): + self._block_count = self.raw_decoder.read_long() + if self.codec == "null": + # Skip a long; we don't need to use the length. + self.raw_decoder.skip_long() + self._datum_decoder = self._raw_decoder + elif self.codec == 'deflate': + # Compressed data is stored as (length, data), which + # corresponds to how the "bytes" type is encoded. + data = self.raw_decoder.read_bytes() + # -15 is the log of the window size; negative indicates + # "raw" (no zlib headers) decompression. See zlib.h. + uncompressed = zlib.decompress(data, -15) + self._datum_decoder = avro_io.BinaryDecoder(io.BytesIO(uncompressed)) + else: + raise DataFileException("Unknown codec: %r" % self.codec) + + def _skip_sync(self): + """ + Read the length of the sync marker; if it matches the sync marker, + return True. Otherwise, seek back to where we started and return False. + """ + proposed_sync_marker = self.reader.read(SYNC_SIZE) + if SYNC_SIZE > 0 and not proposed_sync_marker: + raise StopIteration + if proposed_sync_marker != self.sync_marker: + self.reader.seek(-SYNC_SIZE, 1) + + def __next__(self): + """Return the next datum in the file.""" + if self.block_count == 0: + self._skip_sync() + + # object_position is to support reading from current position in the future read, + # no need to downloading from the beginning of avro file with this attr. + if hasattr(self._reader, 'object_position'): + self.reader.track_object_position() + self._cur_object_index = 0 + + self._read_block_header() + + datum = self.datum_reader.read(self.datum_decoder) + self._block_count -= 1 + self._cur_object_index += 1 + + # object_position is to support reading from current position in the future read, + # This will track the index of the next item to be read. + # This will also track the offset before the next sync marker. + if hasattr(self._reader, 'object_position'): + if self.block_count == 0: + # the next event to be read is at index 0 in the new chunk of blocks, + self.reader.track_object_position() + self.reader.set_object_index(0) + else: + self.reader.set_object_index(self._cur_object_index) + + return datum + + # PY2 + def next(self): + return self.__next__() + + def close(self): + """Close this reader.""" + self.reader.close() + + +if __name__ == '__main__': + raise Exception('Not a standalone module') diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/datafile_async.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/datafile_async.py new file mode 100644 index 00000000000..1e9d018228d --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/datafile_async.py @@ -0,0 +1,215 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +"""Read/Write Avro File Object Containers.""" + +import logging +import sys + +from ..avro import avro_io_async +from ..avro import schema +from .datafile import DataFileException +from .datafile import MAGIC, SYNC_SIZE, META_SCHEMA, SCHEMA_KEY + + +PY3 = sys.version_info[0] == 3 + +logger = logging.getLogger(__name__) + +# ------------------------------------------------------------------------------ +# Constants + +# Codecs supported by container files: +VALID_CODECS = frozenset(['null']) + + +class AsyncDataFileReader(object): # pylint: disable=too-many-instance-attributes + """Read files written by DataFileWriter.""" + + def __init__(self, reader, datum_reader, **kwargs): + """Initializes a new data file reader. + + Args: + reader: Open file to read from. + datum_reader: Avro datum reader. + """ + self._reader = reader + self._raw_decoder = avro_io_async.AsyncBinaryDecoder(reader) + self._header_reader = kwargs.pop('header_reader', None) + self._header_decoder = None if self._header_reader is None else \ + avro_io_async.AsyncBinaryDecoder(self._header_reader) + self._datum_decoder = None # Maybe reset at every block. + self._datum_reader = datum_reader + self.codec = "null" + self._block_count = 0 + self._cur_object_index = 0 + self._meta = None + self._sync_marker = None + + async def init(self): + # In case self._reader only has partial content(without header). + # seek(0, 0) to make sure read the (partial)content from beginning. + await self._reader.seek(0, 0) + + # read the header: magic, meta, sync + await self._read_header() + + # ensure codec is valid + avro_codec_raw = self.get_meta('avro.codec') + if avro_codec_raw is None: + self.codec = "null" + else: + self.codec = avro_codec_raw.decode('utf-8') + if self.codec not in VALID_CODECS: + raise DataFileException('Unknown codec: %s.' % self.codec) + + # get ready to read + self._block_count = 0 + + # object_position is to support reading from current position in the future read, + # no need to downloading from the beginning of avro. + if hasattr(self._reader, 'object_position'): + self.reader.track_object_position() + + # header_reader indicates reader only has partial content. The reader doesn't have block header, + # so we read use the block count stored last time. + # Also ChangeFeed only has codec==null, so use _raw_decoder is good. + if self._header_reader is not None: + self._datum_decoder = self._raw_decoder + self.datum_reader.writer_schema = ( + schema.parse(self.get_meta(SCHEMA_KEY).decode('utf-8'))) + return self + + async def __aenter__(self): + return self + + async def __aexit__(self, data_type, value, traceback): + # Perform a close if there's no exception + if data_type is None: + self.close() + + def __aiter__(self): + return self + + # read-only properties + @property + def reader(self): + return self._reader + + @property + def raw_decoder(self): + return self._raw_decoder + + @property + def datum_decoder(self): + return self._datum_decoder + + @property + def datum_reader(self): + return self._datum_reader + + @property + def sync_marker(self): + return self._sync_marker + + @property + def meta(self): + return self._meta + + # read/write properties + @property + def block_count(self): + return self._block_count + + def get_meta(self, key): + """Reports the value of a given metadata key. + + Args: + key: Metadata key (string) to report the value of. + Returns: + Value associated to the metadata key, as bytes. + """ + return self._meta.get(key) + + async def _read_header(self): + header_reader = self._header_reader if self._header_reader else self._reader + header_decoder = self._header_decoder if self._header_decoder else self._raw_decoder + + # seek to the beginning of the file to get magic block + await header_reader.seek(0, 0) + + # read header into a dict + header = await self.datum_reader.read_data(META_SCHEMA, header_decoder) + + # check magic number + if header.get('magic') != MAGIC: + fail_msg = "Not an Avro data file: %s doesn't match %s." \ + % (header.get('magic'), MAGIC) + raise schema.AvroException(fail_msg) + + # set metadata + self._meta = header['meta'] + + # set sync marker + self._sync_marker = header['sync'] + + async def _read_block_header(self): + self._block_count = await self.raw_decoder.read_long() + if self.codec == "null": + # Skip a long; we don't need to use the length. + await self.raw_decoder.skip_long() + self._datum_decoder = self._raw_decoder + else: + raise DataFileException("Unknown codec: %r" % self.codec) + + async def _skip_sync(self): + """ + Read the length of the sync marker; if it matches the sync marker, + return True. Otherwise, seek back to where we started and return False. + """ + proposed_sync_marker = await self.reader.read(SYNC_SIZE) + if SYNC_SIZE > 0 and not proposed_sync_marker: + raise StopAsyncIteration + if proposed_sync_marker != self.sync_marker: + await self.reader.seek(-SYNC_SIZE, 1) + + async def __anext__(self): + """Return the next datum in the file.""" + if self.block_count == 0: + await self._skip_sync() + + # object_position is to support reading from current position in the future read, + # no need to downloading from the beginning of avro file with this attr. + if hasattr(self._reader, 'object_position'): + await self.reader.track_object_position() + self._cur_object_index = 0 + + await self._read_block_header() + + datum = await self.datum_reader.read(self.datum_decoder) + self._block_count -= 1 + self._cur_object_index += 1 + + # object_position is to support reading from current position in the future read, + # This will track the index of the next item to be read. + # This will also track the offset before the next sync marker. + if hasattr(self._reader, 'object_position'): + if self.block_count == 0: + # the next event to be read is at index 0 in the new chunk of blocks, + await self.reader.track_object_position() + await self.reader.set_object_index(0) + else: + await self.reader.set_object_index(self._cur_object_index) + + return datum + + def close(self): + """Close this reader.""" + self.reader.close() + + +if __name__ == '__main__': + raise Exception('Not a standalone module') diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/schema.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/schema.py new file mode 100644 index 00000000000..ffe28530167 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/avro/schema.py @@ -0,0 +1,1221 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-lines + +"""Representation of Avro schemas. + +A schema may be one of: + - A record, mapping field names to field value data; + - An error, equivalent to a record; + - An enum, containing one of a small set of symbols; + - An array of values, all of the same schema; + - A map containing string/value pairs, each of a declared schema; + - A union of other schemas; + - A fixed sized binary object; + - A unicode string; + - A sequence of bytes; + - A 32-bit signed int; + - A 64-bit signed long; + - A 32-bit floating-point float; + - A 64-bit floating-point double; + - A boolean; + - Null. +""" + +import abc +import json +import logging +import re +import sys +from six import with_metaclass + +PY2 = sys.version_info[0] == 2 + +if PY2: + _str = unicode # pylint: disable=undefined-variable +else: + _str = str + +logger = logging.getLogger(__name__) + +# ------------------------------------------------------------------------------ +# Constants + +# Log level more verbose than DEBUG=10, INFO=20, etc. +DEBUG_VERBOSE = 5 + +NULL = 'null' +BOOLEAN = 'boolean' +STRING = 'string' +BYTES = 'bytes' +INT = 'int' +LONG = 'long' +FLOAT = 'float' +DOUBLE = 'double' +FIXED = 'fixed' +ENUM = 'enum' +RECORD = 'record' +ERROR = 'error' +ARRAY = 'array' +MAP = 'map' +UNION = 'union' + +# Request and error unions are part of Avro protocols: +REQUEST = 'request' +ERROR_UNION = 'error_union' + +PRIMITIVE_TYPES = frozenset([ + NULL, + BOOLEAN, + STRING, + BYTES, + INT, + LONG, + FLOAT, + DOUBLE, +]) + +NAMED_TYPES = frozenset([ + FIXED, + ENUM, + RECORD, + ERROR, +]) + +VALID_TYPES = frozenset.union( + PRIMITIVE_TYPES, + NAMED_TYPES, + [ + ARRAY, + MAP, + UNION, + REQUEST, + ERROR_UNION, + ], +) + +SCHEMA_RESERVED_PROPS = frozenset([ + 'type', + 'name', + 'namespace', + 'fields', # Record + 'items', # Array + 'size', # Fixed + 'symbols', # Enum + 'values', # Map + 'doc', +]) + +FIELD_RESERVED_PROPS = frozenset([ + 'default', + 'name', + 'doc', + 'order', + 'type', +]) + +VALID_FIELD_SORT_ORDERS = frozenset([ + 'ascending', + 'descending', + 'ignore', +]) + + +# ------------------------------------------------------------------------------ +# Exceptions + + +class Error(Exception): + """Base class for errors in this module.""" + + +class AvroException(Error): + """Generic Avro schema error.""" + + +class SchemaParseException(AvroException): + """Error while parsing a JSON schema descriptor.""" + + +class Schema(with_metaclass(abc.ABCMeta, object)): + """Abstract base class for all Schema classes.""" + + def __init__(self, data_type, other_props=None): + """Initializes a new schema object. + + Args: + data_type: Type of the schema to initialize. + other_props: Optional dictionary of additional properties. + """ + if data_type not in VALID_TYPES: + raise SchemaParseException('%r is not a valid Avro type.' % data_type) + + # All properties of this schema, as a map: property name -> property value + self._props = {} + + self._props['type'] = data_type + self._type = data_type + + if other_props: + self._props.update(other_props) + + @property + def namespace(self): + """Returns: the namespace this schema belongs to, if any, or None.""" + return self._props.get('namespace', None) + + @property + def type(self): + """Returns: the type of this schema.""" + return self._type + + @property + def doc(self): + """Returns: the documentation associated to this schema, if any, or None.""" + return self._props.get('doc', None) + + @property + def props(self): + """Reports all the properties of this schema. + + Includes all properties, reserved and non reserved. + JSON properties of this schema are directly generated from this dict. + + Returns: + A dictionary of properties associated to this schema. + """ + return self._props + + @property + def other_props(self): + """Returns: the dictionary of non-reserved properties.""" + return dict(filter_keys_out(items=self._props, keys=SCHEMA_RESERVED_PROPS)) + + def __str__(self): + """Returns: the JSON representation of this schema.""" + return json.dumps(self.to_json(names=None)) + + @abc.abstractmethod + def to_json(self, names): + """Converts the schema object into its AVRO specification representation. + + Schema types that have names (records, enums, and fixed) must + be aware of not re-defining schemas that are already listed + in the parameter names. + """ + raise Exception('Cannot run abstract method.') + + +# ------------------------------------------------------------------------------ + + +_RE_NAME = re.compile(r'[A-Za-z_][A-Za-z0-9_]*') + +_RE_FULL_NAME = re.compile( + r'^' + r'[.]?(?:[A-Za-z_][A-Za-z0-9_]*[.])*' # optional namespace + r'([A-Za-z_][A-Za-z0-9_]*)' # name + r'$' +) + + +class Name(object): + """Representation of an Avro name.""" + + def __init__(self, name, namespace=None): + """Parses an Avro name. + + Args: + name: Avro name to parse (relative or absolute). + namespace: Optional explicit namespace if the name is relative. + """ + # Normalize: namespace is always defined as a string, possibly empty. + if namespace is None: + namespace = '' + + if '.' in name: + # name is absolute, namespace is ignored: + self._fullname = name + + match = _RE_FULL_NAME.match(self._fullname) + if match is None: + raise SchemaParseException( + 'Invalid absolute schema name: %r.' % self._fullname) + + self._name = match.group(1) + self._namespace = self._fullname[:-(len(self._name) + 1)] + + else: + # name is relative, combine with explicit namespace: + self._name = name + self._namespace = namespace + self._fullname = (self._name + if (not self._namespace) else + '%s.%s' % (self._namespace, self._name)) + + # Validate the fullname: + if _RE_FULL_NAME.match(self._fullname) is None: + raise SchemaParseException( + 'Invalid schema name %r infered from name %r and namespace %r.' + % (self._fullname, self._name, self._namespace)) + + def __eq__(self, other): + if not isinstance(other, Name): + return NotImplemented + return self.fullname == other.fullname + + @property + def simple_name(self): + """Returns: the simple name part of this name.""" + return self._name + + @property + def namespace(self): + """Returns: this name's namespace, possible the empty string.""" + return self._namespace + + @property + def fullname(self): + """Returns: the full name.""" + return self._fullname + + +# ------------------------------------------------------------------------------ + + +class Names(object): + """Tracks Avro named schemas and default namespace during parsing.""" + + def __init__(self, default_namespace=None, names=None): + """Initializes a new name tracker. + + Args: + default_namespace: Optional default namespace. + names: Optional initial mapping of known named schemas. + """ + if names is None: + names = {} + self._names = names + self._default_namespace = default_namespace + + @property + def names(self): + """Returns: the mapping of known named schemas.""" + return self._names + + @property + def default_namespace(self): + """Returns: the default namespace, if any, or None.""" + return self._default_namespace + + def new_with_default_namespace(self, namespace): + """Creates a new name tracker from this tracker, but with a new default ns. + + Args: + namespace: New default namespace to use. + Returns: + New name tracker with the specified default namespace. + """ + return Names(names=self._names, default_namespace=namespace) + + def get_name(self, name, namespace=None): + """Resolves the Avro name according to this name tracker's state. + + Args: + name: Name to resolve (absolute or relative). + namespace: Optional explicit namespace. + Returns: + The specified name, resolved according to this tracker. + """ + if namespace is None: + namespace = self._default_namespace + return Name(name=name, namespace=namespace) + + def get_schema(self, name, namespace=None): + """Resolves an Avro schema by name. + + Args: + name: Name (relative or absolute) of the Avro schema to look up. + namespace: Optional explicit namespace. + Returns: + The schema with the specified name, if any, or None. + """ + avro_name = self.get_name(name=name, namespace=namespace) + return self._names.get(avro_name.fullname, None) + + def prune_namespace(self, properties): + """given a properties, return properties with namespace removed if + it matches the own default namespace + """ + if self.default_namespace is None: + # I have no default -- no change + return properties + if 'namespace' not in properties: + # he has no namespace - no change + return properties + if properties['namespace'] != self.default_namespace: + # we're different - leave his stuff alone + return properties + # we each have a namespace and it's redundant. delete his. + prunable = properties.copy() + del prunable['namespace'] + return prunable + + def register(self, schema): + """Registers a new named schema in this tracker. + + Args: + schema: Named Avro schema to register in this tracker. + """ + if schema.fullname in VALID_TYPES: + raise SchemaParseException( + '%s is a reserved type name.' % schema.fullname) + if schema.fullname in self.names: + raise SchemaParseException( + 'Avro name %r already exists.' % schema.fullname) + + logger.log(DEBUG_VERBOSE, 'Register new name for %r', schema.fullname) + self._names[schema.fullname] = schema + + +# ------------------------------------------------------------------------------ + + +class NamedSchema(Schema): + """Abstract base class for named schemas. + + Named schemas are enumerated in NAMED_TYPES. + """ + + def __init__( + self, + data_type, + name=None, + namespace=None, + names=None, + other_props=None, + ): + """Initializes a new named schema object. + + Args: + data_type: Type of the named schema. + name: Name (absolute or relative) of the schema. + namespace: Optional explicit namespace if name is relative. + names: Tracker to resolve and register Avro names. + other_props: Optional map of additional properties of the schema. + """ + assert (data_type in NAMED_TYPES), ('Invalid named type: %r' % data_type) + self._avro_name = names.get_name(name=name, namespace=namespace) + + super(NamedSchema, self).__init__(data_type, other_props) + + names.register(self) + + self._props['name'] = self.name + if self.namespace: + self._props['namespace'] = self.namespace + + @property + def avro_name(self): + """Returns: the Name object describing this schema's name.""" + return self._avro_name + + @property + def name(self): + return self._avro_name.simple_name + + @property + def namespace(self): + return self._avro_name.namespace + + @property + def fullname(self): + return self._avro_name.fullname + + def name_ref(self, names): + """Reports this schema name relative to the specified name tracker. + + Args: + names: Avro name tracker to relativise this schema name against. + Returns: + This schema name, relativised against the specified name tracker. + """ + if self.namespace == names.default_namespace: + return self.name + return self.fullname + + @abc.abstractmethod + def to_json(self, names): + """Converts the schema object into its AVRO specification representation. + + Schema types that have names (records, enums, and fixed) must + be aware of not re-defining schemas that are already listed + in the parameter names. + """ + raise Exception('Cannot run abstract method.') + +# ------------------------------------------------------------------------------ + + +_NO_DEFAULT = object() + + +class Field(object): + """Representation of the schema of a field in a record.""" + + def __init__( + self, + data_type, + name, + index, + has_default, + default=_NO_DEFAULT, + order=None, + doc=None, + other_props=None + ): + """Initializes a new Field object. + + Args: + data_type: Avro schema of the field. + name: Name of the field. + index: 0-based position of the field. + has_default: + default: + order: + doc: + other_props: + """ + if (not isinstance(name, _str)) or (not name): + raise SchemaParseException('Invalid record field name: %r.' % name) + if (order is not None) and (order not in VALID_FIELD_SORT_ORDERS): + raise SchemaParseException('Invalid record field order: %r.' % order) + + # All properties of this record field: + self._props = {} + + self._has_default = has_default + if other_props: + self._props.update(other_props) + + self._index = index + self._type = self._props['type'] = data_type + self._name = self._props['name'] = name + + if has_default: + self._props['default'] = default + + if order is not None: + self._props['order'] = order + + if doc is not None: + self._props['doc'] = doc + + @property + def type(self): + """Returns: the schema of this field.""" + return self._type + + @property + def name(self): + """Returns: this field name.""" + return self._name + + @property + def index(self): + """Returns: the 0-based index of this field in the record.""" + return self._index + + @property + def default(self): + return self._props['default'] + + @property + def has_default(self): + return self._has_default + + @property + def order(self): + return self._props.get('order', None) + + @property + def doc(self): + return self._props.get('doc', None) + + @property + def props(self): + return self._props + + @property + def other_props(self): + return filter_keys_out(items=self._props, keys=FIELD_RESERVED_PROPS) + + def __str__(self): + return json.dumps(self.to_json()) + + def to_json(self, names=None): + if names is None: + names = Names() + to_dump = self.props.copy() + to_dump['type'] = self.type.to_json(names) + return to_dump + + def __eq__(self, that): + to_cmp = json.loads(_str(self)) + return to_cmp == json.loads(_str(that)) + + +# ------------------------------------------------------------------------------ +# Primitive Types + + +class PrimitiveSchema(Schema): + """Schema of a primitive Avro type. + + Valid primitive types are defined in PRIMITIVE_TYPES. + """ + + def __init__(self, data_type, other_props=None): + """Initializes a new schema object for the specified primitive type. + + Args: + data_type: Type of the schema to construct. Must be primitive. + """ + if data_type not in PRIMITIVE_TYPES: + raise AvroException('%r is not a valid primitive type.' % data_type) + super(PrimitiveSchema, self).__init__(data_type, other_props=other_props) + + @property + def name(self): + """Returns: the simple name of this schema.""" + # The name of a primitive type is the type itself. + return self.type + + @property + def fullname(self): + """Returns: the fully qualified name of this schema.""" + # The full name is the simple name for primitive schema. + return self.name + + def to_json(self, names=None): + if len(self.props) == 1: + return self.fullname + return self.props + + def __eq__(self, that): + return self.props == that.props + + +# ------------------------------------------------------------------------------ +# Complex Types (non-recursive) + + +class FixedSchema(NamedSchema): + def __init__( + self, + name, + namespace, + size, + names=None, + other_props=None, + ): + # Ensure valid ctor args + if not isinstance(size, int): + fail_msg = 'Fixed Schema requires a valid integer for size property.' + raise AvroException(fail_msg) + + super(FixedSchema, self).__init__( + data_type=FIXED, + name=name, + namespace=namespace, + names=names, + other_props=other_props, + ) + self._props['size'] = size + + @property + def size(self): + """Returns: the size of this fixed schema, in bytes.""" + return self._props['size'] + + def to_json(self, names=None): + if names is None: + names = Names() + if self.fullname in names.names: + return self.name_ref(names) + names.names[self.fullname] = self + return names.prune_namespace(self.props) + + def __eq__(self, that): + return self.props == that.props + + +# ------------------------------------------------------------------------------ + + +class EnumSchema(NamedSchema): + def __init__( + self, + name, + namespace, + symbols, + names=None, + doc=None, + other_props=None, + ): + """Initializes a new enumeration schema object. + + Args: + name: Simple name of this enumeration. + namespace: Optional namespace. + symbols: Ordered list of symbols defined in this enumeration. + names: + doc: + other_props: + """ + symbols = tuple(symbols) + symbol_set = frozenset(symbols) + if (len(symbol_set) != len(symbols) + or not all(map(lambda symbol: isinstance(symbol, _str), symbols))): + raise AvroException( + 'Invalid symbols for enum schema: %r.' % (symbols,)) + + super(EnumSchema, self).__init__( + data_type=ENUM, + name=name, + namespace=namespace, + names=names, + other_props=other_props, + ) + + self._props['symbols'] = symbols + if doc is not None: + self._props['doc'] = doc + + @property + def symbols(self): + """Returns: the symbols defined in this enum.""" + return self._props['symbols'] + + def to_json(self, names=None): + if names is None: + names = Names() + if self.fullname in names.names: + return self.name_ref(names) + names.names[self.fullname] = self + return names.prune_namespace(self.props) + + def __eq__(self, that): + return self.props == that.props + + +# ------------------------------------------------------------------------------ +# Complex Types (recursive) + + +class ArraySchema(Schema): + """Schema of an array.""" + + def __init__(self, items, other_props=None): + """Initializes a new array schema object. + + Args: + items: Avro schema of the array items. + other_props: + """ + super(ArraySchema, self).__init__( + data_type=ARRAY, + other_props=other_props, + ) + self._items_schema = items + self._props['items'] = items + + @property + def items(self): + """Returns: the schema of the items in this array.""" + return self._items_schema + + def to_json(self, names=None): + if names is None: + names = Names() + to_dump = self.props.copy() + item_schema = self.items + to_dump['items'] = item_schema.to_json(names) + return to_dump + + def __eq__(self, that): + to_cmp = json.loads(_str(self)) + return to_cmp == json.loads(_str(that)) + + +# ------------------------------------------------------------------------------ + + +class MapSchema(Schema): + """Schema of a map.""" + + def __init__(self, values, other_props=None): + """Initializes a new map schema object. + + Args: + values: Avro schema of the map values. + other_props: + """ + super(MapSchema, self).__init__( + data_type=MAP, + other_props=other_props, + ) + self._values_schema = values + self._props['values'] = values + + @property + def values(self): + """Returns: the schema of the values in this map.""" + return self._values_schema + + def to_json(self, names=None): + if names is None: + names = Names() + to_dump = self.props.copy() + to_dump['values'] = self.values.to_json(names) + return to_dump + + def __eq__(self, that): + to_cmp = json.loads(_str(self)) + return to_cmp == json.loads(_str(that)) + + +# ------------------------------------------------------------------------------ + + +class UnionSchema(Schema): + """Schema of a union.""" + + def __init__(self, schemas): + """Initializes a new union schema object. + + Args: + schemas: Ordered collection of schema branches in the union. + """ + super(UnionSchema, self).__init__(data_type=UNION) + self._schemas = tuple(schemas) + + # Validate the schema branches: + + # All named schema names are unique: + named_branches = tuple( + filter(lambda schema: schema.type in NAMED_TYPES, self._schemas)) + unique_names = frozenset(map(lambda schema: schema.fullname, named_branches)) + if len(unique_names) != len(named_branches): + raise AvroException( + 'Invalid union branches with duplicate schema name:%s' + % ''.join(map(lambda schema: ('\n\t - %s' % schema), self._schemas))) + + # Types are unique within unnamed schemas, and union is not allowed: + unnamed_branches = tuple( + filter(lambda schema: schema.type not in NAMED_TYPES, self._schemas)) + unique_types = frozenset(map(lambda schema: schema.type, unnamed_branches)) + if UNION in unique_types: + raise AvroException( + 'Invalid union branches contain other unions:%s' + % ''.join(map(lambda schema: ('\n\t - %s' % schema), self._schemas))) + if len(unique_types) != len(unnamed_branches): + raise AvroException( + 'Invalid union branches with duplicate type:%s' + % ''.join(map(lambda schema: ('\n\t - %s' % schema), self._schemas))) + + @property + def schemas(self): + """Returns: the ordered list of schema branches in the union.""" + return self._schemas + + def to_json(self, names=None): + if names is None: + names = Names() + to_dump = [] + for schema in self.schemas: + to_dump.append(schema.to_json(names)) + return to_dump + + def __eq__(self, that): + to_cmp = json.loads(_str(self)) + return to_cmp == json.loads(_str(that)) + + +# ------------------------------------------------------------------------------ + + +class ErrorUnionSchema(UnionSchema): + """Schema representing the declared errors of a protocol message.""" + + def __init__(self, schemas): + """Initializes an error-union schema. + + Args: + schema: collection of error schema. + """ + # Prepend "string" to handle system errors + schemas = [PrimitiveSchema(data_type=STRING)] + list(schemas) + super(ErrorUnionSchema, self).__init__(schemas=schemas) + + def to_json(self, names=None): + if names is None: + names = Names() + to_dump = [] + for schema in self.schemas: + # Don't print the system error schema + if schema.type == STRING: + continue + to_dump.append(schema.to_json(names)) + return to_dump + + +# ------------------------------------------------------------------------------ + + +class RecordSchema(NamedSchema): + """Schema of a record.""" + + @staticmethod + def _make_field(index, field_desc, names): + """Builds field schemas from a list of field JSON descriptors. + + Args: + index: 0-based index of the field in the record. + field_desc: JSON descriptors of a record field. + Return: + The field schema. + """ + field_schema = schema_from_json_data( + json_data=field_desc['type'], + names=names, + ) + other_props = ( + dict(filter_keys_out(items=field_desc, keys=FIELD_RESERVED_PROPS))) + return Field( + data_type=field_schema, + name=field_desc['name'], + index=index, + has_default=('default' in field_desc), + default=field_desc.get('default', _NO_DEFAULT), + order=field_desc.get('order', None), + doc=field_desc.get('doc', None), + other_props=other_props, + ) + + @staticmethod + def make_field_list(field_desc_list, names): + """Builds field schemas from a list of field JSON descriptors. + + Guarantees field name unicity. + + Args: + field_desc_list: collection of field JSON descriptors. + names: Avro schema tracker. + Yields + Field schemas. + """ + for index, field_desc in enumerate(field_desc_list): + yield RecordSchema._make_field(index, field_desc, names) + + @staticmethod + def _make_field_map(fields): + """Builds the field map. + + Guarantees field name unicity. + + Args: + fields: iterable of field schema. + Returns: + A map of field schemas, indexed by name. + """ + field_map = {} + for field in fields: + if field.name in field_map: + raise SchemaParseException( + 'Duplicate record field name %r.' % field.name) + field_map[field.name] = field + return field_map + + def __init__( + self, + name, + namespace, + fields=None, + make_fields=None, + names=None, + record_type=RECORD, + doc=None, + other_props=None + ): + """Initializes a new record schema object. + + Args: + name: Name of the record (absolute or relative). + namespace: Optional namespace the record belongs to, if name is relative. + fields: collection of fields to add to this record. + Exactly one of fields or make_fields must be specified. + make_fields: function creating the fields that belong to the record. + The function signature is: make_fields(names) -> ordered field list. + Exactly one of fields or make_fields must be specified. + names: + record_type: Type of the record: one of RECORD, ERROR or REQUEST. + Protocol requests are not named. + doc: + other_props: + """ + if record_type == REQUEST: + # Protocol requests are not named: + super(RecordSchema, self).__init__( + data_type=REQUEST, + other_props=other_props, + ) + elif record_type in [RECORD, ERROR]: + # Register this record name in the tracker: + super(RecordSchema, self).__init__( + data_type=record_type, + name=name, + namespace=namespace, + names=names, + other_props=other_props, + ) + else: + raise SchemaParseException( + 'Invalid record type: %r.' % record_type) + + if record_type in [RECORD, ERROR]: + avro_name = names.get_name(name=name, namespace=namespace) + nested_names = names.new_with_default_namespace(namespace=avro_name.namespace) + elif record_type == REQUEST: + # Protocol request has no name: no need to change default namespace: + nested_names = names + + if fields is None: + fields = make_fields(names=nested_names) + else: + assert make_fields is None + self._fields = tuple(fields) + + self._field_map = RecordSchema._make_field_map(self._fields) + + self._props['fields'] = fields + if doc is not None: + self._props['doc'] = doc + + @property + def fields(self): + """Returns: the field schemas, as an ordered tuple.""" + return self._fields + + @property + def field_map(self): + """Returns: a read-only map of the field schemas index by field names.""" + return self._field_map + + def to_json(self, names=None): + if names is None: + names = Names() + # Request records don't have names + if self.type == REQUEST: + return [f.to_json(names) for f in self.fields] + + if self.fullname in names.names: + return self.name_ref(names) + names.names[self.fullname] = self + + to_dump = names.prune_namespace(self.props.copy()) + to_dump['fields'] = [f.to_json(names) for f in self.fields] + return to_dump + + def __eq__(self, that): + to_cmp = json.loads(_str(self)) + return to_cmp == json.loads(_str(that)) + + +# ------------------------------------------------------------------------------ +# Module functions + + +def filter_keys_out(items, keys): + """Filters a collection of (key, value) items. + + Exclude any item whose key belongs to keys. + + Args: + items: Dictionary of items to filter the keys out of. + keys: Keys to filter out. + Yields: + Filtered items. + """ + for key, value in items.items(): + if key in keys: + continue + yield key, value + + +# ------------------------------------------------------------------------------ + + +def _schema_from_json_string(json_string, names): + if json_string in PRIMITIVE_TYPES: + return PrimitiveSchema(data_type=json_string) + + # Look for a known named schema: + schema = names.get_schema(name=json_string) + if schema is None: + raise SchemaParseException( + 'Unknown named schema %r, known names: %r.' + % (json_string, sorted(names.names))) + return schema + + +def _schema_from_json_array(json_array, names): + def MakeSchema(desc): + return schema_from_json_data(json_data=desc, names=names) + + return UnionSchema(map(MakeSchema, json_array)) + + +def _schema_from_json_object(json_object, names): + data_type = json_object.get('type') + if data_type is None: + raise SchemaParseException( + 'Avro schema JSON descriptor has no "type" property: %r' % json_object) + + other_props = dict( + filter_keys_out(items=json_object, keys=SCHEMA_RESERVED_PROPS)) + + if data_type in PRIMITIVE_TYPES: + # FIXME should not ignore other properties + result = PrimitiveSchema(data_type, other_props=other_props) + + elif data_type in NAMED_TYPES: + name = json_object.get('name') + namespace = json_object.get('namespace', names.default_namespace) + if data_type == FIXED: + size = json_object.get('size') + result = FixedSchema(name, namespace, size, names, other_props) + elif data_type == ENUM: + symbols = json_object.get('symbols') + doc = json_object.get('doc') + result = EnumSchema(name, namespace, symbols, names, doc, other_props) + + elif data_type in [RECORD, ERROR]: + field_desc_list = json_object.get('fields', ()) + + def MakeFields(names): + return tuple(RecordSchema.make_field_list(field_desc_list, names)) + + result = RecordSchema( + name=name, + namespace=namespace, + make_fields=MakeFields, + names=names, + record_type=data_type, + doc=json_object.get('doc'), + other_props=other_props, + ) + else: + raise Exception('Internal error: unknown type %r.' % data_type) + + elif data_type in VALID_TYPES: + # Unnamed, non-primitive Avro type: + + if data_type == ARRAY: + items_desc = json_object.get('items') + if items_desc is None: + raise SchemaParseException( + 'Invalid array schema descriptor with no "items" : %r.' + % json_object) + result = ArraySchema( + items=schema_from_json_data(items_desc, names), + other_props=other_props, + ) + + elif data_type == MAP: + values_desc = json_object.get('values') + if values_desc is None: + raise SchemaParseException( + 'Invalid map schema descriptor with no "values" : %r.' + % json_object) + result = MapSchema( + values=schema_from_json_data(values_desc, names=names), + other_props=other_props, + ) + + elif data_type == ERROR_UNION: + error_desc_list = json_object.get('declared_errors') + assert error_desc_list is not None + error_schemas = map( + lambda desc: schema_from_json_data(desc, names=names), + error_desc_list) + result = ErrorUnionSchema(schemas=error_schemas) + + else: + raise Exception('Internal error: unknown type %r.' % data_type) + else: + raise SchemaParseException( + 'Invalid JSON descriptor for an Avro schema: %r' % json_object) + return result + + +# Parsers for the JSON data types: +_JSONDataParserTypeMap = { + _str: _schema_from_json_string, + list: _schema_from_json_array, + dict: _schema_from_json_object, +} + + +def schema_from_json_data(json_data, names=None): + """Builds an Avro Schema from its JSON descriptor. + + Args: + json_data: JSON data representing the descriptor of the Avro schema. + names: Optional tracker for Avro named schemas. + Returns: + The Avro schema parsed from the JSON descriptor. + Raises: + SchemaParseException: if the descriptor is invalid. + """ + if names is None: + names = Names() + + # Select the appropriate parser based on the JSON data type: + parser = _JSONDataParserTypeMap.get(type(json_data)) + if parser is None: + raise SchemaParseException( + 'Invalid JSON descriptor for an Avro schema: %r.' % json_data) + return parser(json_data, names=names) + + +# ------------------------------------------------------------------------------ + + +def parse(json_string): + """Constructs a Schema from its JSON descriptor in text form. + + Args: + json_string: String representation of the JSON descriptor of the schema. + Returns: + The parsed schema. + Raises: + SchemaParseException: on JSON parsing error, + or if the JSON descriptor is invalid. + """ + try: + json_data = json.loads(json_string) + except Exception as exn: + raise SchemaParseException( + 'Error parsing schema from JSON: %r. ' + 'Error message: %r.' + % (json_string, exn)) + + # Initialize the names object + names = Names() + + # construct the Avro Schema object + return schema_from_json_data(json_data, names) diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/base_client.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/base_client.py new file mode 100644 index 00000000000..9784a278ab7 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/base_client.py @@ -0,0 +1,463 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import logging +import uuid +from typing import ( # pylint: disable=unused-import + Optional, + Any, + Tuple, +) + +try: + from urllib.parse import parse_qs, quote +except ImportError: + from urlparse import parse_qs # type: ignore + from urllib2 import quote # type: ignore + +import six + +from azure.core.configuration import Configuration +from azure.core.credentials import AzureSasCredential +from azure.core.exceptions import HttpResponseError +from azure.core.pipeline import Pipeline +from azure.core.pipeline.transport import RequestsTransport, HttpTransport +from azure.core.pipeline.policies import ( + AzureSasCredentialPolicy, + ContentDecodePolicy, + DistributedTracingPolicy, + HttpLoggingPolicy, + RedirectPolicy, + ProxyPolicy, + UserAgentPolicy, +) + +from .constants import CONNECTION_TIMEOUT, READ_TIMEOUT, SERVICE_HOST_BASE +from .models import LocationMode +from .authentication import SharedKeyCredentialPolicy +from .shared_access_signature import QueryStringConstants +from .request_handlers import serialize_batch_body, _get_batch_request_delimiter +from .policies import ( + ExponentialRetry, + StorageBearerTokenCredentialPolicy, + StorageContentValidation, + StorageHeadersPolicy, + StorageHosts, + StorageLoggingPolicy, + StorageRequestHook, + StorageResponseHook, + QueueMessagePolicy, +) +from .._version import VERSION +from .response_handlers import process_storage_error, PartialBatchErrorException + + +_LOGGER = logging.getLogger(__name__) +_SERVICE_PARAMS = { + "blob": {"primary": "BLOBENDPOINT", "secondary": "BLOBSECONDARYENDPOINT"}, + "queue": {"primary": "QUEUEENDPOINT", "secondary": "QUEUESECONDARYENDPOINT"}, + "file": {"primary": "FILEENDPOINT", "secondary": "FILESECONDARYENDPOINT"}, + "dfs": {"primary": "BLOBENDPOINT", "secondary": "BLOBENDPOINT"}, +} + + +class StorageAccountHostsMixin(object): # pylint: disable=too-many-instance-attributes + def __init__( + self, + parsed_url, # type: Any + service, # type: str + credential=None, # type: Optional[Any] + **kwargs # type: Any + ): + # type: (...) -> None + self._location_mode = kwargs.get("_location_mode", LocationMode.PRIMARY) + self._hosts = kwargs.get("_hosts") + self.scheme = parsed_url.scheme + + if service not in ["blob", "queue", "file-share", "dfs"]: + raise ValueError("Invalid service: {}".format(service)) + service_name = service.split('-')[0] + account = parsed_url.netloc.split(".{}.core.".format(service_name)) + + self.account_name = account[0] if len(account) > 1 else None + if not self.account_name and parsed_url.netloc.startswith("localhost") \ + or parsed_url.netloc.startswith("127.0.0.1"): + self.account_name = parsed_url.path.strip("/") + + self.credential = _format_shared_key_credential(self.account_name, credential) + if self.scheme.lower() != "https" and hasattr(self.credential, "get_token"): + raise ValueError("Token credential is only supported with HTTPS.") + + secondary_hostname = None + if hasattr(self.credential, "account_name"): + self.account_name = self.credential.account_name + secondary_hostname = "{}-secondary.{}.{}".format( + self.credential.account_name, service_name, SERVICE_HOST_BASE) + + if not self._hosts: + if len(account) > 1: + secondary_hostname = parsed_url.netloc.replace(account[0], account[0] + "-secondary") + if kwargs.get("secondary_hostname"): + secondary_hostname = kwargs["secondary_hostname"] + primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip('/') + self._hosts = {LocationMode.PRIMARY: primary_hostname, LocationMode.SECONDARY: secondary_hostname} + + self.require_encryption = kwargs.get("require_encryption", False) + self.key_encryption_key = kwargs.get("key_encryption_key") + self.key_resolver_function = kwargs.get("key_resolver_function") + self._config, self._pipeline = self._create_pipeline(self.credential, storage_sdk=service, **kwargs) + + def __enter__(self): + self._client.__enter__() + return self + + def __exit__(self, *args): + self._client.__exit__(*args) + + def close(self): + """ This method is to close the sockets opened by the client. + It need not be used when using with a context manager. + """ + self._client.close() + + @property + def url(self): + """The full endpoint URL to this entity, including SAS token if used. + + This could be either the primary endpoint, + or the secondary endpoint depending on the current :func:`location_mode`. + """ + return self._format_url(self._hosts[self._location_mode]) + + @property + def primary_endpoint(self): + """The full primary endpoint URL. + + :type: str + """ + return self._format_url(self._hosts[LocationMode.PRIMARY]) + + @property + def primary_hostname(self): + """The hostname of the primary endpoint. + + :type: str + """ + return self._hosts[LocationMode.PRIMARY] + + @property + def secondary_endpoint(self): + """The full secondary endpoint URL if configured. + + If not available a ValueError will be raised. To explicitly specify a secondary hostname, use the optional + `secondary_hostname` keyword argument on instantiation. + + :type: str + :raise ValueError: + """ + if not self._hosts[LocationMode.SECONDARY]: + raise ValueError("No secondary host configured.") + return self._format_url(self._hosts[LocationMode.SECONDARY]) + + @property + def secondary_hostname(self): + """The hostname of the secondary endpoint. + + If not available this will be None. To explicitly specify a secondary hostname, use the optional + `secondary_hostname` keyword argument on instantiation. + + :type: str or None + """ + return self._hosts[LocationMode.SECONDARY] + + @property + def location_mode(self): + """The location mode that the client is currently using. + + By default this will be "primary". Options include "primary" and "secondary". + + :type: str + """ + + return self._location_mode + + @location_mode.setter + def location_mode(self, value): + if self._hosts.get(value): + self._location_mode = value + self._client._config.url = self.url # pylint: disable=protected-access + else: + raise ValueError("No host URL for location mode: {}".format(value)) + + @property + def api_version(self): + """The version of the Storage API used for requests. + + :type: str + """ + return self._client._config.version # pylint: disable=protected-access + + def _format_query_string(self, sas_token, credential, snapshot=None, share_snapshot=None): + query_str = "?" + if snapshot: + query_str += "snapshot={}&".format(self.snapshot) + if share_snapshot: + query_str += "sharesnapshot={}&".format(self.snapshot) + if sas_token and isinstance(credential, AzureSasCredential): + raise ValueError( + "You cannot use AzureSasCredential when the resource URI also contains a Shared Access Signature.") + if is_credential_sastoken(credential): + query_str += credential.lstrip("?") + credential = None + elif sas_token: + query_str += sas_token + return query_str.rstrip("?&"), credential + + def _create_pipeline(self, credential, **kwargs): + # type: (Any, **Any) -> Tuple[Configuration, Pipeline] + self._credential_policy = None + if hasattr(credential, "get_token"): + self._credential_policy = StorageBearerTokenCredentialPolicy(credential) + elif isinstance(credential, SharedKeyCredentialPolicy): + self._credential_policy = credential + elif isinstance(credential, AzureSasCredential): + self._credential_policy = AzureSasCredentialPolicy(credential) + elif credential is not None: + raise TypeError("Unsupported credential: {}".format(credential)) + + config = kwargs.get("_configuration") or create_configuration(**kwargs) + if kwargs.get("_pipeline"): + return config, kwargs["_pipeline"] + config.transport = kwargs.get("transport") # type: ignore + kwargs.setdefault("connection_timeout", CONNECTION_TIMEOUT) + kwargs.setdefault("read_timeout", READ_TIMEOUT) + if not config.transport: + config.transport = RequestsTransport(**kwargs) + policies = [ + QueueMessagePolicy(), + config.proxy_policy, + config.user_agent_policy, + StorageContentValidation(), + ContentDecodePolicy(response_encoding="utf-8"), + RedirectPolicy(**kwargs), + StorageHosts(hosts=self._hosts, **kwargs), + config.retry_policy, + config.headers_policy, + StorageRequestHook(**kwargs), + self._credential_policy, + config.logging_policy, + StorageResponseHook(**kwargs), + DistributedTracingPolicy(**kwargs), + HttpLoggingPolicy(**kwargs) + ] + if kwargs.get("_additional_pipeline_policies"): + policies = policies + kwargs.get("_additional_pipeline_policies") + return config, Pipeline(config.transport, policies=policies) + + def _batch_send( + self, + *reqs, # type: HttpRequest + **kwargs + ): + """Given a series of request, do a Storage batch call. + """ + # Pop it here, so requests doesn't feel bad about additional kwarg + raise_on_any_failure = kwargs.pop("raise_on_any_failure", True) + batch_id = str(uuid.uuid1()) + + request = self._client._client.post( # pylint: disable=protected-access + url='{}://{}/{}?{}comp=batch{}{}'.format( + self.scheme, + self.primary_hostname, + kwargs.pop('path', ""), + kwargs.pop('restype', ""), + kwargs.pop('sas', ""), + kwargs.pop('timeout', "") + ), + headers={ + 'x-ms-version': self.api_version, + "Content-Type": "multipart/mixed; boundary=" + _get_batch_request_delimiter(batch_id, False, False) + } + ) + + policies = [StorageHeadersPolicy()] + if self._credential_policy: + policies.append(self._credential_policy) + + request.set_multipart_mixed( + *reqs, + policies=policies, + enforce_https=False + ) + + Pipeline._prepare_multipart_mixed_request(request) # pylint: disable=protected-access + body = serialize_batch_body(request.multipart_mixed_info[0], batch_id) + request.set_bytes_body(body) + + temp = request.multipart_mixed_info + request.multipart_mixed_info = None + pipeline_response = self._pipeline.run( + request, **kwargs + ) + response = pipeline_response.http_response + request.multipart_mixed_info = temp + + try: + if response.status_code not in [202]: + raise HttpResponseError(response=response) + parts = response.parts() + if raise_on_any_failure: + parts = list(response.parts()) + if any(p for p in parts if not 200 <= p.status_code < 300): + error = PartialBatchErrorException( + message="There is a partial failure in the batch operation.", + response=response, parts=parts + ) + raise error + return iter(parts) + return parts + except HttpResponseError as error: + process_storage_error(error) + +class TransportWrapper(HttpTransport): + """Wrapper class that ensures that an inner client created + by a `get_client` method does not close the outer transport for the parent + when used in a context manager. + """ + def __init__(self, transport): + self._transport = transport + + def send(self, request, **kwargs): + return self._transport.send(request, **kwargs) + + def open(self): + pass + + def close(self): + pass + + def __enter__(self): + pass + + def __exit__(self, *args): # pylint: disable=arguments-differ + pass + + +def _format_shared_key_credential(account_name, credential): + if isinstance(credential, six.string_types): + if not account_name: + raise ValueError("Unable to determine account name for shared key credential.") + credential = {"account_name": account_name, "account_key": credential} + if isinstance(credential, dict): + if "account_name" not in credential: + raise ValueError("Shared key credential missing 'account_name") + if "account_key" not in credential: + raise ValueError("Shared key credential missing 'account_key") + return SharedKeyCredentialPolicy(**credential) + return credential + + +def parse_connection_str(conn_str, credential, service): + conn_str = conn_str.rstrip(";") + conn_settings = [s.split("=", 1) for s in conn_str.split(";")] + if any(len(tup) != 2 for tup in conn_settings): + raise ValueError("Connection string is either blank or malformed.") + conn_settings = dict((key.upper(), val) for key, val in conn_settings) + endpoints = _SERVICE_PARAMS[service] + primary = None + secondary = None + if not credential: + try: + credential = {"account_name": conn_settings["ACCOUNTNAME"], "account_key": conn_settings["ACCOUNTKEY"]} + except KeyError: + credential = conn_settings.get("SHAREDACCESSSIGNATURE") + if endpoints["primary"] in conn_settings: + primary = conn_settings[endpoints["primary"]] + if endpoints["secondary"] in conn_settings: + secondary = conn_settings[endpoints["secondary"]] + else: + if endpoints["secondary"] in conn_settings: + raise ValueError("Connection string specifies only secondary endpoint.") + try: + primary = "{}://{}.{}.{}".format( + conn_settings["DEFAULTENDPOINTSPROTOCOL"], + conn_settings["ACCOUNTNAME"], + service, + conn_settings["ENDPOINTSUFFIX"], + ) + secondary = "{}-secondary.{}.{}".format( + conn_settings["ACCOUNTNAME"], service, conn_settings["ENDPOINTSUFFIX"] + ) + except KeyError: + pass + + if not primary: + try: + primary = "https://{}.{}.{}".format( + conn_settings["ACCOUNTNAME"], service, conn_settings.get("ENDPOINTSUFFIX", SERVICE_HOST_BASE) + ) + except KeyError: + raise ValueError("Connection string missing required connection details.") + if service == "dfs": + primary = primary.replace(".blob.", ".dfs.") + secondary = secondary.replace(".blob.", ".dfs.") + return primary, secondary, credential + + +def create_configuration(**kwargs): + # type: (**Any) -> Configuration + config = Configuration(**kwargs) + config.headers_policy = StorageHeadersPolicy(**kwargs) + config.user_agent_policy = UserAgentPolicy( + sdk_moniker="storage-{}/{}".format(kwargs.pop('storage_sdk'), VERSION), **kwargs) + config.retry_policy = kwargs.get("retry_policy") or ExponentialRetry(**kwargs) + config.logging_policy = StorageLoggingPolicy(**kwargs) + config.proxy_policy = ProxyPolicy(**kwargs) + + # Storage settings + config.max_single_put_size = kwargs.get("max_single_put_size", 64 * 1024 * 1024) + config.copy_polling_interval = 15 + + # Block blob uploads + config.max_block_size = kwargs.get("max_block_size", 4 * 1024 * 1024) + config.min_large_block_upload_threshold = kwargs.get("min_large_block_upload_threshold", 4 * 1024 * 1024 + 1) + config.use_byte_buffer = kwargs.get("use_byte_buffer", False) + + # Page blob uploads + config.max_page_size = kwargs.get("max_page_size", 4 * 1024 * 1024) + + # Datalake file uploads + config.min_large_chunk_upload_threshold = kwargs.get("min_large_chunk_upload_threshold", 100 * 1024 * 1024 + 1) + + # Blob downloads + config.max_single_get_size = kwargs.get("max_single_get_size", 32 * 1024 * 1024) + config.max_chunk_get_size = kwargs.get("max_chunk_get_size", 4 * 1024 * 1024) + + # File uploads + config.max_range_size = kwargs.get("max_range_size", 4 * 1024 * 1024) + return config + + +def parse_query(query_str): + sas_values = QueryStringConstants.to_list() + parsed_query = {k: v[0] for k, v in parse_qs(query_str).items()} + sas_params = ["{}={}".format(k, quote(v, safe='')) for k, v in parsed_query.items() if k in sas_values] + sas_token = None + if sas_params: + sas_token = "&".join(sas_params) + + snapshot = parsed_query.get("snapshot") or parsed_query.get("sharesnapshot") + return snapshot, sas_token + + +def is_credential_sastoken(credential): + if not credential or not isinstance(credential, six.string_types): + return False + + sas_values = QueryStringConstants.to_list() + parsed_query = parse_qs(credential.lstrip("?")) + if parsed_query and all([k in sas_values for k in parsed_query.keys()]): + return True + return False diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/base_client_async.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/base_client_async.py new file mode 100644 index 00000000000..d8c5f430398 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/base_client_async.py @@ -0,0 +1,191 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from typing import ( # pylint: disable=unused-import + Union, Optional, Any, Iterable, Dict, List, Type, Tuple, + TYPE_CHECKING +) +import logging + +from azure.core.credentials import AzureSasCredential +from azure.core.pipeline import AsyncPipeline +from azure.core.async_paging import AsyncList +from azure.core.exceptions import HttpResponseError +from azure.core.pipeline.policies import ( + AsyncRedirectPolicy, + AzureSasCredentialPolicy, + ContentDecodePolicy, + DistributedTracingPolicy, + HttpLoggingPolicy, +) +from azure.core.pipeline.transport import AsyncHttpTransport + +from .constants import CONNECTION_TIMEOUT, READ_TIMEOUT +from .authentication import SharedKeyCredentialPolicy +from .base_client import create_configuration +from .policies import ( + StorageContentValidation, + StorageHeadersPolicy, + StorageHosts, + StorageRequestHook, + QueueMessagePolicy +) +from .policies_async import AsyncStorageBearerTokenCredentialPolicy, AsyncStorageResponseHook + +from .response_handlers import process_storage_error, PartialBatchErrorException + +if TYPE_CHECKING: + from azure.core.pipeline import Pipeline + from azure.core.pipeline.transport import HttpRequest + from azure.core.configuration import Configuration +_LOGGER = logging.getLogger(__name__) + + +class AsyncStorageAccountHostsMixin(object): + + def __enter__(self): + raise TypeError("Async client only supports 'async with'.") + + def __exit__(self, *args): + pass + + async def __aenter__(self): + await self._client.__aenter__() + return self + + async def __aexit__(self, *args): + await self._client.__aexit__(*args) + + async def close(self): + """ This method is to close the sockets opened by the client. + It need not be used when using with a context manager. + """ + await self._client.close() + + def _create_pipeline(self, credential, **kwargs): + # type: (Any, **Any) -> Tuple[Configuration, Pipeline] + self._credential_policy = None + if hasattr(credential, 'get_token'): + self._credential_policy = AsyncStorageBearerTokenCredentialPolicy(credential) + elif isinstance(credential, SharedKeyCredentialPolicy): + self._credential_policy = credential + elif isinstance(credential, AzureSasCredential): + self._credential_policy = AzureSasCredentialPolicy(credential) + elif credential is not None: + raise TypeError("Unsupported credential: {}".format(credential)) + config = kwargs.get('_configuration') or create_configuration(**kwargs) + if kwargs.get('_pipeline'): + return config, kwargs['_pipeline'] + config.transport = kwargs.get('transport') # type: ignore + kwargs.setdefault("connection_timeout", CONNECTION_TIMEOUT) + kwargs.setdefault("read_timeout", READ_TIMEOUT) + if not config.transport: + try: + from azure.core.pipeline.transport import AioHttpTransport + except ImportError: + raise ImportError("Unable to create async transport. Please check aiohttp is installed.") + config.transport = AioHttpTransport(**kwargs) + policies = [ + QueueMessagePolicy(), + config.headers_policy, + config.proxy_policy, + config.user_agent_policy, + StorageContentValidation(), + StorageRequestHook(**kwargs), + self._credential_policy, + ContentDecodePolicy(response_encoding="utf-8"), + AsyncRedirectPolicy(**kwargs), + StorageHosts(hosts=self._hosts, **kwargs), # type: ignore + config.retry_policy, + config.logging_policy, + AsyncStorageResponseHook(**kwargs), + DistributedTracingPolicy(**kwargs), + HttpLoggingPolicy(**kwargs), + ] + if kwargs.get("_additional_pipeline_policies"): + policies = policies + kwargs.get("_additional_pipeline_policies") + return config, AsyncPipeline(config.transport, policies=policies) + + async def _batch_send( + self, + *reqs, # type: HttpRequest + **kwargs + ): + """Given a series of request, do a Storage batch call. + """ + # Pop it here, so requests doesn't feel bad about additional kwarg + raise_on_any_failure = kwargs.pop("raise_on_any_failure", True) + request = self._client._client.post( # pylint: disable=protected-access + url='{}://{}/{}?{}comp=batch{}{}'.format( + self.scheme, + self.primary_hostname, + kwargs.pop('path', ""), + kwargs.pop('restype', ""), + kwargs.pop('sas', ""), + kwargs.pop('timeout', "") + ), + headers={ + 'x-ms-version': self.api_version + } + ) + + policies = [StorageHeadersPolicy()] + if self._credential_policy: + policies.append(self._credential_policy) + + request.set_multipart_mixed( + *reqs, + policies=policies, + enforce_https=False + ) + + pipeline_response = await self._pipeline.run( + request, **kwargs + ) + response = pipeline_response.http_response + + try: + if response.status_code not in [202]: + raise HttpResponseError(response=response) + parts = response.parts() # Return an AsyncIterator + if raise_on_any_failure: + parts_list = [] + async for part in parts: + parts_list.append(part) + if any(p for p in parts_list if not 200 <= p.status_code < 300): + error = PartialBatchErrorException( + message="There is a partial failure in the batch operation.", + response=response, parts=parts_list + ) + raise error + return AsyncList(parts_list) + return parts + except HttpResponseError as error: + process_storage_error(error) + + +class AsyncTransportWrapper(AsyncHttpTransport): + """Wrapper class that ensures that an inner client created + by a `get_client` method does not close the outer transport for the parent + when used in a context manager. + """ + def __init__(self, async_transport): + self._transport = async_transport + + async def send(self, request, **kwargs): + return await self._transport.send(request, **kwargs) + + async def open(self): + pass + + async def close(self): + pass + + async def __aenter__(self): + pass + + async def __aexit__(self, *args): # pylint: disable=arguments-differ + pass diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/constants.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/constants.py new file mode 100644 index 00000000000..8a39d934e2a --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/constants.py @@ -0,0 +1,28 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import sys +from .._generated import AzureBlobStorage + + +X_MS_VERSION = AzureBlobStorage(url="get_api_version")._config.version # pylint: disable=protected-access + +# Socket timeout in seconds +CONNECTION_TIMEOUT = 20 +READ_TIMEOUT = 20 + +# for python 3.5+, there was a change to the definition of the socket timeout (as far as socket.sendall is concerned) +# The socket timeout is now the maximum total duration to send all data. +if sys.version_info >= (3, 5): + # the timeout to connect is 20 seconds, and the read timeout is 80000 seconds + # the 80000 seconds was calculated with: + # 4000MB (max block size)/ 50KB/s (an arbitrarily chosen minimum upload speed) + READ_TIMEOUT = 80000 + +DEFAULT_OAUTH_SCOPE = "/.default" +STORAGE_OAUTH_SCOPE = "https://storage.azure.com/.default" + +SERVICE_HOST_BASE = 'core.windows.net' diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/encryption.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/encryption.py new file mode 100644 index 00000000000..62607cc0cf8 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/encryption.py @@ -0,0 +1,542 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import os +from os import urandom +from json import ( + dumps, + loads, +) +from collections import OrderedDict + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.ciphers import Cipher +from cryptography.hazmat.primitives.ciphers.algorithms import AES +from cryptography.hazmat.primitives.ciphers.modes import CBC +from cryptography.hazmat.primitives.padding import PKCS7 + +from azure.core.exceptions import HttpResponseError + +from .._version import VERSION +from . import encode_base64, decode_base64_to_bytes + + +_ENCRYPTION_PROTOCOL_V1 = '1.0' +_ERROR_OBJECT_INVALID = \ + '{0} does not define a complete interface. Value of {1} is either missing or invalid.' + + +def _validate_not_none(param_name, param): + if param is None: + raise ValueError('{0} should not be None.'.format(param_name)) + + +def _validate_key_encryption_key_wrap(kek): + # Note that None is not callable and so will fail the second clause of each check. + if not hasattr(kek, 'wrap_key') or not callable(kek.wrap_key): + raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'wrap_key')) + if not hasattr(kek, 'get_kid') or not callable(kek.get_kid): + raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_kid')) + if not hasattr(kek, 'get_key_wrap_algorithm') or not callable(kek.get_key_wrap_algorithm): + raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_key_wrap_algorithm')) + + +class _EncryptionAlgorithm(object): + ''' + Specifies which client encryption algorithm is used. + ''' + AES_CBC_256 = 'AES_CBC_256' + + +class _WrappedContentKey: + ''' + Represents the envelope key details stored on the service. + ''' + + def __init__(self, algorithm, encrypted_key, key_id): + ''' + :param str algorithm: + The algorithm used for wrapping. + :param bytes encrypted_key: + The encrypted content-encryption-key. + :param str key_id: + The key-encryption-key identifier string. + ''' + + _validate_not_none('algorithm', algorithm) + _validate_not_none('encrypted_key', encrypted_key) + _validate_not_none('key_id', key_id) + + self.algorithm = algorithm + self.encrypted_key = encrypted_key + self.key_id = key_id + + +class _EncryptionAgent: + ''' + Represents the encryption agent stored on the service. + It consists of the encryption protocol version and encryption algorithm used. + ''' + + def __init__(self, encryption_algorithm, protocol): + ''' + :param _EncryptionAlgorithm encryption_algorithm: + The algorithm used for encrypting the message contents. + :param str protocol: + The protocol version used for encryption. + ''' + + _validate_not_none('encryption_algorithm', encryption_algorithm) + _validate_not_none('protocol', protocol) + + self.encryption_algorithm = str(encryption_algorithm) + self.protocol = protocol + + +class _EncryptionData: + ''' + Represents the encryption data that is stored on the service. + ''' + + def __init__(self, content_encryption_IV, encryption_agent, wrapped_content_key, + key_wrapping_metadata): + ''' + :param bytes content_encryption_IV: + The content encryption initialization vector. + :param _EncryptionAgent encryption_agent: + The encryption agent. + :param _WrappedContentKey wrapped_content_key: + An object that stores the wrapping algorithm, the key identifier, + and the encrypted key bytes. + :param dict key_wrapping_metadata: + A dict containing metadata related to the key wrapping. + ''' + + _validate_not_none('content_encryption_IV', content_encryption_IV) + _validate_not_none('encryption_agent', encryption_agent) + _validate_not_none('wrapped_content_key', wrapped_content_key) + + self.content_encryption_IV = content_encryption_IV + self.encryption_agent = encryption_agent + self.wrapped_content_key = wrapped_content_key + self.key_wrapping_metadata = key_wrapping_metadata + + +def _generate_encryption_data_dict(kek, cek, iv): + ''' + Generates and returns the encryption metadata as a dict. + + :param object kek: The key encryption key. See calling functions for more information. + :param bytes cek: The content encryption key. + :param bytes iv: The initialization vector. + :return: A dict containing all the encryption metadata. + :rtype: dict + ''' + # Encrypt the cek. + wrapped_cek = kek.wrap_key(cek) + + # Build the encryption_data dict. + # Use OrderedDict to comply with Java's ordering requirement. + wrapped_content_key = OrderedDict() + wrapped_content_key['KeyId'] = kek.get_kid() + wrapped_content_key['EncryptedKey'] = encode_base64(wrapped_cek) + wrapped_content_key['Algorithm'] = kek.get_key_wrap_algorithm() + + encryption_agent = OrderedDict() + encryption_agent['Protocol'] = _ENCRYPTION_PROTOCOL_V1 + encryption_agent['EncryptionAlgorithm'] = _EncryptionAlgorithm.AES_CBC_256 + + encryption_data_dict = OrderedDict() + encryption_data_dict['WrappedContentKey'] = wrapped_content_key + encryption_data_dict['EncryptionAgent'] = encryption_agent + encryption_data_dict['ContentEncryptionIV'] = encode_base64(iv) + encryption_data_dict['KeyWrappingMetadata'] = {'EncryptionLibrary': 'Python ' + VERSION} + + return encryption_data_dict + + +def _dict_to_encryption_data(encryption_data_dict): + ''' + Converts the specified dictionary to an EncryptionData object for + eventual use in decryption. + + :param dict encryption_data_dict: + The dictionary containing the encryption data. + :return: an _EncryptionData object built from the dictionary. + :rtype: _EncryptionData + ''' + try: + if encryption_data_dict['EncryptionAgent']['Protocol'] != _ENCRYPTION_PROTOCOL_V1: + raise ValueError("Unsupported encryption version.") + except KeyError: + raise ValueError("Unsupported encryption version.") + wrapped_content_key = encryption_data_dict['WrappedContentKey'] + wrapped_content_key = _WrappedContentKey(wrapped_content_key['Algorithm'], + decode_base64_to_bytes(wrapped_content_key['EncryptedKey']), + wrapped_content_key['KeyId']) + + encryption_agent = encryption_data_dict['EncryptionAgent'] + encryption_agent = _EncryptionAgent(encryption_agent['EncryptionAlgorithm'], + encryption_agent['Protocol']) + + if 'KeyWrappingMetadata' in encryption_data_dict: + key_wrapping_metadata = encryption_data_dict['KeyWrappingMetadata'] + else: + key_wrapping_metadata = None + + encryption_data = _EncryptionData(decode_base64_to_bytes(encryption_data_dict['ContentEncryptionIV']), + encryption_agent, + wrapped_content_key, + key_wrapping_metadata) + + return encryption_data + + +def _generate_AES_CBC_cipher(cek, iv): + ''' + Generates and returns an encryption cipher for AES CBC using the given cek and iv. + + :param bytes[] cek: The content encryption key for the cipher. + :param bytes[] iv: The initialization vector for the cipher. + :return: A cipher for encrypting in AES256 CBC. + :rtype: ~cryptography.hazmat.primitives.ciphers.Cipher + ''' + + backend = default_backend() + algorithm = AES(cek) + mode = CBC(iv) + return Cipher(algorithm, mode, backend) + + +def _validate_and_unwrap_cek(encryption_data, key_encryption_key=None, key_resolver=None): + ''' + Extracts and returns the content_encryption_key stored in the encryption_data object + and performs necessary validation on all parameters. + :param _EncryptionData encryption_data: + The encryption metadata of the retrieved value. + :param obj key_encryption_key: + The key_encryption_key used to unwrap the cek. Please refer to high-level service object + instance variables for more details. + :param func key_resolver: + A function used that, given a key_id, will return a key_encryption_key. Please refer + to high-level service object instance variables for more details. + :return: the content_encryption_key stored in the encryption_data object. + :rtype: bytes[] + ''' + + _validate_not_none('content_encryption_IV', encryption_data.content_encryption_IV) + _validate_not_none('encrypted_key', encryption_data.wrapped_content_key.encrypted_key) + + if _ENCRYPTION_PROTOCOL_V1 != encryption_data.encryption_agent.protocol: + raise ValueError('Encryption version is not supported.') + + content_encryption_key = None + + # If the resolver exists, give priority to the key it finds. + if key_resolver is not None: + key_encryption_key = key_resolver(encryption_data.wrapped_content_key.key_id) + + _validate_not_none('key_encryption_key', key_encryption_key) + if not hasattr(key_encryption_key, 'get_kid') or not callable(key_encryption_key.get_kid): + raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_kid')) + if not hasattr(key_encryption_key, 'unwrap_key') or not callable(key_encryption_key.unwrap_key): + raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'unwrap_key')) + if encryption_data.wrapped_content_key.key_id != key_encryption_key.get_kid(): + raise ValueError('Provided or resolved key-encryption-key does not match the id of key used to encrypt.') + # Will throw an exception if the specified algorithm is not supported. + content_encryption_key = key_encryption_key.unwrap_key(encryption_data.wrapped_content_key.encrypted_key, + encryption_data.wrapped_content_key.algorithm) + _validate_not_none('content_encryption_key', content_encryption_key) + + return content_encryption_key + + +def _decrypt_message(message, encryption_data, key_encryption_key=None, resolver=None): + ''' + Decrypts the given ciphertext using AES256 in CBC mode with 128 bit padding. + Unwraps the content-encryption-key using the user-provided or resolved key-encryption-key (kek). + Returns the original plaintex. + + :param str message: + The ciphertext to be decrypted. + :param _EncryptionData encryption_data: + The metadata associated with this ciphertext. + :param object key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + unwrap_key(key, algorithm) + - returns the unwrapped form of the specified symmetric key using the string-specified algorithm. + get_kid() + - returns a string key id for this key-encryption-key. + :param function resolver(kid): + The user-provided key resolver. Uses the kid string to return a key-encryption-key + implementing the interface defined above. + :return: The decrypted plaintext. + :rtype: str + ''' + _validate_not_none('message', message) + content_encryption_key = _validate_and_unwrap_cek(encryption_data, key_encryption_key, resolver) + + if _EncryptionAlgorithm.AES_CBC_256 != encryption_data.encryption_agent.encryption_algorithm: + raise ValueError('Specified encryption algorithm is not supported.') + + cipher = _generate_AES_CBC_cipher(content_encryption_key, encryption_data.content_encryption_IV) + + # decrypt data + decrypted_data = message + decryptor = cipher.decryptor() + decrypted_data = (decryptor.update(decrypted_data) + decryptor.finalize()) + + # unpad data + unpadder = PKCS7(128).unpadder() + decrypted_data = (unpadder.update(decrypted_data) + unpadder.finalize()) + + return decrypted_data + + +def encrypt_blob(blob, key_encryption_key): + ''' + Encrypts the given blob using AES256 in CBC mode with 128 bit padding. + Wraps the generated content-encryption-key using the user-provided key-encryption-key (kek). + Returns a json-formatted string containing the encryption metadata. This method should + only be used when a blob is small enough for single shot upload. Encrypting larger blobs + is done as a part of the upload_data_chunks method. + + :param bytes blob: + The blob to be encrypted. + :param object key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + wrap_key(key)--wraps the specified key using an algorithm of the user's choice. + get_key_wrap_algorithm()--returns the algorithm used to wrap the specified symmetric key. + get_kid()--returns a string key id for this key-encryption-key. + :return: A tuple of json-formatted string containing the encryption metadata and the encrypted blob data. + :rtype: (str, bytes) + ''' + + _validate_not_none('blob', blob) + _validate_not_none('key_encryption_key', key_encryption_key) + _validate_key_encryption_key_wrap(key_encryption_key) + + # AES256 uses 256 bit (32 byte) keys and always with 16 byte blocks + content_encryption_key = urandom(32) + initialization_vector = urandom(16) + + cipher = _generate_AES_CBC_cipher(content_encryption_key, initialization_vector) + + # PKCS7 with 16 byte blocks ensures compatibility with AES. + padder = PKCS7(128).padder() + padded_data = padder.update(blob) + padder.finalize() + + # Encrypt the data. + encryptor = cipher.encryptor() + encrypted_data = encryptor.update(padded_data) + encryptor.finalize() + encryption_data = _generate_encryption_data_dict(key_encryption_key, content_encryption_key, + initialization_vector) + encryption_data['EncryptionMode'] = 'FullBlob' + + return dumps(encryption_data), encrypted_data + + +def generate_blob_encryption_data(key_encryption_key): + ''' + Generates the encryption_metadata for the blob. + + :param bytes key_encryption_key: + The key-encryption-key used to wrap the cek associate with this blob. + :return: A tuple containing the cek and iv for this blob as well as the + serialized encryption metadata for the blob. + :rtype: (bytes, bytes, str) + ''' + encryption_data = None + content_encryption_key = None + initialization_vector = None + if key_encryption_key: + _validate_key_encryption_key_wrap(key_encryption_key) + content_encryption_key = urandom(32) + initialization_vector = urandom(16) + encryption_data = _generate_encryption_data_dict(key_encryption_key, + content_encryption_key, + initialization_vector) + encryption_data['EncryptionMode'] = 'FullBlob' + encryption_data = dumps(encryption_data) + + return content_encryption_key, initialization_vector, encryption_data + + +def decrypt_blob(require_encryption, key_encryption_key, key_resolver, + content, start_offset, end_offset, response_headers): + ''' + Decrypts the given blob contents and returns only the requested range. + + :param bool require_encryption: + Whether or not the calling blob service requires objects to be decrypted. + :param object key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + wrap_key(key)--wraps the specified key using an algorithm of the user's choice. + get_key_wrap_algorithm()--returns the algorithm used to wrap the specified symmetric key. + get_kid()--returns a string key id for this key-encryption-key. + :param key_resolver(kid): + The user-provided key resolver. Uses the kid string to return a key-encryption-key + implementing the interface defined above. + :return: The decrypted blob content. + :rtype: bytes + ''' + try: + encryption_data = _dict_to_encryption_data(loads(response_headers['x-ms-meta-encryptiondata'])) + except: # pylint: disable=bare-except + if require_encryption: + raise ValueError( + 'Encryption required, but received data does not contain appropriate metatadata.' + \ + 'Data was either not encrypted or metadata has been lost.') + + return content + + if encryption_data.encryption_agent.encryption_algorithm != _EncryptionAlgorithm.AES_CBC_256: + raise ValueError('Specified encryption algorithm is not supported.') + + blob_type = response_headers['x-ms-blob-type'] + + iv = None + unpad = False + if 'content-range' in response_headers: + content_range = response_headers['content-range'] + # Format: 'bytes x-y/size' + + # Ignore the word 'bytes' + content_range = content_range.split(' ') + + content_range = content_range[1].split('-') + content_range = content_range[1].split('/') + end_range = int(content_range[0]) + blob_size = int(content_range[1]) + + if start_offset >= 16: + iv = content[:16] + content = content[16:] + start_offset -= 16 + else: + iv = encryption_data.content_encryption_IV + + if end_range == blob_size - 1: + unpad = True + else: + unpad = True + iv = encryption_data.content_encryption_IV + + if blob_type == 'PageBlob': + unpad = False + + content_encryption_key = _validate_and_unwrap_cek(encryption_data, key_encryption_key, key_resolver) + cipher = _generate_AES_CBC_cipher(content_encryption_key, iv) + decryptor = cipher.decryptor() + + content = decryptor.update(content) + decryptor.finalize() + if unpad: + unpadder = PKCS7(128).unpadder() + content = unpadder.update(content) + unpadder.finalize() + + return content[start_offset: len(content) - end_offset] + + +def get_blob_encryptor_and_padder(cek, iv, should_pad): + encryptor = None + padder = None + + if cek is not None and iv is not None: + cipher = _generate_AES_CBC_cipher(cek, iv) + encryptor = cipher.encryptor() + padder = PKCS7(128).padder() if should_pad else None + + return encryptor, padder + + +def encrypt_queue_message(message, key_encryption_key): + ''' + Encrypts the given plain text message using AES256 in CBC mode with 128 bit padding. + Wraps the generated content-encryption-key using the user-provided key-encryption-key (kek). + Returns a json-formatted string containing the encrypted message and the encryption metadata. + + :param object message: + The plain text messge to be encrypted. + :param object key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + wrap_key(key)--wraps the specified key using an algorithm of the user's choice. + get_key_wrap_algorithm()--returns the algorithm used to wrap the specified symmetric key. + get_kid()--returns a string key id for this key-encryption-key. + :return: A json-formatted string containing the encrypted message and the encryption metadata. + :rtype: str + ''' + + _validate_not_none('message', message) + _validate_not_none('key_encryption_key', key_encryption_key) + _validate_key_encryption_key_wrap(key_encryption_key) + + # AES256 uses 256 bit (32 byte) keys and always with 16 byte blocks + content_encryption_key = os.urandom(32) + initialization_vector = os.urandom(16) + + # Queue encoding functions all return unicode strings, and encryption should + # operate on binary strings. + message = message.encode('utf-8') + + cipher = _generate_AES_CBC_cipher(content_encryption_key, initialization_vector) + + # PKCS7 with 16 byte blocks ensures compatibility with AES. + padder = PKCS7(128).padder() + padded_data = padder.update(message) + padder.finalize() + + # Encrypt the data. + encryptor = cipher.encryptor() + encrypted_data = encryptor.update(padded_data) + encryptor.finalize() + + # Build the dictionary structure. + queue_message = {'EncryptedMessageContents': encode_base64(encrypted_data), + 'EncryptionData': _generate_encryption_data_dict(key_encryption_key, + content_encryption_key, + initialization_vector)} + + return dumps(queue_message) + + +def decrypt_queue_message(message, response, require_encryption, key_encryption_key, resolver): + ''' + Returns the decrypted message contents from an EncryptedQueueMessage. + If no encryption metadata is present, will return the unaltered message. + :param str message: + The JSON formatted QueueEncryptedMessage contents with all associated metadata. + :param bool require_encryption: + If set, will enforce that the retrieved messages are encrypted and decrypt them. + :param object key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + unwrap_key(key, algorithm) + - returns the unwrapped form of the specified symmetric key usingthe string-specified algorithm. + get_kid() + - returns a string key id for this key-encryption-key. + :param function resolver(kid): + The user-provided key resolver. Uses the kid string to return a key-encryption-key + implementing the interface defined above. + :return: The plain text message from the queue message. + :rtype: str + ''' + + try: + message = loads(message) + + encryption_data = _dict_to_encryption_data(message['EncryptionData']) + decoded_data = decode_base64_to_bytes(message['EncryptedMessageContents']) + except (KeyError, ValueError): + # Message was not json formatted and so was not encrypted + # or the user provided a json formatted message. + if require_encryption: + raise ValueError('Message was not encrypted.') + + return message + try: + return _decrypt_message(decoded_data, encryption_data, key_encryption_key, resolver).decode('utf-8') + except Exception as error: + raise HttpResponseError( + message="Decryption failed.", + response=response, + error=error) diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/models.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/models.py new file mode 100644 index 00000000000..22e7b7522ae --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/models.py @@ -0,0 +1,480 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-instance-attributes +from enum import Enum + + +def get_enum_value(value): + if value is None or value in ["None", ""]: + return None + try: + return value.value + except AttributeError: + return value + + +class StorageErrorCode(str, Enum): + + # Generic storage values + account_already_exists = "AccountAlreadyExists" + account_being_created = "AccountBeingCreated" + account_is_disabled = "AccountIsDisabled" + authentication_failed = "AuthenticationFailed" + authorization_failure = "AuthorizationFailure" + no_authentication_information = "NoAuthenticationInformation" + condition_headers_not_supported = "ConditionHeadersNotSupported" + condition_not_met = "ConditionNotMet" + empty_metadata_key = "EmptyMetadataKey" + insufficient_account_permissions = "InsufficientAccountPermissions" + internal_error = "InternalError" + invalid_authentication_info = "InvalidAuthenticationInfo" + invalid_header_value = "InvalidHeaderValue" + invalid_http_verb = "InvalidHttpVerb" + invalid_input = "InvalidInput" + invalid_md5 = "InvalidMd5" + invalid_metadata = "InvalidMetadata" + invalid_query_parameter_value = "InvalidQueryParameterValue" + invalid_range = "InvalidRange" + invalid_resource_name = "InvalidResourceName" + invalid_uri = "InvalidUri" + invalid_xml_document = "InvalidXmlDocument" + invalid_xml_node_value = "InvalidXmlNodeValue" + md5_mismatch = "Md5Mismatch" + metadata_too_large = "MetadataTooLarge" + missing_content_length_header = "MissingContentLengthHeader" + missing_required_query_parameter = "MissingRequiredQueryParameter" + missing_required_header = "MissingRequiredHeader" + missing_required_xml_node = "MissingRequiredXmlNode" + multiple_condition_headers_not_supported = "MultipleConditionHeadersNotSupported" + operation_timed_out = "OperationTimedOut" + out_of_range_input = "OutOfRangeInput" + out_of_range_query_parameter_value = "OutOfRangeQueryParameterValue" + request_body_too_large = "RequestBodyTooLarge" + resource_type_mismatch = "ResourceTypeMismatch" + request_url_failed_to_parse = "RequestUrlFailedToParse" + resource_already_exists = "ResourceAlreadyExists" + resource_not_found = "ResourceNotFound" + server_busy = "ServerBusy" + unsupported_header = "UnsupportedHeader" + unsupported_xml_node = "UnsupportedXmlNode" + unsupported_query_parameter = "UnsupportedQueryParameter" + unsupported_http_verb = "UnsupportedHttpVerb" + + # Blob values + append_position_condition_not_met = "AppendPositionConditionNotMet" + blob_already_exists = "BlobAlreadyExists" + blob_not_found = "BlobNotFound" + blob_overwritten = "BlobOverwritten" + blob_tier_inadequate_for_content_length = "BlobTierInadequateForContentLength" + block_count_exceeds_limit = "BlockCountExceedsLimit" + block_list_too_long = "BlockListTooLong" + cannot_change_to_lower_tier = "CannotChangeToLowerTier" + cannot_verify_copy_source = "CannotVerifyCopySource" + container_already_exists = "ContainerAlreadyExists" + container_being_deleted = "ContainerBeingDeleted" + container_disabled = "ContainerDisabled" + container_not_found = "ContainerNotFound" + content_length_larger_than_tier_limit = "ContentLengthLargerThanTierLimit" + copy_across_accounts_not_supported = "CopyAcrossAccountsNotSupported" + copy_id_mismatch = "CopyIdMismatch" + feature_version_mismatch = "FeatureVersionMismatch" + incremental_copy_blob_mismatch = "IncrementalCopyBlobMismatch" + incremental_copy_of_eralier_version_snapshot_not_allowed = "IncrementalCopyOfEralierVersionSnapshotNotAllowed" + incremental_copy_source_must_be_snapshot = "IncrementalCopySourceMustBeSnapshot" + infinite_lease_duration_required = "InfiniteLeaseDurationRequired" + invalid_blob_or_block = "InvalidBlobOrBlock" + invalid_blob_tier = "InvalidBlobTier" + invalid_blob_type = "InvalidBlobType" + invalid_block_id = "InvalidBlockId" + invalid_block_list = "InvalidBlockList" + invalid_operation = "InvalidOperation" + invalid_page_range = "InvalidPageRange" + invalid_source_blob_type = "InvalidSourceBlobType" + invalid_source_blob_url = "InvalidSourceBlobUrl" + invalid_version_for_page_blob_operation = "InvalidVersionForPageBlobOperation" + lease_already_present = "LeaseAlreadyPresent" + lease_already_broken = "LeaseAlreadyBroken" + lease_id_mismatch_with_blob_operation = "LeaseIdMismatchWithBlobOperation" + lease_id_mismatch_with_container_operation = "LeaseIdMismatchWithContainerOperation" + lease_id_mismatch_with_lease_operation = "LeaseIdMismatchWithLeaseOperation" + lease_id_missing = "LeaseIdMissing" + lease_is_breaking_and_cannot_be_acquired = "LeaseIsBreakingAndCannotBeAcquired" + lease_is_breaking_and_cannot_be_changed = "LeaseIsBreakingAndCannotBeChanged" + lease_is_broken_and_cannot_be_renewed = "LeaseIsBrokenAndCannotBeRenewed" + lease_lost = "LeaseLost" + lease_not_present_with_blob_operation = "LeaseNotPresentWithBlobOperation" + lease_not_present_with_container_operation = "LeaseNotPresentWithContainerOperation" + lease_not_present_with_lease_operation = "LeaseNotPresentWithLeaseOperation" + max_blob_size_condition_not_met = "MaxBlobSizeConditionNotMet" + no_pending_copy_operation = "NoPendingCopyOperation" + operation_not_allowed_on_incremental_copy_blob = "OperationNotAllowedOnIncrementalCopyBlob" + pending_copy_operation = "PendingCopyOperation" + previous_snapshot_cannot_be_newer = "PreviousSnapshotCannotBeNewer" + previous_snapshot_not_found = "PreviousSnapshotNotFound" + previous_snapshot_operation_not_supported = "PreviousSnapshotOperationNotSupported" + sequence_number_condition_not_met = "SequenceNumberConditionNotMet" + sequence_number_increment_too_large = "SequenceNumberIncrementTooLarge" + snapshot_count_exceeded = "SnapshotCountExceeded" + snaphot_operation_rate_exceeded = "SnaphotOperationRateExceeded" + snapshots_present = "SnapshotsPresent" + source_condition_not_met = "SourceConditionNotMet" + system_in_use = "SystemInUse" + target_condition_not_met = "TargetConditionNotMet" + unauthorized_blob_overwrite = "UnauthorizedBlobOverwrite" + blob_being_rehydrated = "BlobBeingRehydrated" + blob_archived = "BlobArchived" + blob_not_archived = "BlobNotArchived" + + # Queue values + invalid_marker = "InvalidMarker" + message_not_found = "MessageNotFound" + message_too_large = "MessageTooLarge" + pop_receipt_mismatch = "PopReceiptMismatch" + queue_already_exists = "QueueAlreadyExists" + queue_being_deleted = "QueueBeingDeleted" + queue_disabled = "QueueDisabled" + queue_not_empty = "QueueNotEmpty" + queue_not_found = "QueueNotFound" + + # File values + cannot_delete_file_or_directory = "CannotDeleteFileOrDirectory" + client_cache_flush_delay = "ClientCacheFlushDelay" + delete_pending = "DeletePending" + directory_not_empty = "DirectoryNotEmpty" + file_lock_conflict = "FileLockConflict" + invalid_file_or_directory_path_name = "InvalidFileOrDirectoryPathName" + parent_not_found = "ParentNotFound" + read_only_attribute = "ReadOnlyAttribute" + share_already_exists = "ShareAlreadyExists" + share_being_deleted = "ShareBeingDeleted" + share_disabled = "ShareDisabled" + share_not_found = "ShareNotFound" + sharing_violation = "SharingViolation" + share_snapshot_in_progress = "ShareSnapshotInProgress" + share_snapshot_count_exceeded = "ShareSnapshotCountExceeded" + share_snapshot_operation_not_supported = "ShareSnapshotOperationNotSupported" + share_has_snapshots = "ShareHasSnapshots" + container_quota_downgrade_not_allowed = "ContainerQuotaDowngradeNotAllowed" + + # DataLake values + content_length_must_be_zero = 'ContentLengthMustBeZero' + path_already_exists = 'PathAlreadyExists' + invalid_flush_position = 'InvalidFlushPosition' + invalid_property_name = 'InvalidPropertyName' + invalid_source_uri = 'InvalidSourceUri' + unsupported_rest_version = 'UnsupportedRestVersion' + file_system_not_found = 'FilesystemNotFound' + path_not_found = 'PathNotFound' + rename_destination_parent_path_not_found = 'RenameDestinationParentPathNotFound' + source_path_not_found = 'SourcePathNotFound' + destination_path_is_being_deleted = 'DestinationPathIsBeingDeleted' + file_system_already_exists = 'FilesystemAlreadyExists' + file_system_being_deleted = 'FilesystemBeingDeleted' + invalid_destination_path = 'InvalidDestinationPath' + invalid_rename_source_path = 'InvalidRenameSourcePath' + invalid_source_or_destination_resource_type = 'InvalidSourceOrDestinationResourceType' + lease_is_already_broken = 'LeaseIsAlreadyBroken' + lease_name_mismatch = 'LeaseNameMismatch' + path_conflict = 'PathConflict' + source_path_is_being_deleted = 'SourcePathIsBeingDeleted' + + +class DictMixin(object): + + def __setitem__(self, key, item): + self.__dict__[key] = item + + def __getitem__(self, key): + return self.__dict__[key] + + def __repr__(self): + return str(self) + + def __len__(self): + return len(self.keys()) + + def __delitem__(self, key): + self.__dict__[key] = None + + def __eq__(self, other): + """Compare objects by comparing all attributes.""" + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other): + """Compare objects by comparing all attributes.""" + return not self.__eq__(other) + + def __str__(self): + return str({k: v for k, v in self.__dict__.items() if not k.startswith('_')}) + + def has_key(self, k): + return k in self.__dict__ + + def update(self, *args, **kwargs): + return self.__dict__.update(*args, **kwargs) + + def keys(self): + return [k for k in self.__dict__ if not k.startswith('_')] + + def values(self): + return [v for k, v in self.__dict__.items() if not k.startswith('_')] + + def items(self): + return [(k, v) for k, v in self.__dict__.items() if not k.startswith('_')] + + def get(self, key, default=None): + if key in self.__dict__: + return self.__dict__[key] + return default + + +class LocationMode(object): + """ + Specifies the location the request should be sent to. This mode only applies + for RA-GRS accounts which allow secondary read access. All other account types + must use PRIMARY. + """ + + PRIMARY = 'primary' #: Requests should be sent to the primary location. + SECONDARY = 'secondary' #: Requests should be sent to the secondary location, if possible. + + +class ResourceTypes(object): + """ + Specifies the resource types that are accessible with the account SAS. + + :param bool service: + Access to service-level APIs (e.g., Get/Set Service Properties, + Get Service Stats, List Containers/Queues/Shares) + :param bool container: + Access to container-level APIs (e.g., Create/Delete Container, + Create/Delete Queue, Create/Delete Share, + List Blobs/Files and Directories) + :param bool object: + Access to object-level APIs for blobs, queue messages, and + files(e.g. Put Blob, Query Entity, Get Messages, Create File, etc.) + """ + + def __init__(self, service=False, container=False, object=False): # pylint: disable=redefined-builtin + self.service = service + self.container = container + self.object = object + self._str = (('s' if self.service else '') + + ('c' if self.container else '') + + ('o' if self.object else '')) + + def __str__(self): + return self._str + + @classmethod + def from_string(cls, string): + """Create a ResourceTypes from a string. + + To specify service, container, or object you need only to + include the first letter of the word in the string. E.g. service and container, + you would provide a string "sc". + + :param str string: Specify service, container, or object in + in the string with the first letter of the word. + :return: A ResourceTypes object + :rtype: ~azure.storage.blob.ResourceTypes + """ + res_service = 's' in string + res_container = 'c' in string + res_object = 'o' in string + + parsed = cls(res_service, res_container, res_object) + parsed._str = string # pylint: disable = protected-access + return parsed + + +class AccountSasPermissions(object): + """ + :class:`~ResourceTypes` class to be used with generate_account_sas + function and for the AccessPolicies used with set_*_acl. There are two types of + SAS which may be used to grant resource access. One is to grant access to a + specific resource (resource-specific). Another is to grant access to the + entire service for a specific account and allow certain operations based on + perms found here. + + :param bool read: + Valid for all signed resources types (Service, Container, and Object). + Permits read permissions to the specified resource type. + :param bool write: + Valid for all signed resources types (Service, Container, and Object). + Permits write permissions to the specified resource type. + :param bool delete: + Valid for Container and Object resource types, except for queue messages. + :param bool delete_previous_version: + Delete the previous blob version for the versioning enabled storage account. + :param bool list: + Valid for Service and Container resource types only. + :param bool add: + Valid for the following Object resource types only: queue messages, and append blobs. + :param bool create: + Valid for the following Object resource types only: blobs and files. + Users can create new blobs or files, but may not overwrite existing + blobs or files. + :param bool update: + Valid for the following Object resource types only: queue messages. + :param bool process: + Valid for the following Object resource type only: queue messages. + :keyword bool tag: + To enable set or get tags on the blobs in the container. + :keyword bool filter_by_tags: + To enable get blobs by tags, this should be used together with list permission. + :keyword bool set_immutability_policy: + To enable operations related to set/delete immutability policy. + To get immutability policy, you just need read permission. + :keyword bool permanent_delete: + To enable permanent delete on the blob is permitted. + Valid for Object resource type of Blob only. + """ + def __init__(self, read=False, write=False, delete=False, + list=False, # pylint: disable=redefined-builtin + add=False, create=False, update=False, process=False, delete_previous_version=False, **kwargs): + self.read = read + self.write = write + self.delete = delete + self.delete_previous_version = delete_previous_version + self.permanent_delete = kwargs.pop('permanent_delete', False) + self.list = list + self.add = add + self.create = create + self.update = update + self.process = process + self.tag = kwargs.pop('tag', False) + self.filter_by_tags = kwargs.pop('filter_by_tags', False) + self.set_immutability_policy = kwargs.pop('set_immutability_policy', False) + self._str = (('r' if self.read else '') + + ('w' if self.write else '') + + ('d' if self.delete else '') + + ('x' if self.delete_previous_version else '') + + ('y' if self.permanent_delete else '') + + ('l' if self.list else '') + + ('a' if self.add else '') + + ('c' if self.create else '') + + ('u' if self.update else '') + + ('p' if self.process else '') + + ('f' if self.filter_by_tags else '') + + ('t' if self.tag else '') + + ('i' if self.set_immutability_policy else '') + ) + + def __str__(self): + return self._str + + @classmethod + def from_string(cls, permission): + """Create AccountSasPermissions from a string. + + To specify read, write, delete, etc. permissions you need only to + include the first letter of the word in the string. E.g. for read and write + permissions you would provide a string "rw". + + :param str permission: Specify permissions in + the string with the first letter of the word. + :return: An AccountSasPermissions object + :rtype: ~azure.storage.blob.AccountSasPermissions + """ + p_read = 'r' in permission + p_write = 'w' in permission + p_delete = 'd' in permission + p_delete_previous_version = 'x' in permission + p_permanent_delete = 'y' in permission + p_list = 'l' in permission + p_add = 'a' in permission + p_create = 'c' in permission + p_update = 'u' in permission + p_process = 'p' in permission + p_tag = 't' in permission + p_filter_by_tags = 'f' in permission + p_set_immutability_policy = 'i' in permission + parsed = cls(read=p_read, write=p_write, delete=p_delete, delete_previous_version=p_delete_previous_version, + list=p_list, add=p_add, create=p_create, update=p_update, process=p_process, tag=p_tag, + filter_by_tags=p_filter_by_tags, set_immutability_policy=p_set_immutability_policy, + permanent_delete=p_permanent_delete) + + return parsed + + +class Services(object): + """Specifies the services accessible with the account SAS. + + :param bool blob: + Access for the `~azure.storage.blob.BlobServiceClient` + :param bool queue: + Access for the `~azure.storage.queue.QueueServiceClient` + :param bool fileshare: + Access for the `~azure.storage.fileshare.ShareServiceClient` + """ + + def __init__(self, blob=False, queue=False, fileshare=False): + self.blob = blob + self.queue = queue + self.fileshare = fileshare + self._str = (('b' if self.blob else '') + + ('q' if self.queue else '') + + ('f' if self.fileshare else '')) + + def __str__(self): + return self._str + + @classmethod + def from_string(cls, string): + """Create Services from a string. + + To specify blob, queue, or file you need only to + include the first letter of the word in the string. E.g. for blob and queue + you would provide a string "bq". + + :param str string: Specify blob, queue, or file in + in the string with the first letter of the word. + :return: A Services object + :rtype: ~azure.storage.blob.Services + """ + res_blob = 'b' in string + res_queue = 'q' in string + res_file = 'f' in string + + parsed = cls(res_blob, res_queue, res_file) + parsed._str = string # pylint: disable = protected-access + return parsed + + +class UserDelegationKey(object): + """ + Represents a user delegation key, provided to the user by Azure Storage + based on their Azure Active Directory access token. + + The fields are saved as simple strings since the user does not have to interact with this object; + to generate an identify SAS, the user can simply pass it to the right API. + + :ivar str signed_oid: + Object ID of this token. + :ivar str signed_tid: + Tenant ID of the tenant that issued this token. + :ivar str signed_start: + The datetime this token becomes valid. + :ivar str signed_expiry: + The datetime this token expires. + :ivar str signed_service: + What service this key is valid for. + :ivar str signed_version: + The version identifier of the REST service that created this token. + :ivar str value: + The user delegation key. + """ + def __init__(self): + self.signed_oid = None + self.signed_tid = None + self.signed_start = None + self.signed_expiry = None + self.signed_service = None + self.signed_version = None + self.value = None diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/parser.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/parser.py new file mode 100644 index 00000000000..c6feba8a639 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/parser.py @@ -0,0 +1,20 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import sys + +if sys.version_info < (3,): + def _str(value): + if isinstance(value, unicode): # pylint: disable=undefined-variable + return value.encode('utf-8') + + return str(value) +else: + _str = str + + +def _to_utc_datetime(value): + return value.strftime('%Y-%m-%dT%H:%M:%SZ') diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/policies.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/policies.py new file mode 100644 index 00000000000..21c689d9cda --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/policies.py @@ -0,0 +1,657 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import base64 +import hashlib +import re +import random +from time import time +from io import SEEK_SET, UnsupportedOperation +import logging +import uuid +from typing import Any, TYPE_CHECKING +from wsgiref.handlers import format_date_time +try: + from urllib.parse import ( + urlparse, + parse_qsl, + urlunparse, + urlencode, + ) +except ImportError: + from urllib import urlencode # type: ignore + from urlparse import ( # type: ignore + urlparse, + parse_qsl, + urlunparse, + ) + +from azure.core.pipeline.policies import ( + BearerTokenCredentialPolicy, + HeadersPolicy, + HTTPPolicy, + NetworkTraceLoggingPolicy, + RequestHistory, + SansIOHTTPPolicy, +) +from azure.core.exceptions import AzureError, ServiceRequestError, ServiceResponseError + +from .authentication import StorageHttpChallenge +from .constants import DEFAULT_OAUTH_SCOPE, STORAGE_OAUTH_SCOPE +from .models import LocationMode + +try: + _unicode_type = unicode # type: ignore +except NameError: + _unicode_type = str + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + from azure.core.pipeline import PipelineRequest, PipelineResponse + + +_LOGGER = logging.getLogger(__name__) + + +def encode_base64(data): + if isinstance(data, _unicode_type): + data = data.encode('utf-8') + encoded = base64.b64encode(data) + return encoded.decode('utf-8') + + +def is_exhausted(settings): + """Are we out of retries?""" + retry_counts = (settings['total'], settings['connect'], settings['read'], settings['status']) + retry_counts = list(filter(None, retry_counts)) + if not retry_counts: + return False + return min(retry_counts) < 0 + + +def retry_hook(settings, **kwargs): + if settings['hook']: + settings['hook'](retry_count=settings['count'] - 1, location_mode=settings['mode'], **kwargs) + + +def is_retry(response, mode): # pylint: disable=too-many-return-statements + """Is this method/status code retryable? (Based on allowlists and control + variables such as the number of total retries to allow, whether to + respect the Retry-After header, whether this header is present, and + whether the returned status code is on the list of status codes to + be retried upon on the presence of the aforementioned header) + """ + status = response.http_response.status_code + if 300 <= status < 500: + # An exception occured, but in most cases it was expected. Examples could + # include a 309 Conflict or 412 Precondition Failed. + if status == 404 and mode == LocationMode.SECONDARY: + # Response code 404 should be retried if secondary was used. + return True + if status == 408: + # Response code 408 is a timeout and should be retried. + return True + return False + if status >= 500: + # Response codes above 500 with the exception of 501 Not Implemented and + # 505 Version Not Supported indicate a server issue and should be retried. + if status in [501, 505]: + return False + return True + # retry if invalid content md5 + if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): + computed_md5 = response.http_request.headers.get('content-md5', None) or \ + encode_base64(StorageContentValidation.get_content_md5(response.http_response.body())) + if response.http_response.headers['content-md5'] != computed_md5: + return True + return False + + +def urljoin(base_url, stub_url): + parsed = urlparse(base_url) + parsed = parsed._replace(path=parsed.path + '/' + stub_url) + return parsed.geturl() + + +class QueueMessagePolicy(SansIOHTTPPolicy): + + def on_request(self, request): + message_id = request.context.options.pop('queue_message_id', None) + if message_id: + request.http_request.url = urljoin( + request.http_request.url, + message_id) + + +class StorageHeadersPolicy(HeadersPolicy): + request_id_header_name = 'x-ms-client-request-id' + + def on_request(self, request): + # type: (PipelineRequest, Any) -> None + super(StorageHeadersPolicy, self).on_request(request) + current_time = format_date_time(time()) + request.http_request.headers['x-ms-date'] = current_time + + custom_id = request.context.options.pop('client_request_id', None) + request.http_request.headers['x-ms-client-request-id'] = custom_id or str(uuid.uuid1()) + + # def on_response(self, request, response): + # # raise exception if the echoed client request id from the service is not identical to the one we sent + # if self.request_id_header_name in response.http_response.headers: + + # client_request_id = request.http_request.headers.get(self.request_id_header_name) + + # if response.http_response.headers[self.request_id_header_name] != client_request_id: + # raise AzureError( + # "Echoed client request ID: {} does not match sent client request ID: {}. " + # "Service request ID: {}".format( + # response.http_response.headers[self.request_id_header_name], client_request_id, + # response.http_response.headers['x-ms-request-id']), + # response=response.http_response + # ) + + +class StorageHosts(SansIOHTTPPolicy): + + def __init__(self, hosts=None, **kwargs): # pylint: disable=unused-argument + self.hosts = hosts + super(StorageHosts, self).__init__() + + def on_request(self, request): + # type: (PipelineRequest, Any) -> None + request.context.options['hosts'] = self.hosts + parsed_url = urlparse(request.http_request.url) + + # Detect what location mode we're currently requesting with + location_mode = LocationMode.PRIMARY + for key, value in self.hosts.items(): + if parsed_url.netloc == value: + location_mode = key + + # See if a specific location mode has been specified, and if so, redirect + use_location = request.context.options.pop('use_location', None) + if use_location: + # Lock retries to the specific location + request.context.options['retry_to_secondary'] = False + if use_location not in self.hosts: + raise ValueError("Attempting to use undefined host location {}".format(use_location)) + if use_location != location_mode: + # Update request URL to use the specified location + updated = parsed_url._replace(netloc=self.hosts[use_location]) + request.http_request.url = updated.geturl() + location_mode = use_location + + request.context.options['location_mode'] = location_mode + + +class StorageLoggingPolicy(NetworkTraceLoggingPolicy): + """A policy that logs HTTP request and response to the DEBUG logger. + + This accepts both global configuration, and per-request level with "enable_http_logger" + """ + def __init__(self, logging_enable=False, **kwargs): + self.logging_body = kwargs.pop("logging_body", False) + super(StorageLoggingPolicy, self).__init__(logging_enable=logging_enable, **kwargs) + + def on_request(self, request): + # type: (PipelineRequest, Any) -> None + http_request = request.http_request + options = request.context.options + self.logging_body = self.logging_body or options.pop("logging_body", False) + if options.pop("logging_enable", self.enable_http_logger): + request.context["logging_enable"] = True + if not _LOGGER.isEnabledFor(logging.DEBUG): + return + + try: + log_url = http_request.url + query_params = http_request.query + if 'sig' in query_params: + log_url = log_url.replace(query_params['sig'], "sig=*****") + _LOGGER.debug("Request URL: %r", log_url) + _LOGGER.debug("Request method: %r", http_request.method) + _LOGGER.debug("Request headers:") + for header, value in http_request.headers.items(): + if header.lower() == 'authorization': + value = '*****' + elif header.lower() == 'x-ms-copy-source' and 'sig' in value: + # take the url apart and scrub away the signed signature + scheme, netloc, path, params, query, fragment = urlparse(value) + parsed_qs = dict(parse_qsl(query)) + parsed_qs['sig'] = '*****' + + # the SAS needs to be put back together + value = urlunparse((scheme, netloc, path, params, urlencode(parsed_qs), fragment)) + + _LOGGER.debug(" %r: %r", header, value) + _LOGGER.debug("Request body:") + + if self.logging_body: + _LOGGER.debug(str(http_request.body)) + else: + # We don't want to log the binary data of a file upload. + _LOGGER.debug("Hidden body, please use logging_body to show body") + except Exception as err: # pylint: disable=broad-except + _LOGGER.debug("Failed to log request: %r", err) + + def on_response(self, request, response): + # type: (PipelineRequest, PipelineResponse, Any) -> None + if response.context.pop("logging_enable", self.enable_http_logger): + if not _LOGGER.isEnabledFor(logging.DEBUG): + return + + try: + _LOGGER.debug("Response status: %r", response.http_response.status_code) + _LOGGER.debug("Response headers:") + for res_header, value in response.http_response.headers.items(): + _LOGGER.debug(" %r: %r", res_header, value) + + # We don't want to log binary data if the response is a file. + _LOGGER.debug("Response content:") + pattern = re.compile(r'attachment; ?filename=["\w.]+', re.IGNORECASE) + header = response.http_response.headers.get('content-disposition') + resp_content_type = response.http_response.headers.get("content-type", "") + + if header and pattern.match(header): + filename = header.partition('=')[2] + _LOGGER.debug("File attachments: %s", filename) + elif resp_content_type.endswith("octet-stream"): + _LOGGER.debug("Body contains binary data.") + elif resp_content_type.startswith("image"): + _LOGGER.debug("Body contains image data.") + + if self.logging_body and resp_content_type.startswith("text"): + _LOGGER.debug(response.http_response.text()) + elif self.logging_body: + try: + _LOGGER.debug(response.http_response.body()) + except ValueError: + _LOGGER.debug("Body is streamable") + + except Exception as err: # pylint: disable=broad-except + _LOGGER.debug("Failed to log response: %s", repr(err)) + + +class StorageRequestHook(SansIOHTTPPolicy): + + def __init__(self, **kwargs): # pylint: disable=unused-argument + self._request_callback = kwargs.get('raw_request_hook') + super(StorageRequestHook, self).__init__() + + def on_request(self, request): + # type: (PipelineRequest, **Any) -> PipelineResponse + request_callback = request.context.options.pop('raw_request_hook', self._request_callback) + if request_callback: + request_callback(request) + + +class StorageResponseHook(HTTPPolicy): + + def __init__(self, **kwargs): # pylint: disable=unused-argument + self._response_callback = kwargs.get('raw_response_hook') + super(StorageResponseHook, self).__init__() + + def send(self, request): + # type: (PipelineRequest) -> PipelineResponse + # Values could be 0 + data_stream_total = request.context.get('data_stream_total') + if data_stream_total is None: + data_stream_total = request.context.options.pop('data_stream_total', None) + download_stream_current = request.context.get('download_stream_current') + if download_stream_current is None: + download_stream_current = request.context.options.pop('download_stream_current', None) + upload_stream_current = request.context.get('upload_stream_current') + if upload_stream_current is None: + upload_stream_current = request.context.options.pop('upload_stream_current', None) + + response_callback = request.context.get('response_callback') or \ + request.context.options.pop('raw_response_hook', self._response_callback) + + response = self.next.send(request) + + will_retry = is_retry(response, request.context.options.get('mode')) + # Auth error could come from Bearer challenge, in which case this request will be made again + is_auth_error = response.http_response.status_code == 401 + should_update_counts = not (will_retry or is_auth_error) + + if should_update_counts and download_stream_current is not None: + download_stream_current += int(response.http_response.headers.get('Content-Length', 0)) + if data_stream_total is None: + content_range = response.http_response.headers.get('Content-Range') + if content_range: + data_stream_total = int(content_range.split(' ', 1)[1].split('/', 1)[1]) + else: + data_stream_total = download_stream_current + elif should_update_counts and upload_stream_current is not None: + upload_stream_current += int(response.http_request.headers.get('Content-Length', 0)) + for pipeline_obj in [request, response]: + pipeline_obj.context['data_stream_total'] = data_stream_total + pipeline_obj.context['download_stream_current'] = download_stream_current + pipeline_obj.context['upload_stream_current'] = upload_stream_current + if response_callback: + response_callback(response) + request.context['response_callback'] = response_callback + return response + + +class StorageContentValidation(SansIOHTTPPolicy): + """A simple policy that sends the given headers + with the request. + + This will overwrite any headers already defined in the request. + """ + header_name = 'Content-MD5' + + def __init__(self, **kwargs): # pylint: disable=unused-argument + super(StorageContentValidation, self).__init__() + + @staticmethod + def get_content_md5(data): + md5 = hashlib.md5() # nosec + if isinstance(data, bytes): + md5.update(data) + elif hasattr(data, 'read'): + pos = 0 + try: + pos = data.tell() + except: # pylint: disable=bare-except + pass + for chunk in iter(lambda: data.read(4096), b""): + md5.update(chunk) + try: + data.seek(pos, SEEK_SET) + except (AttributeError, IOError): + raise ValueError("Data should be bytes or a seekable file-like object.") + else: + raise ValueError("Data should be bytes or a seekable file-like object.") + + return md5.digest() + + def on_request(self, request): + # type: (PipelineRequest, Any) -> None + validate_content = request.context.options.pop('validate_content', False) + if validate_content and request.http_request.method != 'GET': + computed_md5 = encode_base64(StorageContentValidation.get_content_md5(request.http_request.data)) + request.http_request.headers[self.header_name] = computed_md5 + request.context['validate_content_md5'] = computed_md5 + request.context['validate_content'] = validate_content + + def on_response(self, request, response): + if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): + computed_md5 = request.context.get('validate_content_md5') or \ + encode_base64(StorageContentValidation.get_content_md5(response.http_response.body())) + if response.http_response.headers['content-md5'] != computed_md5: + raise AzureError( + 'MD5 mismatch. Expected value is \'{0}\', computed value is \'{1}\'.'.format( + response.http_response.headers['content-md5'], computed_md5), + response=response.http_response + ) + + +class StorageRetryPolicy(HTTPPolicy): + """ + The base class for Exponential and Linear retries containing shared code. + """ + + def __init__(self, **kwargs): + self.total_retries = kwargs.pop('retry_total', 10) + self.connect_retries = kwargs.pop('retry_connect', 3) + self.read_retries = kwargs.pop('retry_read', 3) + self.status_retries = kwargs.pop('retry_status', 3) + self.retry_to_secondary = kwargs.pop('retry_to_secondary', False) + super(StorageRetryPolicy, self).__init__() + + def _set_next_host_location(self, settings, request): # pylint: disable=no-self-use + """ + A function which sets the next host location on the request, if applicable. + + :param ~azure.storage.models.RetryContext context: + The retry context containing the previous host location and the request + to evaluate and possibly modify. + """ + if settings['hosts'] and all(settings['hosts'].values()): + url = urlparse(request.url) + # If there's more than one possible location, retry to the alternative + if settings['mode'] == LocationMode.PRIMARY: + settings['mode'] = LocationMode.SECONDARY + else: + settings['mode'] = LocationMode.PRIMARY + updated = url._replace(netloc=settings['hosts'].get(settings['mode'])) + request.url = updated.geturl() + + def configure_retries(self, request): # pylint: disable=no-self-use + body_position = None + if hasattr(request.http_request.body, 'read'): + try: + body_position = request.http_request.body.tell() + except (AttributeError, UnsupportedOperation): + # if body position cannot be obtained, then retries will not work + pass + options = request.context.options + return { + 'total': options.pop("retry_total", self.total_retries), + 'connect': options.pop("retry_connect", self.connect_retries), + 'read': options.pop("retry_read", self.read_retries), + 'status': options.pop("retry_status", self.status_retries), + 'retry_secondary': options.pop("retry_to_secondary", self.retry_to_secondary), + 'mode': options.pop("location_mode", LocationMode.PRIMARY), + 'hosts': options.pop("hosts", None), + 'hook': options.pop("retry_hook", None), + 'body_position': body_position, + 'count': 0, + 'history': [] + } + + def get_backoff_time(self, settings): # pylint: disable=unused-argument,no-self-use + """ Formula for computing the current backoff. + Should be calculated by child class. + + :rtype: float + """ + return 0 + + def sleep(self, settings, transport): + backoff = self.get_backoff_time(settings) + if not backoff or backoff < 0: + return + transport.sleep(backoff) + + def increment(self, settings, request, response=None, error=None): + """Increment the retry counters. + + :param response: A pipeline response object. + :param error: An error encountered during the request, or + None if the response was received successfully. + + :return: Whether the retry attempts are exhausted. + """ + settings['total'] -= 1 + + if error and isinstance(error, ServiceRequestError): + # Errors when we're fairly sure that the server did not receive the + # request, so it should be safe to retry. + settings['connect'] -= 1 + settings['history'].append(RequestHistory(request, error=error)) + + elif error and isinstance(error, ServiceResponseError): + # Errors that occur after the request has been started, so we should + # assume that the server began processing it. + settings['read'] -= 1 + settings['history'].append(RequestHistory(request, error=error)) + + else: + # Incrementing because of a server error like a 500 in + # status_forcelist and a the given method is in the allowlist + if response: + settings['status'] -= 1 + settings['history'].append(RequestHistory(request, http_response=response)) + + if not is_exhausted(settings): + if request.method not in ['PUT'] and settings['retry_secondary']: + self._set_next_host_location(settings, request) + + # rewind the request body if it is a stream + if request.body and hasattr(request.body, 'read'): + # no position was saved, then retry would not work + if settings['body_position'] is None: + return False + try: + # attempt to rewind the body to the initial position + request.body.seek(settings['body_position'], SEEK_SET) + except (UnsupportedOperation, ValueError): + # if body is not seekable, then retry would not work + return False + settings['count'] += 1 + return True + return False + + def send(self, request): + retries_remaining = True + response = None + retry_settings = self.configure_retries(request) + while retries_remaining: + try: + response = self.next.send(request) + if is_retry(response, retry_settings['mode']): + retries_remaining = self.increment( + retry_settings, + request=request.http_request, + response=response.http_response) + if retries_remaining: + retry_hook( + retry_settings, + request=request.http_request, + response=response.http_response, + error=None) + self.sleep(retry_settings, request.context.transport) + continue + break + except AzureError as err: + retries_remaining = self.increment( + retry_settings, request=request.http_request, error=err) + if retries_remaining: + retry_hook( + retry_settings, + request=request.http_request, + response=None, + error=err) + self.sleep(retry_settings, request.context.transport) + continue + raise err + if retry_settings['history']: + response.context['history'] = retry_settings['history'] + response.http_response.location_mode = retry_settings['mode'] + return response + + +class ExponentialRetry(StorageRetryPolicy): + """Exponential retry.""" + + def __init__(self, initial_backoff=15, increment_base=3, retry_total=3, + retry_to_secondary=False, random_jitter_range=3, **kwargs): + ''' + Constructs an Exponential retry object. The initial_backoff is used for + the first retry. Subsequent retries are retried after initial_backoff + + increment_power^retry_count seconds. + + :param int initial_backoff: + The initial backoff interval, in seconds, for the first retry. + :param int increment_base: + The base, in seconds, to increment the initial_backoff by after the + first retry. + :param int max_attempts: + The maximum number of retry attempts. + :param bool retry_to_secondary: + Whether the request should be retried to secondary, if able. This should + only be enabled of RA-GRS accounts are used and potentially stale data + can be handled. + :param int random_jitter_range: + A number in seconds which indicates a range to jitter/randomize for the back-off interval. + For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. + ''' + self.initial_backoff = initial_backoff + self.increment_base = increment_base + self.random_jitter_range = random_jitter_range + super(ExponentialRetry, self).__init__( + retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs) + + def get_backoff_time(self, settings): + """ + Calculates how long to sleep before retrying. + + :return: + An integer indicating how long to wait before retrying the request, + or None to indicate no retry should be performed. + :rtype: int or None + """ + random_generator = random.Random() + backoff = self.initial_backoff + (0 if settings['count'] == 0 else pow(self.increment_base, settings['count'])) + random_range_start = backoff - self.random_jitter_range if backoff > self.random_jitter_range else 0 + random_range_end = backoff + self.random_jitter_range + return random_generator.uniform(random_range_start, random_range_end) + + +class LinearRetry(StorageRetryPolicy): + """Linear retry.""" + + def __init__(self, backoff=15, retry_total=3, retry_to_secondary=False, random_jitter_range=3, **kwargs): + """ + Constructs a Linear retry object. + + :param int backoff: + The backoff interval, in seconds, between retries. + :param int max_attempts: + The maximum number of retry attempts. + :param bool retry_to_secondary: + Whether the request should be retried to secondary, if able. This should + only be enabled of RA-GRS accounts are used and potentially stale data + can be handled. + :param int random_jitter_range: + A number in seconds which indicates a range to jitter/randomize for the back-off interval. + For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. + """ + self.backoff = backoff + self.random_jitter_range = random_jitter_range + super(LinearRetry, self).__init__( + retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs) + + def get_backoff_time(self, settings): + """ + Calculates how long to sleep before retrying. + + :return: + An integer indicating how long to wait before retrying the request, + or None to indicate no retry should be performed. + :rtype: int or None + """ + random_generator = random.Random() + # the backoff interval normally does not change, however there is the possibility + # that it was modified by accessing the property directly after initializing the object + random_range_start = self.backoff - self.random_jitter_range \ + if self.backoff > self.random_jitter_range else 0 + random_range_end = self.backoff + self.random_jitter_range + return random_generator.uniform(random_range_start, random_range_end) + + +class StorageBearerTokenCredentialPolicy(BearerTokenCredentialPolicy): + """ Custom Bearer token credential policy for following Storage Bearer challenges """ + + def __init__(self, credential, **kwargs): + # type: (TokenCredential, **Any) -> None + super(StorageBearerTokenCredentialPolicy, self).__init__(credential, STORAGE_OAUTH_SCOPE, **kwargs) + + def on_challenge(self, request, response): + # type: (PipelineRequest, PipelineResponse) -> bool + try: + auth_header = response.http_response.headers.get("WWW-Authenticate") + challenge = StorageHttpChallenge(auth_header) + except ValueError: + return False + + scope = challenge.resource_id + DEFAULT_OAUTH_SCOPE + self.authorize_request(request, scope, tenant_id=challenge.tenant_id) + + return True diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/policies_async.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/policies_async.py new file mode 100644 index 00000000000..b0eae9f1c42 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/policies_async.py @@ -0,0 +1,253 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=invalid-overridden-method + +import asyncio +import random +import logging +from typing import Any, TYPE_CHECKING + +from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy, AsyncHTTPPolicy +from azure.core.exceptions import AzureError + +from .authentication import StorageHttpChallenge +from .constants import DEFAULT_OAUTH_SCOPE, STORAGE_OAUTH_SCOPE +from .policies import is_retry, StorageRetryPolicy + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + from azure.core.pipeline import PipelineRequest, PipelineResponse + + +_LOGGER = logging.getLogger(__name__) + + +async def retry_hook(settings, **kwargs): + if settings['hook']: + if asyncio.iscoroutine(settings['hook']): + await settings['hook']( + retry_count=settings['count'] - 1, + location_mode=settings['mode'], + **kwargs) + else: + settings['hook']( + retry_count=settings['count'] - 1, + location_mode=settings['mode'], + **kwargs) + + +class AsyncStorageResponseHook(AsyncHTTPPolicy): + + def __init__(self, **kwargs): # pylint: disable=unused-argument + self._response_callback = kwargs.get('raw_response_hook') + super(AsyncStorageResponseHook, self).__init__() + + async def send(self, request): + # type: (PipelineRequest) -> PipelineResponse + # Values could be 0 + data_stream_total = request.context.get('data_stream_total') + if data_stream_total is None: + data_stream_total = request.context.options.pop('data_stream_total', None) + download_stream_current = request.context.get('download_stream_current') + if download_stream_current is None: + download_stream_current = request.context.options.pop('download_stream_current', None) + upload_stream_current = request.context.get('upload_stream_current') + if upload_stream_current is None: + upload_stream_current = request.context.options.pop('upload_stream_current', None) + + response_callback = request.context.get('response_callback') or \ + request.context.options.pop('raw_response_hook', self._response_callback) + + response = await self.next.send(request) + await response.http_response.load_body() + + will_retry = is_retry(response, request.context.options.get('mode')) + # Auth error could come from Bearer challenge, in which case this request will be made again + is_auth_error = response.http_response.status_code == 401 + should_update_counts = not (will_retry or is_auth_error) + + if should_update_counts and download_stream_current is not None: + download_stream_current += int(response.http_response.headers.get('Content-Length', 0)) + if data_stream_total is None: + content_range = response.http_response.headers.get('Content-Range') + if content_range: + data_stream_total = int(content_range.split(' ', 1)[1].split('/', 1)[1]) + else: + data_stream_total = download_stream_current + elif should_update_counts and upload_stream_current is not None: + upload_stream_current += int(response.http_request.headers.get('Content-Length', 0)) + for pipeline_obj in [request, response]: + pipeline_obj.context['data_stream_total'] = data_stream_total + pipeline_obj.context['download_stream_current'] = download_stream_current + pipeline_obj.context['upload_stream_current'] = upload_stream_current + if response_callback: + if asyncio.iscoroutine(response_callback): + await response_callback(response) + else: + response_callback(response) + request.context['response_callback'] = response_callback + return response + +class AsyncStorageRetryPolicy(StorageRetryPolicy): + """ + The base class for Exponential and Linear retries containing shared code. + """ + + async def sleep(self, settings, transport): + backoff = self.get_backoff_time(settings) + if not backoff or backoff < 0: + return + await transport.sleep(backoff) + + async def send(self, request): + retries_remaining = True + response = None + retry_settings = self.configure_retries(request) + while retries_remaining: + try: + response = await self.next.send(request) + if is_retry(response, retry_settings['mode']): + retries_remaining = self.increment( + retry_settings, + request=request.http_request, + response=response.http_response) + if retries_remaining: + await retry_hook( + retry_settings, + request=request.http_request, + response=response.http_response, + error=None) + await self.sleep(retry_settings, request.context.transport) + continue + break + except AzureError as err: + retries_remaining = self.increment( + retry_settings, request=request.http_request, error=err) + if retries_remaining: + await retry_hook( + retry_settings, + request=request.http_request, + response=None, + error=err) + await self.sleep(retry_settings, request.context.transport) + continue + raise err + if retry_settings['history']: + response.context['history'] = retry_settings['history'] + response.http_response.location_mode = retry_settings['mode'] + return response + + +class ExponentialRetry(AsyncStorageRetryPolicy): + """Exponential retry.""" + + def __init__(self, initial_backoff=15, increment_base=3, retry_total=3, + retry_to_secondary=False, random_jitter_range=3, **kwargs): + ''' + Constructs an Exponential retry object. The initial_backoff is used for + the first retry. Subsequent retries are retried after initial_backoff + + increment_power^retry_count seconds. For example, by default the first retry + occurs after 15 seconds, the second after (15+3^1) = 18 seconds, and the + third after (15+3^2) = 24 seconds. + + :param int initial_backoff: + The initial backoff interval, in seconds, for the first retry. + :param int increment_base: + The base, in seconds, to increment the initial_backoff by after the + first retry. + :param int max_attempts: + The maximum number of retry attempts. + :param bool retry_to_secondary: + Whether the request should be retried to secondary, if able. This should + only be enabled of RA-GRS accounts are used and potentially stale data + can be handled. + :param int random_jitter_range: + A number in seconds which indicates a range to jitter/randomize for the back-off interval. + For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. + ''' + self.initial_backoff = initial_backoff + self.increment_base = increment_base + self.random_jitter_range = random_jitter_range + super(ExponentialRetry, self).__init__( + retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs) + + def get_backoff_time(self, settings): + """ + Calculates how long to sleep before retrying. + + :return: + An integer indicating how long to wait before retrying the request, + or None to indicate no retry should be performed. + :rtype: int or None + """ + random_generator = random.Random() + backoff = self.initial_backoff + (0 if settings['count'] == 0 else pow(self.increment_base, settings['count'])) + random_range_start = backoff - self.random_jitter_range if backoff > self.random_jitter_range else 0 + random_range_end = backoff + self.random_jitter_range + return random_generator.uniform(random_range_start, random_range_end) + + +class LinearRetry(AsyncStorageRetryPolicy): + """Linear retry.""" + + def __init__(self, backoff=15, retry_total=3, retry_to_secondary=False, random_jitter_range=3, **kwargs): + """ + Constructs a Linear retry object. + + :param int backoff: + The backoff interval, in seconds, between retries. + :param int max_attempts: + The maximum number of retry attempts. + :param bool retry_to_secondary: + Whether the request should be retried to secondary, if able. This should + only be enabled of RA-GRS accounts are used and potentially stale data + can be handled. + :param int random_jitter_range: + A number in seconds which indicates a range to jitter/randomize for the back-off interval. + For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. + """ + self.backoff = backoff + self.random_jitter_range = random_jitter_range + super(LinearRetry, self).__init__( + retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs) + + def get_backoff_time(self, settings): + """ + Calculates how long to sleep before retrying. + + :return: + An integer indicating how long to wait before retrying the request, + or None to indicate no retry should be performed. + :rtype: int or None + """ + random_generator = random.Random() + # the backoff interval normally does not change, however there is the possibility + # that it was modified by accessing the property directly after initializing the object + random_range_start = self.backoff - self.random_jitter_range \ + if self.backoff > self.random_jitter_range else 0 + random_range_end = self.backoff + self.random_jitter_range + return random_generator.uniform(random_range_start, random_range_end) + + +class AsyncStorageBearerTokenCredentialPolicy(AsyncBearerTokenCredentialPolicy): + """ Custom Bearer token credential policy for following Storage Bearer challenges """ + + def __init__(self, credential, **kwargs): + # type: (AsyncTokenCredential, **Any) -> None + super(AsyncStorageBearerTokenCredentialPolicy, self).__init__(credential, STORAGE_OAUTH_SCOPE, **kwargs) + + async def on_challenge(self, request, response): + # type: (PipelineRequest, PipelineResponse) -> bool + try: + auth_header = response.http_response.headers.get("WWW-Authenticate") + challenge = StorageHttpChallenge(auth_header) + except ValueError: + return False + + scope = challenge.resource_id + DEFAULT_OAUTH_SCOPE + await self.authorize_request(request, scope, tenant_id=challenge.tenant_id) + + return True diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/request_handlers.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/request_handlers.py new file mode 100644 index 00000000000..ba760434cac --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/request_handlers.py @@ -0,0 +1,278 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from typing import ( # pylint: disable=unused-import + Union, Optional, Any, Iterable, Dict, List, Type, Tuple, + TYPE_CHECKING +) + +import logging +from os import fstat +import stat +from io import (SEEK_END, SEEK_SET, UnsupportedOperation) + +import isodate + +from azure.core.exceptions import raise_with_traceback + + +_LOGGER = logging.getLogger(__name__) + +_REQUEST_DELIMITER_PREFIX = "batch_" +_HTTP1_1_IDENTIFIER = "HTTP/1.1" +_HTTP_LINE_ENDING = "\r\n" + + +def serialize_iso(attr): + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: ValueError if format invalid. + """ + if not attr: + return None + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, + utc.tm_hour, utc.tm_min, utc.tm_sec) + return date + 'Z' + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise_with_traceback(ValueError, msg, err) + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise_with_traceback(TypeError, msg, err) + + +def get_length(data): + length = None + # Check if object implements the __len__ method, covers most input cases such as bytearray. + try: + length = len(data) + except: # pylint: disable=bare-except + pass + + if not length: + # Check if the stream is a file-like stream object. + # If so, calculate the size using the file descriptor. + try: + fileno = data.fileno() + except (AttributeError, UnsupportedOperation): + pass + else: + try: + mode = fstat(fileno).st_mode + if stat.S_ISREG(mode) or stat.S_ISLNK(mode): + #st_size only meaningful if regular file or symlink, other types + # e.g. sockets may return misleading sizes like 0 + return fstat(fileno).st_size + except OSError: + # Not a valid fileno, may be possible requests returned + # a socket number? + pass + + # If the stream is seekable and tell() is implemented, calculate the stream size. + try: + current_position = data.tell() + data.seek(0, SEEK_END) + length = data.tell() - current_position + data.seek(current_position, SEEK_SET) + except (AttributeError, OSError, UnsupportedOperation): + pass + + return length + + +def read_length(data): + try: + if hasattr(data, 'read'): + read_data = b'' + for chunk in iter(lambda: data.read(4096), b""): + read_data += chunk + return len(read_data), read_data + if hasattr(data, '__iter__'): + read_data = b'' + for chunk in data: + read_data += chunk + return len(read_data), read_data + except: # pylint: disable=bare-except + pass + raise ValueError("Unable to calculate content length, please specify.") + + +def validate_and_format_range_headers( + start_range, end_range, start_range_required=True, + end_range_required=True, check_content_md5=False, align_to_page=False): + # If end range is provided, start range must be provided + if (start_range_required or end_range is not None) and start_range is None: + raise ValueError("start_range value cannot be None.") + if end_range_required and end_range is None: + raise ValueError("end_range value cannot be None.") + + # Page ranges must be 512 aligned + if align_to_page: + if start_range is not None and start_range % 512 != 0: + raise ValueError("Invalid page blob start_range: {0}. " + "The size must be aligned to a 512-byte boundary.".format(start_range)) + if end_range is not None and end_range % 512 != 511: + raise ValueError("Invalid page blob end_range: {0}. " + "The size must be aligned to a 512-byte boundary.".format(end_range)) + + # Format based on whether end_range is present + range_header = None + if end_range is not None: + range_header = 'bytes={0}-{1}'.format(start_range, end_range) + elif start_range is not None: + range_header = "bytes={0}-".format(start_range) + + # Content MD5 can only be provided for a complete range less than 4MB in size + range_validation = None + if check_content_md5: + if start_range is None or end_range is None: + raise ValueError("Both start and end range requied for MD5 content validation.") + if end_range - start_range > 4 * 1024 * 1024: + raise ValueError("Getting content MD5 for a range greater than 4MB is not supported.") + range_validation = 'true' + + return range_header, range_validation + + +def add_metadata_headers(metadata=None): + # type: (Optional[Dict[str, str]]) -> Dict[str, str] + headers = {} + if metadata: + for key, value in metadata.items(): + headers['x-ms-meta-{}'.format(key.strip())] = value.strip() if value else value + return headers + + +def serialize_batch_body(requests, batch_id): + """ + -- + + -- + (repeated as needed) + ---- + + Serializes the requests in this batch to a single HTTP mixed/multipart body. + + :param list[~azure.core.pipeline.transport.HttpRequest] requests: + a list of sub-request for the batch request + :param str batch_id: + to be embedded in batch sub-request delimiter + :return: The body bytes for this batch. + """ + + if requests is None or len(requests) == 0: + raise ValueError('Please provide sub-request(s) for this batch request') + + delimiter_bytes = (_get_batch_request_delimiter(batch_id, True, False) + _HTTP_LINE_ENDING).encode('utf-8') + newline_bytes = _HTTP_LINE_ENDING.encode('utf-8') + batch_body = list() + + content_index = 0 + for request in requests: + request.headers.update({ + "Content-ID": str(content_index), + "Content-Length": str(0) + }) + batch_body.append(delimiter_bytes) + batch_body.append(_make_body_from_sub_request(request)) + batch_body.append(newline_bytes) + content_index += 1 + + batch_body.append(_get_batch_request_delimiter(batch_id, True, True).encode('utf-8')) + # final line of body MUST have \r\n at the end, or it will not be properly read by the service + batch_body.append(newline_bytes) + + return bytes().join(batch_body) + + +def _get_batch_request_delimiter(batch_id, is_prepend_dashes=False, is_append_dashes=False): + """ + Gets the delimiter used for this batch request's mixed/multipart HTTP format. + + :param str batch_id: + Randomly generated id + :param bool is_prepend_dashes: + Whether to include the starting dashes. Used in the body, but non on defining the delimiter. + :param bool is_append_dashes: + Whether to include the ending dashes. Used in the body on the closing delimiter only. + :return: The delimiter, WITHOUT a trailing newline. + """ + + prepend_dashes = '--' if is_prepend_dashes else '' + append_dashes = '--' if is_append_dashes else '' + + return prepend_dashes + _REQUEST_DELIMITER_PREFIX + batch_id + append_dashes + + +def _make_body_from_sub_request(sub_request): + """ + Content-Type: application/http + Content-ID: + Content-Transfer-Encoding: (if present) + + HTTP/ +
:
(repeated as necessary) + Content-Length: + (newline if content length > 0) + (if content length > 0) + + Serializes an http request. + + :param ~azure.core.pipeline.transport.HttpRequest sub_request: + Request to serialize. + :return: The serialized sub-request in bytes + """ + + # put the sub-request's headers into a list for efficient str concatenation + sub_request_body = list() + + # get headers for ease of manipulation; remove headers as they are used + headers = sub_request.headers + + # append opening headers + sub_request_body.append("Content-Type: application/http") + sub_request_body.append(_HTTP_LINE_ENDING) + + sub_request_body.append("Content-ID: ") + sub_request_body.append(headers.pop("Content-ID", "")) + sub_request_body.append(_HTTP_LINE_ENDING) + + sub_request_body.append("Content-Transfer-Encoding: binary") + sub_request_body.append(_HTTP_LINE_ENDING) + + # append blank line + sub_request_body.append(_HTTP_LINE_ENDING) + + # append HTTP verb and path and query and HTTP version + sub_request_body.append(sub_request.method) + sub_request_body.append(' ') + sub_request_body.append(sub_request.url) + sub_request_body.append(' ') + sub_request_body.append(_HTTP1_1_IDENTIFIER) + sub_request_body.append(_HTTP_LINE_ENDING) + + # append remaining headers (this will set the Content-Length, as it was set on `sub-request`) + for header_name, header_value in headers.items(): + if header_value is not None: + sub_request_body.append(header_name) + sub_request_body.append(": ") + sub_request_body.append(header_value) + sub_request_body.append(_HTTP_LINE_ENDING) + + # append blank line + sub_request_body.append(_HTTP_LINE_ENDING) + + return ''.join(sub_request_body).encode() diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/response_handlers.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/response_handlers.py new file mode 100644 index 00000000000..4d90a17f4db --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/response_handlers.py @@ -0,0 +1,195 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import ( # pylint: disable=unused-import + Union, Optional, Any, Iterable, Dict, List, Type, Tuple, + TYPE_CHECKING +) +import logging +from xml.etree.ElementTree import Element + +from azure.core.pipeline.policies import ContentDecodePolicy +from azure.core.exceptions import ( + HttpResponseError, + ResourceNotFoundError, + ResourceModifiedError, + ResourceExistsError, + ClientAuthenticationError, + DecodeError) + +from .parser import _to_utc_datetime +from .models import StorageErrorCode, UserDelegationKey, get_enum_value + +if TYPE_CHECKING: + from datetime import datetime + from azure.core.exceptions import AzureError + + +_LOGGER = logging.getLogger(__name__) + + +class PartialBatchErrorException(HttpResponseError): + """There is a partial failure in batch operations. + + :param str message: The message of the exception. + :param response: Server response to be deserialized. + :param list parts: A list of the parts in multipart response. + """ + + def __init__(self, message, response, parts): + self.parts = parts + super(PartialBatchErrorException, self).__init__(message=message, response=response) + + +def parse_length_from_content_range(content_range): + ''' + Parses the blob length from the content range header: bytes 1-3/65537 + ''' + if content_range is None: + return None + + # First, split in space and take the second half: '1-3/65537' + # Next, split on slash and take the second half: '65537' + # Finally, convert to an int: 65537 + return int(content_range.split(' ', 1)[1].split('/', 1)[1]) + + +def normalize_headers(headers): + normalized = {} + for key, value in headers.items(): + if key.startswith('x-ms-'): + key = key[5:] + normalized[key.lower().replace('-', '_')] = get_enum_value(value) + return normalized + + +def deserialize_metadata(response, obj, headers): # pylint: disable=unused-argument + raw_metadata = {k: v for k, v in response.http_response.headers.items() if k.startswith("x-ms-meta-")} + return {k[10:]: v for k, v in raw_metadata.items()} + + +def return_response_headers(response, deserialized, response_headers): # pylint: disable=unused-argument + return normalize_headers(response_headers) + + +def return_headers_and_deserialized(response, deserialized, response_headers): # pylint: disable=unused-argument + return normalize_headers(response_headers), deserialized + + +def return_context_and_deserialized(response, deserialized, response_headers): # pylint: disable=unused-argument + return response.http_response.location_mode, deserialized + + +def process_storage_error(storage_error): # pylint:disable=too-many-statements + raise_error = HttpResponseError + serialized = False + if not storage_error.response: + raise storage_error + # If it is one of those three then it has been serialized prior by the generated layer. + if isinstance(storage_error, (PartialBatchErrorException, + ClientAuthenticationError, ResourceNotFoundError, ResourceExistsError)): + serialized = True + error_code = storage_error.response.headers.get('x-ms-error-code') + error_message = storage_error.message + additional_data = {} + error_dict = {} + try: + error_body = ContentDecodePolicy.deserialize_from_http_generics(storage_error.response) + try: + error_body = error_body or storage_error.response.reason + except AttributeError: + error_body = '' + # If it is an XML response + if isinstance(error_body, Element): + error_dict = { + child.tag.lower(): child.text + for child in error_body + } + # If it is a JSON response + elif isinstance(error_body, dict): + error_dict = error_body.get('error', {}) + elif not error_code: + _LOGGER.warning( + 'Unexpected return type %s from ContentDecodePolicy.deserialize_from_http_generics.', type(error_body)) + error_dict = {'message': str(error_body)} + + # If we extracted from a Json or XML response + if error_dict: + error_code = error_dict.get('code') + error_message = error_dict.get('message') + additional_data = {k: v for k, v in error_dict.items() if k not in {'code', 'message'}} + except DecodeError: + pass + + try: + # This check would be unnecessary if we have already serialized the error + if error_code and not serialized: + error_code = StorageErrorCode(error_code) + if error_code in [StorageErrorCode.condition_not_met, + StorageErrorCode.blob_overwritten]: + raise_error = ResourceModifiedError + if error_code in [StorageErrorCode.invalid_authentication_info, + StorageErrorCode.authentication_failed]: + raise_error = ClientAuthenticationError + if error_code in [StorageErrorCode.resource_not_found, + StorageErrorCode.cannot_verify_copy_source, + StorageErrorCode.blob_not_found, + StorageErrorCode.queue_not_found, + StorageErrorCode.container_not_found, + StorageErrorCode.parent_not_found, + StorageErrorCode.share_not_found]: + raise_error = ResourceNotFoundError + if error_code in [StorageErrorCode.account_already_exists, + StorageErrorCode.account_being_created, + StorageErrorCode.resource_already_exists, + StorageErrorCode.resource_type_mismatch, + StorageErrorCode.blob_already_exists, + StorageErrorCode.queue_already_exists, + StorageErrorCode.container_already_exists, + StorageErrorCode.container_being_deleted, + StorageErrorCode.queue_being_deleted, + StorageErrorCode.share_already_exists, + StorageErrorCode.share_being_deleted]: + raise_error = ResourceExistsError + except ValueError: + # Got an unknown error code + pass + + # Error message should include all the error properties + try: + error_message += "\nErrorCode:{}".format(error_code.value) + except AttributeError: + error_message += "\nErrorCode:{}".format(error_code) + for name, info in additional_data.items(): + error_message += "\n{}:{}".format(name, info) + + # No need to create an instance if it has already been serialized by the generated layer + if serialized: + storage_error.message = error_message + error = storage_error + else: + error = raise_error(message=error_message, response=storage_error.response) + # Ensure these properties are stored in the error instance as well (not just the error message) + error.error_code = error_code + error.additional_info = additional_data + # error.args is what's surfaced on the traceback - show error message in all cases + error.args = (error.message,) + try: + # `from None` prevents us from double printing the exception (suppresses generated layer error context) + exec("raise error from None") # pylint: disable=exec-used # nosec + except SyntaxError: + raise error + + +def parse_to_internal_user_delegation_key(service_user_delegation_key): + internal_user_delegation_key = UserDelegationKey() + internal_user_delegation_key.signed_oid = service_user_delegation_key.signed_oid + internal_user_delegation_key.signed_tid = service_user_delegation_key.signed_tid + internal_user_delegation_key.signed_start = _to_utc_datetime(service_user_delegation_key.signed_start) + internal_user_delegation_key.signed_expiry = _to_utc_datetime(service_user_delegation_key.signed_expiry) + internal_user_delegation_key.signed_service = service_user_delegation_key.signed_service + internal_user_delegation_key.signed_version = service_user_delegation_key.signed_version + internal_user_delegation_key.value = service_user_delegation_key.value + return internal_user_delegation_key diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/shared_access_signature.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/shared_access_signature.py new file mode 100644 index 00000000000..d2ebfc4b809 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/shared_access_signature.py @@ -0,0 +1,230 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from datetime import date + +from .parser import _str, _to_utc_datetime +from .constants import X_MS_VERSION +from . import sign_string, url_quote + + +class QueryStringConstants(object): + SIGNED_SIGNATURE = 'sig' + SIGNED_PERMISSION = 'sp' + SIGNED_START = 'st' + SIGNED_EXPIRY = 'se' + SIGNED_RESOURCE = 'sr' + SIGNED_IDENTIFIER = 'si' + SIGNED_IP = 'sip' + SIGNED_PROTOCOL = 'spr' + SIGNED_VERSION = 'sv' + SIGNED_CACHE_CONTROL = 'rscc' + SIGNED_CONTENT_DISPOSITION = 'rscd' + SIGNED_CONTENT_ENCODING = 'rsce' + SIGNED_CONTENT_LANGUAGE = 'rscl' + SIGNED_CONTENT_TYPE = 'rsct' + START_PK = 'spk' + START_RK = 'srk' + END_PK = 'epk' + END_RK = 'erk' + SIGNED_RESOURCE_TYPES = 'srt' + SIGNED_SERVICES = 'ss' + SIGNED_OID = 'skoid' + SIGNED_TID = 'sktid' + SIGNED_KEY_START = 'skt' + SIGNED_KEY_EXPIRY = 'ske' + SIGNED_KEY_SERVICE = 'sks' + SIGNED_KEY_VERSION = 'skv' + + # for blob only + SIGNED_ENCRYPTION_SCOPE = 'ses' + + # for ADLS + SIGNED_AUTHORIZED_OID = 'saoid' + SIGNED_UNAUTHORIZED_OID = 'suoid' + SIGNED_CORRELATION_ID = 'scid' + SIGNED_DIRECTORY_DEPTH = 'sdd' + + @staticmethod + def to_list(): + return [ + QueryStringConstants.SIGNED_SIGNATURE, + QueryStringConstants.SIGNED_PERMISSION, + QueryStringConstants.SIGNED_START, + QueryStringConstants.SIGNED_EXPIRY, + QueryStringConstants.SIGNED_RESOURCE, + QueryStringConstants.SIGNED_IDENTIFIER, + QueryStringConstants.SIGNED_IP, + QueryStringConstants.SIGNED_PROTOCOL, + QueryStringConstants.SIGNED_VERSION, + QueryStringConstants.SIGNED_CACHE_CONTROL, + QueryStringConstants.SIGNED_CONTENT_DISPOSITION, + QueryStringConstants.SIGNED_CONTENT_ENCODING, + QueryStringConstants.SIGNED_CONTENT_LANGUAGE, + QueryStringConstants.SIGNED_CONTENT_TYPE, + QueryStringConstants.START_PK, + QueryStringConstants.START_RK, + QueryStringConstants.END_PK, + QueryStringConstants.END_RK, + QueryStringConstants.SIGNED_RESOURCE_TYPES, + QueryStringConstants.SIGNED_SERVICES, + QueryStringConstants.SIGNED_OID, + QueryStringConstants.SIGNED_TID, + QueryStringConstants.SIGNED_KEY_START, + QueryStringConstants.SIGNED_KEY_EXPIRY, + QueryStringConstants.SIGNED_KEY_SERVICE, + QueryStringConstants.SIGNED_KEY_VERSION, + # for blob only + QueryStringConstants.SIGNED_ENCRYPTION_SCOPE, + # for ADLS + QueryStringConstants.SIGNED_AUTHORIZED_OID, + QueryStringConstants.SIGNED_UNAUTHORIZED_OID, + QueryStringConstants.SIGNED_CORRELATION_ID, + QueryStringConstants.SIGNED_DIRECTORY_DEPTH, + ] + + +class SharedAccessSignature(object): + ''' + Provides a factory for creating account access + signature tokens with an account name and account key. Users can either + use the factory or can construct the appropriate service and use the + generate_*_shared_access_signature method directly. + ''' + + def __init__(self, account_name, account_key, x_ms_version=X_MS_VERSION): + ''' + :param str account_name: + The storage account name used to generate the shared access signatures. + :param str account_key: + The access key to generate the shares access signatures. + :param str x_ms_version: + The service version used to generate the shared access signatures. + ''' + self.account_name = account_name + self.account_key = account_key + self.x_ms_version = x_ms_version + + def generate_account(self, services, resource_types, permission, expiry, start=None, + ip=None, protocol=None, **kwargs): + ''' + Generates a shared access signature for the account. + Use the returned signature with the sas_token parameter of the service + or to create a new account object. + + :param ResourceTypes resource_types: + Specifies the resource types that are accessible with the account + SAS. You can combine values to provide access to more than one + resource type. + :param AccountSasPermissions permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. You can combine + values to provide more than one permission. + :param expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :type expiry: datetime or str + :param start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :type start: datetime or str + :param str ip: + Specifies an IP address or a range of IP addresses from which to accept requests. + If the IP address from which the request originates does not match the IP address + or address range specified on the SAS token, the request is not authenticated. + For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS + restricts the request to those IP addresses. + :param str protocol: + Specifies the protocol permitted for a request made. The default value + is https,http. See :class:`~azure.storage.common.models.Protocol` for possible values. + ''' + sas = _SharedAccessHelper() + sas.add_base(permission, expiry, start, ip, protocol, self.x_ms_version) + sas.add_account(services, resource_types) + sas.add_encryption_scope(**kwargs) + sas.add_account_signature(self.account_name, self.account_key) + + return sas.get_token() + + +class _SharedAccessHelper(object): + def __init__(self): + self.query_dict = {} + + def _add_query(self, name, val): + if val: + self.query_dict[name] = _str(val) if val is not None else None + + def add_encryption_scope(self, **kwargs): + self._add_query(QueryStringConstants.SIGNED_ENCRYPTION_SCOPE, kwargs.pop('encryption_scope', None)) + + def add_base(self, permission, expiry, start, ip, protocol, x_ms_version): + if isinstance(start, date): + start = _to_utc_datetime(start) + + if isinstance(expiry, date): + expiry = _to_utc_datetime(expiry) + + self._add_query(QueryStringConstants.SIGNED_START, start) + self._add_query(QueryStringConstants.SIGNED_EXPIRY, expiry) + self._add_query(QueryStringConstants.SIGNED_PERMISSION, permission) + self._add_query(QueryStringConstants.SIGNED_IP, ip) + self._add_query(QueryStringConstants.SIGNED_PROTOCOL, protocol) + self._add_query(QueryStringConstants.SIGNED_VERSION, x_ms_version) + + def add_resource(self, resource): + self._add_query(QueryStringConstants.SIGNED_RESOURCE, resource) + + def add_id(self, policy_id): + self._add_query(QueryStringConstants.SIGNED_IDENTIFIER, policy_id) + + def add_account(self, services, resource_types): + self._add_query(QueryStringConstants.SIGNED_SERVICES, services) + self._add_query(QueryStringConstants.SIGNED_RESOURCE_TYPES, resource_types) + + def add_override_response_headers(self, cache_control, + content_disposition, + content_encoding, + content_language, + content_type): + self._add_query(QueryStringConstants.SIGNED_CACHE_CONTROL, cache_control) + self._add_query(QueryStringConstants.SIGNED_CONTENT_DISPOSITION, content_disposition) + self._add_query(QueryStringConstants.SIGNED_CONTENT_ENCODING, content_encoding) + self._add_query(QueryStringConstants.SIGNED_CONTENT_LANGUAGE, content_language) + self._add_query(QueryStringConstants.SIGNED_CONTENT_TYPE, content_type) + + def add_account_signature(self, account_name, account_key): + def get_value_to_append(query): + return_value = self.query_dict.get(query) or '' + return return_value + '\n' + + string_to_sign = \ + (account_name + '\n' + + get_value_to_append(QueryStringConstants.SIGNED_PERMISSION) + + get_value_to_append(QueryStringConstants.SIGNED_SERVICES) + + get_value_to_append(QueryStringConstants.SIGNED_RESOURCE_TYPES) + + get_value_to_append(QueryStringConstants.SIGNED_START) + + get_value_to_append(QueryStringConstants.SIGNED_EXPIRY) + + get_value_to_append(QueryStringConstants.SIGNED_IP) + + get_value_to_append(QueryStringConstants.SIGNED_PROTOCOL) + + get_value_to_append(QueryStringConstants.SIGNED_VERSION) + + get_value_to_append(QueryStringConstants.SIGNED_ENCRYPTION_SCOPE)) + + self._add_query(QueryStringConstants.SIGNED_SIGNATURE, + sign_string(account_key, string_to_sign)) + + def get_token(self): + return '&'.join(['{0}={1}'.format(n, url_quote(v)) for n, v in self.query_dict.items() if v is not None]) diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/uploads.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/uploads.py new file mode 100644 index 00000000000..941a90faf53 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/uploads.py @@ -0,0 +1,603 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=no-self-use + +from concurrent import futures +from io import (BytesIO, IOBase, SEEK_CUR, SEEK_END, SEEK_SET, UnsupportedOperation) +from threading import Lock +from itertools import islice +from math import ceil + +import six + +from azure.core.tracing.common import with_current_context + +from . import encode_base64, url_quote +from .request_handlers import get_length +from .response_handlers import return_response_headers +from .encryption import get_blob_encryptor_and_padder + + +_LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE = 4 * 1024 * 1024 +_ERROR_VALUE_SHOULD_BE_SEEKABLE_STREAM = "{0} should be a seekable file-like/io.IOBase type stream object." + + +def _parallel_uploads(executor, uploader, pending, running): + range_ids = [] + while True: + # Wait for some download to finish before adding a new one + done, running = futures.wait(running, return_when=futures.FIRST_COMPLETED) + range_ids.extend([chunk.result() for chunk in done]) + try: + for _ in range(0, len(done)): + next_chunk = next(pending) + running.add(executor.submit(with_current_context(uploader), next_chunk)) + except StopIteration: + break + + # Wait for the remaining uploads to finish + done, _running = futures.wait(running) + range_ids.extend([chunk.result() for chunk in done]) + return range_ids + + +def upload_data_chunks( + service=None, + uploader_class=None, + total_size=None, + chunk_size=None, + max_concurrency=None, + stream=None, + validate_content=None, + encryption_options=None, + **kwargs): + + if encryption_options: + encryptor, padder = get_blob_encryptor_and_padder( + encryption_options.get('cek'), + encryption_options.get('vector'), + uploader_class is not PageBlobChunkUploader) + kwargs['encryptor'] = encryptor + kwargs['padder'] = padder + + parallel = max_concurrency > 1 + if parallel and 'modified_access_conditions' in kwargs: + # Access conditions do not work with parallelism + kwargs['modified_access_conditions'] = None + + uploader = uploader_class( + service=service, + total_size=total_size, + chunk_size=chunk_size, + stream=stream, + parallel=parallel, + validate_content=validate_content, + **kwargs) + if parallel: + with futures.ThreadPoolExecutor(max_concurrency) as executor: + upload_tasks = uploader.get_chunk_streams() + running_futures = [ + executor.submit(with_current_context(uploader.process_chunk), u) + for u in islice(upload_tasks, 0, max_concurrency) + ] + range_ids = _parallel_uploads(executor, uploader.process_chunk, upload_tasks, running_futures) + else: + range_ids = [uploader.process_chunk(result) for result in uploader.get_chunk_streams()] + if any(range_ids): + return [r[1] for r in sorted(range_ids, key=lambda r: r[0])] + return uploader.response_headers + + +def upload_substream_blocks( + service=None, + uploader_class=None, + total_size=None, + chunk_size=None, + max_concurrency=None, + stream=None, + **kwargs): + parallel = max_concurrency > 1 + if parallel and 'modified_access_conditions' in kwargs: + # Access conditions do not work with parallelism + kwargs['modified_access_conditions'] = None + uploader = uploader_class( + service=service, + total_size=total_size, + chunk_size=chunk_size, + stream=stream, + parallel=parallel, + **kwargs) + + if parallel: + with futures.ThreadPoolExecutor(max_concurrency) as executor: + upload_tasks = uploader.get_substream_blocks() + running_futures = [ + executor.submit(with_current_context(uploader.process_substream_block), u) + for u in islice(upload_tasks, 0, max_concurrency) + ] + range_ids = _parallel_uploads(executor, uploader.process_substream_block, upload_tasks, running_futures) + else: + range_ids = [uploader.process_substream_block(b) for b in uploader.get_substream_blocks()] + if any(range_ids): + return sorted(range_ids) + return [] + + +class _ChunkUploader(object): # pylint: disable=too-many-instance-attributes + + def __init__(self, service, total_size, chunk_size, stream, parallel, encryptor=None, padder=None, **kwargs): + self.service = service + self.total_size = total_size + self.chunk_size = chunk_size + self.stream = stream + self.parallel = parallel + + # Stream management + self.stream_start = stream.tell() if parallel else None + self.stream_lock = Lock() if parallel else None + + # Progress feedback + self.progress_total = 0 + self.progress_lock = Lock() if parallel else None + + # Encryption + self.encryptor = encryptor + self.padder = padder + self.response_headers = None + self.etag = None + self.last_modified = None + self.request_options = kwargs + + def get_chunk_streams(self): + index = 0 + while True: + data = b"" + read_size = self.chunk_size + + # Buffer until we either reach the end of the stream or get a whole chunk. + while True: + if self.total_size: + read_size = min(self.chunk_size - len(data), self.total_size - (index + len(data))) + temp = self.stream.read(read_size) + if not isinstance(temp, six.binary_type): + raise TypeError("Blob data should be of type bytes.") + data += temp or b"" + + # We have read an empty string and so are at the end + # of the buffer or we have read a full chunk. + if temp == b"" or len(data) == self.chunk_size: + break + + if len(data) == self.chunk_size: + if self.padder: + data = self.padder.update(data) + if self.encryptor: + data = self.encryptor.update(data) + yield index, data + else: + if self.padder: + data = self.padder.update(data) + self.padder.finalize() + if self.encryptor: + data = self.encryptor.update(data) + self.encryptor.finalize() + if data: + yield index, data + break + index += len(data) + + def process_chunk(self, chunk_data): + chunk_bytes = chunk_data[1] + chunk_offset = chunk_data[0] + return self._upload_chunk_with_progress(chunk_offset, chunk_bytes) + + def _update_progress(self, length): + if self.progress_lock is not None: + with self.progress_lock: + self.progress_total += length + else: + self.progress_total += length + + def _upload_chunk(self, chunk_offset, chunk_data): + raise NotImplementedError("Must be implemented by child class.") + + def _upload_chunk_with_progress(self, chunk_offset, chunk_data): + range_id = self._upload_chunk(chunk_offset, chunk_data) + self._update_progress(len(chunk_data)) + return range_id + + def get_substream_blocks(self): + assert self.chunk_size is not None + lock = self.stream_lock + blob_length = self.total_size + + if blob_length is None: + blob_length = get_length(self.stream) + if blob_length is None: + raise ValueError("Unable to determine content length of upload data.") + + blocks = int(ceil(blob_length / (self.chunk_size * 1.0))) + last_block_size = self.chunk_size if blob_length % self.chunk_size == 0 else blob_length % self.chunk_size + + for i in range(blocks): + index = i * self.chunk_size + length = last_block_size if i == blocks - 1 else self.chunk_size + yield index, SubStream(self.stream, index, length, lock) + + def process_substream_block(self, block_data): + return self._upload_substream_block_with_progress(block_data[0], block_data[1]) + + def _upload_substream_block(self, index, block_stream): + raise NotImplementedError("Must be implemented by child class.") + + def _upload_substream_block_with_progress(self, index, block_stream): + range_id = self._upload_substream_block(index, block_stream) + self._update_progress(len(block_stream)) + return range_id + + def set_response_properties(self, resp): + self.etag = resp.etag + self.last_modified = resp.last_modified + + +class BlockBlobChunkUploader(_ChunkUploader): + + def __init__(self, *args, **kwargs): + kwargs.pop("modified_access_conditions", None) + super(BlockBlobChunkUploader, self).__init__(*args, **kwargs) + self.current_length = None + + def _upload_chunk(self, chunk_offset, chunk_data): + # TODO: This is incorrect, but works with recording. + index = '{0:032d}'.format(chunk_offset) + block_id = encode_base64(url_quote(encode_base64(index))) + self.service.stage_block( + block_id, + len(chunk_data), + chunk_data, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + return index, block_id + + def _upload_substream_block(self, index, block_stream): + try: + block_id = 'BlockId{}'.format("%05d" % (index/self.chunk_size)) + self.service.stage_block( + block_id, + len(block_stream), + block_stream, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + finally: + block_stream.close() + return block_id + + +class PageBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method + + def _is_chunk_empty(self, chunk_data): + # read until non-zero byte is encountered + # if reached the end without returning, then chunk_data is all 0's + return not any(bytearray(chunk_data)) + + def _upload_chunk(self, chunk_offset, chunk_data): + # avoid uploading the empty pages + if not self._is_chunk_empty(chunk_data): + chunk_end = chunk_offset + len(chunk_data) - 1 + content_range = "bytes={0}-{1}".format(chunk_offset, chunk_end) + computed_md5 = None + self.response_headers = self.service.upload_pages( + body=chunk_data, + content_length=len(chunk_data), + transactional_content_md5=computed_md5, + range=content_range, + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + + if not self.parallel and self.request_options.get('modified_access_conditions'): + self.request_options['modified_access_conditions'].if_match = self.response_headers['etag'] + + def _upload_substream_block(self, index, block_stream): + pass + + +class AppendBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method + + def __init__(self, *args, **kwargs): + super(AppendBlobChunkUploader, self).__init__(*args, **kwargs) + self.current_length = None + + def _upload_chunk(self, chunk_offset, chunk_data): + if self.current_length is None: + self.response_headers = self.service.append_block( + body=chunk_data, + content_length=len(chunk_data), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + self.current_length = int(self.response_headers["blob_append_offset"]) + else: + self.request_options['append_position_access_conditions'].append_position = \ + self.current_length + chunk_offset + self.response_headers = self.service.append_block( + body=chunk_data, + content_length=len(chunk_data), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + + def _upload_substream_block(self, index, block_stream): + pass + + +class DataLakeFileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method + + def _upload_chunk(self, chunk_offset, chunk_data): + # avoid uploading the empty pages + self.response_headers = self.service.append_data( + body=chunk_data, + position=chunk_offset, + content_length=len(chunk_data), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + + if not self.parallel and self.request_options.get('modified_access_conditions'): + self.request_options['modified_access_conditions'].if_match = self.response_headers['etag'] + + def _upload_substream_block(self, index, block_stream): + try: + self.service.append_data( + body=block_stream, + position=index, + content_length=len(block_stream), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + finally: + block_stream.close() + + +class FileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method + + def _upload_chunk(self, chunk_offset, chunk_data): + length = len(chunk_data) + chunk_end = chunk_offset + length - 1 + response = self.service.upload_range( + chunk_data, + chunk_offset, + length, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + return 'bytes={0}-{1}'.format(chunk_offset, chunk_end), response + + # TODO: Implement this method. + def _upload_substream_block(self, index, block_stream): + pass + + +class SubStream(IOBase): + + def __init__(self, wrapped_stream, stream_begin_index, length, lockObj): + # Python 2.7: file-like objects created with open() typically support seek(), but are not + # derivations of io.IOBase and thus do not implement seekable(). + # Python > 3.0: file-like objects created with open() are derived from io.IOBase. + try: + # only the main thread runs this, so there's no need grabbing the lock + wrapped_stream.seek(0, SEEK_CUR) + except: + raise ValueError("Wrapped stream must support seek().") + + self._lock = lockObj + self._wrapped_stream = wrapped_stream + self._position = 0 + self._stream_begin_index = stream_begin_index + self._length = length + self._buffer = BytesIO() + + # we must avoid buffering more than necessary, and also not use up too much memory + # so the max buffer size is capped at 4MB + self._max_buffer_size = ( + length if length < _LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE else _LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE + ) + self._current_buffer_start = 0 + self._current_buffer_size = 0 + super(SubStream, self).__init__() + + def __len__(self): + return self._length + + def close(self): + if self._buffer: + self._buffer.close() + self._wrapped_stream = None + IOBase.close(self) + + def fileno(self): + return self._wrapped_stream.fileno() + + def flush(self): + pass + + def read(self, size=None): + if self.closed: # pylint: disable=using-constant-test + raise ValueError("Stream is closed.") + + if size is None: + size = self._length - self._position + + # adjust if out of bounds + if size + self._position >= self._length: + size = self._length - self._position + + # return fast + if size == 0 or self._buffer.closed: + return b"" + + # attempt first read from the read buffer and update position + read_buffer = self._buffer.read(size) + bytes_read = len(read_buffer) + bytes_remaining = size - bytes_read + self._position += bytes_read + + # repopulate the read buffer from the underlying stream to fulfill the request + # ensure the seek and read operations are done atomically (only if a lock is provided) + if bytes_remaining > 0: + with self._buffer: + # either read in the max buffer size specified on the class + # or read in just enough data for the current block/sub stream + current_max_buffer_size = min(self._max_buffer_size, self._length - self._position) + + # lock is only defined if max_concurrency > 1 (parallel uploads) + if self._lock: + with self._lock: + # reposition the underlying stream to match the start of the data to read + absolute_position = self._stream_begin_index + self._position + self._wrapped_stream.seek(absolute_position, SEEK_SET) + # If we can't seek to the right location, our read will be corrupted so fail fast. + if self._wrapped_stream.tell() != absolute_position: + raise IOError("Stream failed to seek to the desired location.") + buffer_from_stream = self._wrapped_stream.read(current_max_buffer_size) + else: + absolute_position = self._stream_begin_index + self._position + # It's possible that there's connection problem during data transfer, + # so when we retry we don't want to read from current position of wrapped stream, + # instead we should seek to where we want to read from. + if self._wrapped_stream.tell() != absolute_position: + self._wrapped_stream.seek(absolute_position, SEEK_SET) + + buffer_from_stream = self._wrapped_stream.read(current_max_buffer_size) + + if buffer_from_stream: + # update the buffer with new data from the wrapped stream + # we need to note down the start position and size of the buffer, in case seek is performed later + self._buffer = BytesIO(buffer_from_stream) + self._current_buffer_start = self._position + self._current_buffer_size = len(buffer_from_stream) + + # read the remaining bytes from the new buffer and update position + second_read_buffer = self._buffer.read(bytes_remaining) + read_buffer += second_read_buffer + self._position += len(second_read_buffer) + + return read_buffer + + def readable(self): + return True + + def readinto(self, b): + raise UnsupportedOperation + + def seek(self, offset, whence=0): + if whence is SEEK_SET: + start_index = 0 + elif whence is SEEK_CUR: + start_index = self._position + elif whence is SEEK_END: + start_index = self._length + offset = -offset + else: + raise ValueError("Invalid argument for the 'whence' parameter.") + + pos = start_index + offset + + if pos > self._length: + pos = self._length + elif pos < 0: + pos = 0 + + # check if buffer is still valid + # if not, drop buffer + if pos < self._current_buffer_start or pos >= self._current_buffer_start + self._current_buffer_size: + self._buffer.close() + self._buffer = BytesIO() + else: # if yes seek to correct position + delta = pos - self._current_buffer_start + self._buffer.seek(delta, SEEK_SET) + + self._position = pos + return pos + + def seekable(self): + return True + + def tell(self): + return self._position + + def write(self): + raise UnsupportedOperation + + def writelines(self): + raise UnsupportedOperation + + def writeable(self): + return False + + +class IterStreamer(object): + """ + File-like streaming iterator. + """ + + def __init__(self, generator, encoding="UTF-8"): + self.generator = generator + self.iterator = iter(generator) + self.leftover = b"" + self.encoding = encoding + + def __len__(self): + return self.generator.__len__() + + def __iter__(self): + return self.iterator + + def seekable(self): + return False + + def __next__(self): + return next(self.iterator) + + next = __next__ # Python 2 compatibility. + + def tell(self, *args, **kwargs): + raise UnsupportedOperation("Data generator does not support tell.") + + def seek(self, *args, **kwargs): + raise UnsupportedOperation("Data generator is unseekable.") + + def read(self, size): + data = self.leftover + count = len(self.leftover) + try: + while count < size: + chunk = self.__next__() + if isinstance(chunk, six.text_type): + chunk = chunk.encode(self.encoding) + data += chunk + count += len(chunk) + # This means count < size and what's leftover will be returned in this call. + except StopIteration: + self.leftover = b"" + + if count >= size: + self.leftover = data[size:] + + return data[:size] diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/uploads_async.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/uploads_async.py new file mode 100644 index 00000000000..5ed192b3659 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared/uploads_async.py @@ -0,0 +1,395 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=no-self-use + +import asyncio +from asyncio import Lock +from itertools import islice +import threading + +from math import ceil + +import six + +from . import encode_base64, url_quote +from .request_handlers import get_length +from .response_handlers import return_response_headers +from .encryption import get_blob_encryptor_and_padder +from .uploads import SubStream, IterStreamer # pylint: disable=unused-import + + +_LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE = 4 * 1024 * 1024 +_ERROR_VALUE_SHOULD_BE_SEEKABLE_STREAM = '{0} should be a seekable file-like/io.IOBase type stream object.' + + +async def _parallel_uploads(uploader, pending, running): + range_ids = [] + while True: + # Wait for some download to finish before adding a new one + done, running = await asyncio.wait(running, return_when=asyncio.FIRST_COMPLETED) + range_ids.extend([chunk.result() for chunk in done]) + try: + for _ in range(0, len(done)): + next_chunk = next(pending) + running.add(asyncio.ensure_future(uploader(next_chunk))) + except StopIteration: + break + + # Wait for the remaining uploads to finish + if running: + done, _running = await asyncio.wait(running) + range_ids.extend([chunk.result() for chunk in done]) + return range_ids + + +async def upload_data_chunks( + service=None, + uploader_class=None, + total_size=None, + chunk_size=None, + max_concurrency=None, + stream=None, + encryption_options=None, + **kwargs): + + if encryption_options: + encryptor, padder = get_blob_encryptor_and_padder( + encryption_options.get('cek'), + encryption_options.get('vector'), + uploader_class is not PageBlobChunkUploader) + kwargs['encryptor'] = encryptor + kwargs['padder'] = padder + + parallel = max_concurrency > 1 + if parallel and 'modified_access_conditions' in kwargs: + # Access conditions do not work with parallelism + kwargs['modified_access_conditions'] = None + + uploader = uploader_class( + service=service, + total_size=total_size, + chunk_size=chunk_size, + stream=stream, + parallel=parallel, + **kwargs) + + if parallel: + upload_tasks = uploader.get_chunk_streams() + running_futures = [ + asyncio.ensure_future(uploader.process_chunk(u)) + for u in islice(upload_tasks, 0, max_concurrency) + ] + range_ids = await _parallel_uploads(uploader.process_chunk, upload_tasks, running_futures) + else: + range_ids = [] + for chunk in uploader.get_chunk_streams(): + range_ids.append(await uploader.process_chunk(chunk)) + + if any(range_ids): + return [r[1] for r in sorted(range_ids, key=lambda r: r[0])] + return uploader.response_headers + + +async def upload_substream_blocks( + service=None, + uploader_class=None, + total_size=None, + chunk_size=None, + max_concurrency=None, + stream=None, + **kwargs): + parallel = max_concurrency > 1 + if parallel and 'modified_access_conditions' in kwargs: + # Access conditions do not work with parallelism + kwargs['modified_access_conditions'] = None + uploader = uploader_class( + service=service, + total_size=total_size, + chunk_size=chunk_size, + stream=stream, + parallel=parallel, + **kwargs) + + if parallel: + upload_tasks = uploader.get_substream_blocks() + running_futures = [ + asyncio.ensure_future(uploader.process_substream_block(u)) + for u in islice(upload_tasks, 0, max_concurrency) + ] + range_ids = await _parallel_uploads(uploader.process_substream_block, upload_tasks, running_futures) + else: + range_ids = [] + for block in uploader.get_substream_blocks(): + range_ids.append(await uploader.process_substream_block(block)) + if any(range_ids): + return sorted(range_ids) + return + + +class _ChunkUploader(object): # pylint: disable=too-many-instance-attributes + + def __init__(self, service, total_size, chunk_size, stream, parallel, encryptor=None, padder=None, **kwargs): + self.service = service + self.total_size = total_size + self.chunk_size = chunk_size + self.stream = stream + self.parallel = parallel + + # Stream management + self.stream_start = stream.tell() if parallel else None + self.stream_lock = threading.Lock() if parallel else None + + # Progress feedback + self.progress_total = 0 + self.progress_lock = Lock() if parallel else None + + # Encryption + self.encryptor = encryptor + self.padder = padder + self.response_headers = None + self.etag = None + self.last_modified = None + self.request_options = kwargs + + def get_chunk_streams(self): + index = 0 + while True: + data = b'' + read_size = self.chunk_size + + # Buffer until we either reach the end of the stream or get a whole chunk. + while True: + if self.total_size: + read_size = min(self.chunk_size - len(data), self.total_size - (index + len(data))) + temp = self.stream.read(read_size) + if not isinstance(temp, six.binary_type): + raise TypeError('Blob data should be of type bytes.') + data += temp or b"" + + # We have read an empty string and so are at the end + # of the buffer or we have read a full chunk. + if temp == b'' or len(data) == self.chunk_size: + break + + if len(data) == self.chunk_size: + if self.padder: + data = self.padder.update(data) + if self.encryptor: + data = self.encryptor.update(data) + yield index, data + else: + if self.padder: + data = self.padder.update(data) + self.padder.finalize() + if self.encryptor: + data = self.encryptor.update(data) + self.encryptor.finalize() + if data: + yield index, data + break + index += len(data) + + async def process_chunk(self, chunk_data): + chunk_bytes = chunk_data[1] + chunk_offset = chunk_data[0] + return await self._upload_chunk_with_progress(chunk_offset, chunk_bytes) + + async def _update_progress(self, length): + if self.progress_lock is not None: + async with self.progress_lock: + self.progress_total += length + else: + self.progress_total += length + + async def _upload_chunk(self, chunk_offset, chunk_data): + raise NotImplementedError("Must be implemented by child class.") + + async def _upload_chunk_with_progress(self, chunk_offset, chunk_data): + range_id = await self._upload_chunk(chunk_offset, chunk_data) + await self._update_progress(len(chunk_data)) + return range_id + + def get_substream_blocks(self): + assert self.chunk_size is not None + lock = self.stream_lock + blob_length = self.total_size + + if blob_length is None: + blob_length = get_length(self.stream) + if blob_length is None: + raise ValueError("Unable to determine content length of upload data.") + + blocks = int(ceil(blob_length / (self.chunk_size * 1.0))) + last_block_size = self.chunk_size if blob_length % self.chunk_size == 0 else blob_length % self.chunk_size + + for i in range(blocks): + index = i * self.chunk_size + length = last_block_size if i == blocks - 1 else self.chunk_size + yield index, SubStream(self.stream, index, length, lock) + + async def process_substream_block(self, block_data): + return await self._upload_substream_block_with_progress(block_data[0], block_data[1]) + + async def _upload_substream_block(self, index, block_stream): + raise NotImplementedError("Must be implemented by child class.") + + async def _upload_substream_block_with_progress(self, index, block_stream): + range_id = await self._upload_substream_block(index, block_stream) + await self._update_progress(len(block_stream)) + return range_id + + def set_response_properties(self, resp): + self.etag = resp.etag + self.last_modified = resp.last_modified + + +class BlockBlobChunkUploader(_ChunkUploader): + + def __init__(self, *args, **kwargs): + kwargs.pop('modified_access_conditions', None) + super(BlockBlobChunkUploader, self).__init__(*args, **kwargs) + self.current_length = None + + async def _upload_chunk(self, chunk_offset, chunk_data): + # TODO: This is incorrect, but works with recording. + index = '{0:032d}'.format(chunk_offset) + block_id = encode_base64(url_quote(encode_base64(index))) + await self.service.stage_block( + block_id, + len(chunk_data), + body=chunk_data, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options) + return index, block_id + + async def _upload_substream_block(self, index, block_stream): + try: + block_id = 'BlockId{}'.format("%05d" % (index/self.chunk_size)) + await self.service.stage_block( + block_id, + len(block_stream), + block_stream, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options) + finally: + block_stream.close() + return block_id + + +class PageBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method + + def _is_chunk_empty(self, chunk_data): + # read until non-zero byte is encountered + # if reached the end without returning, then chunk_data is all 0's + for each_byte in chunk_data: + if each_byte not in [0, b'\x00']: + return False + return True + + async def _upload_chunk(self, chunk_offset, chunk_data): + # avoid uploading the empty pages + if not self._is_chunk_empty(chunk_data): + chunk_end = chunk_offset + len(chunk_data) - 1 + content_range = 'bytes={0}-{1}'.format(chunk_offset, chunk_end) + computed_md5 = None + self.response_headers = await self.service.upload_pages( + body=chunk_data, + content_length=len(chunk_data), + transactional_content_md5=computed_md5, + range=content_range, + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options) + + if not self.parallel and self.request_options.get('modified_access_conditions'): + self.request_options['modified_access_conditions'].if_match = self.response_headers['etag'] + + async def _upload_substream_block(self, index, block_stream): + pass + + +class AppendBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method + + def __init__(self, *args, **kwargs): + super(AppendBlobChunkUploader, self).__init__(*args, **kwargs) + self.current_length = None + + async def _upload_chunk(self, chunk_offset, chunk_data): + if self.current_length is None: + self.response_headers = await self.service.append_block( + body=chunk_data, + content_length=len(chunk_data), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options) + self.current_length = int(self.response_headers['blob_append_offset']) + else: + self.request_options['append_position_access_conditions'].append_position = \ + self.current_length + chunk_offset + self.response_headers = await self.service.append_block( + body=chunk_data, + content_length=len(chunk_data), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options) + + async def _upload_substream_block(self, index, block_stream): + pass + + +class DataLakeFileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method + + async def _upload_chunk(self, chunk_offset, chunk_data): + self.response_headers = await self.service.append_data( + body=chunk_data, + position=chunk_offset, + content_length=len(chunk_data), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + + if not self.parallel and self.request_options.get('modified_access_conditions'): + self.request_options['modified_access_conditions'].if_match = self.response_headers['etag'] + + async def _upload_substream_block(self, index, block_stream): + try: + await self.service.append_data( + body=block_stream, + position=index, + content_length=len(block_stream), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + finally: + block_stream.close() + + +class FileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method + + async def _upload_chunk(self, chunk_offset, chunk_data): + length = len(chunk_data) + chunk_end = chunk_offset + length - 1 + response = await self.service.upload_range( + chunk_data, + chunk_offset, + length, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + range_id = 'bytes={0}-{1}'.format(chunk_offset, chunk_end) + return range_id, response + + # TODO: Implement this method. + async def _upload_substream_block(self, index, block_stream): + pass diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared_access_signature.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared_access_signature.py new file mode 100644 index 00000000000..e3f1b248250 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_shared_access_signature.py @@ -0,0 +1,609 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from typing import ( # pylint: disable=unused-import + Union, Optional, Any, TYPE_CHECKING +) + +from ._shared import sign_string, url_quote +from ._shared.constants import X_MS_VERSION +from ._shared.models import Services, UserDelegationKey +from ._shared.shared_access_signature import SharedAccessSignature, _SharedAccessHelper, \ + QueryStringConstants + +if TYPE_CHECKING: + from datetime import datetime + from ..blob import ( + ResourceTypes, + AccountSasPermissions, + ContainerSasPermissions, + BlobSasPermissions + ) + + +class BlobQueryStringConstants(object): + SIGNED_TIMESTAMP = 'snapshot' + + +class BlobSharedAccessSignature(SharedAccessSignature): + ''' + Provides a factory for creating blob and container access + signature tokens with a common account name and account key. Users can either + use the factory or can construct the appropriate service and use the + generate_*_shared_access_signature method directly. + ''' + + def __init__(self, account_name, account_key=None, user_delegation_key=None): + ''' + :param str account_name: + The storage account name used to generate the shared access signatures. + :param str account_key: + The access key to generate the shares access signatures. + :param ~azure.storage.blob.models.UserDelegationKey user_delegation_key: + Instead of an account key, the user could pass in a user delegation key. + A user delegation key can be obtained from the service by authenticating with an AAD identity; + this can be accomplished by calling get_user_delegation_key on any Blob service object. + ''' + super(BlobSharedAccessSignature, self).__init__(account_name, account_key, x_ms_version=X_MS_VERSION) + self.user_delegation_key = user_delegation_key + + def generate_blob(self, container_name, blob_name, snapshot=None, version_id=None, permission=None, + expiry=None, start=None, policy_id=None, ip=None, protocol=None, + cache_control=None, content_disposition=None, + content_encoding=None, content_language=None, + content_type=None, **kwargs): + ''' + Generates a shared access signature for the blob or one of its snapshots. + Use the returned signature with the sas_token parameter of any BlobService. + + :param str container_name: + Name of container. + :param str blob_name: + Name of blob. + :param str snapshot: + The snapshot parameter is an opaque DateTime value that, + when present, specifies the blob snapshot to grant permission. + :param permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Permissions must be ordered racwdxytmei. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. + :type permission: str or BlobSasPermissions + :param expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :type expiry: datetime or str + :param start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :type start: datetime or str + :param str policy_id: + A unique value up to 64 characters in length that correlates to a + stored access policy. To create a stored access policy, use + set_blob_service_properties. + :param str ip: + Specifies an IP address or a range of IP addresses from which to accept requests. + If the IP address from which the request originates does not match the IP address + or address range specified on the SAS token, the request is not authenticated. + For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS + restricts the request to those IP addresses. + :param str protocol: + Specifies the protocol permitted for a request made. The default value + is https,http. See :class:`~azure.storage.common.models.Protocol` for possible values. + :param str cache_control: + Response header value for Cache-Control when resource is accessed + using this shared access signature. + :param str content_disposition: + Response header value for Content-Disposition when resource is accessed + using this shared access signature. + :param str content_encoding: + Response header value for Content-Encoding when resource is accessed + using this shared access signature. + :param str content_language: + Response header value for Content-Language when resource is accessed + using this shared access signature. + :param str content_type: + Response header value for Content-Type when resource is accessed + using this shared access signature. + ''' + resource_path = container_name + '/' + blob_name + + sas = _BlobSharedAccessHelper() + sas.add_base(permission, expiry, start, ip, protocol, self.x_ms_version) + sas.add_id(policy_id) + + resource = 'bs' if snapshot else 'b' + resource = 'bv' if version_id else resource + resource = 'd' if kwargs.pop("is_directory", None) else resource + sas.add_resource(resource) + + sas.add_timestamp(snapshot or version_id) + sas.add_override_response_headers(cache_control, content_disposition, + content_encoding, content_language, + content_type) + sas.add_encryption_scope(**kwargs) + sas.add_info_for_hns_account(**kwargs) + sas.add_resource_signature(self.account_name, self.account_key, resource_path, + user_delegation_key=self.user_delegation_key) + + return sas.get_token() + + def generate_container(self, container_name, permission=None, expiry=None, + start=None, policy_id=None, ip=None, protocol=None, + cache_control=None, content_disposition=None, + content_encoding=None, content_language=None, + content_type=None, **kwargs): + ''' + Generates a shared access signature for the container. + Use the returned signature with the sas_token parameter of any BlobService. + + :param str container_name: + Name of container. + :param permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Permissions must be ordered racwdxyltfmei. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. + :type permission: str or ContainerSasPermissions + :param expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :type expiry: datetime or str + :param start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :type start: datetime or str + :param str policy_id: + A unique value up to 64 characters in length that correlates to a + stored access policy. To create a stored access policy, use + set_blob_service_properties. + :param str ip: + Specifies an IP address or a range of IP addresses from which to accept requests. + If the IP address from which the request originates does not match the IP address + or address range specified on the SAS token, the request is not authenticated. + For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS + restricts the request to those IP addresses. + :param str protocol: + Specifies the protocol permitted for a request made. The default value + is https,http. See :class:`~azure.storage.common.models.Protocol` for possible values. + :param str cache_control: + Response header value for Cache-Control when resource is accessed + using this shared access signature. + :param str content_disposition: + Response header value for Content-Disposition when resource is accessed + using this shared access signature. + :param str content_encoding: + Response header value for Content-Encoding when resource is accessed + using this shared access signature. + :param str content_language: + Response header value for Content-Language when resource is accessed + using this shared access signature. + :param str content_type: + Response header value for Content-Type when resource is accessed + using this shared access signature. + ''' + sas = _BlobSharedAccessHelper() + sas.add_base(permission, expiry, start, ip, protocol, self.x_ms_version) + sas.add_id(policy_id) + sas.add_resource('c') + sas.add_override_response_headers(cache_control, content_disposition, + content_encoding, content_language, + content_type) + sas.add_encryption_scope(**kwargs) + sas.add_info_for_hns_account(**kwargs) + sas.add_resource_signature(self.account_name, self.account_key, container_name, + user_delegation_key=self.user_delegation_key) + return sas.get_token() + + +class _BlobSharedAccessHelper(_SharedAccessHelper): + + def add_timestamp(self, timestamp): + self._add_query(BlobQueryStringConstants.SIGNED_TIMESTAMP, timestamp) + + def add_info_for_hns_account(self, **kwargs): + self._add_query(QueryStringConstants.SIGNED_DIRECTORY_DEPTH, kwargs.pop('sdd', None)) + self._add_query(QueryStringConstants.SIGNED_AUTHORIZED_OID, kwargs.pop('preauthorized_agent_object_id', None)) + self._add_query(QueryStringConstants.SIGNED_UNAUTHORIZED_OID, kwargs.pop('agent_object_id', None)) + self._add_query(QueryStringConstants.SIGNED_CORRELATION_ID, kwargs.pop('correlation_id', None)) + + def get_value_to_append(self, query): + return_value = self.query_dict.get(query) or '' + return return_value + '\n' + + def add_resource_signature(self, account_name, account_key, path, user_delegation_key=None): + # pylint: disable = no-member + if path[0] != '/': + path = '/' + path + + canonicalized_resource = '/blob/' + account_name + path + '\n' + + # Form the string to sign from shared_access_policy and canonicalized + # resource. The order of values is important. + string_to_sign = \ + (self.get_value_to_append(QueryStringConstants.SIGNED_PERMISSION) + + self.get_value_to_append(QueryStringConstants.SIGNED_START) + + self.get_value_to_append(QueryStringConstants.SIGNED_EXPIRY) + + canonicalized_resource) + + if user_delegation_key is not None: + self._add_query(QueryStringConstants.SIGNED_OID, user_delegation_key.signed_oid) + self._add_query(QueryStringConstants.SIGNED_TID, user_delegation_key.signed_tid) + self._add_query(QueryStringConstants.SIGNED_KEY_START, user_delegation_key.signed_start) + self._add_query(QueryStringConstants.SIGNED_KEY_EXPIRY, user_delegation_key.signed_expiry) + self._add_query(QueryStringConstants.SIGNED_KEY_SERVICE, user_delegation_key.signed_service) + self._add_query(QueryStringConstants.SIGNED_KEY_VERSION, user_delegation_key.signed_version) + + string_to_sign += \ + (self.get_value_to_append(QueryStringConstants.SIGNED_OID) + + self.get_value_to_append(QueryStringConstants.SIGNED_TID) + + self.get_value_to_append(QueryStringConstants.SIGNED_KEY_START) + + self.get_value_to_append(QueryStringConstants.SIGNED_KEY_EXPIRY) + + self.get_value_to_append(QueryStringConstants.SIGNED_KEY_SERVICE) + + self.get_value_to_append(QueryStringConstants.SIGNED_KEY_VERSION) + + self.get_value_to_append(QueryStringConstants.SIGNED_AUTHORIZED_OID) + + self.get_value_to_append(QueryStringConstants.SIGNED_UNAUTHORIZED_OID) + + self.get_value_to_append(QueryStringConstants.SIGNED_CORRELATION_ID)) + else: + string_to_sign += self.get_value_to_append(QueryStringConstants.SIGNED_IDENTIFIER) + + string_to_sign += \ + (self.get_value_to_append(QueryStringConstants.SIGNED_IP) + + self.get_value_to_append(QueryStringConstants.SIGNED_PROTOCOL) + + self.get_value_to_append(QueryStringConstants.SIGNED_VERSION) + + self.get_value_to_append(QueryStringConstants.SIGNED_RESOURCE) + + self.get_value_to_append(BlobQueryStringConstants.SIGNED_TIMESTAMP) + + self.get_value_to_append(QueryStringConstants.SIGNED_ENCRYPTION_SCOPE) + + self.get_value_to_append(QueryStringConstants.SIGNED_CACHE_CONTROL) + + self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_DISPOSITION) + + self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_ENCODING) + + self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_LANGUAGE) + + self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_TYPE)) + + # remove the trailing newline + if string_to_sign[-1] == '\n': + string_to_sign = string_to_sign[:-1] + + self._add_query(QueryStringConstants.SIGNED_SIGNATURE, + sign_string(account_key if user_delegation_key is None else user_delegation_key.value, + string_to_sign)) + + def get_token(self): + # a conscious decision was made to exclude the timestamp in the generated token + # this is to avoid having two snapshot ids in the query parameters when the user appends the snapshot timestamp + exclude = [BlobQueryStringConstants.SIGNED_TIMESTAMP] + return '&'.join(['{0}={1}'.format(n, url_quote(v)) + for n, v in self.query_dict.items() if v is not None and n not in exclude]) + + +def generate_account_sas( + account_name, # type: str + account_key, # type: str + resource_types, # type: Union[ResourceTypes, str] + permission, # type: Union[AccountSasPermissions, str] + expiry, # type: Optional[Union[datetime, str]] + start=None, # type: Optional[Union[datetime, str]] + ip=None, # type: Optional[str] + **kwargs # type: Any + ): # type: (...) -> str + """Generates a shared access signature for the blob service. + + Use the returned signature with the credential parameter of any BlobServiceClient, + ContainerClient or BlobClient. + + :param str account_name: + The storage account name used to generate the shared access signature. + :param str account_key: + The account key, also called shared key or access key, to generate the shared access signature. + :param resource_types: + Specifies the resource types that are accessible with the account SAS. + :type resource_types: str or ~azure.storage.blob.ResourceTypes + :param permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. + :type permission: str or ~azure.storage.blob.AccountSasPermissions + :param expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :type expiry: ~datetime.datetime or str + :param start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :type start: ~datetime.datetime or str + :param str ip: + Specifies an IP address or a range of IP addresses from which to accept requests. + If the IP address from which the request originates does not match the IP address + or address range specified on the SAS token, the request is not authenticated. + For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS + restricts the request to those IP addresses. + :keyword str protocol: + Specifies the protocol permitted for a request made. The default value is https. + :keyword str encryption_scope: + Specifies the encryption scope for a request made so that all write operations will be service encrypted. + :return: A Shared Access Signature (sas) token. + :rtype: str + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START create_sas_token] + :end-before: [END create_sas_token] + :language: python + :dedent: 8 + :caption: Generating a shared access signature. + """ + sas = SharedAccessSignature(account_name, account_key) + return sas.generate_account( + services=Services(blob=True), + resource_types=resource_types, + permission=permission, + expiry=expiry, + start=start, + ip=ip, + **kwargs + ) # type: ignore + + +def generate_container_sas( + account_name, # type: str + container_name, # type: str + account_key=None, # type: Optional[str] + user_delegation_key=None, # type: Optional[UserDelegationKey] + permission=None, # type: Optional[Union[ContainerSasPermissions, str]] + expiry=None, # type: Optional[Union[datetime, str]] + start=None, # type: Optional[Union[datetime, str]] + policy_id=None, # type: Optional[str] + ip=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Any + """Generates a shared access signature for a container. + + Use the returned signature with the credential parameter of any BlobServiceClient, + ContainerClient or BlobClient. + + :param str account_name: + The storage account name used to generate the shared access signature. + :param str container_name: + The name of the container. + :param str account_key: + The account key, also called shared key or access key, to generate the shared access signature. + Either `account_key` or `user_delegation_key` must be specified. + :param ~azure.storage.blob.UserDelegationKey user_delegation_key: + Instead of an account shared key, the user could pass in a user delegation key. + A user delegation key can be obtained from the service by authenticating with an AAD identity; + this can be accomplished by calling :func:`~azure.storage.blob.BlobServiceClient.get_user_delegation_key`. + When present, the SAS is signed with the user delegation key instead. + :param permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Permissions must be ordered racwdxyltfmei. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. + :type permission: str or ~azure.storage.blob.ContainerSasPermissions + :param expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :type expiry: ~datetime.datetime or str + :param start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :type start: ~datetime.datetime or str + :param str policy_id: + A unique value up to 64 characters in length that correlates to a + stored access policy. To create a stored access policy, use + :func:`~azure.storage.blob.ContainerClient.set_container_access_policy`. + :param str ip: + Specifies an IP address or a range of IP addresses from which to accept requests. + If the IP address from which the request originates does not match the IP address + or address range specified on the SAS token, the request is not authenticated. + For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS + restricts the request to those IP addresses. + :keyword str protocol: + Specifies the protocol permitted for a request made. The default value is https. + :keyword str cache_control: + Response header value for Cache-Control when resource is accessed + using this shared access signature. + :keyword str content_disposition: + Response header value for Content-Disposition when resource is accessed + using this shared access signature. + :keyword str content_encoding: + Response header value for Content-Encoding when resource is accessed + using this shared access signature. + :keyword str content_language: + Response header value for Content-Language when resource is accessed + using this shared access signature. + :keyword str content_type: + Response header value for Content-Type when resource is accessed + using this shared access signature. + :keyword str encryption_scope: + Specifies the encryption scope for a request made so that all write operations will be service encrypted. + :return: A Shared Access Signature (sas) token. + :rtype: str + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START generate_sas_token] + :end-before: [END generate_sas_token] + :language: python + :dedent: 12 + :caption: Generating a sas token. + """ + if not user_delegation_key and not account_key: + raise ValueError("Either user_delegation_key or account_key must be provided.") + if isinstance(account_key, UserDelegationKey): + user_delegation_key = account_key + if user_delegation_key: + sas = BlobSharedAccessSignature(account_name, user_delegation_key=user_delegation_key) + else: + sas = BlobSharedAccessSignature(account_name, account_key=account_key) + return sas.generate_container( + container_name, + permission=permission, + expiry=expiry, + start=start, + policy_id=policy_id, + ip=ip, + **kwargs + ) + + +def generate_blob_sas( + account_name, # type: str + container_name, # type: str + blob_name, # type: str + snapshot=None, # type: Optional[str] + account_key=None, # type: Optional[str] + user_delegation_key=None, # type: Optional[UserDelegationKey] + permission=None, # type: Optional[Union[BlobSasPermissions, str]] + expiry=None, # type: Optional[Union[datetime, str]] + start=None, # type: Optional[Union[datetime, str]] + policy_id=None, # type: Optional[str] + ip=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Any + """Generates a shared access signature for a blob. + + Use the returned signature with the credential parameter of any BlobServiceClient, + ContainerClient or BlobClient. + + :param str account_name: + The storage account name used to generate the shared access signature. + :param str container_name: + The name of the container. + :param str blob_name: + The name of the blob. + :param str snapshot: + An optional blob snapshot ID. + :param str account_key: + The account key, also called shared key or access key, to generate the shared access signature. + Either `account_key` or `user_delegation_key` must be specified. + :param ~azure.storage.blob.UserDelegationKey user_delegation_key: + Instead of an account shared key, the user could pass in a user delegation key. + A user delegation key can be obtained from the service by authenticating with an AAD identity; + this can be accomplished by calling :func:`~azure.storage.blob.BlobServiceClient.get_user_delegation_key`. + When present, the SAS is signed with the user delegation key instead. + :param permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Permissions must be ordered racwdxytmei. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. + :type permission: str or ~azure.storage.blob.BlobSasPermissions + :param expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :type expiry: ~datetime.datetime or str + :param start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :type start: ~datetime.datetime or str + :param str policy_id: + A unique value up to 64 characters in length that correlates to a + stored access policy. To create a stored access policy, use + :func:`~azure.storage.blob.ContainerClient.set_container_access_policy()`. + :param str ip: + Specifies an IP address or a range of IP addresses from which to accept requests. + If the IP address from which the request originates does not match the IP address + or address range specified on the SAS token, the request is not authenticated. + For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS + restricts the request to those IP addresses. + :keyword str version_id: + An optional blob version ID. This parameter is only for versioning enabled account + + .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. + :keyword str protocol: + Specifies the protocol permitted for a request made. The default value is https. + :keyword str cache_control: + Response header value for Cache-Control when resource is accessed + using this shared access signature. + :keyword str content_disposition: + Response header value for Content-Disposition when resource is accessed + using this shared access signature. + :keyword str content_encoding: + Response header value for Content-Encoding when resource is accessed + using this shared access signature. + :keyword str content_language: + Response header value for Content-Language when resource is accessed + using this shared access signature. + :keyword str content_type: + Response header value for Content-Type when resource is accessed + using this shared access signature. + :keyword str encryption_scope: + Specifies the encryption scope for a request made so that all write operations will be service encrypted. + :return: A Shared Access Signature (sas) token. + :rtype: str + """ + if not user_delegation_key and not account_key: + raise ValueError("Either user_delegation_key or account_key must be provided.") + if isinstance(account_key, UserDelegationKey): + user_delegation_key = account_key + version_id = kwargs.pop('version_id', None) + if version_id and snapshot: + raise ValueError("snapshot and version_id cannot be set at the same time.") + if user_delegation_key: + sas = BlobSharedAccessSignature(account_name, user_delegation_key=user_delegation_key) + else: + sas = BlobSharedAccessSignature(account_name, account_key=account_key) + return sas.generate_blob( + container_name, + blob_name, + snapshot=snapshot, + version_id=version_id, + permission=permission, + expiry=expiry, + start=start, + policy_id=policy_id, + ip=ip, + **kwargs + ) diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_upload_helpers.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_upload_helpers.py new file mode 100644 index 00000000000..30d5bfae926 --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_upload_helpers.py @@ -0,0 +1,306 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=no-self-use + +from io import SEEK_SET, UnsupportedOperation +from typing import Optional, Union, Any, TypeVar, TYPE_CHECKING # pylint: disable=unused-import + +import six +from azure.core.exceptions import ResourceExistsError, ResourceModifiedError, HttpResponseError + +from ._shared.response_handlers import ( + process_storage_error, + return_response_headers) +from ._shared.models import StorageErrorCode +from ._shared.uploads import ( + upload_data_chunks, + upload_substream_blocks, + BlockBlobChunkUploader, + PageBlobChunkUploader, + AppendBlobChunkUploader) +from ._shared.encryption import generate_blob_encryption_data, encrypt_blob +from ._generated.models import ( + BlockLookupList, + AppendPositionAccessConditions, + ModifiedAccessConditions, +) + +if TYPE_CHECKING: + from datetime import datetime # pylint: disable=unused-import + BlobLeaseClient = TypeVar("BlobLeaseClient") + +_LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE = 4 * 1024 * 1024 +_ERROR_VALUE_SHOULD_BE_SEEKABLE_STREAM = '{0} should be a seekable file-like/io.IOBase type stream object.' + + +def _convert_mod_error(error): + message = error.message.replace( + "The condition specified using HTTP conditional header(s) is not met.", + "The specified blob already exists.") + message = message.replace("ConditionNotMet", "BlobAlreadyExists") + overwrite_error = ResourceExistsError( + message=message, + response=error.response, + error=error) + overwrite_error.error_code = StorageErrorCode.blob_already_exists + raise overwrite_error + + +def _any_conditions(modified_access_conditions=None, **kwargs): # pylint: disable=unused-argument + return any([ + modified_access_conditions.if_modified_since, + modified_access_conditions.if_unmodified_since, + modified_access_conditions.if_none_match, + modified_access_conditions.if_match + ]) + + +def upload_block_blob( # pylint: disable=too-many-locals + client=None, + data=None, + stream=None, + length=None, + overwrite=None, + headers=None, + validate_content=None, + max_concurrency=None, + blob_settings=None, + encryption_options=None, + **kwargs): + try: + if not overwrite and not _any_conditions(**kwargs): + kwargs['modified_access_conditions'].if_none_match = '*' + adjusted_count = length + if (encryption_options.get('key') is not None) and (adjusted_count is not None): + adjusted_count += (16 - (length % 16)) + blob_headers = kwargs.pop('blob_headers', None) + tier = kwargs.pop('standard_blob_tier', None) + blob_tags_string = kwargs.pop('blob_tags_string', None) + + immutability_policy = kwargs.pop('immutability_policy', None) + immutability_policy_expiry = None if immutability_policy is None else immutability_policy.expiry_time + immutability_policy_mode = None if immutability_policy is None else immutability_policy.policy_mode + legal_hold = kwargs.pop('legal_hold', None) + + # Do single put if the size is smaller than or equal config.max_single_put_size + if adjusted_count is not None and (adjusted_count <= blob_settings.max_single_put_size): + try: + data = data.read(length) + if not isinstance(data, six.binary_type): + raise TypeError('Blob data should be of type bytes.') + except AttributeError: + pass + if encryption_options.get('key'): + encryption_data, data = encrypt_blob(data, encryption_options['key']) + headers['x-ms-meta-encryptiondata'] = encryption_data + return client.upload( + body=data, + content_length=adjusted_count, + blob_http_headers=blob_headers, + headers=headers, + cls=return_response_headers, + validate_content=validate_content, + data_stream_total=adjusted_count, + upload_stream_current=0, + tier=tier.value if tier else None, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + **kwargs) + + use_original_upload_path = blob_settings.use_byte_buffer or \ + validate_content or encryption_options.get('required') or \ + blob_settings.max_block_size < blob_settings.min_large_block_upload_threshold or \ + hasattr(stream, 'seekable') and not stream.seekable() or \ + not hasattr(stream, 'seek') or not hasattr(stream, 'tell') + + if use_original_upload_path: + if encryption_options.get('key'): + cek, iv, encryption_data = generate_blob_encryption_data(encryption_options['key']) + headers['x-ms-meta-encryptiondata'] = encryption_data + encryption_options['cek'] = cek + encryption_options['vector'] = iv + block_ids = upload_data_chunks( + service=client, + uploader_class=BlockBlobChunkUploader, + total_size=length, + chunk_size=blob_settings.max_block_size, + max_concurrency=max_concurrency, + stream=stream, + validate_content=validate_content, + encryption_options=encryption_options, + headers=headers, + **kwargs + ) + else: + block_ids = upload_substream_blocks( + service=client, + uploader_class=BlockBlobChunkUploader, + total_size=length, + chunk_size=blob_settings.max_block_size, + max_concurrency=max_concurrency, + stream=stream, + validate_content=validate_content, + headers=headers, + **kwargs + ) + + block_lookup = BlockLookupList(committed=[], uncommitted=[], latest=[]) + block_lookup.latest = block_ids + return client.commit_block_list( + block_lookup, + blob_http_headers=blob_headers, + cls=return_response_headers, + validate_content=validate_content, + headers=headers, + tier=tier.value if tier else None, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + **kwargs) + except HttpResponseError as error: + try: + process_storage_error(error) + except ResourceModifiedError as mod_error: + if not overwrite: + _convert_mod_error(mod_error) + raise + + +def upload_page_blob( + client=None, + stream=None, + length=None, + overwrite=None, + headers=None, + validate_content=None, + max_concurrency=None, + blob_settings=None, + encryption_options=None, + **kwargs): + try: + if not overwrite and not _any_conditions(**kwargs): + kwargs['modified_access_conditions'].if_none_match = '*' + if length is None or length < 0: + raise ValueError("A content length must be specified for a Page Blob.") + if length % 512 != 0: + raise ValueError("Invalid page blob size: {0}. " + "The size must be aligned to a 512-byte boundary.".format(length)) + if kwargs.get('premium_page_blob_tier'): + premium_page_blob_tier = kwargs.pop('premium_page_blob_tier') + try: + headers['x-ms-access-tier'] = premium_page_blob_tier.value + except AttributeError: + headers['x-ms-access-tier'] = premium_page_blob_tier + if encryption_options and encryption_options.get('data'): + headers['x-ms-meta-encryptiondata'] = encryption_options['data'] + blob_tags_string = kwargs.pop('blob_tags_string', None) + + response = client.create( + content_length=0, + blob_content_length=length, + blob_sequence_number=None, + blob_http_headers=kwargs.pop('blob_headers', None), + blob_tags_string=blob_tags_string, + cls=return_response_headers, + headers=headers, + **kwargs) + if length == 0: + return response + + kwargs['modified_access_conditions'] = ModifiedAccessConditions(if_match=response['etag']) + return upload_data_chunks( + service=client, + uploader_class=PageBlobChunkUploader, + total_size=length, + chunk_size=blob_settings.max_page_size, + stream=stream, + max_concurrency=max_concurrency, + validate_content=validate_content, + encryption_options=encryption_options, + headers=headers, + **kwargs) + + except HttpResponseError as error: + try: + process_storage_error(error) + except ResourceModifiedError as mod_error: + if not overwrite: + _convert_mod_error(mod_error) + raise + + +def upload_append_blob( # pylint: disable=unused-argument + client=None, + stream=None, + length=None, + overwrite=None, + headers=None, + validate_content=None, + max_concurrency=None, + blob_settings=None, + encryption_options=None, + **kwargs): + try: + if length == 0: + return {} + blob_headers = kwargs.pop('blob_headers', None) + append_conditions = AppendPositionAccessConditions( + max_size=kwargs.pop('maxsize_condition', None), + append_position=None) + blob_tags_string = kwargs.pop('blob_tags_string', None) + + try: + if overwrite: + client.create( + content_length=0, + blob_http_headers=blob_headers, + headers=headers, + blob_tags_string=blob_tags_string, + **kwargs) + return upload_data_chunks( + service=client, + uploader_class=AppendBlobChunkUploader, + total_size=length, + chunk_size=blob_settings.max_block_size, + stream=stream, + max_concurrency=max_concurrency, + validate_content=validate_content, + append_position_access_conditions=append_conditions, + headers=headers, + **kwargs) + except HttpResponseError as error: + if error.response.status_code != 404: + raise + # rewind the request body if it is a stream + if hasattr(stream, 'read'): + try: + # attempt to rewind the body to the initial position + stream.seek(0, SEEK_SET) + except UnsupportedOperation: + # if body is not seekable, then retry would not work + raise error + client.create( + content_length=0, + blob_http_headers=blob_headers, + headers=headers, + blob_tags_string=blob_tags_string, + **kwargs) + return upload_data_chunks( + service=client, + uploader_class=AppendBlobChunkUploader, + total_size=length, + chunk_size=blob_settings.max_block_size, + stream=stream, + max_concurrency=max_concurrency, + validate_content=validate_content, + append_position_access_conditions=append_conditions, + headers=headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) diff --git a/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_version.py b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_version.py new file mode 100644 index 00000000000..b08ef47e08d --- /dev/null +++ b/src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2021_04_10/_version.py @@ -0,0 +1,7 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +VERSION = "12.11.0" diff --git a/src/storage-blob-preview/setup.py b/src/storage-blob-preview/setup.py index 13a3f73765b..3caf7696e67 100644 --- a/src/storage-blob-preview/setup.py +++ b/src/storage-blob-preview/setup.py @@ -16,7 +16,7 @@ # TODO: Confirm this is the right version number you want and it matches your # HISTORY.rst entry. -VERSION = '0.6.1' +VERSION = '0.6.2' # The full list of classifiers is available at # https://pypi.python.org/pypi?%3Aaction=list_classifiers From 682012952026ca094e374cb2c35af39e7c7f27da Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Wed, 2 Nov 2022 01:39:12 +0000 Subject: [PATCH 57/85] [Release] Update index.json for extension [ storage-blob-preview ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=12609&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/979eede8cd20964ccfcdd7569f520ba1b7c8dbfb --- src/index.json | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/src/index.json b/src/index.json index 48e83d7cdc9..f01c8ae56fb 100644 --- a/src/index.json +++ b/src/index.json @@ -39722,6 +39722,49 @@ "version": "0.6.1" }, "sha256Digest": "1f2362df611cb017b516d5b9a4389f194066dcf308e8cbc52ea7868d0c5f3f80" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/storage_blob_preview-0.6.2-py2.py3-none-any.whl", + "filename": "storage_blob_preview-0.6.2-py2.py3-none-any.whl", + "metadata": { + "azext.isPreview": true, + "azext.minCliCoreVersion": "2.27.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/storage-blob-preview" + } + } + }, + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "storage-blob-preview", + "summary": "Microsoft Azure Command-Line Tools Storage-blob-preview Extension", + "version": "0.6.2" + }, + "sha256Digest": "e9bae6062186e6241fc77ab2162767eab4d4b593266faa8f81a6ff8a8917625e" } ], "storage-preview": [ From a190eaee6cbbbc4a7b419b7f779301ee560455c2 Mon Sep 17 00:00:00 2001 From: Warren Jones Date: Tue, 1 Nov 2022 18:55:24 -0700 Subject: [PATCH 58/85] [Quantum] Update QDK version in Release History (#5509) * Update QDK version in Release History * Add date of history file update --- src/quantum/HISTORY.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/quantum/HISTORY.rst b/src/quantum/HISTORY.rst index 3ef0d9a27aa..d0aa5beaaf5 100644 --- a/src/quantum/HISTORY.rst +++ b/src/quantum/HISTORY.rst @@ -5,7 +5,8 @@ Release History 0.17.0 ++++++ -* [2022-10-14] Version intended to work with QDK version 0.26.233415 +* [2022-11-02] Update default QDK version to latest 0.27.238334 - See https://learn.microsoft.com/azure/quantum/release-notes. +* [2022-10-14] [Edited] The 0.17.0 release was originally intended to work with QDK version 0.26.233415, however additional functionality has been added to QDK version to 0.27.238334 that can be accessed by CLI extension 0.17.0. * The `az quantum` reference documentation now indicates which command parameters are required, and missing-parameter error messages are more informative. See https://learn.microsoft.com/cli/azure/quantum * You can submit jobs to the microsoft.simulator.resources-estimator target using the CLI. From b714fbe84c8db92d46c19756f79b5fa62c8d60f8 Mon Sep 17 00:00:00 2001 From: Delora Bradish Date: Wed, 2 Nov 2022 03:18:08 -0400 Subject: [PATCH 59/85] metadata update to replace https://github.com/MicrosoftDocs/azure-docs-cli/pull/3367 (#5499) --- scripts/ci/avail-ext-doc/list-template.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/ci/avail-ext-doc/list-template.md b/scripts/ci/avail-ext-doc/list-template.md index 2ea80d1cd7c..2738e5d4576 100644 --- a/scripts/ci/avail-ext-doc/list-template.md +++ b/scripts/ci/avail-ext-doc/list-template.md @@ -6,9 +6,9 @@ ms.author: jianzen manager: yonzhan,yungezz ms.date: {{ date }} ms.topic: article -ms.prod: azure -ms.technology: azure-cli +ms.service: azure-cli ms.devlang: azure-cli +ms.tool: azure-cli ms.custom: devx-track-azurecli keywords: az extension, azure cli extensions, azure extensions --- From 1279c3b82dc579d37cfbd28d856892119185814c Mon Sep 17 00:00:00 2001 From: Nitesh Vijay Date: Thu, 3 Nov 2022 21:04:47 +0530 Subject: [PATCH 60/85] [CosmosDb] Add mongo collection copy command (#5506) * Remove preview tag * Add mongo collection copy * Rename param * Add extra space * Add new version Co-authored-by: Nitesh Vijay --- src/cosmosdb-preview/HISTORY.rst | 4 + .../azext_cosmosdb_preview/_help.py | 17 ++- .../azext_cosmosdb_preview/_params.py | 3 + .../azext_cosmosdb_preview/actions.py | 40 +++++++ .../azext_cosmosdb_preview/custom.py | 50 ++++++--- .../_cassandra_clusters_operations.py | 41 +++++-- .../_cassandra_data_centers_operations.py | 15 ++- .../_cassandra_resources_operations.py | 41 +++++-- .../aio/operations/_collection_operations.py | 41 +++++-- .../_collection_partition_operations.py | 28 +++-- ..._collection_partition_region_operations.py | 15 ++- .../_collection_region_operations.py | 15 ++- .../_data_transfer_jobs_operations.py | 15 ++- .../_database_account_region_operations.py | 15 ++- .../_database_accounts_operations.py | 67 ++++++++--- .../aio/operations/_database_operations.py | 41 +++++-- .../operations/_graph_resources_operations.py | 15 ++- .../_gremlin_resources_operations.py | 28 +++-- .../aio/operations/_locations_operations.py | 15 ++- .../_mongo_db_resources_operations.py | 54 ++++++--- .../_notebook_workspaces_operations.py | 39 ++++--- .../aio/operations/_operations.py | 15 ++- .../_partition_key_range_id_operations.py | 15 ++- ...artition_key_range_id_region_operations.py | 15 ++- .../aio/operations/_percentile_operations.py | 15 ++- .../_percentile_source_target_operations.py | 15 ++- .../_percentile_target_operations.py | 15 ++- ...private_endpoint_connections_operations.py | 15 ++- .../_private_link_resources_operations.py | 15 ++- ...restorable_database_accounts_operations.py | 28 +++-- ...restorable_gremlin_databases_operations.py | 15 ++- .../_restorable_gremlin_graphs_operations.py | 15 ++- ...restorable_gremlin_resources_operations.py | 15 ++- ...storable_mongodb_collections_operations.py | 15 ++- ...restorable_mongodb_databases_operations.py | 15 ++- ...restorable_mongodb_resources_operations.py | 15 ++- .../_restorable_sql_containers_operations.py | 15 ++- .../_restorable_sql_databases_operations.py | 15 ++- .../_restorable_sql_resources_operations.py | 15 ++- .../_restorable_table_resources_operations.py | 15 ++- .../_restorable_tables_operations.py | 15 ++- .../aio/operations/_service_operations.py | 15 ++- .../operations/_sql_resources_operations.py | 106 +++++++++++++----- .../operations/_table_resources_operations.py | 15 ++- .../azure_mgmt_cosmosdb/models/__init__.py | 2 + .../_cosmos_db_management_client_enums.py | 1 + .../azure_mgmt_cosmosdb/models/_models_py3.py | 54 ++++++++- .../_cassandra_clusters_operations.py | 41 +++++-- .../_cassandra_data_centers_operations.py | 15 ++- .../_cassandra_resources_operations.py | 41 +++++-- .../operations/_collection_operations.py | 41 +++++-- .../_collection_partition_operations.py | 28 +++-- ..._collection_partition_region_operations.py | 15 ++- .../_collection_region_operations.py | 15 ++- .../_data_transfer_jobs_operations.py | 15 ++- .../_database_account_region_operations.py | 15 ++- .../_database_accounts_operations.py | 67 ++++++++--- .../operations/_database_operations.py | 41 +++++-- .../operations/_graph_resources_operations.py | 15 ++- .../_gremlin_resources_operations.py | 28 +++-- .../operations/_locations_operations.py | 15 ++- .../_mongo_db_resources_operations.py | 54 ++++++--- .../_notebook_workspaces_operations.py | 51 +++++---- .../operations/_operations.py | 15 ++- .../_partition_key_range_id_operations.py | 15 ++- ...artition_key_range_id_region_operations.py | 15 ++- .../operations/_percentile_operations.py | 15 ++- .../_percentile_source_target_operations.py | 15 ++- .../_percentile_target_operations.py | 15 ++- ...private_endpoint_connections_operations.py | 15 ++- .../_private_link_resources_operations.py | 15 ++- ...restorable_database_accounts_operations.py | 28 +++-- ...restorable_gremlin_databases_operations.py | 15 ++- .../_restorable_gremlin_graphs_operations.py | 15 ++- ...restorable_gremlin_resources_operations.py | 15 ++- ...storable_mongodb_collections_operations.py | 15 ++- ...restorable_mongodb_databases_operations.py | 15 ++- ...restorable_mongodb_resources_operations.py | 15 ++- .../_restorable_sql_containers_operations.py | 15 ++- .../_restorable_sql_databases_operations.py | 15 ++- .../_restorable_sql_resources_operations.py | 15 ++- .../_restorable_table_resources_operations.py | 15 ++- .../_restorable_tables_operations.py | 15 ++- .../operations/_service_operations.py | 15 ++- .../operations/_sql_resources_operations.py | 106 +++++++++++++----- .../operations/_table_resources_operations.py | 15 ++- src/cosmosdb-preview/setup.py | 2 +- 87 files changed, 1538 insertions(+), 515 deletions(-) diff --git a/src/cosmosdb-preview/HISTORY.rst b/src/cosmosdb-preview/HISTORY.rst index 59904767fd8..ae7307afad8 100644 --- a/src/cosmosdb-preview/HISTORY.rst +++ b/src/cosmosdb-preview/HISTORY.rst @@ -2,6 +2,10 @@ Release History =============== +0.21.0 +* Add support for mongo data transfer jobs. + +++++++ 0.20.0 * Add support for Continuous mode restore with user provided identity. diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/_help.py b/src/cosmosdb-preview/azext_cosmosdb_preview/_help.py index 8f145fabfd6..3b60086125e 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/_help.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/_help.py @@ -567,6 +567,18 @@ Usage: --dest-sql-container database=XX container=XX' database: Database name of CosmosDB Sql. container: Container name of CosmosDB Sql. + - name: --source-mongo + short-summary: "Source mongo collection" + long-summary: | + Usage: --source-mongo database=XX collection=XX' + database: Database name of CosmosDB Mongo. + collection: Collection name of CosmosDB Mongo. + - name: --dest-mongo + short-summary: "Destination mongo collection" + long-summary: | + Usage: --dest-mongo database=XX collection=XX' + database: Database name of CosmosDB Mongo. + collection: Collection name of CosmosDB Mongo. examples: - name: Copy sql container @@ -574,7 +586,10 @@ az cosmosdb dts copy -g "rg1" --job-name "j1" --account-name "db1" --source-sql-container database=db1 container=c1 --dest-sql-container database=db2 container=c2 - name: Copy cassandra table text: |- - az cosmosdb dts copy -g "rg1" --job-name "j1" --account-name "db1" --source-cassandra-table keyspace=k1 table=t1 --dest-cassandra-table keyspace=k1 table=t1 + az cosmosdb dts copy -g "rg1" --job-name "j1" --account-name "db1" --source-cassandra-table keyspace=k1 table=t1 --dest-cassandra-table keyspace=k2 table=t2 + - name: Copy mongo collection + text: |- + az cosmosdb dts copy -g "rg1" --job-name "j1" --account-name "db1" --source-mongo database=d1 collection=c1 --dest-mongo database=d2 collection=c2 """ helps['cosmosdb dts'] = """ diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/_params.py b/src/cosmosdb-preview/azext_cosmosdb_preview/_params.py index 199904619a7..f2b3190ab4f 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/_params.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/_params.py @@ -21,6 +21,7 @@ CreateGremlinDatabaseRestoreResource, CreateTableRestoreResource, AddCassandraTableAction, + AddMongoCollectionAction, AddSqlContainerAction, CreateTargetPhysicalPartitionThroughputInfoAction, CreateSourcePhysicalPartitionThroughputInfoAction, @@ -314,8 +315,10 @@ def load_arguments(self, _): with self.argument_context('cosmosdb dts copy') as c: c.argument('job_name', job_name_type) c.argument('source_cassandra_table', nargs='+', action=AddCassandraTableAction, help='Source cassandra table') + c.argument('source_mongo', nargs='+', action=AddMongoCollectionAction, help='Source mongo collection') c.argument('source_sql_container', nargs='+', action=AddSqlContainerAction, help='Source sql container') c.argument('dest_cassandra_table', nargs='+', action=AddCassandraTableAction, help='Destination cassandra table') + c.argument('dest_mongo', nargs='+', action=AddMongoCollectionAction, help='Destination mongo collection') c.argument('dest_sql_container', nargs='+', action=AddSqlContainerAction, help='Destination sql container') c.argument('worker_count', type=int, help='Worker count') diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/actions.py b/src/cosmosdb-preview/azext_cosmosdb_preview/actions.py index e359be0e6a2..5e4a9fe1659 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/actions.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/actions.py @@ -13,6 +13,7 @@ DatabaseRestoreResource, GremlinDatabaseRestoreResource, CosmosCassandraDataTransferDataSourceSink, + CosmosMongoDataTransferDataSourceSink, CosmosSqlDataTransferDataSourceSink, PhysicalPartitionThroughputInfoResource, PhysicalPartitionId @@ -138,6 +139,45 @@ def __call__(self, parser, namespace, values, option_string=None): namespace.cassandra_table = cassandra_table +class AddMongoCollectionAction(argparse._AppendAction): + def __call__(self, parser, namespace, values, option_string=None): + if not values: + # pylint: disable=line-too-long + raise CLIError(f'usage error: {option_string} [KEY=VALUE ...]') + + database_name = None + collection_name = None + + for (k, v) in (x.split('=', 1) for x in values): + kl = k.lower() + if kl == 'database': + database_name = v + + elif kl == 'collection': + collection_name = v + + else: + raise CLIError( + f'Unsupported Key {k} is provided for {option_string} component. All' + ' possible keys are: database, collection' + ) + + if database_name is None: + raise CLIError(f'usage error: missing key database in {option_string} component') + + if collection_name is None: + raise CLIError(f'usage error: missing key table in {option_string} component') + + mongo_collection = CosmosMongoDataTransferDataSourceSink(database_name=database_name, collection_name=collection_name) + + if option_string == "--source-mongo": + namespace.source_mongo = mongo_collection + elif option_string == "--dest-mongo": + namespace.dest_mongo = mongo_collection + else: + namespace.mongo_collection = mongo_collection + + class AddSqlContainerAction(argparse._AppendAction): def __call__(self, parser, namespace, values, option_string=None): if not values: diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/custom.py b/src/cosmosdb-preview/azext_cosmosdb_preview/custom.py index 2f6ddcb2e74..0ad776941cf 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/custom.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/custom.py @@ -1114,33 +1114,51 @@ def cosmosdb_data_transfer_copy_job(client, dest_cassandra_table=None, source_sql_container=None, dest_sql_container=None, + source_mongo=None, + dest_mongo=None, worker_count=0, job_name=None): - if source_cassandra_table is None and source_sql_container is None: - raise CLIError('source component ismissing') - - if source_cassandra_table is not None and source_sql_container is not None: - raise CLIError('Invalid input: multiple source components') - - if dest_cassandra_table is None and dest_sql_container is None: - raise CLIError('destination component is missing') - - if dest_cassandra_table is not None and dest_sql_container is not None: - raise CLIError('Invalid input: multiple destination components') - job_create_properties = {} + source = None if source_cassandra_table is not None: - job_create_properties['source'] = source_cassandra_table + if source is not None: + raise CLIError('Invalid input: multiple source components') + source = source_cassandra_table if source_sql_container is not None: - job_create_properties['source'] = source_sql_container + if source is not None: + raise CLIError('Invalid input: multiple source components') + source = source_sql_container + if source_mongo is not None: + if source is not None: + raise CLIError('Invalid input: multiple source components') + source = source_mongo + + if source is None: + raise CLIError('source component is missing') + job_create_properties['source'] = source + + destination = None if dest_cassandra_table is not None: - job_create_properties['destination'] = dest_cassandra_table + if destination is not None: + raise CLIError('Invalid input: multiple destination components') + destination = dest_cassandra_table if dest_sql_container is not None: - job_create_properties['destination'] = dest_sql_container + if destination is not None: + raise CLIError('Invalid input: multiple destination components') + destination = dest_sql_container + + if dest_mongo is not None: + if destination is not None: + raise CLIError('Invalid input: multiple destination components') + destination = dest_mongo + + if destination is None: + raise CLIError('destination component is missing') + job_create_properties['destination'] = destination if worker_count > 0: job_create_properties['worker_count'] = worker_count diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_cassandra_clusters_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_cassandra_clusters_operations.py index 30f4c51a387..8404d7b5861 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_cassandra_clusters_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_cassandra_clusters_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -106,10 +106,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -184,10 +191,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -1067,10 +1081,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_cassandra_data_centers_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_cassandra_data_centers_operations.py index 232ddb786b3..49e96a43068 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_cassandra_data_centers_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_cassandra_data_centers_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -108,10 +108,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_cassandra_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_cassandra_resources_operations.py index 9e21e6b6d9a..3cb174081eb 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_cassandra_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_cassandra_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -129,10 +129,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -1177,10 +1184,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -2266,10 +2280,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_collection_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_collection_operations.py index 3310dc77f42..1d187c927ce 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_collection_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_collection_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -121,10 +121,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -219,10 +226,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -306,10 +320,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_collection_partition_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_collection_partition_operations.py index 232379b2a8a..0538d769126 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_collection_partition_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_collection_partition_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -117,10 +117,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -215,10 +222,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_collection_partition_region_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_collection_partition_region_operations.py index 28dd8be3340..5c043878500 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_collection_partition_region_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_collection_partition_region_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -121,10 +121,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_collection_region_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_collection_region_operations.py index 3dff52bd133..240133636fb 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_collection_region_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_collection_region_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -121,10 +121,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_data_transfer_jobs_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_data_transfer_jobs_operations.py index 3172df6e043..557772a500d 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_data_transfer_jobs_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_data_transfer_jobs_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -513,10 +513,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_database_account_region_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_database_account_region_operations.py index ba89e26df4c..2a85d6f0c4f 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_database_account_region_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_database_account_region_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -107,10 +107,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_database_accounts_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_database_accounts_operations.py index 4d3f6fa7272..388357e8471 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_database_accounts_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_database_accounts_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -942,10 +942,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -1022,10 +1029,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -2033,10 +2047,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -2119,10 +2140,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -2200,10 +2228,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_database_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_database_operations.py index 77841e3c683..3837389d289 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_database_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_database_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -112,10 +112,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -206,10 +213,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -290,10 +304,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_graph_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_graph_resources_operations.py index ee1bba3f5e6..bb89c18a0a7 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_graph_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_graph_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -109,10 +109,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_gremlin_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_gremlin_resources_operations.py index 4dee97ac4e3..c62b823c7be 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_gremlin_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_gremlin_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -122,10 +122,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -1170,10 +1177,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_locations_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_locations_operations.py index 838ea6b24a7..558379a22a0 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_locations_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_locations_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -91,10 +91,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_mongo_db_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_mongo_db_resources_operations.py index 2ec7a63a21f..7b74721b6ea 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_mongo_db_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_mongo_db_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -135,10 +135,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -2161,10 +2168,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -3910,10 +3924,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -4409,10 +4430,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_notebook_workspaces_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_notebook_workspaces_operations.py index 4bfe9187f7c..6c31a0ca45e 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_notebook_workspaces_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_notebook_workspaces_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -110,10 +110,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -150,7 +157,7 @@ async def get( self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any ) -> _models.NotebookWorkspace: """Gets the notebook workspace for a Cosmos DB account. @@ -219,7 +226,7 @@ async def _create_or_update_initial( self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], notebook_create_update_parameters: Union[_models.NotebookWorkspaceCreateUpdateParameters, IO], **kwargs: Any ) -> _models.NotebookWorkspace: @@ -287,7 +294,7 @@ async def begin_create_or_update( self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], notebook_create_update_parameters: _models.NotebookWorkspaceCreateUpdateParameters, *, content_type: str = "application/json", @@ -329,7 +336,7 @@ async def begin_create_or_update( self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], notebook_create_update_parameters: IO, *, content_type: str = "application/json", @@ -370,7 +377,7 @@ async def begin_create_or_update( self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], notebook_create_update_parameters: Union[_models.NotebookWorkspaceCreateUpdateParameters, IO], **kwargs: Any ) -> AsyncLROPoller[_models.NotebookWorkspace]: @@ -455,7 +462,7 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any ) -> None: error_map = { @@ -506,7 +513,7 @@ async def begin_delete( self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any ) -> AsyncLROPoller[None]: """Deletes the notebook workspace for a Cosmos DB account. @@ -578,7 +585,7 @@ async def list_connection_info( self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any ) -> _models.NotebookWorkspaceConnectionInfoResult: """Retrieves the connection info for the notebook workspace. @@ -647,7 +654,7 @@ async def _regenerate_auth_token_initial( # pylint: disable=inconsistent-return self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any ) -> None: error_map = { @@ -698,7 +705,7 @@ async def begin_regenerate_auth_token( self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any ) -> AsyncLROPoller[None]: """Regenerates the auth token for the notebook workspace. @@ -769,7 +776,7 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any ) -> None: error_map = { @@ -820,7 +827,7 @@ async def begin_start( self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any ) -> AsyncLROPoller[None]: """Starts the notebook workspace. diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_operations.py index dd6fd621221..6312f6f0669 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -89,10 +89,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_partition_key_range_id_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_partition_key_range_id_operations.py index f21b1e256de..92732644b67 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_partition_key_range_id_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_partition_key_range_id_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -120,10 +120,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_partition_key_range_id_region_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_partition_key_range_id_region_operations.py index 7b53497ac65..eeadd44e2bf 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_partition_key_range_id_region_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_partition_key_range_id_region_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -125,10 +125,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_percentile_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_percentile_operations.py index 0edc259991d..95ea21e8a11 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_percentile_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_percentile_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -105,10 +105,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_percentile_source_target_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_percentile_source_target_operations.py index 7e1e86d76ab..04691f4ab89 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_percentile_source_target_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_percentile_source_target_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -119,10 +119,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_percentile_target_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_percentile_target_operations.py index b1b67ef9b7b..dc2307caebd 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_percentile_target_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_percentile_target_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -109,10 +109,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_private_endpoint_connections_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_private_endpoint_connections_operations.py index 0ddb00080e7..25846f5e4db 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_private_endpoint_connections_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_private_endpoint_connections_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -109,10 +109,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_private_link_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_private_link_resources_operations.py index e4237da28da..ae6cc147529 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_private_link_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_private_link_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -101,10 +101,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_database_accounts_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_database_accounts_operations.py index 7bfbdf8db6c..36972cbce77 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_database_accounts_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_database_accounts_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -105,10 +105,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -180,10 +187,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_gremlin_databases_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_gremlin_databases_operations.py index 3c1ea155a8a..dbbadb2f445 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_gremlin_databases_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_gremlin_databases_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -104,10 +104,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_gremlin_graphs_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_gremlin_graphs_operations.py index cbb9ee94241..bbfe53bcc48 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_gremlin_graphs_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_gremlin_graphs_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -119,10 +119,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_gremlin_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_gremlin_resources_operations.py index d0a357a5c55..0c303814188 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_gremlin_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_gremlin_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -117,10 +117,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_mongodb_collections_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_mongodb_collections_operations.py index d79c09e0dd5..c753fe0f26b 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_mongodb_collections_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_mongodb_collections_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -119,10 +119,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_mongodb_databases_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_mongodb_databases_operations.py index 9519912b4ea..58e6d2bf128 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_mongodb_databases_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_mongodb_databases_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -104,10 +104,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_mongodb_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_mongodb_resources_operations.py index b768abb215a..4f0bff3c019 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_mongodb_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_mongodb_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -117,10 +117,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_sql_containers_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_sql_containers_operations.py index a409e42bdbb..2c3e2f5c212 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_sql_containers_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_sql_containers_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -118,10 +118,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_sql_databases_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_sql_databases_operations.py index 79373f22a0f..e7a0632a571 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_sql_databases_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_sql_databases_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -104,10 +104,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_sql_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_sql_resources_operations.py index c9ebbef2e05..8dcec79c34f 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_sql_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_sql_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -117,10 +117,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_table_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_table_resources_operations.py index 40e843e6cce..542f20ef70a 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_table_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_table_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -116,10 +116,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_tables_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_tables_operations.py index 676382d3639..4d382eb1e7c 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_tables_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_restorable_tables_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -114,10 +114,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_service_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_service_operations.py index 13fad900cdf..401c6a1bd5e 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_service_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_service_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -107,10 +107,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_sql_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_sql_resources_operations.py index 083f77e61cc..79cd8cc2f48 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_sql_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_sql_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -150,10 +150,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -1193,10 +1200,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -1605,10 +1619,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -3917,10 +3938,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -4477,10 +4505,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -5041,10 +5076,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -6005,10 +6047,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -6503,10 +6552,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_table_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_table_resources_operations.py index daf8fd375de..c1315edd611 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_table_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/aio/operations/_table_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -112,10 +112,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/models/__init__.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/models/__init__.py index d6f05320863..b4f2e142fde 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/models/__init__.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/models/__init__.py @@ -71,6 +71,7 @@ from ._models_py3 import ContinuousModeProperties from ._models_py3 import CorsPolicy from ._models_py3 import CosmosCassandraDataTransferDataSourceSink +from ._models_py3 import CosmosMongoDataTransferDataSourceSink from ._models_py3 import CosmosSqlDataTransferDataSourceSink from ._models_py3 import CreateJobRequest from ._models_py3 import CreateUpdateOptions @@ -424,6 +425,7 @@ "ContinuousModeProperties", "CorsPolicy", "CosmosCassandraDataTransferDataSourceSink", + "CosmosMongoDataTransferDataSourceSink", "CosmosSqlDataTransferDataSourceSink", "CreateJobRequest", "CreateUpdateOptions", diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/models/_cosmos_db_management_client_enums.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/models/_cosmos_db_management_client_enums.py index 60fea4b04af..ac1f2e1659f 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/models/_cosmos_db_management_client_enums.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/models/_cosmos_db_management_client_enums.py @@ -129,6 +129,7 @@ class DataTransferComponent(str, Enum, metaclass=CaseInsensitiveEnumMeta): """DataTransferComponent.""" COSMOS_DB_CASSANDRA = "CosmosDBCassandra" + COSMOS_DB_MONGO = "CosmosDBMongo" COSMOS_DB_SQL = "CosmosDBSql" AZURE_BLOB_STORAGE = "AzureBlobStorage" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/models/_models_py3.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/models/_models_py3.py index 8eb6317387b..45b5eb08b50 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/models/_models_py3.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/models/_models_py3.py @@ -360,11 +360,12 @@ class DataTransferDataSourceSink(_serialization.Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: AzureBlobDataTransferDataSourceSink, CosmosCassandraDataTransferDataSourceSink, - CosmosSqlDataTransferDataSourceSink + CosmosMongoDataTransferDataSourceSink, CosmosSqlDataTransferDataSourceSink All required parameters must be populated in order to send to Azure. - :ivar component: Known values are: "CosmosDBCassandra", "CosmosDBSql", and "AzureBlobStorage". + :ivar component: Known values are: "CosmosDBCassandra", "CosmosDBMongo", "CosmosDBSql", and + "AzureBlobStorage". :vartype component: str or ~azure.mgmt.cosmosdb.models.DataTransferComponent """ @@ -380,6 +381,7 @@ class DataTransferDataSourceSink(_serialization.Model): "component": { "AzureBlobStorage": "AzureBlobDataTransferDataSourceSink", "CosmosDBCassandra": "CosmosCassandraDataTransferDataSourceSink", + "CosmosDBMongo": "CosmosMongoDataTransferDataSourceSink", "CosmosDBSql": "CosmosSqlDataTransferDataSourceSink", } } @@ -395,7 +397,8 @@ class AzureBlobDataTransferDataSourceSink(DataTransferDataSourceSink): All required parameters must be populated in order to send to Azure. - :ivar component: Known values are: "CosmosDBCassandra", "CosmosDBSql", and "AzureBlobStorage". + :ivar component: Known values are: "CosmosDBCassandra", "CosmosDBMongo", "CosmosDBSql", and + "AzureBlobStorage". :vartype component: str or ~azure.mgmt.cosmosdb.models.DataTransferComponent :ivar container_name: Required. :vartype container_name: str @@ -3145,7 +3148,8 @@ class CosmosCassandraDataTransferDataSourceSink(DataTransferDataSourceSink): All required parameters must be populated in order to send to Azure. - :ivar component: Known values are: "CosmosDBCassandra", "CosmosDBSql", and "AzureBlobStorage". + :ivar component: Known values are: "CosmosDBCassandra", "CosmosDBMongo", "CosmosDBSql", and + "AzureBlobStorage". :vartype component: str or ~azure.mgmt.cosmosdb.models.DataTransferComponent :ivar keyspace_name: Required. :vartype keyspace_name: str @@ -3178,12 +3182,52 @@ def __init__(self, *, keyspace_name: str, table_name: str, **kwargs): self.table_name = table_name +class CosmosMongoDataTransferDataSourceSink(DataTransferDataSourceSink): + """A CosmosDB Cassandra API data source/sink. + + All required parameters must be populated in order to send to Azure. + + :ivar component: Known values are: "CosmosDBCassandra", "CosmosDBMongo", "CosmosDBSql", and + "AzureBlobStorage". + :vartype component: str or ~azure.mgmt.cosmosdb.models.DataTransferComponent + :ivar database_name: Required. + :vartype database_name: str + :ivar collection_name: Required. + :vartype collection_name: str + """ + + _validation = { + "component": {"required": True}, + "database_name": {"required": True}, + "collection_name": {"required": True}, + } + + _attribute_map = { + "component": {"key": "component", "type": "str"}, + "database_name": {"key": "databaseName", "type": "str"}, + "collection_name": {"key": "collectionName", "type": "str"}, + } + + def __init__(self, *, database_name: str, collection_name: str, **kwargs): + """ + :keyword database_name: Required. + :paramtype database_name: str + :keyword collection_name: Required. + :paramtype collection_name: str + """ + super().__init__(**kwargs) + self.component = "CosmosDBMongo" # type: str + self.database_name = database_name + self.collection_name = collection_name + + class CosmosSqlDataTransferDataSourceSink(DataTransferDataSourceSink): """A CosmosDB Cassandra API data source/sink. All required parameters must be populated in order to send to Azure. - :ivar component: Known values are: "CosmosDBCassandra", "CosmosDBSql", and "AzureBlobStorage". + :ivar component: Known values are: "CosmosDBCassandra", "CosmosDBMongo", "CosmosDBSql", and + "AzureBlobStorage". :vartype component: str or ~azure.mgmt.cosmosdb.models.DataTransferComponent :ivar database_name: Required. :vartype database_name: str diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_cassandra_clusters_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_cassandra_clusters_operations.py index 72ae81ada2c..1029ddacac0 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_cassandra_clusters_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_cassandra_clusters_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -509,10 +509,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -585,10 +592,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -1468,10 +1482,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_cassandra_data_centers_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_cassandra_data_centers_operations.py index 5d54224976d..583b5b2a0cc 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_cassandra_data_centers_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_cassandra_data_centers_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -315,10 +315,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_cassandra_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_cassandra_resources_operations.py index 3f3939a2420..3a38ca88adf 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_cassandra_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_cassandra_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -1017,10 +1017,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -2056,10 +2063,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -3136,10 +3150,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_collection_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_collection_operations.py index cb47c68b013..a4418ced7e6 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_collection_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_collection_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -253,10 +253,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -351,10 +358,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -438,10 +452,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_collection_partition_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_collection_partition_operations.py index 8f8fd58b70e..aa5f0ffea71 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_collection_partition_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_collection_partition_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -211,10 +211,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -309,10 +316,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_collection_partition_region_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_collection_partition_region_operations.py index a4ac0b0a41f..aa30e195cae 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_collection_partition_region_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_collection_partition_region_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -171,10 +171,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_collection_region_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_collection_region_operations.py index 5d5a7fde6b2..d8c70bf5fe0 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_collection_region_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_collection_region_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -171,10 +171,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_data_transfer_jobs_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_data_transfer_jobs_operations.py index 2721c8a85bf..f453098f6d8 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_data_transfer_jobs_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_data_transfer_jobs_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -725,10 +725,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_database_account_region_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_database_account_region_operations.py index 86e85f39ad9..bee9e943fc4 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_database_account_region_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_database_account_region_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -147,10 +147,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_database_accounts_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_database_accounts_operations.py index 52d82445cc1..05cbfe7f298 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_database_accounts_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_database_accounts_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -1522,10 +1522,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -1601,10 +1608,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -2612,10 +2626,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -2698,10 +2719,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -2779,10 +2807,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_database_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_database_operations.py index c7e45861ce4..ca99d2cf124 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_database_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_database_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -228,10 +228,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -322,10 +329,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -406,10 +420,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_graph_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_graph_resources_operations.py index 20241795f0e..e44bbc146b9 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_graph_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_graph_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -245,10 +245,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_gremlin_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_gremlin_resources_operations.py index 5b73d317cd4..2b7aab00eb5 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_gremlin_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_gremlin_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -767,10 +767,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -1806,10 +1813,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_locations_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_locations_operations.py index e13ff57c4ce..9edfe7328a7 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_locations_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_locations_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -144,10 +144,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_mongo_db_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_mongo_db_resources_operations.py index 03e4ca2c324..ea62d971fd2 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_mongo_db_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_mongo_db_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -1272,10 +1272,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -3289,10 +3296,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -5029,10 +5043,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -5527,10 +5548,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_notebook_workspaces_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_notebook_workspaces_operations.py index c86c348e189..25412e9c658 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_notebook_workspaces_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_notebook_workspaces_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -76,7 +76,7 @@ def build_list_by_database_account_request( def build_get_request( resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], subscription_id: str, **kwargs: Any ) -> HttpRequest: @@ -116,7 +116,7 @@ def build_get_request( def build_create_or_update_request( resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], subscription_id: str, **kwargs: Any ) -> HttpRequest: @@ -159,7 +159,7 @@ def build_create_or_update_request( def build_delete_request( resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], subscription_id: str, **kwargs: Any ) -> HttpRequest: @@ -199,7 +199,7 @@ def build_delete_request( def build_list_connection_info_request( resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], subscription_id: str, **kwargs: Any ) -> HttpRequest: @@ -239,7 +239,7 @@ def build_list_connection_info_request( def build_regenerate_auth_token_request( resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], subscription_id: str, **kwargs: Any ) -> HttpRequest: @@ -279,7 +279,7 @@ def build_regenerate_auth_token_request( def build_start_request( resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], subscription_id: str, **kwargs: Any ) -> HttpRequest: @@ -382,10 +382,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -422,7 +429,7 @@ def get( self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any ) -> _models.NotebookWorkspace: """Gets the notebook workspace for a Cosmos DB account. @@ -491,7 +498,7 @@ def _create_or_update_initial( self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], notebook_create_update_parameters: Union[_models.NotebookWorkspaceCreateUpdateParameters, IO], **kwargs: Any ) -> _models.NotebookWorkspace: @@ -559,7 +566,7 @@ def begin_create_or_update( self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], notebook_create_update_parameters: _models.NotebookWorkspaceCreateUpdateParameters, *, content_type: str = "application/json", @@ -601,7 +608,7 @@ def begin_create_or_update( self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], notebook_create_update_parameters: IO, *, content_type: str = "application/json", @@ -642,7 +649,7 @@ def begin_create_or_update( self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], notebook_create_update_parameters: Union[_models.NotebookWorkspaceCreateUpdateParameters, IO], **kwargs: Any ) -> LROPoller[_models.NotebookWorkspace]: @@ -727,7 +734,7 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any ) -> None: error_map = { @@ -778,7 +785,7 @@ def begin_delete( self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any ) -> LROPoller[None]: """Deletes the notebook workspace for a Cosmos DB account. @@ -850,7 +857,7 @@ def list_connection_info( self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any ) -> _models.NotebookWorkspaceConnectionInfoResult: """Retrieves the connection info for the notebook workspace. @@ -919,7 +926,7 @@ def _regenerate_auth_token_initial( # pylint: disable=inconsistent-return-state self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any ) -> None: error_map = { @@ -970,7 +977,7 @@ def begin_regenerate_auth_token( self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any ) -> LROPoller[None]: """Regenerates the auth token for the notebook workspace. @@ -1041,7 +1048,7 @@ def _start_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any ) -> None: error_map = { @@ -1092,7 +1099,7 @@ def begin_start( self, resource_group_name: str, account_name: str, - notebook_workspace_name: Union[str, "_models.NotebookWorkspaceName"], + notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any ) -> LROPoller[None]: """Starts the notebook workspace. diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_operations.py index 4bec30f6d60..657fb53d8bb 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -111,10 +111,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_partition_key_range_id_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_partition_key_range_id_operations.py index ddbdac183b8..66f4ffe1694 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_partition_key_range_id_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_partition_key_range_id_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -170,10 +170,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_partition_key_range_id_region_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_partition_key_range_id_region_operations.py index 406cbbabf4d..2dd533eb540 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_partition_key_range_id_region_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_partition_key_range_id_region_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -177,10 +177,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_percentile_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_percentile_operations.py index 3d1d1b2a8ea..2d138976d15 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_percentile_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_percentile_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -144,10 +144,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_percentile_source_target_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_percentile_source_target_operations.py index 9d02dced09c..619dc62220b 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_percentile_source_target_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_percentile_source_target_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -167,10 +167,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_percentile_target_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_percentile_target_operations.py index 3e953891aa9..448ce28dc1e 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_percentile_target_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_percentile_target_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -149,10 +149,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_private_endpoint_connections_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_private_endpoint_connections_operations.py index bcb27156c85..709f9fca3b4 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_private_endpoint_connections_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_private_endpoint_connections_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -269,10 +269,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_private_link_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_private_link_resources_operations.py index b642a17acb8..c6c4bcdb7d6 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_private_link_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_private_link_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -173,10 +173,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_database_accounts_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_database_accounts_operations.py index f3d4df56dcb..123375d354e 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_database_accounts_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_database_accounts_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -184,10 +184,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -259,10 +266,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_gremlin_databases_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_gremlin_databases_operations.py index 98e95c97cb7..f4e95723009 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_gremlin_databases_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_gremlin_databases_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -136,10 +136,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_gremlin_graphs_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_gremlin_graphs_operations.py index 80e6df036e1..3e499918e1e 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_gremlin_graphs_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_gremlin_graphs_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -168,10 +168,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_gremlin_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_gremlin_resources_operations.py index 22bec7d931f..02d19892c40 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_gremlin_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_gremlin_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -163,10 +163,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_mongodb_collections_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_mongodb_collections_operations.py index 001dd09f270..15d1451be5d 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_mongodb_collections_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_mongodb_collections_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -168,10 +168,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_mongodb_databases_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_mongodb_databases_operations.py index 415e2af2c3a..783b5a86a60 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_mongodb_databases_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_mongodb_databases_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -136,10 +136,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_mongodb_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_mongodb_resources_operations.py index 4b40ed54029..b9ce58788c6 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_mongodb_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_mongodb_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -163,10 +163,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_sql_containers_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_sql_containers_operations.py index 482d8df0c2f..19e2d15c9f8 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_sql_containers_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_sql_containers_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -167,10 +167,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_sql_databases_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_sql_databases_operations.py index e34dbc95095..bbd8a8c739f 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_sql_databases_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_sql_databases_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -136,10 +136,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_sql_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_sql_resources_operations.py index a8318378a70..f75eb4065a0 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_sql_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_sql_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -163,10 +163,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_table_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_table_resources_operations.py index 6d5a523c53f..2a809743a4a 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_table_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_table_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -162,10 +162,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_tables_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_tables_operations.py index a3117ff669e..3c5b981a964 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_tables_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_restorable_tables_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Iterable, Optional, TypeVar -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -157,10 +157,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_service_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_service_operations.py index ce38a5289fc..3c9d60241ee 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_service_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_service_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -246,10 +246,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_sql_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_sql_resources_operations.py index 01b467034c0..8d9286923af 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_sql_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_sql_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -1908,10 +1908,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -2945,10 +2952,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -3356,10 +3370,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -5662,10 +5683,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -6219,10 +6247,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -6782,10 +6817,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -7742,10 +7784,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" @@ -8236,10 +8285,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_table_resources_operations.py b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_table_resources_operations.py index c414f014e07..d84f35d2ff1 100644 --- a/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_table_resources_operations.py +++ b/src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_table_resources_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload -from urllib.parse import parse_qs, urljoin, urlparse +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -430,10 +430,17 @@ def prepare_request(next_link=None): else: # make call to next link with the client's api-version - _parsed_next_link = urlparse(next_link) - _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" diff --git a/src/cosmosdb-preview/setup.py b/src/cosmosdb-preview/setup.py index 29e7a8bb1c3..0ecf364b924 100644 --- a/src/cosmosdb-preview/setup.py +++ b/src/cosmosdb-preview/setup.py @@ -16,7 +16,7 @@ # TODO: Confirm this is the right version number you want and it matches your # HISTORY.rst entry. -VERSION = '0.20.0' +VERSION = '0.21.0' # The full list of classifiers is available at # https://pypi.python.org/pypi?%3Aaction=list_classifiers From cc73c6cae13a466346578b7c8233a72790e34492 Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Thu, 3 Nov 2022 15:41:00 +0000 Subject: [PATCH 61/85] [Release] Update index.json for extension [ cosmosdb-preview ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=13047&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/1279c3b82dc579d37cfbd28d856892119185814c --- src/index.json | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/src/index.json b/src/index.json index f01c8ae56fb..2e9a2a707d5 100644 --- a/src/index.json +++ b/src/index.json @@ -19389,6 +19389,49 @@ "version": "0.20.0" }, "sha256Digest": "0e759bad4c3bd91e197d357ea804827e2030812b0751c9bac4472711e5baf4e2" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/cosmosdb_preview-0.21.0-py2.py3-none-any.whl", + "filename": "cosmosdb_preview-0.21.0-py2.py3-none-any.whl", + "metadata": { + "azext.isPreview": true, + "azext.minCliCoreVersion": "2.17.1", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "kakhandr@microsoft.com", + "name": "Kalyan khandrika", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/cosmosdb-preview" + } + } + }, + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "cosmosdb-preview", + "summary": "Microsoft Azure Command-Line Tools Cosmosdb-preview Extension", + "version": "0.21.0" + }, + "sha256Digest": "969496859093a6106bd2b10fff91121087f42a2bab4419f0dfcda52339681969" } ], "costmanagement": [ From f5fb41b4265c648073c3ee117468277abf6cbe79 Mon Sep 17 00:00:00 2001 From: ccc Date: Mon, 7 Nov 2022 16:11:42 +0800 Subject: [PATCH 62/85] [AKS] Fix NSG Control flags when creating agentpool (#5520) * Fix asg-ids / allowed-hostports flags when creating agentpool * Fix ut --- src/aks-preview/HISTORY.rst | 5 +++++ src/aks-preview/azext_aks_preview/agentpool_decorator.py | 9 ++++----- .../tests/latest/test_agentpool_decorator.py | 2 ++ .../tests/latest/test_managed_cluster_decorator.py | 2 ++ src/aks-preview/setup.py | 2 +- 5 files changed, 14 insertions(+), 6 deletions(-) diff --git a/src/aks-preview/HISTORY.rst b/src/aks-preview/HISTORY.rst index 99f8dedb247..f3f3a92b2b5 100644 --- a/src/aks-preview/HISTORY.rst +++ b/src/aks-preview/HISTORY.rst @@ -12,6 +12,11 @@ To release a new version, please select a new version number (usually plus 1 to Pending +++++++ +0.5.114 ++++++++ + +* Fix `az aks create` and `az aks nodepool add` commands failing on adding nodepool with managed ApplicationSecurityGroups. + 0.5.113 +++++++ diff --git a/src/aks-preview/azext_aks_preview/agentpool_decorator.py b/src/aks-preview/azext_aks_preview/agentpool_decorator.py index ff82f67a4c4..2869eea8330 100644 --- a/src/aks-preview/azext_aks_preview/agentpool_decorator.py +++ b/src/aks-preview/azext_aks_preview/agentpool_decorator.py @@ -402,11 +402,10 @@ def set_up_agentpool_network_profile(self, agentpool: AgentPool) -> AgentPool: asg_ids = self.context.get_asg_ids() allowed_host_ports = self.context.get_allowed_host_ports() - if asg_ids and allowed_host_ports: - agentpool.network_profile = self.models.AgentPoolNetworkProfile( - application_security_groups=asg_ids, - allowed_host_ports=allowed_host_ports, - ) + agentpool.network_profile = self.models.AgentPoolNetworkProfile() + if allowed_host_ports is not None: + agentpool.network_profile.allowed_host_ports = allowed_host_ports + agentpool.network_profile.application_security_groups = asg_ids return agentpool def construct_agentpool_profile_preview(self) -> AgentPool: diff --git a/src/aks-preview/azext_aks_preview/tests/latest/test_agentpool_decorator.py b/src/aks-preview/azext_aks_preview/tests/latest/test_agentpool_decorator.py index c48078091b1..1f5be78e186 100644 --- a/src/aks-preview/azext_aks_preview/tests/latest/test_agentpool_decorator.py +++ b/src/aks-preview/azext_aks_preview/tests/latest/test_agentpool_decorator.py @@ -537,6 +537,7 @@ def test_construct_agentpool_profile_preview(self): scale_down_mode=CONST_SCALE_DOWN_MODE_DELETE, workload_runtime=CONST_WORKLOAD_RUNTIME_OCI_CONTAINER, enable_custom_ca_trust=False, + network_profile=self.models.AgentPoolNetworkProfile(), ) self.assertEqual(dec_agentpool_1, ground_truth_agentpool_1) @@ -629,6 +630,7 @@ def test_construct_agentpool_profile_preview(self): mode=CONST_NODEPOOL_MODE_SYSTEM, workload_runtime=CONST_WORKLOAD_RUNTIME_OCI_CONTAINER, enable_custom_ca_trust=False, + network_profile=self.models.AgentPoolNetworkProfile(), ) self.assertEqual(dec_agentpool_1, ground_truth_agentpool_1) diff --git a/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py b/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py index bbdeb256e5d..152a1b59684 100644 --- a/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py +++ b/src/aks-preview/azext_aks_preview/tests/latest/test_managed_cluster_decorator.py @@ -3410,6 +3410,7 @@ def test_set_up_agentpool_profile(self): message_of_the_day="W10=", # base64 encode of "[]" gpu_instance_profile="test_gpu_instance_profile", workload_runtime=CONST_WORKLOAD_RUNTIME_OCI_CONTAINER, + network_profile=self.models.AgentPoolNetworkProfile(), ) ground_truth_mc_1 = self.models.ManagedCluster(location="test_location") ground_truth_mc_1.agent_pool_profiles = [ground_truth_agentpool_profile_1] @@ -4234,6 +4235,7 @@ def test_construct_mc_profile_preview(self): mode=CONST_NODEPOOL_MODE_SYSTEM, workload_runtime=CONST_WORKLOAD_RUNTIME_OCI_CONTAINER, enable_custom_ca_trust=False, + network_profile=self.models.AgentPoolNetworkProfile(), ) ssh_config_1 = self.models.ContainerServiceSshConfiguration( public_keys=[self.models.ContainerServiceSshPublicKey(key_data=public_key)] diff --git a/src/aks-preview/setup.py b/src/aks-preview/setup.py index 9898c42a8fb..a6fe608aed8 100644 --- a/src/aks-preview/setup.py +++ b/src/aks-preview/setup.py @@ -9,7 +9,7 @@ from setuptools import setup, find_packages -VERSION = "0.5.113" +VERSION = "0.5.114" CLASSIFIERS = [ "Development Status :: 4 - Beta", From 102f3b7d779014749a21bacb8e615821c467f18c Mon Sep 17 00:00:00 2001 From: Azure CLI Team Date: Mon, 7 Nov 2022 08:17:38 +0000 Subject: [PATCH 63/85] [Release] Update index.json for extension [ aks-preview ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=13546&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/f5fb41b4265c648073c3ee117468277abf6cbe79 --- src/index.json | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/src/index.json b/src/index.json index 2e9a2a707d5..8d3c731eed4 100644 --- a/src/index.json +++ b/src/index.json @@ -7040,6 +7040,49 @@ "version": "0.5.113" }, "sha256Digest": "033c6189f4ce704653e4f9ef880ccbedecd478d652922cdb16a604f0fcdd6cc2" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/aks_preview-0.5.114-py2.py3-none-any.whl", + "filename": "aks_preview-0.5.114-py2.py3-none-any.whl", + "metadata": { + "azext.isPreview": true, + "azext.minCliCoreVersion": "2.38.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions/tree/main/src/aks-preview" + } + } + }, + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "aks-preview", + "summary": "Provides a preview for upcoming AKS features", + "version": "0.5.114" + }, + "sha256Digest": "364f6267114f199a6c1f9b5362826f6f14c924c21b9587e4dc5ca90c49ec3d6e" } ], "alertsmanagement": [ From 2ce21e8affa9dd627fcf8c6cdf5478fc5b13a5ff Mon Sep 17 00:00:00 2001 From: Silas Strawn Date: Mon, 7 Nov 2022 18:08:18 -0800 Subject: [PATCH 64/85] [containerapp] `az containerapp logs/exec`: Fix "KeyError" Bug (#5527) --- src/containerapp/HISTORY.rst | 4 + .../azext_containerapp/_ssh_utils.py | 15 +- src/containerapp/azext_containerapp/custom.py | 5 +- .../test_containerapp_env_logs_e2e.yaml | 1603 ++++++---- .../test_containerapp_eventstream.yaml | 823 ++--- .../test_containerapp_logstream.yaml | 2788 +++-------------- .../latest/test_containerapp_env_commands.py | 1 - src/containerapp/setup.py | 2 +- 8 files changed, 1698 insertions(+), 3543 deletions(-) diff --git a/src/containerapp/HISTORY.rst b/src/containerapp/HISTORY.rst index 35d4f4dce78..19d83e22406 100644 --- a/src/containerapp/HISTORY.rst +++ b/src/containerapp/HISTORY.rst @@ -3,6 +3,10 @@ Release History =============== +0.3.14 +++++++ +* 'az containerapp logs show'/'az containerapp exec': Fix "KeyError" bug + 0.3.13 ++++++ * 'az containerapp compose create': Migrated from containerapp-compose extension diff --git a/src/containerapp/azext_containerapp/_ssh_utils.py b/src/containerapp/azext_containerapp/_ssh_utils.py index af8169afe7b..5dfeb247708 100644 --- a/src/containerapp/azext_containerapp/_ssh_utils.py +++ b/src/containerapp/azext_containerapp/_ssh_utils.py @@ -13,7 +13,7 @@ import websocket from knack.log import get_logger -from azure.cli.core.azclierror import CLIInternalError +from azure.cli.core.azclierror import CLIInternalError, ValidationError from azure.cli.core.commands.client_factory import get_subscription_id from ._clients import ContainerAppClient @@ -53,7 +53,8 @@ class WebSocketConnection: def __init__(self, cmd, resource_group_name, name, revision, replica, container, startup_command): token_response = ContainerAppClient.get_auth_token(cmd, resource_group_name, name) self._token = token_response["properties"]["token"] - self._logstream_endpoint = token_response["properties"]["logStreamEndpoint"] + self._logstream_endpoint = self._get_logstream_endpoint(cmd, resource_group_name, name, + revision, replica, container) self._url = self._get_url(cmd=cmd, resource_group_name=resource_group_name, name=name, revision=revision, replica=replica, container=container, startup_command=startup_command) self._socket = websocket.WebSocket(enable_multithread=True) @@ -67,6 +68,16 @@ def __init__(self, cmd, resource_group_name, name, revision, replica, container, self._windows_conout_mode = _get_conout_mode() self._windows_conin_mode = _get_conin_mode() + @classmethod + def _get_logstream_endpoint(cls, cmd, resource_group_name, name, revision, replica, container): + containers = ContainerAppClient.get_replica(cmd, + resource_group_name, + name, revision, replica)["properties"]["containers"] + container_info = [c for c in containers if c["name"] == container] + if not container_info: + raise ValidationError(f"No such container: {container}") + return container_info[0]["logStreamEndpoint"] + def _get_url(self, cmd, resource_group_name, name, revision, replica, container, startup_command): sub = get_subscription_id(cmd.cli_ctx) base_url = self._logstream_endpoint diff --git a/src/containerapp/azext_containerapp/custom.py b/src/containerapp/azext_containerapp/custom.py index 38504f6cab6..c9af5b304c0 100644 --- a/src/containerapp/azext_containerapp/custom.py +++ b/src/containerapp/azext_containerapp/custom.py @@ -2475,8 +2475,9 @@ def stream_containerapp_logs(cmd, resource_group_name, name, container=None, rev sub = get_subscription_id(cmd.cli_ctx) token_response = ContainerAppClient.get_auth_token(cmd, resource_group_name, name) token = token_response["properties"]["token"] - logstream_endpoint = token_response["properties"]["logStreamEndpoint"] - base_url = logstream_endpoint[:logstream_endpoint.index("/subscriptions/")] + + base_url = ContainerAppClient.show(cmd, resource_group_name, name)["properties"]["eventStreamEndpoint"] + base_url = base_url[:base_url.index("/subscriptions/")] if kind == LOG_TYPE_CONSOLE: url = (f"{base_url}/subscriptions/{sub}/resourceGroups/{resource_group_name}/containerApps/{name}" diff --git a/src/containerapp/azext_containerapp/tests/latest/recordings/test_containerapp_env_logs_e2e.yaml b/src/containerapp/azext_containerapp/tests/latest/recordings/test_containerapp_env_logs_e2e.yaml index 1e07059b9de..74de557fd79 100644 --- a/src/containerapp/azext_containerapp/tests/latest/recordings/test_containerapp_env_logs_e2e.yaml +++ b/src/containerapp/azext_containerapp/tests/latest/recordings/test_containerapp_env_logs_e2e.yaml @@ -13,12 +13,12 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001?api-version=2021-04-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001","name":"clitest.rg000001","type":"Microsoft.Resources/resourceGroups","location":"australiaeast","tags":{"product":"azurecli","cause":"automation","date":"2022-10-09T04:11:43Z"},"properties":{"provisioningState":"Succeeded"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001","name":"clitest.rg000001","type":"Microsoft.Resources/resourceGroups","location":"australiaeast","tags":{"product":"azurecli","cause":"automation","date":"2022-11-07T23:07:58Z"},"properties":{"provisioningState":"Succeeded"}}' headers: cache-control: - no-cache @@ -27,7 +27,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:12:49 GMT + - Mon, 07 Nov 2022 23:08:42 GMT expires: - '-1' pragma: @@ -60,12 +60,12 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001/providers/Microsoft.OperationalInsights/workspaces/containerapp-env000004?api-version=2021-12-01-preview response: body: - string: '{"properties":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259","provisioningState":"Creating","sku":{"name":"PerGB2018","lastSkuUpdate":"2022-10-09T04:12:55.3461232Z"},"retentionInDays":30,"features":{"legacy":0,"searchVersion":1,"enableLogAccessUsingOnlyResourcePermissions":true},"workspaceCapping":{"dailyQuotaGb":-1.0,"quotaNextResetTime":"2022-10-09T06:00:00Z","dataIngestionStatus":"RespectQuota"},"publicNetworkAccessForIngestion":"Enabled","publicNetworkAccessForQuery":"Enabled","createdDate":"2022-10-09T04:12:55.3461232Z","modifiedDate":"2022-10-09T04:12:55.3461232Z"},"location":"australiaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.OperationalInsights/workspaces/containerapp-env000004","name":"containerapp-env000004","type":"Microsoft.OperationalInsights/workspaces"}' + string: '{"properties":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd","provisioningState":"Creating","sku":{"name":"PerGB2018","lastSkuUpdate":"2022-11-07T23:08:46.266874Z"},"retentionInDays":30,"features":{"legacy":0,"searchVersion":1,"enableLogAccessUsingOnlyResourcePermissions":true},"workspaceCapping":{"dailyQuotaGb":-1.0,"quotaNextResetTime":"2022-11-08T23:00:00Z","dataIngestionStatus":"RespectQuota"},"publicNetworkAccessForIngestion":"Enabled","publicNetworkAccessForQuery":"Enabled","createdDate":"2022-11-07T23:08:46.266874Z","modifiedDate":"2022-11-07T23:08:46.266874Z"},"location":"australiaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.OperationalInsights/workspaces/containerapp-env000004","name":"containerapp-env000004","type":"Microsoft.OperationalInsights/workspaces"}' headers: access-control-allow-origin: - '*' @@ -74,11 +74,11 @@ interactions: cache-control: - no-cache content-length: - - '858' + - '855' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:12:58 GMT + - Mon, 07 Nov 2022 23:08:46 GMT expires: - '-1' location: @@ -92,7 +92,7 @@ interactions: x-content-type-options: - nosniff x-ms-ratelimit-remaining-subscription-writes: - - '1199' + - '1198' x-powered-by: - ASP.NET status: @@ -112,12 +112,12 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.OperationalInsights/workspaces/containerapp-env000004?api-version=2021-12-01-preview response: body: - string: '{"properties":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259","provisioningState":"Succeeded","sku":{"name":"PerGB2018","lastSkuUpdate":"2022-10-09T04:12:55.3461232Z"},"retentionInDays":30,"features":{"legacy":0,"searchVersion":1,"enableLogAccessUsingOnlyResourcePermissions":true},"workspaceCapping":{"dailyQuotaGb":-1.0,"quotaNextResetTime":"2022-10-09T06:00:00Z","dataIngestionStatus":"RespectQuota"},"publicNetworkAccessForIngestion":"Enabled","publicNetworkAccessForQuery":"Enabled","createdDate":"2022-10-09T04:12:55.3461232Z","modifiedDate":"2022-10-09T04:12:55.3461232Z"},"location":"australiaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.OperationalInsights/workspaces/containerapp-env000004","name":"containerapp-env000004","type":"Microsoft.OperationalInsights/workspaces"}' + string: '{"properties":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd","provisioningState":"Succeeded","sku":{"name":"PerGB2018","lastSkuUpdate":"2022-11-07T23:08:46.266874Z"},"retentionInDays":30,"features":{"legacy":0,"searchVersion":1,"enableLogAccessUsingOnlyResourcePermissions":true},"workspaceCapping":{"dailyQuotaGb":-1.0,"quotaNextResetTime":"2022-11-08T23:00:00Z","dataIngestionStatus":"RespectQuota"},"publicNetworkAccessForIngestion":"Enabled","publicNetworkAccessForQuery":"Enabled","createdDate":"2022-11-07T23:08:46.266874Z","modifiedDate":"2022-11-07T23:08:46.266874Z"},"location":"australiaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.OperationalInsights/workspaces/containerapp-env000004","name":"containerapp-env000004","type":"Microsoft.OperationalInsights/workspaces"}' headers: access-control-allow-origin: - '*' @@ -126,11 +126,11 @@ interactions: cache-control: - no-cache content-length: - - '859' + - '856' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:13:29 GMT + - Mon, 07 Nov 2022 23:09:17 GMT expires: - '-1' pragma: @@ -166,12 +166,12 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: POST uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001/providers/Microsoft.OperationalInsights/workspaces/containerapp-env000004/sharedKeys?api-version=2020-08-01 response: body: - string: '{"primarySharedKey":"UdlBpc7WT4y4xbQktofBkZEQKqo2rl1yCxRXEAwQgSxp51A7lAW14EF9jyEwDsd3CN6Ac5dSRhlyq3w6Gg0Wpw==","secondarySharedKey":"Zu7zysKwAkahVGiNFFiD3KaKpemVTseQ+Zg9ze70jJBYwxXYqHzOY+iXJXT9h2OlVmenJL+JEPwa12gCH+Dfdw=="}' + string: '{"primarySharedKey":"OV6rSv0/sHDML+E+uCbGG9aLOfe5GF00gFHwc4kR9AnOPCiriMo6zpz2pplJEdwBRfnFSV7BAdfMirKxqKAAhw==","secondarySharedKey":"2VpNRZE4ojZ2FwiAYBrBlz9aejzVskAStLuZDrqDb8uAkoK+fsiBBYULh2iTaY+al+KTcGtmcmmyl+hv7kuLHA=="}' headers: access-control-allow-origin: - '*' @@ -184,7 +184,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:13:31 GMT + - Mon, 07 Nov 2022 23:09:19 GMT expires: - '-1' pragma: @@ -220,7 +220,7 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key --logs-destination -l User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -292,7 +292,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:13:33 GMT + - Mon, 07 Nov 2022 23:09:21 GMT expires: - '-1' pragma: @@ -320,39 +320,36 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key --logs-destination -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/managedEnvironments?api-version=2022-06-01-preview response: body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage1","name":"stage1","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:06:38.7041799","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:06:38.7041799"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"calmpond-b28c41d1.northcentralusstage.azurecontainerapps.io","staticIp":"13.67.139.178","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"21980690-af3f-47d2-ba10-1382b5450cc0"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage2","name":"stage2","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:07:15.2866819","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:07:15.2866819"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"wittymeadow-4dbec5e3.northcentralusstage.azurecontainerapps.io","staticIp":"23.100.80.227","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"71828812-a8df-459b-b6ef-c164550d9f5b"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage3","name":"stage3","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:07:27.14113","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:07:27.14113"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"redisland-f0228bca.northcentralusstage.azurecontainerapps.io","staticIp":"20.9.4.10","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"da72e22b-f2d3-4eab-b0bc-78680ea9ade5"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage4","name":"stage4","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:11:33.3982648","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:11:33.3982648"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"bravedune-ac753656.northcentralusstage.azurecontainerapps.io","staticIp":"20.9.3.250","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"aa3cdac6-e5ed-4ef6-808f-97996f1a8cd3"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage5","name":"stage5","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:11:23.672865","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:11:23.672865"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"blackwater-28f0311e.northcentralusstage.azurecontainerapps.io","staticIp":"40.69.173.180","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"5815914d-d73c-4e61-8f50-e71bcd901812"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}]}' + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage1","name":"stage1","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:06:38.7041799","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:06:38.7041799"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"calmpond-b28c41d1.northcentralusstage.azurecontainerapps.io","staticIp":"13.67.139.178","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"21980690-af3f-47d2-ba10-1382b5450cc0"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage2","name":"stage2","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:07:15.2866819","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:07:15.2866819"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"wittymeadow-4dbec5e3.northcentralusstage.azurecontainerapps.io","staticIp":"23.100.80.227","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"71828812-a8df-459b-b6ef-c164550d9f5b"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage3","name":"stage3","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:07:27.14113","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:07:27.14113"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"redisland-f0228bca.northcentralusstage.azurecontainerapps.io","staticIp":"20.9.4.10","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"da72e22b-f2d3-4eab-b0bc-78680ea9ade5"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage4","name":"stage4","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:11:33.3982648","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:11:33.3982648"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"bravedune-ac753656.northcentralusstage.azurecontainerapps.io","staticIp":"20.9.3.250","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"aa3cdac6-e5ed-4ef6-808f-97996f1a8cd3"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage5","name":"stage5","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:11:23.672865","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:11:23.672865"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"blackwater-28f0311e.northcentralusstage.azurecontainerapps.io","staticIp":"40.69.173.180","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"5815914d-d73c-4e61-8f50-e71bcd901812"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/env","name":"env","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-02T17:28:30.2161566","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T17:40:00.0261021"},"properties":{"provisioningState":"Failed","useLegionServerlessCompute":false,"defaultDomain":"wittysea-79949ad9.canadacentral.azurecontainerapps.io","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rgbntmvovn6b2yf4665irjixuyplbmqibatyrcolwtssk7qgwehfcaprvmb3amfhkne/providers/Microsoft.App/managedEnvironments/envszl4xfcgdr4feuyd27fee","name":"envszl4xfcgdr4feuyd27fee","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:46.2766299","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:46.2766299"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"agreeablebush-1943b534.canadacentral.azurecontainerapps.io","staticIp":"20.220.245.143","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"fad4055d-5566-4087-8adc-47b3d7d9d091"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rgu6i52u5xck4kf6tgpfhgqyw7fjw552wtd7qba2rmeh757px4hdm5fxfp6b26x4vrl/providers/Microsoft.App/managedEnvironments/env652qmaypbbhwigcxjddgr","name":"env652qmaypbbhwigcxjddgr","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/logstream-env","name":"logstream-env","type":"Microsoft.App/managedEnvironments","location":"eastus","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T19:31:58.3535946","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T19:31:58.3535946"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"salmoncoast-d1f7a6de.eastus.azurecontainerapps.io","staticIp":"20.121.84.21","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"b4591f71-4b84-4cd5-b397-377a1771e36f"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/logstream2-env","name":"logstream2-env","type":"Microsoft.App/managedEnvironments","location":"centralus","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T19:37:52.0110005","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T19:37:52.0110005"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"ashybeach-727d6692.centralus.azurecontainerapps.io","staticIp":"20.84.132.103","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"d2b86418-edcf-4f2b-b600-d473b2c3588e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}]}' headers: - api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview cache-control: - no-cache content-length: - - '4970' + - '9815' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:13:33 GMT + - Mon, 07 Nov 2022 23:09:22 GMT expires: - '-1' pragma: - no-cache - server: - - Microsoft-IIS/10.0 strict-transport-security: - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked vary: - - Accept-Encoding,Accept-Encoding + - Accept-Encoding x-content-type-options: - nosniff - x-powered-by: - - ASP.NET + x-ms-original-request-ids: + - '' + - '' + - '' + - '' status: code: 200 message: OK @@ -370,7 +367,7 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key --logs-destination -l User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -442,7 +439,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:13:34 GMT + - Mon, 07 Nov 2022 23:09:22 GMT expires: - '-1' pragma: @@ -470,7 +467,7 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key --logs-destination -l User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -542,7 +539,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:13:34 GMT + - Mon, 07 Nov 2022 23:09:23 GMT expires: - '-1' pragma: @@ -570,7 +567,7 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key --logs-destination -l User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -642,7 +639,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:13:34 GMT + - Mon, 07 Nov 2022 23:09:23 GMT expires: - '-1' pragma: @@ -670,7 +667,7 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key --logs-destination -l User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -742,7 +739,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:13:34 GMT + - Mon, 07 Nov 2022 23:09:24 GMT expires: - '-1' pragma: @@ -760,8 +757,8 @@ interactions: body: '{"location": "australiaeast", "tags": null, "sku": {"name": "Consumption"}, "properties": {"daprAIInstrumentationKey": null, "vnetConfiguration": null, "internalLoadBalancerEnabled": null, "appLogsConfiguration": {"destination": - "log-analytics", "logAnalyticsConfiguration": {"customerId": "6e2eb9ef-b74e-4574-89d3-df4ca975d259", - "sharedKey": "UdlBpc7WT4y4xbQktofBkZEQKqo2rl1yCxRXEAwQgSxp51A7lAW14EF9jyEwDsd3CN6Ac5dSRhlyq3w6Gg0Wpw=="}}, + "log-analytics", "logAnalyticsConfiguration": {"customerId": "3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd", + "sharedKey": "OV6rSv0/sHDML+E+uCbGG9aLOfe5GF00gFHwc4kR9AnOPCiriMo6zpz2pplJEdwBRfnFSV7BAdfMirKxqKAAhw=="}}, "customDomainConfiguration": null, "zoneRedundant": false}}' headers: Accept: @@ -779,25 +776,25 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key --logs-destination -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631Z","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:13:39.2406631Z"},"properties":{"provisioningState":"Waiting","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887Z","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:09:27.4478887Z"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/locations/australiaeast/managedEnvironmentOperationStatuses/d7de5a88-a074-48f6-b10f-45de799d1c5a?api-version=2022-06-01-preview&azureAsyncOperation=true + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/locations/australiaeast/managedEnvironmentOperationStatuses/8136551d-1371-4966-90b9-122e34500127?api-version=2022-06-01-preview&azureAsyncOperation=true cache-control: - no-cache content-length: - - '982' + - '998' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:13:44 GMT + - Mon, 07 Nov 2022 23:09:31 GMT expires: - '-1' pragma: @@ -831,23 +828,23 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key --logs-destination -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:13:39.2406631"},"properties":{"provisioningState":"Waiting","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:09:27.4478887"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '980' + - '996' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:13:45 GMT + - Mon, 07 Nov 2022 23:09:32 GMT expires: - '-1' pragma: @@ -881,23 +878,23 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key --logs-destination -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:13:39.2406631"},"properties":{"provisioningState":"Waiting","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:09:27.4478887"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '980' + - '996' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:13:51 GMT + - Mon, 07 Nov 2022 23:09:37 GMT expires: - '-1' pragma: @@ -931,7 +928,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -1003,7 +1000,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:13:51 GMT + - Mon, 07 Nov 2022 23:09:38 GMT expires: - '-1' pragma: @@ -1031,23 +1028,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:13:39.2406631"},"properties":{"provisioningState":"Waiting","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:09:27.4478887"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '980' + - '996' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:13:53 GMT + - Mon, 07 Nov 2022 23:09:39 GMT expires: - '-1' pragma: @@ -1081,7 +1078,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -1153,7 +1150,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:13:59 GMT + - Mon, 07 Nov 2022 23:09:46 GMT expires: - '-1' pragma: @@ -1181,23 +1178,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:13:39.2406631"},"properties":{"provisioningState":"Waiting","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:09:27.4478887"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '980' + - '996' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:14:02 GMT + - Mon, 07 Nov 2022 23:09:49 GMT expires: - '-1' pragma: @@ -1231,7 +1228,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -1303,7 +1300,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:14:08 GMT + - Mon, 07 Nov 2022 23:09:55 GMT expires: - '-1' pragma: @@ -1331,23 +1328,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:13:39.2406631"},"properties":{"provisioningState":"Waiting","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:09:27.4478887"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '980' + - '996' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:14:10 GMT + - Mon, 07 Nov 2022 23:09:57 GMT expires: - '-1' pragma: @@ -1381,7 +1378,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -1453,7 +1450,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:14:16 GMT + - Mon, 07 Nov 2022 23:10:02 GMT expires: - '-1' pragma: @@ -1481,23 +1478,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:13:39.2406631"},"properties":{"provisioningState":"Waiting","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:09:27.4478887"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '980' + - '996' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:14:19 GMT + - Mon, 07 Nov 2022 23:10:04 GMT expires: - '-1' pragma: @@ -1531,7 +1528,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -1603,7 +1600,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:14:25 GMT + - Mon, 07 Nov 2022 23:10:10 GMT expires: - '-1' pragma: @@ -1631,23 +1628,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:13:39.2406631"},"properties":{"provisioningState":"Succeeded","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:09:27.4478887"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '982' + - '996' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:14:27 GMT + - Mon, 07 Nov 2022 23:10:11 GMT expires: - '-1' pragma: @@ -1679,9 +1676,9 @@ interactions: Connection: - keep-alive ParameterSetName: - - -n -g + - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -1753,7 +1750,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:14:28 GMT + - Mon, 07 Nov 2022 23:10:17 GMT expires: - '-1' pragma: @@ -1779,25 +1776,25 @@ interactions: Connection: - keep-alive ParameterSetName: - - -n -g + - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:13:39.2406631"},"properties":{"provisioningState":"Succeeded","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:09:27.4478887"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '982' + - '996' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:14:30 GMT + - Mon, 07 Nov 2022 23:10:19 GMT expires: - '-1' pragma: @@ -1817,6 +1814,106 @@ interactions: status: code: 200 message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - containerapp env show + Connection: + - keep-alive + ParameterSetName: + - -g -n + User-Agent: + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App","namespace":"Microsoft.App","authorizations":[{"applicationId":"7e3bc4fd-85a3-4192-b177-5b8bfc87f42c","roleDefinitionId":"39a74f72-b40f-4bdc-b639-562fe2260bf0"},{"applicationId":"3734c1a4-2bed-4998-a37a-ff1a9e7bf019","roleDefinitionId":"5c779a4f-5cb2-4547-8c41-478d9be8ba90"},{"applicationId":"55ebbb62-3b9c-49fd-9b87-9595226dd4ac","roleDefinitionId":"e49ca620-7992-4561-a7df-4ed67dad77b5"}],"resourceTypes":[{"resourceType":"managedEnvironments","locations":["North + Central US (Stage)","Canada Central","West Europe","North Europe","East US","East + US 2","East Asia","Australia East","Germany West Central","Japan East","UK + South","West US","Central US","North Central US","South Central US","Korea + Central","Brazil South","West US 3","France Central","South Africa North","Norway + East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"managedEnvironments/certificates","locations":["North + Central US (Stage)","Canada Central","West Europe","North Europe","East US","East + US 2","East Asia","Australia East","Germany West Central","Japan East","UK + South","West US","Central US","North Central US","South Central US","Korea + Central","Brazil South","West US 3","France Central","South Africa North","Norway + East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"containerApps","locations":["North + Central US (Stage)","Canada Central","West Europe","North Europe","East US","East + US 2","East Asia","Australia East","Germany West Central","Japan East","UK + South","West US","Central US","North Central US","South Central US","Korea + Central","Brazil South","West US 3","France Central","South Africa North","Norway + East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity, SupportsTags, + SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationResults","locations":["North + Central US (Stage)","Canada Central","West Europe","North Europe","East US","East + US 2","East Asia","Australia East","Germany West Central","Japan East","UK + South","West US","Central US","North Central US","South Central US","Korea + Central","Brazil South","West US 3","France Central","South Africa North","Norway + East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationStatuses","locations":["North + Central US (Stage)","Canada Central","West Europe","North Europe","East US","East + US 2","East Asia","Australia East","Germany West Central","Japan East","UK + South","West US","Central US","North Central US","South Central US","Korea + Central","Brazil South","West US 3","France Central","South Africa North","Norway + East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationResults","locations":["North + Central US (Stage)","Canada Central","West Europe","North Europe","East US","East + US 2","East Asia","Australia East","Germany West Central","Japan East","UK + South","West US","Central US","North Central US","South Central US","Korea + Central","Brazil South","West US 3","France Central","South Africa North","Norway + East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationStatuses","locations":["North + Central US (Stage)","Canada Central","West Europe","North Europe","East US","East + US 2","East Asia","Australia East","Germany West Central","Japan East","UK + South","West US","Central US","North Central US","South Central US","Korea + Central","Brazil South","West US 3","France Central","South Africa North","Norway + East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"operations","locations":["North + Central US (Stage)","Central US EUAP","East US 2 EUAP","Canada Central","West + Europe","North Europe","East US","East US 2","East Asia","Australia East","Germany + West Central","Japan East","UK South","West US","Central US","North Central + US","South Central US","Korea Central","Brazil South","West US 3","France + Central","South Africa North","Norway East"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"connectedEnvironments","locations":["North + Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East + US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"connectedEnvironments/certificates","locations":["North + Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East + US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"locations/connectedEnvironmentOperationResults","locations":["North + Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East + US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North + Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East + US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + headers: + cache-control: + - no-cache + content-length: + - '6928' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 07 Nov 2022 23:10:26 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK - request: body: null headers: @@ -1825,29 +1922,29 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - containerapp env update + - containerapp env show Connection: - keep-alive ParameterSetName: - - -g -n --logs-destination --storage-account + - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:13:39.2406631"},"properties":{"provisioningState":"Succeeded","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:09:27.4478887"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '982' + - '998' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:14:34 GMT + - Mon, 07 Nov 2022 23:10:28 GMT expires: - '-1' pragma: @@ -1868,43 +1965,136 @@ interactions: code: 200 message: OK - request: - body: '{"location": "australiaeast", "tags": null, "properties": {"appLogsConfiguration": - {"destination": "azure-monitor"}, "customDomainConfiguration": {}}}' + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - containerapp env show + Connection: + - keep-alive + ParameterSetName: + - -n -g + User-Agent: + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App","namespace":"Microsoft.App","authorizations":[{"applicationId":"7e3bc4fd-85a3-4192-b177-5b8bfc87f42c","roleDefinitionId":"39a74f72-b40f-4bdc-b639-562fe2260bf0"},{"applicationId":"3734c1a4-2bed-4998-a37a-ff1a9e7bf019","roleDefinitionId":"5c779a4f-5cb2-4547-8c41-478d9be8ba90"},{"applicationId":"55ebbb62-3b9c-49fd-9b87-9595226dd4ac","roleDefinitionId":"e49ca620-7992-4561-a7df-4ed67dad77b5"}],"resourceTypes":[{"resourceType":"managedEnvironments","locations":["North + Central US (Stage)","Canada Central","West Europe","North Europe","East US","East + US 2","East Asia","Australia East","Germany West Central","Japan East","UK + South","West US","Central US","North Central US","South Central US","Korea + Central","Brazil South","West US 3","France Central","South Africa North","Norway + East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"managedEnvironments/certificates","locations":["North + Central US (Stage)","Canada Central","West Europe","North Europe","East US","East + US 2","East Asia","Australia East","Germany West Central","Japan East","UK + South","West US","Central US","North Central US","South Central US","Korea + Central","Brazil South","West US 3","France Central","South Africa North","Norway + East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"containerApps","locations":["North + Central US (Stage)","Canada Central","West Europe","North Europe","East US","East + US 2","East Asia","Australia East","Germany West Central","Japan East","UK + South","West US","Central US","North Central US","South Central US","Korea + Central","Brazil South","West US 3","France Central","South Africa North","Norway + East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SystemAssignedResourceIdentity, SupportsTags, + SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationResults","locations":["North + Central US (Stage)","Canada Central","West Europe","North Europe","East US","East + US 2","East Asia","Australia East","Germany West Central","Japan East","UK + South","West US","Central US","North Central US","South Central US","Korea + Central","Brazil South","West US 3","France Central","South Africa North","Norway + East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationStatuses","locations":["North + Central US (Stage)","Canada Central","West Europe","North Europe","East US","East + US 2","East Asia","Australia East","Germany West Central","Japan East","UK + South","West US","Central US","North Central US","South Central US","Korea + Central","Brazil South","West US 3","France Central","South Africa North","Norway + East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationResults","locations":["North + Central US (Stage)","Canada Central","West Europe","North Europe","East US","East + US 2","East Asia","Australia East","Germany West Central","Japan East","UK + South","West US","Central US","North Central US","South Central US","Korea + Central","Brazil South","West US 3","France Central","South Africa North","Norway + East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationStatuses","locations":["North + Central US (Stage)","Canada Central","West Europe","North Europe","East US","East + US 2","East Asia","Australia East","Germany West Central","Japan East","UK + South","West US","Central US","North Central US","South Central US","Korea + Central","Brazil South","West US 3","France Central","South Africa North","Norway + East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"operations","locations":["North + Central US (Stage)","Central US EUAP","East US 2 EUAP","Canada Central","West + Europe","North Europe","East US","East US 2","East Asia","Australia East","Germany + West Central","Japan East","UK South","West US","Central US","North Central + US","South Central US","Korea Central","Brazil South","West US 3","France + Central","South Africa North","Norway East"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"connectedEnvironments","locations":["North + Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East + US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"connectedEnvironments/certificates","locations":["North + Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East + US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, + CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"locations/connectedEnvironmentOperationResults","locations":["North + Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East + US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North + Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East + US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + headers: + cache-control: + - no-cache + content-length: + - '6928' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 07 Nov 2022 23:10:29 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: null headers: Accept: - '*/*' Accept-Encoding: - gzip, deflate CommandName: - - containerapp env update + - containerapp env show Connection: - keep-alive - Content-Length: - - '150' - Content-Type: - - application/json ParameterSetName: - - -g -n --logs-destination --storage-account + - -n -g User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: PUT + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 + method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:14:35.4697042Z"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:09:27.4478887"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview - azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/locations/australiaeast/managedEnvironmentOperationStatuses/5a4a985c-8f06-47e4-aa0c-88d5d49f1ea4?api-version=2022-06-01-preview&azureAsyncOperation=true + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '933' + - '998' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:14:38 GMT + - Mon, 07 Nov 2022 23:10:31 GMT expires: - '-1' pragma: @@ -1913,17 +2103,17 @@ interactions: - Microsoft-IIS/10.0 strict-transport-security: - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding,Accept-Encoding x-content-type-options: - nosniff - x-ms-async-operation-timeout: - - PT15M - x-ms-ratelimit-remaining-subscription-resource-requests: - - '99' x-powered-by: - ASP.NET status: - code: 201 - message: Created + code: 200 + message: OK - request: body: null headers: @@ -1938,23 +2128,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:14:35.4697042"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:09:27.4478887"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '932' + - '998' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:14:39 GMT + - Mon, 07 Nov 2022 23:10:34 GMT expires: - '-1' pragma: @@ -1974,6 +2164,63 @@ interactions: status: code: 200 message: OK +- request: + body: '{"location": "australiaeast", "tags": null, "properties": {"appLogsConfiguration": + {"destination": "azure-monitor"}, "customDomainConfiguration": {}}}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - containerapp env update + Connection: + - keep-alive + Content-Length: + - '150' + Content-Type: + - application/json + ParameterSetName: + - -g -n --logs-destination --storage-account + User-Agent: + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:10:36.9897848Z"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + headers: + api-supported-versions: + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/locations/australiaeast/managedEnvironmentOperationStatuses/7b62d00f-8cd8-42f9-9ed6-a96c40af976b?api-version=2022-06-01-preview&azureAsyncOperation=true + cache-control: + - no-cache + content-length: + - '949' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 07 Nov 2022 23:10:40 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-async-operation-timeout: + - PT15M + x-ms-ratelimit-remaining-subscription-resource-requests: + - '99' + x-powered-by: + - ASP.NET + status: + code: 201 + message: Created - request: body: null headers: @@ -1988,23 +2235,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:14:35.4697042"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:10:36.9897848"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '932' + - '948' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:14:44 GMT + - Mon, 07 Nov 2022 23:10:41 GMT expires: - '-1' pragma: @@ -2038,23 +2285,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:14:35.4697042"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:10:36.9897848"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '932' + - '948' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:14:49 GMT + - Mon, 07 Nov 2022 23:10:46 GMT expires: - '-1' pragma: @@ -2088,23 +2335,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:14:35.4697042"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:10:36.9897848"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '932' + - '948' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:14:53 GMT + - Mon, 07 Nov 2022 23:10:49 GMT expires: - '-1' pragma: @@ -2138,23 +2385,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:14:35.4697042"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:10:36.9897848"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '932' + - '948' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:14:56 GMT + - Mon, 07 Nov 2022 23:10:55 GMT expires: - '-1' pragma: @@ -2188,23 +2435,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:14:35.4697042"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:10:36.9897848"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '932' + - '948' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:15:02 GMT + - Mon, 07 Nov 2022 23:10:58 GMT expires: - '-1' pragma: @@ -2238,23 +2485,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:14:35.4697042"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:10:36.9897848"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '932' + - '948' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:15:07 GMT + - Mon, 07 Nov 2022 23:11:04 GMT expires: - '-1' pragma: @@ -2288,23 +2535,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:14:35.4697042"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:10:36.9897848"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '932' + - '948' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:15:11 GMT + - Mon, 07 Nov 2022 23:11:08 GMT expires: - '-1' pragma: @@ -2338,23 +2585,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:14:35.4697042"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:10:36.9897848"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '932' + - '948' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:15:16 GMT + - Mon, 07 Nov 2022 23:11:12 GMT expires: - '-1' pragma: @@ -2388,23 +2635,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:14:35.4697042"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:10:36.9897848"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '932' + - '948' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:15:21 GMT + - Mon, 07 Nov 2022 23:11:17 GMT expires: - '-1' pragma: @@ -2438,23 +2685,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:14:35.4697042"},"properties":{"provisioningState":"Succeeded","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:10:36.9897848"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '931' + - '947' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:15:25 GMT + - Mon, 07 Nov 2022 23:11:20 GMT expires: - '-1' pragma: @@ -2495,7 +2742,7 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-monitor/5.0.0 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-monitor/5.0.0 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003/providers/Microsoft.Insights/diagnosticSettings/diagnosticsettings?api-version=2021-05-01-preview response: @@ -2509,7 +2756,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:15:33 GMT + - Mon, 07 Nov 2022 23:11:27 GMT expires: - '-1' pragma: @@ -2543,7 +2790,7 @@ interactions: ParameterSetName: - -n -g User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -2615,7 +2862,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:15:34 GMT + - Mon, 07 Nov 2022 23:11:29 GMT expires: - '-1' pragma: @@ -2643,23 +2890,23 @@ interactions: ParameterSetName: - -n -g User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:14:35.4697042"},"properties":{"provisioningState":"Succeeded","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:10:36.9897848"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '931' + - '947' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:15:35 GMT + - Mon, 07 Nov 2022 23:11:31 GMT expires: - '-1' pragma: @@ -2693,7 +2940,7 @@ interactions: ParameterSetName: - --name --resource User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-monitor/5.0.0 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-monitor/5.0.0 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003/providers/Microsoft.Insights/diagnosticSettings/diagnosticsettings?api-version=2021-05-01-preview response: @@ -2707,7 +2954,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:15:39 GMT + - Mon, 07 Nov 2022 23:11:33 GMT expires: - '-1' pragma: @@ -2739,23 +2986,23 @@ interactions: ParameterSetName: - -g -n --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:14:35.4697042"},"properties":{"provisioningState":"Succeeded","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:10:36.9897848"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '931' + - '947' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:15:42 GMT + - Mon, 07 Nov 2022 23:11:35 GMT expires: - '-1' pragma: @@ -2776,8 +3023,64 @@ interactions: code: 200 message: OK - request: - body: '{"location": "australiaeast", "tags": null, "properties": {"appLogsConfiguration": - {"destination": null}, "customDomainConfiguration": {}}}' + body: '{"location": "australiaeast", "tags": null, "properties": {"appLogsConfiguration": + {"destination": null}, "customDomainConfiguration": {}}}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - containerapp env update + Connection: + - keep-alive + Content-Length: + - '139' + Content-Type: + - application/json + ParameterSetName: + - -g -n --logs-destination + User-Agent: + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:11:37.0823018Z"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + headers: + api-supported-versions: + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/locations/australiaeast/managedEnvironmentOperationStatuses/70789baf-5683-4a94-9cd8-4b7dbcb253d1?api-version=2022-06-01-preview&azureAsyncOperation=true + cache-control: + - no-cache + content-length: + - '919' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 07 Nov 2022 23:11:40 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-async-operation-timeout: + - PT15M + x-ms-ratelimit-remaining-subscription-resource-requests: + - '99' + x-powered-by: + - ASP.NET + status: + code: 201 + message: Created +- request: + body: null headers: Accept: - '*/*' @@ -2787,32 +3090,26 @@ interactions: - containerapp env update Connection: - keep-alive - Content-Length: - - '139' - Content-Type: - - application/json ParameterSetName: - -g -n --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: PUT + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 + method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:15:44.9715636Z"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:11:37.0823018"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview - azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/locations/australiaeast/managedEnvironmentOperationStatuses/a0323c06-75a3-44f8-8028-4410c33afdb6?api-version=2022-06-01-preview&azureAsyncOperation=true + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '903' + - '918' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:15:47 GMT + - Mon, 07 Nov 2022 23:11:42 GMT expires: - '-1' pragma: @@ -2821,17 +3118,17 @@ interactions: - Microsoft-IIS/10.0 strict-transport-security: - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding,Accept-Encoding x-content-type-options: - nosniff - x-ms-async-operation-timeout: - - PT15M - x-ms-ratelimit-remaining-subscription-resource-requests: - - '99' x-powered-by: - ASP.NET status: - code: 201 - message: Created + code: 200 + message: OK - request: body: null headers: @@ -2846,23 +3143,23 @@ interactions: ParameterSetName: - -g -n --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:15:44.9715636"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:11:37.0823018"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '902' + - '918' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:15:50 GMT + - Mon, 07 Nov 2022 23:11:46 GMT expires: - '-1' pragma: @@ -2896,23 +3193,23 @@ interactions: ParameterSetName: - -g -n --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:15:44.9715636"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:11:37.0823018"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '902' + - '918' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:15:54 GMT + - Mon, 07 Nov 2022 23:11:50 GMT expires: - '-1' pragma: @@ -2946,23 +3243,23 @@ interactions: ParameterSetName: - -g -n --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:15:44.9715636"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:11:37.0823018"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '902' + - '918' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:15:58 GMT + - Mon, 07 Nov 2022 23:11:53 GMT expires: - '-1' pragma: @@ -2996,23 +3293,23 @@ interactions: ParameterSetName: - -g -n --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:15:44.9715636"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:11:37.0823018"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '902' + - '918' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:16:02 GMT + - Mon, 07 Nov 2022 23:11:57 GMT expires: - '-1' pragma: @@ -3046,23 +3343,23 @@ interactions: ParameterSetName: - -g -n --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:15:44.9715636"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:11:37.0823018"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '902' + - '918' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:16:07 GMT + - Mon, 07 Nov 2022 23:11:59 GMT expires: - '-1' pragma: @@ -3096,23 +3393,23 @@ interactions: ParameterSetName: - -g -n --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:15:44.9715636"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:11:37.0823018"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '902' + - '918' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:16:12 GMT + - Mon, 07 Nov 2022 23:12:04 GMT expires: - '-1' pragma: @@ -3146,23 +3443,23 @@ interactions: ParameterSetName: - -g -n --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:15:44.9715636"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:11:37.0823018"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '902' + - '918' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:16:17 GMT + - Mon, 07 Nov 2022 23:12:08 GMT expires: - '-1' pragma: @@ -3196,23 +3493,23 @@ interactions: ParameterSetName: - -g -n --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:15:44.9715636"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:11:37.0823018"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '902' + - '918' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:16:21 GMT + - Mon, 07 Nov 2022 23:12:12 GMT expires: - '-1' pragma: @@ -3246,23 +3543,23 @@ interactions: ParameterSetName: - -g -n --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:15:44.9715636"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:11:37.0823018"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '902' + - '918' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:16:26 GMT + - Mon, 07 Nov 2022 23:12:16 GMT expires: - '-1' pragma: @@ -3296,23 +3593,23 @@ interactions: ParameterSetName: - -g -n --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:15:44.9715636"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:11:37.0823018"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '902' + - '918' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:16:29 GMT + - Mon, 07 Nov 2022 23:12:21 GMT expires: - '-1' pragma: @@ -3346,23 +3643,23 @@ interactions: ParameterSetName: - -g -n --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:15:44.9715636"},"properties":{"provisioningState":"Succeeded","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:11:37.0823018"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '901' + - '917' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:16:34 GMT + - Mon, 07 Nov 2022 23:12:26 GMT expires: - '-1' pragma: @@ -3396,7 +3693,7 @@ interactions: ParameterSetName: - -n -g User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -3468,7 +3765,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:16:35 GMT + - Mon, 07 Nov 2022 23:12:27 GMT expires: - '-1' pragma: @@ -3496,23 +3793,23 @@ interactions: ParameterSetName: - -n -g User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:15:44.9715636"},"properties":{"provisioningState":"Succeeded","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:11:37.0823018"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '901' + - '917' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:16:38 GMT + - Mon, 07 Nov 2022 23:12:30 GMT expires: - '-1' pragma: @@ -3546,23 +3843,23 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:15:44.9715636"},"properties":{"provisioningState":"Succeeded","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:11:37.0823018"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '901' + - '917' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:16:40 GMT + - Mon, 07 Nov 2022 23:12:32 GMT expires: - '-1' pragma: @@ -3585,7 +3882,7 @@ interactions: - request: body: '{"location": "australiaeast", "tags": null, "properties": {"appLogsConfiguration": {"destination": "log-analytics", "logAnalyticsConfiguration": {"customerId": - "6e2eb9ef-b74e-4574-89d3-df4ca975d259", "sharedKey": "UdlBpc7WT4y4xbQktofBkZEQKqo2rl1yCxRXEAwQgSxp51A7lAW14EF9jyEwDsd3CN6Ac5dSRhlyq3w6Gg0Wpw=="}}, + "3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd", "sharedKey": "OV6rSv0/sHDML+E+uCbGG9aLOfe5GF00gFHwc4kR9AnOPCiriMo6zpz2pplJEdwBRfnFSV7BAdfMirKxqKAAhw=="}}, "customDomainConfiguration": {}}}' headers: Accept: @@ -3603,25 +3900,25 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:16:42.7423862Z"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:12:34.3422153Z"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/locations/australiaeast/managedEnvironmentOperationStatuses/5ca004ed-28bb-44a3-962f-4efcb6cf49a9?api-version=2022-06-01-preview&azureAsyncOperation=true + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/locations/australiaeast/managedEnvironmentOperationStatuses/5211f62e-fa3f-49e3-b543-fe9fbf96f1ae?api-version=2022-06-01-preview&azureAsyncOperation=true cache-control: - no-cache content-length: - - '984' + - '1000' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:16:45 GMT + - Mon, 07 Nov 2022 23:12:36 GMT expires: - '-1' pragma: @@ -3655,23 +3952,173 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:12:34.3422153"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + headers: + api-supported-versions: + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 + cache-control: + - no-cache + content-length: + - '999' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 07 Nov 2022 23:12:38 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding,Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - containerapp env update + Connection: + - keep-alive + ParameterSetName: + - -g -n --logs-workspace-id --logs-workspace-key --logs-destination + User-Agent: + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:12:34.3422153"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + headers: + api-supported-versions: + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 + cache-control: + - no-cache + content-length: + - '999' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 07 Nov 2022 23:12:43 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding,Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - containerapp env update + Connection: + - keep-alive + ParameterSetName: + - -g -n --logs-workspace-id --logs-workspace-key --logs-destination + User-Agent: + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:12:34.3422153"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + headers: + api-supported-versions: + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 + cache-control: + - no-cache + content-length: + - '999' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 07 Nov 2022 23:12:46 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding,Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - containerapp env update + Connection: + - keep-alive + ParameterSetName: + - -g -n --logs-workspace-id --logs-workspace-key --logs-destination + User-Agent: + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:16:42.7423862"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:12:34.3422153"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '983' + - '999' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:16:48 GMT + - Mon, 07 Nov 2022 23:12:51 GMT expires: - '-1' pragma: @@ -3705,23 +4152,23 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:16:42.7423862"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:12:34.3422153"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '983' + - '999' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:16:52 GMT + - Mon, 07 Nov 2022 23:12:54 GMT expires: - '-1' pragma: @@ -3755,23 +4202,23 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:16:42.7423862"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:12:34.3422153"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '983' + - '999' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:16:56 GMT + - Mon, 07 Nov 2022 23:12:58 GMT expires: - '-1' pragma: @@ -3805,23 +4252,23 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:16:42.7423862"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:12:34.3422153"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '983' + - '999' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:16:59 GMT + - Mon, 07 Nov 2022 23:13:02 GMT expires: - '-1' pragma: @@ -3855,23 +4302,23 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:16:42.7423862"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:12:34.3422153"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '983' + - '999' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:04 GMT + - Mon, 07 Nov 2022 23:13:06 GMT expires: - '-1' pragma: @@ -3905,23 +4352,23 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:16:42.7423862"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:12:34.3422153"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '983' + - '999' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:08 GMT + - Mon, 07 Nov 2022 23:13:10 GMT expires: - '-1' pragma: @@ -3955,23 +4402,23 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:16:42.7423862"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:12:34.3422153"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '983' + - '999' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:11 GMT + - Mon, 07 Nov 2022 23:13:13 GMT expires: - '-1' pragma: @@ -4005,23 +4452,23 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:16:42.7423862"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:12:34.3422153"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '983' + - '999' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:15 GMT + - Mon, 07 Nov 2022 23:13:17 GMT expires: - '-1' pragma: @@ -4055,23 +4502,23 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:16:42.7423862"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:12:34.3422153"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '983' + - '999' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:20 GMT + - Mon, 07 Nov 2022 23:13:21 GMT expires: - '-1' pragma: @@ -4105,23 +4552,23 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key --logs-destination User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:16:42.7423862"},"properties":{"provisioningState":"Succeeded","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:12:34.3422153"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '982' + - '998' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:24 GMT + - Mon, 07 Nov 2022 23:13:25 GMT expires: - '-1' pragma: @@ -4155,7 +4602,7 @@ interactions: ParameterSetName: - -n -g User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -4227,7 +4674,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:25 GMT + - Mon, 07 Nov 2022 23:13:26 GMT expires: - '-1' pragma: @@ -4255,23 +4702,23 @@ interactions: ParameterSetName: - -n -g User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:16:42.7423862"},"properties":{"provisioningState":"Succeeded","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:12:34.3422153"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '982' + - '998' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:27 GMT + - Mon, 07 Nov 2022 23:13:28 GMT expires: - '-1' pragma: @@ -4305,7 +4752,7 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account -l User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -4377,7 +4824,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:27 GMT + - Mon, 07 Nov 2022 23:13:29 GMT expires: - '-1' pragma: @@ -4405,21 +4852,21 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/managedEnvironments?api-version=2022-06-01-preview response: body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage1","name":"stage1","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:06:38.7041799","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:06:38.7041799"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"calmpond-b28c41d1.northcentralusstage.azurecontainerapps.io","staticIp":"13.67.139.178","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"21980690-af3f-47d2-ba10-1382b5450cc0"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage2","name":"stage2","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:07:15.2866819","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:07:15.2866819"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"wittymeadow-4dbec5e3.northcentralusstage.azurecontainerapps.io","staticIp":"23.100.80.227","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"71828812-a8df-459b-b6ef-c164550d9f5b"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage3","name":"stage3","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:07:27.14113","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:07:27.14113"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"redisland-f0228bca.northcentralusstage.azurecontainerapps.io","staticIp":"20.9.4.10","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"da72e22b-f2d3-4eab-b0bc-78680ea9ade5"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage4","name":"stage4","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:11:33.3982648","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:11:33.3982648"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"bravedune-ac753656.northcentralusstage.azurecontainerapps.io","staticIp":"20.9.3.250","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"aa3cdac6-e5ed-4ef6-808f-97996f1a8cd3"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage5","name":"stage5","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:11:23.672865","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:11:23.672865"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"blackwater-28f0311e.northcentralusstage.azurecontainerapps.io","staticIp":"40.69.173.180","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"5815914d-d73c-4e61-8f50-e71bcd901812"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:16:42.7423862"},"properties":{"provisioningState":"Succeeded","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6e2eb9ef-b74e-4574-89d3-df4ca975d259"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}]}' + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage1","name":"stage1","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:06:38.7041799","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:06:38.7041799"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"calmpond-b28c41d1.northcentralusstage.azurecontainerapps.io","staticIp":"13.67.139.178","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"21980690-af3f-47d2-ba10-1382b5450cc0"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage2","name":"stage2","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:07:15.2866819","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:07:15.2866819"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"wittymeadow-4dbec5e3.northcentralusstage.azurecontainerapps.io","staticIp":"23.100.80.227","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"71828812-a8df-459b-b6ef-c164550d9f5b"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage3","name":"stage3","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:07:27.14113","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:07:27.14113"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"redisland-f0228bca.northcentralusstage.azurecontainerapps.io","staticIp":"20.9.4.10","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"da72e22b-f2d3-4eab-b0bc-78680ea9ade5"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage4","name":"stage4","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:11:33.3982648","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:11:33.3982648"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"bravedune-ac753656.northcentralusstage.azurecontainerapps.io","staticIp":"20.9.3.250","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"aa3cdac6-e5ed-4ef6-808f-97996f1a8cd3"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage5","name":"stage5","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:11:23.672865","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:11:23.672865"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"blackwater-28f0311e.northcentralusstage.azurecontainerapps.io","staticIp":"40.69.173.180","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"5815914d-d73c-4e61-8f50-e71bcd901812"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/env","name":"env","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-02T17:28:30.2161566","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T17:40:00.0261021"},"properties":{"provisioningState":"Failed","useLegionServerlessCompute":false,"defaultDomain":"wittysea-79949ad9.canadacentral.azurecontainerapps.io","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rgbntmvovn6b2yf4665irjixuyplbmqibatyrcolwtssk7qgwehfcaprvmb3amfhkne/providers/Microsoft.App/managedEnvironments/envszl4xfcgdr4feuyd27fee","name":"envszl4xfcgdr4feuyd27fee","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:46.2766299","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:46.2766299"},"properties":{"provisioningState":"ScheduledForDelete","useLegionServerlessCompute":false,"defaultDomain":"agreeablebush-1943b534.canadacentral.azurecontainerapps.io","staticIp":"20.220.245.143","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"fad4055d-5566-4087-8adc-47b3d7d9d091"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rgu6i52u5xck4kf6tgpfhgqyw7fjw552wtd7qba2rmeh757px4hdm5fxfp6b26x4vrl/providers/Microsoft.App/managedEnvironments/env652qmaypbbhwigcxjddgr","name":"env652qmaypbbhwigcxjddgr","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"ScheduledForDelete","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/logstream-env","name":"logstream-env","type":"Microsoft.App/managedEnvironments","location":"eastus","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T19:31:58.3535946","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T19:31:58.3535946"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"salmoncoast-d1f7a6de.eastus.azurecontainerapps.io","staticIp":"20.121.84.21","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"b4591f71-4b84-4cd5-b397-377a1771e36f"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:12:34.3422153"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"3ca7e828-f47d-4905-b8d0-8a3d1d9e19bd"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/logstream2-env","name":"logstream2-env","type":"Microsoft.App/managedEnvironments","location":"centralus","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T19:37:52.0110005","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T19:37:52.0110005"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"ashybeach-727d6692.centralus.azurecontainerapps.io","staticIp":"20.84.132.103","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"d2b86418-edcf-4f2b-b600-d473b2c3588e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}]}' headers: cache-control: - no-cache content-length: - - '5953' + - '10836' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:28 GMT + - Mon, 07 Nov 2022 23:13:31 GMT expires: - '-1' pragma: @@ -4433,6 +4880,9 @@ interactions: x-ms-original-request-ids: - '' - '' + - '' + - '' + - '' status: code: 200 message: OK @@ -4450,7 +4900,7 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account -l User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -4522,7 +4972,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:28 GMT + - Mon, 07 Nov 2022 23:13:30 GMT expires: - '-1' pragma: @@ -4550,7 +5000,7 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account -l User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -4622,7 +5072,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:28 GMT + - Mon, 07 Nov 2022 23:13:31 GMT expires: - '-1' pragma: @@ -4650,7 +5100,7 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account -l User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -4722,7 +5172,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:28 GMT + - Mon, 07 Nov 2022 23:13:31 GMT expires: - '-1' pragma: @@ -4750,7 +5200,7 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account -l User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -4822,7 +5272,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:29 GMT + - Mon, 07 Nov 2022 23:13:32 GMT expires: - '-1' pragma: @@ -4858,25 +5308,25 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:17:31.7757747Z"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:13:33.5628492Z"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/locations/australiaeast/managedEnvironmentOperationStatuses/410d3783-a5a0-4db7-8158-332729bb20f2?api-version=2022-06-01-preview&azureAsyncOperation=true + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/locations/australiaeast/managedEnvironmentOperationStatuses/ff4e340b-dcd2-44ee-92b8-7aaf31c437ed?api-version=2022-06-01-preview&azureAsyncOperation=true cache-control: - no-cache content-length: - - '933' + - '949' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:32 GMT + - Mon, 07 Nov 2022 23:13:34 GMT expires: - '-1' pragma: @@ -4910,23 +5360,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:17:31.7757747"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:13:33.5628492"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '932' + - '948' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:34 GMT + - Mon, 07 Nov 2022 23:13:37 GMT expires: - '-1' pragma: @@ -4960,23 +5410,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:17:31.7757747"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:13:33.5628492"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '932' + - '948' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:37 GMT + - Mon, 07 Nov 2022 23:13:39 GMT expires: - '-1' pragma: @@ -5010,23 +5460,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:17:31.7757747"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:13:33.5628492"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '932' + - '948' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:42 GMT + - Mon, 07 Nov 2022 23:13:44 GMT expires: - '-1' pragma: @@ -5060,23 +5510,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:17:31.7757747"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:13:33.5628492"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '932' + - '948' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:45 GMT + - Mon, 07 Nov 2022 23:13:49 GMT expires: - '-1' pragma: @@ -5110,23 +5560,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:17:31.7757747"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:13:33.5628492"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '932' + - '948' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:49 GMT + - Mon, 07 Nov 2022 23:13:53 GMT expires: - '-1' pragma: @@ -5160,23 +5610,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:17:31.7757747"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:13:33.5628492"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '932' + - '948' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:53 GMT + - Mon, 07 Nov 2022 23:13:57 GMT expires: - '-1' pragma: @@ -5210,23 +5660,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:17:31.7757747"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:13:33.5628492"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '932' + - '948' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:17:57 GMT + - Mon, 07 Nov 2022 23:14:02 GMT expires: - '-1' pragma: @@ -5260,23 +5710,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:17:31.7757747"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:13:33.5628492"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '932' + - '948' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:18:00 GMT + - Mon, 07 Nov 2022 23:14:07 GMT expires: - '-1' pragma: @@ -5310,23 +5760,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:17:31.7757747"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:13:33.5628492"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '932' + - '948' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:18:05 GMT + - Mon, 07 Nov 2022 23:14:11 GMT expires: - '-1' pragma: @@ -5360,23 +5810,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:17:31.7757747"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:13:33.5628492"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '932' + - '948' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:18:08 GMT + - Mon, 07 Nov 2022 23:14:15 GMT expires: - '-1' pragma: @@ -5410,23 +5860,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:17:31.7757747"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:13:33.5628492"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '932' + - '948' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:18:13 GMT + - Mon, 07 Nov 2022 23:14:20 GMT expires: - '-1' pragma: @@ -5460,23 +5910,23 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:17:31.7757747"},"properties":{"provisioningState":"Succeeded","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:13:33.5628492"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '931' + - '947' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:18:17 GMT + - Mon, 07 Nov 2022 23:14:24 GMT expires: - '-1' pragma: @@ -5517,7 +5967,7 @@ interactions: ParameterSetName: - -g -n --logs-destination --storage-account -l User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-monitor/5.0.0 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-monitor/5.0.0 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003/providers/Microsoft.Insights/diagnosticSettings/diagnosticsettings?api-version=2021-05-01-preview response: @@ -5531,7 +5981,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:18:24 GMT + - Mon, 07 Nov 2022 23:14:32 GMT expires: - '-1' pragma: @@ -5565,7 +6015,7 @@ interactions: ParameterSetName: - -n -g User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -5637,7 +6087,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:18:25 GMT + - Mon, 07 Nov 2022 23:14:33 GMT expires: - '-1' pragma: @@ -5665,23 +6115,23 @@ interactions: ParameterSetName: - -n -g User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:17:31.7757747"},"properties":{"provisioningState":"Succeeded","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:13:33.5628492"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '931' + - '947' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:18:27 GMT + - Mon, 07 Nov 2022 23:14:34 GMT expires: - '-1' pragma: @@ -5715,7 +6165,7 @@ interactions: ParameterSetName: - --name --resource User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-monitor/5.0.0 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-monitor/5.0.0 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003/providers/Microsoft.Insights/diagnosticSettings/diagnosticsettings?api-version=2021-05-01-preview response: @@ -5729,7 +6179,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:18:28 GMT + - Mon, 07 Nov 2022 23:14:37 GMT expires: - '-1' pragma: @@ -5761,7 +6211,7 @@ interactions: ParameterSetName: - -g -n --logs-destination -l User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -5833,7 +6283,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:18:30 GMT + - Mon, 07 Nov 2022 23:14:38 GMT expires: - '-1' pragma: @@ -5861,21 +6311,21 @@ interactions: ParameterSetName: - -g -n --logs-destination -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/managedEnvironments?api-version=2022-06-01-preview response: body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage1","name":"stage1","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:06:38.7041799","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:06:38.7041799"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"calmpond-b28c41d1.northcentralusstage.azurecontainerapps.io","staticIp":"13.67.139.178","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"21980690-af3f-47d2-ba10-1382b5450cc0"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage2","name":"stage2","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:07:15.2866819","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:07:15.2866819"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"wittymeadow-4dbec5e3.northcentralusstage.azurecontainerapps.io","staticIp":"23.100.80.227","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"71828812-a8df-459b-b6ef-c164550d9f5b"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage3","name":"stage3","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:07:27.14113","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:07:27.14113"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"redisland-f0228bca.northcentralusstage.azurecontainerapps.io","staticIp":"20.9.4.10","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"da72e22b-f2d3-4eab-b0bc-78680ea9ade5"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage4","name":"stage4","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:11:33.3982648","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:11:33.3982648"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"bravedune-ac753656.northcentralusstage.azurecontainerapps.io","staticIp":"20.9.3.250","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"aa3cdac6-e5ed-4ef6-808f-97996f1a8cd3"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage5","name":"stage5","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:11:23.672865","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:11:23.672865"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"blackwater-28f0311e.northcentralusstage.azurecontainerapps.io","staticIp":"40.69.173.180","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"5815914d-d73c-4e61-8f50-e71bcd901812"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:17:31.7757747"},"properties":{"provisioningState":"Succeeded","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}]}' + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage1","name":"stage1","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:06:38.7041799","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:06:38.7041799"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"calmpond-b28c41d1.northcentralusstage.azurecontainerapps.io","staticIp":"13.67.139.178","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"21980690-af3f-47d2-ba10-1382b5450cc0"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage2","name":"stage2","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:07:15.2866819","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:07:15.2866819"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"wittymeadow-4dbec5e3.northcentralusstage.azurecontainerapps.io","staticIp":"23.100.80.227","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"71828812-a8df-459b-b6ef-c164550d9f5b"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage3","name":"stage3","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:07:27.14113","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:07:27.14113"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"redisland-f0228bca.northcentralusstage.azurecontainerapps.io","staticIp":"20.9.4.10","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"da72e22b-f2d3-4eab-b0bc-78680ea9ade5"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage4","name":"stage4","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:11:33.3982648","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:11:33.3982648"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"bravedune-ac753656.northcentralusstage.azurecontainerapps.io","staticIp":"20.9.3.250","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"aa3cdac6-e5ed-4ef6-808f-97996f1a8cd3"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage5","name":"stage5","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:11:23.672865","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:11:23.672865"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"blackwater-28f0311e.northcentralusstage.azurecontainerapps.io","staticIp":"40.69.173.180","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"5815914d-d73c-4e61-8f50-e71bcd901812"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/env","name":"env","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-02T17:28:30.2161566","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T17:40:00.0261021"},"properties":{"provisioningState":"Failed","useLegionServerlessCompute":false,"defaultDomain":"wittysea-79949ad9.canadacentral.azurecontainerapps.io","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rgbntmvovn6b2yf4665irjixuyplbmqibatyrcolwtssk7qgwehfcaprvmb3amfhkne/providers/Microsoft.App/managedEnvironments/envszl4xfcgdr4feuyd27fee","name":"envszl4xfcgdr4feuyd27fee","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:46.2766299","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:46.2766299"},"properties":{"provisioningState":"ScheduledForDelete","useLegionServerlessCompute":false,"defaultDomain":"agreeablebush-1943b534.canadacentral.azurecontainerapps.io","staticIp":"20.220.245.143","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"fad4055d-5566-4087-8adc-47b3d7d9d091"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rgu6i52u5xck4kf6tgpfhgqyw7fjw552wtd7qba2rmeh757px4hdm5fxfp6b26x4vrl/providers/Microsoft.App/managedEnvironments/env652qmaypbbhwigcxjddgr","name":"env652qmaypbbhwigcxjddgr","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"ScheduledForDelete","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/logstream-env","name":"logstream-env","type":"Microsoft.App/managedEnvironments","location":"eastus","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T19:31:58.3535946","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T19:31:58.3535946"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"salmoncoast-d1f7a6de.eastus.azurecontainerapps.io","staticIp":"20.121.84.21","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"b4591f71-4b84-4cd5-b397-377a1771e36f"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:13:33.5628492"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"destination":"azure-monitor","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/logstream2-env","name":"logstream2-env","type":"Microsoft.App/managedEnvironments","location":"centralus","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T19:37:52.0110005","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T19:37:52.0110005"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"ashybeach-727d6692.centralus.azurecontainerapps.io","staticIp":"20.84.132.103","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"d2b86418-edcf-4f2b-b600-d473b2c3588e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}]}' headers: cache-control: - no-cache content-length: - - '5902' + - '10785' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:18:31 GMT + - Mon, 07 Nov 2022 23:14:40 GMT expires: - '-1' pragma: @@ -5889,6 +6339,9 @@ interactions: x-ms-original-request-ids: - '' - '' + - '' + - '' + - '' status: code: 200 message: OK @@ -5906,7 +6359,7 @@ interactions: ParameterSetName: - -g -n --logs-destination -l User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -5978,7 +6431,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:18:31 GMT + - Mon, 07 Nov 2022 23:14:41 GMT expires: - '-1' pragma: @@ -6006,7 +6459,7 @@ interactions: ParameterSetName: - -g -n --logs-destination -l User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -6078,7 +6531,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:18:31 GMT + - Mon, 07 Nov 2022 23:14:41 GMT expires: - '-1' pragma: @@ -6106,7 +6559,7 @@ interactions: ParameterSetName: - -g -n --logs-destination -l User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -6173,12 +6626,14 @@ interactions: headers: cache-control: - no-cache + connection: + - close content-length: - '6928' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:18:31 GMT + - Mon, 07 Nov 2022 23:14:42 GMT expires: - '-1' pragma: @@ -6206,7 +6661,7 @@ interactions: ParameterSetName: - -g -n --logs-destination -l User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -6278,7 +6733,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:18:32 GMT + - Mon, 07 Nov 2022 23:14:41 GMT expires: - '-1' pragma: @@ -6314,25 +6769,25 @@ interactions: ParameterSetName: - -g -n --logs-destination -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:18:33.0170577Z"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:14:43.3960158Z"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/locations/australiaeast/managedEnvironmentOperationStatuses/48487260-bff9-4f40-98c9-e0f51e49a605?api-version=2022-06-01-preview&azureAsyncOperation=true + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/locations/australiaeast/managedEnvironmentOperationStatuses/6fc135b3-cdb3-424a-a62f-8180de82a07e?api-version=2022-06-01-preview&azureAsyncOperation=true cache-control: - no-cache content-length: - - '903' + - '919' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:18:34 GMT + - Mon, 07 Nov 2022 23:14:44 GMT expires: - '-1' pragma: @@ -6366,123 +6821,23 @@ interactions: ParameterSetName: - -g -n --logs-destination -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:18:33.0170577"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' - headers: - api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview - cache-control: - - no-cache - content-length: - - '902' - content-type: - - application/json; charset=utf-8 - date: - - Sun, 09 Oct 2022 04:18:35 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding,Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp env create - Connection: - - keep-alive - ParameterSetName: - - -g -n --logs-destination -l - User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:18:33.0170577"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' - headers: - api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview - cache-control: - - no-cache - content-length: - - '902' - content-type: - - application/json; charset=utf-8 - date: - - Sun, 09 Oct 2022 04:18:40 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding,Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp env create - Connection: - - keep-alive - ParameterSetName: - - -g -n --logs-destination -l - User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:18:33.0170577"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:14:43.3960158"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '902' + - '918' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:18:43 GMT + - Mon, 07 Nov 2022 23:14:45 GMT expires: - '-1' pragma: @@ -6516,23 +6871,23 @@ interactions: ParameterSetName: - -g -n --logs-destination -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:18:33.0170577"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:14:43.3960158"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '902' + - '918' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:18:47 GMT + - Mon, 07 Nov 2022 23:14:49 GMT expires: - '-1' pragma: @@ -6566,23 +6921,23 @@ interactions: ParameterSetName: - -g -n --logs-destination -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:18:33.0170577"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:14:43.3960158"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '902' + - '918' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:18:50 GMT + - Mon, 07 Nov 2022 23:14:53 GMT expires: - '-1' pragma: @@ -6616,23 +6971,23 @@ interactions: ParameterSetName: - -g -n --logs-destination -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:18:33.0170577"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:14:43.3960158"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '902' + - '918' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:18:54 GMT + - Mon, 07 Nov 2022 23:14:56 GMT expires: - '-1' pragma: @@ -6666,23 +7021,23 @@ interactions: ParameterSetName: - -g -n --logs-destination -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:18:33.0170577"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:14:43.3960158"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '902' + - '918' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:18:58 GMT + - Mon, 07 Nov 2022 23:15:01 GMT expires: - '-1' pragma: @@ -6716,23 +7071,23 @@ interactions: ParameterSetName: - -g -n --logs-destination -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:18:33.0170577"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:14:43.3960158"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '902' + - '918' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:19:01 GMT + - Mon, 07 Nov 2022 23:15:05 GMT expires: - '-1' pragma: @@ -6766,23 +7121,23 @@ interactions: ParameterSetName: - -g -n --logs-destination -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:18:33.0170577"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:14:43.3960158"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '902' + - '918' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:19:05 GMT + - Mon, 07 Nov 2022 23:15:09 GMT expires: - '-1' pragma: @@ -6816,23 +7171,23 @@ interactions: ParameterSetName: - -g -n --logs-destination -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:18:33.0170577"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:14:43.3960158"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '902' + - '918' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:19:08 GMT + - Mon, 07 Nov 2022 23:15:14 GMT expires: - '-1' pragma: @@ -6866,23 +7221,23 @@ interactions: ParameterSetName: - -g -n --logs-destination -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:18:33.0170577"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:14:43.3960158"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '902' + - '918' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:19:12 GMT + - Mon, 07 Nov 2022 23:15:16 GMT expires: - '-1' pragma: @@ -6916,23 +7271,23 @@ interactions: ParameterSetName: - -g -n --logs-destination -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:18:33.0170577"},"properties":{"provisioningState":"InProgress","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:14:43.3960158"},"properties":{"provisioningState":"InProgress","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '902' + - '918' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:19:15 GMT + - Mon, 07 Nov 2022 23:15:21 GMT expires: - '-1' pragma: @@ -6966,23 +7321,23 @@ interactions: ParameterSetName: - -g -n --logs-destination -l User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:18:33.0170577"},"properties":{"provisioningState":"Succeeded","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:14:43.3960158"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '901' + - '917' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:19:20 GMT + - Mon, 07 Nov 2022 23:15:26 GMT expires: - '-1' pragma: @@ -7016,7 +7371,7 @@ interactions: ParameterSetName: - -n -g User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -7088,7 +7443,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:19:21 GMT + - Mon, 07 Nov 2022 23:15:28 GMT expires: - '-1' pragma: @@ -7116,23 +7471,23 @@ interactions: ParameterSetName: - -n -g User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-09T04:13:39.2406631","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-09T04:18:33.0170577"},"properties":{"provisioningState":"Succeeded","defaultDomain":"salmonpebble-252aa49e.australiaeast.azurecontainerapps.io","staticIp":"20.227.48.25","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/containerapp-env000003","name":"containerapp-env000003","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:27.4478887","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:14:43.3960158"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"politegrass-cacba4c2.australiaeast.azurecontainerapps.io","staticIp":"20.227.24.30","appLogsConfiguration":{"logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '901' + - '917' content-type: - application/json; charset=utf-8 date: - - Sun, 09 Oct 2022 04:19:22 GMT + - Mon, 07 Nov 2022 23:15:29 GMT expires: - '-1' pragma: diff --git a/src/containerapp/azext_containerapp/tests/latest/recordings/test_containerapp_eventstream.yaml b/src/containerapp/azext_containerapp/tests/latest/recordings/test_containerapp_eventstream.yaml index 4ce6d844aa5..74bbce566f5 100644 --- a/src/containerapp/azext_containerapp/tests/latest/recordings/test_containerapp_eventstream.yaml +++ b/src/containerapp/azext_containerapp/tests/latest/recordings/test_containerapp_eventstream.yaml @@ -13,12 +13,12 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001?api-version=2021-04-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001","name":"clitest.rg000001","type":"Microsoft.Resources/resourceGroups","location":"northeurope","tags":{"product":"azurecli","cause":"automation","date":"2022-10-07T20:16:24Z"},"properties":{"provisioningState":"Succeeded"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001","name":"clitest.rg000001","type":"Microsoft.Resources/resourceGroups","location":"northeurope","tags":{"product":"azurecli","cause":"automation","date":"2022-11-07T23:07:58Z"},"properties":{"provisioningState":"Succeeded"}}' headers: cache-control: - no-cache @@ -27,7 +27,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:16:27 GMT + - Mon, 07 Nov 2022 23:08:03 GMT expires: - '-1' pragma: @@ -60,12 +60,12 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001/providers/Microsoft.OperationalInsights/workspaces/containerapp-env000004?api-version=2021-12-01-preview response: body: - string: '{"properties":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf","provisioningState":"Creating","sku":{"name":"PerGB2018","lastSkuUpdate":"2022-10-07T20:16:31.3475485Z"},"retentionInDays":30,"features":{"legacy":0,"searchVersion":1,"enableLogAccessUsingOnlyResourcePermissions":true},"workspaceCapping":{"dailyQuotaGb":-1.0,"quotaNextResetTime":"2022-10-08T20:00:00Z","dataIngestionStatus":"RespectQuota"},"publicNetworkAccessForIngestion":"Enabled","publicNetworkAccessForQuery":"Enabled","createdDate":"2022-10-07T20:16:31.3475485Z","modifiedDate":"2022-10-07T20:16:31.3475485Z"},"location":"northeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.OperationalInsights/workspaces/containerapp-env000004","name":"containerapp-env000004","type":"Microsoft.OperationalInsights/workspaces"}' + string: '{"properties":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e","provisioningState":"Creating","sku":{"name":"PerGB2018","lastSkuUpdate":"2022-11-07T23:08:06.9283432Z"},"retentionInDays":30,"features":{"legacy":0,"searchVersion":1,"enableLogAccessUsingOnlyResourcePermissions":true},"workspaceCapping":{"dailyQuotaGb":-1.0,"quotaNextResetTime":"2022-11-08T12:00:00Z","dataIngestionStatus":"RespectQuota"},"publicNetworkAccessForIngestion":"Enabled","publicNetworkAccessForQuery":"Enabled","createdDate":"2022-11-07T23:08:06.9283432Z","modifiedDate":"2022-11-07T23:08:06.9283432Z"},"location":"northeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.OperationalInsights/workspaces/containerapp-env000004","name":"containerapp-env000004","type":"Microsoft.OperationalInsights/workspaces"}' headers: access-control-allow-origin: - '*' @@ -78,7 +78,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:16:32 GMT + - Mon, 07 Nov 2022 23:08:07 GMT expires: - '-1' location: @@ -87,8 +87,6 @@ interactions: - no-cache request-context: - appId=cid-v1:e6336c63-aab2-45f0-996a-e5dbab2a1508 - server: - - Microsoft-IIS/10.0 strict-transport-security: - max-age=31536000; includeSubDomains x-content-type-options: @@ -114,12 +112,12 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.OperationalInsights/workspaces/containerapp-env000004?api-version=2021-12-01-preview response: body: - string: '{"properties":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf","provisioningState":"Succeeded","sku":{"name":"PerGB2018","lastSkuUpdate":"2022-10-07T20:16:31.3475485Z"},"retentionInDays":30,"features":{"legacy":0,"searchVersion":1,"enableLogAccessUsingOnlyResourcePermissions":true},"workspaceCapping":{"dailyQuotaGb":-1.0,"quotaNextResetTime":"2022-10-08T20:00:00Z","dataIngestionStatus":"RespectQuota"},"publicNetworkAccessForIngestion":"Enabled","publicNetworkAccessForQuery":"Enabled","createdDate":"2022-10-07T20:16:31.3475485Z","modifiedDate":"2022-10-07T20:16:31.3475485Z"},"location":"northeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.OperationalInsights/workspaces/containerapp-env000004","name":"containerapp-env000004","type":"Microsoft.OperationalInsights/workspaces"}' + string: '{"properties":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e","provisioningState":"Succeeded","sku":{"name":"PerGB2018","lastSkuUpdate":"2022-11-07T23:08:06.9283432Z"},"retentionInDays":30,"features":{"legacy":0,"searchVersion":1,"enableLogAccessUsingOnlyResourcePermissions":true},"workspaceCapping":{"dailyQuotaGb":-1.0,"quotaNextResetTime":"2022-11-08T12:00:00Z","dataIngestionStatus":"RespectQuota"},"publicNetworkAccessForIngestion":"Enabled","publicNetworkAccessForQuery":"Enabled","createdDate":"2022-11-07T23:08:06.9283432Z","modifiedDate":"2022-11-07T23:08:06.9283432Z"},"location":"northeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.OperationalInsights/workspaces/containerapp-env000004","name":"containerapp-env000004","type":"Microsoft.OperationalInsights/workspaces"}' headers: access-control-allow-origin: - '*' @@ -132,21 +130,19 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:02 GMT + - Mon, 07 Nov 2022 23:08:38 GMT expires: - '-1' pragma: - no-cache request-context: - appId=cid-v1:e6336c63-aab2-45f0-996a-e5dbab2a1508 - server: - - Microsoft-IIS/10.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: - chunked vary: - - Accept-Encoding,Accept-Encoding + - Accept-Encoding,Accept-Encoding,Accept-Encoding x-content-type-options: - nosniff x-powered-by: @@ -170,12 +166,12 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: POST uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001/providers/Microsoft.OperationalInsights/workspaces/containerapp-env000004/sharedKeys?api-version=2020-08-01 response: body: - string: '{"primarySharedKey":"2Rpu66vKEiwwrKHvZzrGNSRQ+EMVNjP2ixqJS4dWkltXEIowwgp5lyCaQ1KYZn3w3TLQBoqLLEh2Y4xg6TarKQ==","secondarySharedKey":"w6dPsXnfBhFoQl6MvZRIuRevNrHdPd387LGNCxPgyimyNKfnHYe4OIZkwESYpaRdPrhlUUTed/ZxDAvgR38ypw=="}' + string: '{"primarySharedKey":"e0GL2dO81nAWkTUFkLzlFZxCRTReQRpPOSeUdxnbynzajXP7+145VTpOgCctO01D3AVj4M/Q+2sJkYQuTXri2A==","secondarySharedKey":"FwllCoostPhr1mDt9bheOyPMTtLfFW8gIm5Uv++X7+MpnLoliroUBdNNRag/sRr+a+fOgJtaWe4R30zkZWdJvg=="}' headers: access-control-allow-origin: - '*' @@ -188,15 +184,13 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:04 GMT + - Mon, 07 Nov 2022 23:08:40 GMT expires: - '-1' pragma: - no-cache request-context: - appId=cid-v1:e6336c63-aab2-45f0-996a-e5dbab2a1508 - server: - - Microsoft-IIS/10.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -226,7 +220,7 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -298,7 +292,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:04 GMT + - Mon, 07 Nov 2022 23:08:41 GMT expires: - '-1' pragma: @@ -326,21 +320,21 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/managedEnvironments?api-version=2022-06-01-preview response: body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage1","name":"stage1","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:06:38.7041799","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:06:38.7041799"},"properties":{"provisioningState":"Succeeded","defaultDomain":"calmpond-b28c41d1.northcentralusstage.azurecontainerapps.io","staticIp":"13.67.139.178","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"21980690-af3f-47d2-ba10-1382b5450cc0"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage2","name":"stage2","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:07:15.2866819","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:07:15.2866819"},"properties":{"provisioningState":"Succeeded","defaultDomain":"wittymeadow-4dbec5e3.northcentralusstage.azurecontainerapps.io","staticIp":"23.100.80.227","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"71828812-a8df-459b-b6ef-c164550d9f5b"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage3","name":"stage3","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:07:27.14113","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:07:27.14113"},"properties":{"provisioningState":"Waiting","defaultDomain":"redisland-f0228bca.northcentralusstage.azurecontainerapps.io","staticIp":"20.9.4.10","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"da72e22b-f2d3-4eab-b0bc-78680ea9ade5"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage4","name":"stage4","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:11:33.3982648","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:11:33.3982648"},"properties":{"provisioningState":"Waiting","defaultDomain":"bravedune-ac753656.northcentralusstage.azurecontainerapps.io","staticIp":"20.9.3.250","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"aa3cdac6-e5ed-4ef6-808f-97996f1a8cd3"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage5","name":"stage5","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:11:23.672865","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:11:23.672865"},"properties":{"provisioningState":"Waiting","defaultDomain":"blackwater-28f0311e.northcentralusstage.azurecontainerapps.io","staticIp":"40.69.173.180","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"5815914d-d73c-4e61-8f50-e71bcd901812"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rgunptltcotvqggsjia7x7ii2lc7pwcsf7wc2z2rwcwkb7kwyrs3moe3zt5enbzi7fx/providers/Microsoft.App/managedEnvironments/containerapp-e2e-envjagi","name":"containerapp-e2e-envjagi","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:12:54.3281907","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:12:54.3281907"},"properties":{"provisioningState":"ScheduledForDelete","defaultDomain":"victorioustree-d296cf27.canadacentral.azurecontainerapps.io","staticIp":"20.220.54.99","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"daf29c0c-b60f-4f5a-b7d0-73b293d50fcc"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}]}' + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage1","name":"stage1","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:06:38.7041799","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:06:38.7041799"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"calmpond-b28c41d1.northcentralusstage.azurecontainerapps.io","staticIp":"13.67.139.178","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"21980690-af3f-47d2-ba10-1382b5450cc0"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage2","name":"stage2","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:07:15.2866819","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:07:15.2866819"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"wittymeadow-4dbec5e3.northcentralusstage.azurecontainerapps.io","staticIp":"23.100.80.227","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"71828812-a8df-459b-b6ef-c164550d9f5b"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage3","name":"stage3","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:07:27.14113","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:07:27.14113"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"redisland-f0228bca.northcentralusstage.azurecontainerapps.io","staticIp":"20.9.4.10","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"da72e22b-f2d3-4eab-b0bc-78680ea9ade5"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage4","name":"stage4","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:11:33.3982648","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:11:33.3982648"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"bravedune-ac753656.northcentralusstage.azurecontainerapps.io","staticIp":"20.9.3.250","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"aa3cdac6-e5ed-4ef6-808f-97996f1a8cd3"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage5","name":"stage5","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:11:23.672865","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:11:23.672865"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"blackwater-28f0311e.northcentralusstage.azurecontainerapps.io","staticIp":"40.69.173.180","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"5815914d-d73c-4e61-8f50-e71bcd901812"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/env","name":"env","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-02T17:28:30.2161566","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T17:40:00.0261021"},"properties":{"provisioningState":"Failed","useLegionServerlessCompute":false,"defaultDomain":"wittysea-79949ad9.canadacentral.azurecontainerapps.io","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/logstream-env","name":"logstream-env","type":"Microsoft.App/managedEnvironments","location":"eastus","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T19:31:58.3535946","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T19:31:58.3535946"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"salmoncoast-d1f7a6de.eastus.azurecontainerapps.io","staticIp":"20.121.84.21","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"b4591f71-4b84-4cd5-b397-377a1771e36f"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/logstream2-env","name":"logstream2-env","type":"Microsoft.App/managedEnvironments","location":"centralus","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T19:37:52.0110005","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T19:37:52.0110005"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"ashybeach-727d6692.centralus.azurecontainerapps.io","staticIp":"20.84.132.103","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"d2b86418-edcf-4f2b-b600-d473b2c3588e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}]}' headers: cache-control: - no-cache content-length: - - '5847' + - '7693' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:05 GMT + - Mon, 07 Nov 2022 23:08:43 GMT expires: - '-1' pragma: @@ -354,6 +348,8 @@ interactions: x-ms-original-request-ids: - '' - '' + - '' + - '' status: code: 200 message: OK @@ -371,7 +367,7 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -443,7 +439,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:05 GMT + - Mon, 07 Nov 2022 23:08:42 GMT expires: - '-1' pragma: @@ -471,7 +467,7 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -543,7 +539,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:05 GMT + - Mon, 07 Nov 2022 23:08:43 GMT expires: - '-1' pragma: @@ -571,7 +567,7 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -643,7 +639,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:06 GMT + - Mon, 07 Nov 2022 23:08:43 GMT expires: - '-1' pragma: @@ -661,8 +657,8 @@ interactions: body: '{"location": "canadacentral", "tags": null, "sku": {"name": "Consumption"}, "properties": {"daprAIInstrumentationKey": null, "vnetConfiguration": null, "internalLoadBalancerEnabled": null, "appLogsConfiguration": {"destination": - "log-analytics", "logAnalyticsConfiguration": {"customerId": "437c9025-efac-4acb-bfe3-486d2844f8cf", - "sharedKey": "2Rpu66vKEiwwrKHvZzrGNSRQ+EMVNjP2ixqJS4dWkltXEIowwgp5lyCaQ1KYZn3w3TLQBoqLLEh2Y4xg6TarKQ=="}}, + "log-analytics", "logAnalyticsConfiguration": {"customerId": "e664c7fc-5089-4a03-8b39-334414ffe10e", + "sharedKey": "e0GL2dO81nAWkTUFkLzlFZxCRTReQRpPOSeUdxnbynzajXP7+145VTpOgCctO01D3AVj4M/Q+2sJkYQuTXri2A=="}}, "customDomainConfiguration": null, "zoneRedundant": false}}' headers: Accept: @@ -680,25 +676,25 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677Z","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677Z"},"properties":{"provisioningState":"Waiting","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497Z","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497Z"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/locations/canadacentral/managedEnvironmentOperationStatuses/caecc45d-2793-43fa-a72b-7f31b5c8688f?api-version=2022-06-01-preview&azureAsyncOperation=true + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/locations/canadacentral/managedEnvironmentOperationStatuses/c651c3ee-d7a3-4ac4-87cd-ac2176b12ba8?api-version=2022-06-01-preview&azureAsyncOperation=true cache-control: - no-cache content-length: - - '954' + - '970' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:11 GMT + - Mon, 07 Nov 2022 23:08:49 GMT expires: - '-1' pragma: @@ -732,23 +728,23 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677"},"properties":{"provisioningState":"Waiting","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '952' + - '968' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:11 GMT + - Mon, 07 Nov 2022 23:08:51 GMT expires: - '-1' pragma: @@ -782,23 +778,23 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677"},"properties":{"provisioningState":"Waiting","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '952' + - '968' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:14 GMT + - Mon, 07 Nov 2022 23:08:55 GMT expires: - '-1' pragma: @@ -832,7 +828,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -904,7 +900,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:14 GMT + - Mon, 07 Nov 2022 23:08:55 GMT expires: - '-1' pragma: @@ -932,23 +928,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677"},"properties":{"provisioningState":"Waiting","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '952' + - '968' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:16 GMT + - Mon, 07 Nov 2022 23:08:57 GMT expires: - '-1' pragma: @@ -982,7 +978,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -1054,7 +1050,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:21 GMT + - Mon, 07 Nov 2022 23:09:02 GMT expires: - '-1' pragma: @@ -1082,23 +1078,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677"},"properties":{"provisioningState":"Waiting","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '952' + - '968' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:22 GMT + - Mon, 07 Nov 2022 23:09:04 GMT expires: - '-1' pragma: @@ -1132,7 +1128,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -1204,7 +1200,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:28 GMT + - Mon, 07 Nov 2022 23:09:11 GMT expires: - '-1' pragma: @@ -1232,23 +1228,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677"},"properties":{"provisioningState":"Waiting","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '952' + - '968' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:29 GMT + - Mon, 07 Nov 2022 23:09:13 GMT expires: - '-1' pragma: @@ -1282,7 +1278,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -1354,7 +1350,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:34 GMT + - Mon, 07 Nov 2022 23:09:18 GMT expires: - '-1' pragma: @@ -1382,23 +1378,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677"},"properties":{"provisioningState":"Waiting","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '952' + - '968' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:35 GMT + - Mon, 07 Nov 2022 23:09:20 GMT expires: - '-1' pragma: @@ -1432,7 +1428,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -1504,7 +1500,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:41 GMT + - Mon, 07 Nov 2022 23:09:26 GMT expires: - '-1' pragma: @@ -1532,23 +1528,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677"},"properties":{"provisioningState":"Waiting","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '952' + - '968' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:41 GMT + - Mon, 07 Nov 2022 23:09:27 GMT expires: - '-1' pragma: @@ -1582,7 +1578,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -1654,7 +1650,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:47 GMT + - Mon, 07 Nov 2022 23:09:33 GMT expires: - '-1' pragma: @@ -1682,23 +1678,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677"},"properties":{"provisioningState":"Waiting","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '952' + - '968' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:48 GMT + - Mon, 07 Nov 2022 23:09:34 GMT expires: - '-1' pragma: @@ -1732,7 +1728,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -1804,7 +1800,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:53 GMT + - Mon, 07 Nov 2022 23:09:39 GMT expires: - '-1' pragma: @@ -1832,23 +1828,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677"},"properties":{"provisioningState":"Waiting","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '952' + - '968' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:17:54 GMT + - Mon, 07 Nov 2022 23:09:40 GMT expires: - '-1' pragma: @@ -1882,7 +1878,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -1954,7 +1950,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:18:00 GMT + - Mon, 07 Nov 2022 23:09:47 GMT expires: - '-1' pragma: @@ -1982,23 +1978,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677"},"properties":{"provisioningState":"Waiting","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '952' + - '968' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:18:01 GMT + - Mon, 07 Nov 2022 23:09:48 GMT expires: - '-1' pragma: @@ -2032,7 +2028,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -2104,7 +2100,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:18:06 GMT + - Mon, 07 Nov 2022 23:09:54 GMT expires: - '-1' pragma: @@ -2132,23 +2128,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677"},"properties":{"provisioningState":"Waiting","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '952' + - '968' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:18:07 GMT + - Mon, 07 Nov 2022 23:09:54 GMT expires: - '-1' pragma: @@ -2182,7 +2178,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -2254,7 +2250,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:18:12 GMT + - Mon, 07 Nov 2022 23:10:01 GMT expires: - '-1' pragma: @@ -2282,23 +2278,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677"},"properties":{"provisioningState":"Waiting","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '952' + - '968' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:18:13 GMT + - Mon, 07 Nov 2022 23:10:02 GMT expires: - '-1' pragma: @@ -2332,7 +2328,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -2404,7 +2400,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:18:19 GMT + - Mon, 07 Nov 2022 23:10:08 GMT expires: - '-1' pragma: @@ -2432,23 +2428,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677"},"properties":{"provisioningState":"Waiting","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '952' + - '968' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:18:20 GMT + - Mon, 07 Nov 2022 23:10:10 GMT expires: - '-1' pragma: @@ -2482,7 +2478,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -2554,7 +2550,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:18:25 GMT + - Mon, 07 Nov 2022 23:10:16 GMT expires: - '-1' pragma: @@ -2582,23 +2578,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677"},"properties":{"provisioningState":"Waiting","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '952' + - '968' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:18:27 GMT + - Mon, 07 Nov 2022 23:10:17 GMT expires: - '-1' pragma: @@ -2632,7 +2628,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -2704,7 +2700,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:18:33 GMT + - Mon, 07 Nov 2022 23:10:22 GMT expires: - '-1' pragma: @@ -2732,23 +2728,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677"},"properties":{"provisioningState":"Waiting","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '952' + - '968' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:18:34 GMT + - Mon, 07 Nov 2022 23:10:23 GMT expires: - '-1' pragma: @@ -2782,7 +2778,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -2854,7 +2850,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:18:39 GMT + - Mon, 07 Nov 2022 23:10:29 GMT expires: - '-1' pragma: @@ -2882,23 +2878,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677"},"properties":{"provisioningState":"Waiting","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '952' + - '968' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:18:40 GMT + - Mon, 07 Nov 2022 23:10:30 GMT expires: - '-1' pragma: @@ -2932,7 +2928,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -3004,7 +3000,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:18:45 GMT + - Mon, 07 Nov 2022 23:10:36 GMT expires: - '-1' pragma: @@ -3032,23 +3028,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677"},"properties":{"provisioningState":"Waiting","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '952' + - '970' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:18:47 GMT + - Mon, 07 Nov 2022 23:10:38 GMT expires: - '-1' pragma: @@ -3076,13 +3072,13 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - containerapp env show + - containerapp create Connection: - keep-alive ParameterSetName: - - -g -n + - -g -n --environment --min-replicas --ingress --target-port User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -3154,7 +3150,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:18:53 GMT + - Mon, 07 Nov 2022 23:10:39 GMT expires: - '-1' pragma: @@ -3176,29 +3172,29 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - containerapp env show + - containerapp create Connection: - keep-alive ParameterSetName: - - -g -n + - -g -n --environment --min-replicas --ingress --target-port User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677"},"properties":{"provisioningState":"Waiting","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '952' + - '970' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:18:54 GMT + - Mon, 07 Nov 2022 23:10:40 GMT expires: - '-1' pragma: @@ -3226,13 +3222,13 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - containerapp env show + - containerapp create Connection: - keep-alive ParameterSetName: - - -g -n + - -g -n --environment --min-replicas --ingress --target-port User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -3304,7 +3300,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:19:00 GMT + - Mon, 07 Nov 2022 23:10:40 GMT expires: - '-1' pragma: @@ -3319,36 +3315,51 @@ interactions: code: 200 message: OK - request: - body: null + body: '{"location": "canadacentral", "identity": {"type": "None", "userAssignedIdentities": + null}, "properties": {"managedEnvironmentId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003", + "configuration": {"secrets": null, "activeRevisionsMode": "single", "ingress": + {"fqdn": null, "external": true, "targetPort": 80, "transport": "auto", "exposedPort": + null, "traffic": null, "customDomains": null}, "dapr": null, "registries": null}, + "template": {"revisionSuffix": null, "containers": [{"image": "mcr.microsoft.com/azuredocs/containerapps-helloworld:latest", + "name": "capp000002", "command": null, "args": null, "env": null, "resources": + null, "volumeMounts": null}], "scale": {"minReplicas": 1, "maxReplicas": null, + "rules": []}, "volumes": null}}, "tags": null}' headers: Accept: - '*/*' Accept-Encoding: - gzip, deflate CommandName: - - containerapp env show + - containerapp create Connection: - keep-alive + Content-Length: + - '849' + Content-Type: + - application/json ParameterSetName: - - -g -n + - -g -n --environment --min-replicas --ingress --target-port User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerApps/capp000002?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677"},"properties":{"provisioningState":"Succeeded","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerapps/capp000002","name":"capp000002","type":"Microsoft.App/containerApps","location":"Canada + Central","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:10:43.8293633Z","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:10:43.8293633Z"},"properties":{"provisioningState":"InProgress","managedEnvironmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","environmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","outboundIpAddresses":["20.175.194.180"],"latestRevisionName":"","latestRevisionFqdn":"","customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7","configuration":{"activeRevisionsMode":"Single","ingress":{"fqdn":"capp000002.redwave-7a288e50.canadacentral.azurecontainerapps.io","external":true,"targetPort":80,"exposedPort":0,"transport":"Auto","traffic":[{"weight":100,"latestRevision":true}],"allowInsecure":false}},"template":{"revisionSuffix":"","containers":[{"image":"mcr.microsoft.com/azuredocs/containerapps-helloworld:latest","name":"capp000002","resources":{"cpu":0.5,"memory":"1Gi","ephemeralStorage":""}}],"scale":{"minReplicas":1,"maxReplicas":10}},"eventStreamEndpoint":"https://canadacentral.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/eventstream"},"identity":{"type":"None"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/locations/canadacentral/containerappOperationStatuses/a0d64bef-b050-4242-ad93-dfe5cd4dff39?api-version=2022-06-01-preview&azureAsyncOperation=true cache-control: - no-cache content-length: - - '954' + - '1752' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:19:00 GMT + - Mon, 07 Nov 2022 23:10:45 GMT expires: - '-1' pragma: @@ -3357,117 +3368,17 @@ interactions: - Microsoft-IIS/10.0 strict-transport-security: - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding,Accept-Encoding x-content-type-options: - nosniff + x-ms-async-operation-timeout: + - PT15M + x-ms-ratelimit-remaining-subscription-resource-requests: + - '499' x-powered-by: - ASP.NET status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp create - Connection: - - keep-alive - ParameterSetName: - - -g -n --environment --min-replicas --ingress --target-port - User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App","namespace":"Microsoft.App","authorizations":[{"applicationId":"7e3bc4fd-85a3-4192-b177-5b8bfc87f42c","roleDefinitionId":"39a74f72-b40f-4bdc-b639-562fe2260bf0"},{"applicationId":"3734c1a4-2bed-4998-a37a-ff1a9e7bf019","roleDefinitionId":"5c779a4f-5cb2-4547-8c41-478d9be8ba90"},{"applicationId":"55ebbb62-3b9c-49fd-9b87-9595226dd4ac","roleDefinitionId":"e49ca620-7992-4561-a7df-4ed67dad77b5"}],"resourceTypes":[{"resourceType":"managedEnvironments","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"managedEnvironments/certificates","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"containerApps","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SystemAssignedResourceIdentity, SupportsTags, - SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"operations","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","Canada Central","West - Europe","North Europe","East US","East US 2","East Asia","Australia East","Germany - West Central","Japan East","UK South","West US","Central US","North Central - US","South Central US","Korea Central","Brazil South","West US 3","France - Central","South Africa North","Norway East"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"connectedEnvironments","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"connectedEnvironments/certificates","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"locations/connectedEnvironmentOperationResults","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US","East - US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US","East - US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' - headers: - cache-control: - - no-cache - content-length: - - '6928' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 20:19:00 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK + code: 201 + message: Created - request: body: null headers: @@ -3482,23 +3393,24 @@ interactions: ParameterSetName: - -g -n --environment --min-replicas --ingress --target-port User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerApps/capp000002?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677"},"properties":{"provisioningState":"Succeeded","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerapps/capp000002","name":"capp000002","type":"Microsoft.App/containerApps","location":"Canada + Central","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:10:43.8293633","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:10:43.8293633"},"properties":{"provisioningState":"InProgress","managedEnvironmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","environmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","outboundIpAddresses":["20.175.194.180"],"latestRevisionName":"capp000002--n071fgf","latestRevisionFqdn":"capp000002--n071fgf.redwave-7a288e50.canadacentral.azurecontainerapps.io","customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7","configuration":{"activeRevisionsMode":"Single","ingress":{"fqdn":"capp000002.redwave-7a288e50.canadacentral.azurecontainerapps.io","external":true,"targetPort":80,"exposedPort":0,"transport":"Auto","traffic":[{"weight":100,"latestRevision":true}],"allowInsecure":false}},"template":{"revisionSuffix":"","containers":[{"image":"mcr.microsoft.com/azuredocs/containerapps-helloworld:latest","name":"capp000002","resources":{"cpu":0.5,"memory":"1Gi","ephemeralStorage":""}}],"scale":{"minReplicas":1,"maxReplicas":10}},"eventStreamEndpoint":"https://canadacentral.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/eventstream"},"identity":{"type":"None"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '954' + - '1841' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:19:02 GMT + - Mon, 07 Nov 2022 23:10:47 GMT expires: - '-1' pragma: @@ -3520,114 +3432,6 @@ interactions: message: OK - request: body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp create - Connection: - - keep-alive - ParameterSetName: - - -g -n --environment --min-replicas --ingress --target-port - User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App","namespace":"Microsoft.App","authorizations":[{"applicationId":"7e3bc4fd-85a3-4192-b177-5b8bfc87f42c","roleDefinitionId":"39a74f72-b40f-4bdc-b639-562fe2260bf0"},{"applicationId":"3734c1a4-2bed-4998-a37a-ff1a9e7bf019","roleDefinitionId":"5c779a4f-5cb2-4547-8c41-478d9be8ba90"},{"applicationId":"55ebbb62-3b9c-49fd-9b87-9595226dd4ac","roleDefinitionId":"e49ca620-7992-4561-a7df-4ed67dad77b5"}],"resourceTypes":[{"resourceType":"managedEnvironments","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"managedEnvironments/certificates","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"containerApps","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SystemAssignedResourceIdentity, SupportsTags, - SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"operations","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","Canada Central","West - Europe","North Europe","East US","East US 2","East Asia","Australia East","Germany - West Central","Japan East","UK South","West US","Central US","North Central - US","South Central US","Korea Central","Brazil South","West US 3","France - Central","South Africa North","Norway East"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"connectedEnvironments","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"connectedEnvironments/certificates","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"locations/connectedEnvironmentOperationResults","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US","East - US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US","East - US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' - headers: - cache-control: - - no-cache - content-length: - - '6928' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 20:19:02 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: '{"location": "canadacentral", "identity": {"type": "None", "userAssignedIdentities": - null}, "properties": {"managedEnvironmentId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003", - "configuration": {"secrets": null, "activeRevisionsMode": "single", "ingress": - {"fqdn": null, "external": true, "targetPort": 80, "transport": "auto", "exposedPort": - null, "traffic": null, "customDomains": null}, "dapr": null, "registries": null}, - "template": {"revisionSuffix": null, "containers": [{"image": "mcr.microsoft.com/azuredocs/containerapps-helloworld:latest", - "name": "capp000002", "command": null, "args": null, "env": null, "resources": - null, "volumeMounts": null}], "scale": {"minReplicas": 1, "maxReplicas": null, - "rules": []}, "volumes": null}}, "tags": null}' headers: Accept: - '*/*' @@ -3637,84 +3441,27 @@ interactions: - containerapp create Connection: - keep-alive - Content-Length: - - '849' - Content-Type: - - application/json ParameterSetName: - -g -n --environment --min-replicas --ingress --target-port User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerApps/capp000002?api-version=2022-06-01-preview - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerapps/capp000002","name":"capp000002","type":"Microsoft.App/containerApps","location":"Canada - Central","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:19:05.7483524Z","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:19:05.7483524Z"},"properties":{"provisioningState":"InProgress","managedEnvironmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","environmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","outboundIpAddresses":["20.175.226.18"],"latestRevisionName":"","latestRevisionFqdn":"","customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7","configuration":{"activeRevisionsMode":"Single","ingress":{"fqdn":"capp000002.ashymoss-b173f40e.canadacentral.azurecontainerapps.io","external":true,"targetPort":80,"exposedPort":0,"transport":"Auto","traffic":[{"weight":100,"latestRevision":true}],"allowInsecure":false}},"template":{"revisionSuffix":"","containers":[{"image":"mcr.microsoft.com/azuredocs/containerapps-helloworld:latest","name":"capp000002","resources":{"cpu":0.5,"memory":"1Gi","ephemeralStorage":""}}],"scale":{"minReplicas":1,"maxReplicas":10}},"eventStreamEndpoint":"https://canadacentral.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/eventstream"},"identity":{"type":"None"}}' - headers: - api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview - azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/locations/canadacentral/containerappOperationStatuses/6ea4db22-16c9-4cbf-a8f3-3f00fef760ba?api-version=2022-06-01-preview&azureAsyncOperation=true - cache-control: - - no-cache - content-length: - - '1752' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 20:19:07 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-async-operation-timeout: - - PT15M - x-ms-ratelimit-remaining-subscription-resource-requests: - - '499' - x-powered-by: - - ASP.NET - status: - code: 201 - message: Created -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp create - Connection: - - keep-alive - ParameterSetName: - - -g -n --environment --min-replicas --ingress --target-port - User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerApps/capp000002?api-version=2022-06-01-preview response: body: string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerapps/capp000002","name":"capp000002","type":"Microsoft.App/containerApps","location":"Canada - Central","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:19:05.7483524","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:19:05.7483524"},"properties":{"provisioningState":"InProgress","managedEnvironmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","environmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","outboundIpAddresses":["20.175.226.18"],"latestRevisionName":"capp000002--ag5ou61","latestRevisionFqdn":"capp000002--ag5ou61.ashymoss-b173f40e.canadacentral.azurecontainerapps.io","customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7","configuration":{"activeRevisionsMode":"Single","ingress":{"fqdn":"capp000002.ashymoss-b173f40e.canadacentral.azurecontainerapps.io","external":true,"targetPort":80,"exposedPort":0,"transport":"Auto","traffic":[{"weight":100,"latestRevision":true}],"allowInsecure":false}},"template":{"revisionSuffix":"","containers":[{"image":"mcr.microsoft.com/azuredocs/containerapps-helloworld:latest","name":"capp000002","resources":{"cpu":0.5,"memory":"1Gi","ephemeralStorage":""}}],"scale":{"minReplicas":1,"maxReplicas":10}},"eventStreamEndpoint":"https://canadacentral.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/eventstream"},"identity":{"type":"None"}}' + Central","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:10:43.8293633","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:10:43.8293633"},"properties":{"provisioningState":"InProgress","managedEnvironmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","environmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","outboundIpAddresses":["20.175.194.180"],"latestRevisionName":"capp000002--n071fgf","latestRevisionFqdn":"capp000002--n071fgf.redwave-7a288e50.canadacentral.azurecontainerapps.io","customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7","configuration":{"activeRevisionsMode":"Single","ingress":{"fqdn":"capp000002.redwave-7a288e50.canadacentral.azurecontainerapps.io","external":true,"targetPort":80,"exposedPort":0,"transport":"Auto","traffic":[{"weight":100,"latestRevision":true}],"allowInsecure":false}},"template":{"revisionSuffix":"","containers":[{"image":"mcr.microsoft.com/azuredocs/containerapps-helloworld:latest","name":"capp000002","resources":{"cpu":0.5,"memory":"1Gi","ephemeralStorage":""}}],"scale":{"minReplicas":1,"maxReplicas":10}},"eventStreamEndpoint":"https://canadacentral.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/eventstream"},"identity":{"type":"None"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '1842' + - '1841' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:19:09 GMT + - Mon, 07 Nov 2022 23:10:51 GMT expires: - '-1' pragma: @@ -3748,24 +3495,24 @@ interactions: ParameterSetName: - -g -n --environment --min-replicas --ingress --target-port User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerApps/capp000002?api-version=2022-06-01-preview response: body: string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerapps/capp000002","name":"capp000002","type":"Microsoft.App/containerApps","location":"Canada - Central","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:19:05.7483524","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:19:05.7483524"},"properties":{"provisioningState":"InProgress","managedEnvironmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","environmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","outboundIpAddresses":["20.175.226.18"],"latestRevisionName":"capp000002--ag5ou61","latestRevisionFqdn":"capp000002--ag5ou61.ashymoss-b173f40e.canadacentral.azurecontainerapps.io","customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7","configuration":{"activeRevisionsMode":"Single","ingress":{"fqdn":"capp000002.ashymoss-b173f40e.canadacentral.azurecontainerapps.io","external":true,"targetPort":80,"exposedPort":0,"transport":"Auto","traffic":[{"weight":100,"latestRevision":true}],"allowInsecure":false}},"template":{"revisionSuffix":"","containers":[{"image":"mcr.microsoft.com/azuredocs/containerapps-helloworld:latest","name":"capp000002","resources":{"cpu":0.5,"memory":"1Gi","ephemeralStorage":""}}],"scale":{"minReplicas":1,"maxReplicas":10}},"eventStreamEndpoint":"https://canadacentral.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/eventstream"},"identity":{"type":"None"}}' + Central","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:10:43.8293633","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:10:43.8293633"},"properties":{"provisioningState":"Succeeded","managedEnvironmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","environmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","outboundIpAddresses":["20.175.194.180"],"latestRevisionName":"capp000002--n071fgf","latestRevisionFqdn":"capp000002--n071fgf.redwave-7a288e50.canadacentral.azurecontainerapps.io","customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7","configuration":{"activeRevisionsMode":"Single","ingress":{"fqdn":"capp000002.redwave-7a288e50.canadacentral.azurecontainerapps.io","external":true,"targetPort":80,"exposedPort":0,"transport":"Auto","traffic":[{"weight":100,"latestRevision":true}],"allowInsecure":false}},"template":{"revisionSuffix":"","containers":[{"image":"mcr.microsoft.com/azuredocs/containerapps-helloworld:latest","name":"capp000002","resources":{"cpu":0.5,"memory":"1Gi","ephemeralStorage":""}}],"scale":{"minReplicas":1,"maxReplicas":10}},"eventStreamEndpoint":"https://canadacentral.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/eventstream"},"identity":{"type":"None"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '1842' + - '1840' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:19:12 GMT + - Mon, 07 Nov 2022 23:10:54 GMT expires: - '-1' pragma: @@ -3793,30 +3540,31 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - containerapp create + - containerapp logs show Connection: - keep-alive + Content-Length: + - '0' ParameterSetName: - - -g -n --environment --min-replicas --ingress --target-port + - -n -g --type User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerApps/capp000002?api-version=2022-06-01-preview + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerApps/capp000002/getAuthToken?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerapps/capp000002","name":"capp000002","type":"Microsoft.App/containerApps","location":"Canada - Central","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:19:05.7483524","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:19:05.7483524"},"properties":{"provisioningState":"Succeeded","managedEnvironmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","environmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","outboundIpAddresses":["20.175.226.18"],"latestRevisionName":"capp000002--ag5ou61","latestRevisionFqdn":"capp000002--ag5ou61.ashymoss-b173f40e.canadacentral.azurecontainerapps.io","customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7","configuration":{"activeRevisionsMode":"Single","ingress":{"fqdn":"capp000002.ashymoss-b173f40e.canadacentral.azurecontainerapps.io","external":true,"targetPort":80,"exposedPort":0,"transport":"Auto","traffic":[{"weight":100,"latestRevision":true}],"allowInsecure":false}},"template":{"revisionSuffix":"","containers":[{"image":"mcr.microsoft.com/azuredocs/containerapps-helloworld:latest","name":"capp000002","resources":{"cpu":0.5,"memory":"1Gi","ephemeralStorage":""}}],"scale":{"minReplicas":1,"maxReplicas":10}},"eventStreamEndpoint":"https://canadacentral.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/eventstream"},"identity":{"type":"None"}}' + string: '{"location":"Canada Central","properties":{"token":"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJjYXBwX3N1YiI6IjJlZGMyOWY0LWI4MWYtNDk0Yi1hNjI0LWNjNjE5OTAzYjgzNyIsImNhcHBfcmciOiJjbGl0ZXN0LnJndTZpNTJ1NXhjazRrZjZ0Z3BmaGdxeXc3Zmp3NTUyd3RkN3FiYTJybWVoNzU3cHg0aGRtNWZ4ZnA2YjI2eDR2cmwiLCJjYXBwX25hbWUiOiJjYXBwY3dnbHp4YWR6NWJ5ZDdrazdqbDYiLCJjYXBwX2ppdF9leHBpcnkiOiIyMDIyLTExLTA4VDAwOjEwOjU3LjIyMjYyNzJaIiwibmJmIjoxNjY3ODYyNjU3LCJleHAiOjE2Njc4NjYyNTcsImlhdCI6MTY2Nzg2MjY1NywiaXNzIjoiQXp1cmVDb250YWluZXJBcHBzIiwiYXVkIjoiaHR0cHM6Ly9jYW5hZGFjZW50cmFsLmF6dXJlY29udGFpbmVyYXBwcy5kZXYifQ.g2vqXRu2DYmRcrsAtkY_JRZqDKBs_ki5GiZA6XzFbDM","expires":"2022-11-08T00:10:57.2226272Z"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerapps/capp000002","name":"capp000002","type":"Microsoft.App/containerApps/accesstoken"}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '1841' + - '864' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:19:17 GMT + - Mon, 07 Nov 2022 23:10:56 GMT expires: - '-1' pragma: @@ -3831,6 +3579,8 @@ interactions: - Accept-Encoding,Accept-Encoding x-content-type-options: - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' x-powered-by: - ASP.NET status: @@ -3847,28 +3597,27 @@ interactions: - containerapp logs show Connection: - keep-alive - Content-Length: - - '0' ParameterSetName: - -n -g --type User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: POST - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerApps/capp000002/getAuthToken?api-version=2022-06-01-preview + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerApps/capp000002?api-version=2022-06-01-preview response: body: - string: '{"location":"Canada Central","properties":{"token":"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJjYXBwX3N1YiI6IjJlZGMyOWY0LWI4MWYtNDk0Yi1hNjI0LWNjNjE5OTAzYjgzNyIsImNhcHBfcmciOiJjbGl0ZXN0LnJnc2V2NWxpYzQydm1qZDI1ZmxiaHZucXJ4cmFubWRmbDM3dzY1c3dnY2R1ZDVzd2x2dzd0Z280aml3dXl4ZTVpYXQiLCJjYXBwX25hbWUiOiJjYXBwYnpnNTQ1djVxNDZ5NGo1ZjUyM2YiLCJjYXBwX2ppdF9leHBpcnkiOiIyMDIyLTEwLTA3VDIxOjE5OjE4Ljc3NTcyOTdaIiwibmJmIjoxNjY1MTczOTU4LCJleHAiOjE2NjUxNzc1NTgsImlhdCI6MTY2NTE3Mzk1OCwiaXNzIjoiQXp1cmVDb250YWluZXJBcHBzIiwiYXVkIjoiaHR0cHM6Ly9jYW5hZGFjZW50cmFsLmF6dXJlY29udGFpbmVyYXBwcy5kZXYifQ.MxBVgicF4R7wNki37STQNj7wPu2iJyHTifF9VIa5ehA","expires":"2022-10-07T21:19:18.7757297Z","logStreamEndpoint":"https://canadacentral.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/revisions/logstream?token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJjYXBwX3N1YiI6IjJlZGMyOWY0LWI4MWYtNDk0Yi1hNjI0LWNjNjE5OTAzYjgzNyIsImNhcHBfcmciOiJjbGl0ZXN0LnJnc2V2NWxpYzQydm1qZDI1ZmxiaHZucXJ4cmFubWRmbDM3dzY1c3dnY2R1ZDVzd2x2dzd0Z280aml3dXl4ZTVpYXQiLCJjYXBwX25hbWUiOiJjYXBwYnpnNTQ1djVxNDZ5NGo1ZjUyM2YiLCJjYXBwX2ppdF9leHBpcnkiOiIyMDIyLTEwLTA3VDIxOjE5OjE4Ljc3NTcyOTdaIiwibmJmIjoxNjY1MTczOTU4LCJleHAiOjE2NjUxNzc1NTgsImlhdCI6MTY2NTE3Mzk1OCwiaXNzIjoiQXp1cmVDb250YWluZXJBcHBzIiwiYXVkIjoiaHR0cHM6Ly9jYW5hZGFjZW50cmFsLmF6dXJlY29udGFpbmVyYXBwcy5kZXYifQ.MxBVgicF4R7wNki37STQNj7wPu2iJyHTifF9VIa5ehA","execEndpoint":"wss://canadacentral.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/revisions/exec?token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJjYXBwX3N1YiI6IjJlZGMyOWY0LWI4MWYtNDk0Yi1hNjI0LWNjNjE5OTAzYjgzNyIsImNhcHBfcmciOiJjbGl0ZXN0LnJnc2V2NWxpYzQydm1qZDI1ZmxiaHZucXJ4cmFubWRmbDM3dzY1c3dnY2R1ZDVzd2x2dzd0Z280aml3dXl4ZTVpYXQiLCJjYXBwX25hbWUiOiJjYXBwYnpnNTQ1djVxNDZ5NGo1ZjUyM2YiLCJjYXBwX2ppdF9leHBpcnkiOiIyMDIyLTEwLTA3VDIxOjE5OjE4Ljc3NTcyOTdaIiwibmJmIjoxNjY1MTczOTU4LCJleHAiOjE2NjUxNzc1NTgsImlhdCI6MTY2NTE3Mzk1OCwiaXNzIjoiQXp1cmVDb250YWluZXJBcHBzIiwiYXVkIjoiaHR0cHM6Ly9jYW5hZGFjZW50cmFsLmF6dXJlY29udGFpbmVyYXBwcy5kZXYifQ.MxBVgicF4R7wNki37STQNj7wPu2iJyHTifF9VIa5ehA"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerapps/capp000002","name":"capp000002","type":"Microsoft.App/containerApps/accesstoken"}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerapps/capp000002","name":"capp000002","type":"Microsoft.App/containerApps","location":"Canada + Central","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:10:43.8293633","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:10:43.8293633"},"properties":{"provisioningState":"Succeeded","managedEnvironmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","environmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","outboundIpAddresses":["20.175.194.180"],"latestRevisionName":"capp000002--n071fgf","latestRevisionFqdn":"capp000002--n071fgf.redwave-7a288e50.canadacentral.azurecontainerapps.io","customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7","configuration":{"activeRevisionsMode":"Single","ingress":{"fqdn":"capp000002.redwave-7a288e50.canadacentral.azurecontainerapps.io","external":true,"targetPort":80,"exposedPort":0,"transport":"Auto","traffic":[{"weight":100,"latestRevision":true}],"allowInsecure":false}},"template":{"revisionSuffix":"","containers":[{"image":"mcr.microsoft.com/azuredocs/containerapps-helloworld:latest","name":"capp000002","resources":{"cpu":0.5,"memory":"1Gi","ephemeralStorage":""}}],"scale":{"minReplicas":1,"maxReplicas":10}},"eventStreamEndpoint":"https://canadacentral.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/eventstream"},"identity":{"type":"None"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '2374' + - '1840' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:19:17 GMT + - Mon, 07 Nov 2022 23:10:57 GMT expires: - '-1' pragma: @@ -3883,8 +3632,6 @@ interactions: - Accept-Encoding,Accept-Encoding x-content-type-options: - nosniff - x-ms-ratelimit-remaining-subscription-writes: - - '1198' x-powered-by: - ASP.NET status: @@ -3905,48 +3652,48 @@ interactions: uri: https://canadacentral.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/eventstream?follow=false&output=json&tailLines=20 response: body: - string: '{"TimeStamp":"2022-10-07T20:19:19Z","Type":"Normal","ContainerAppName":null,"RevisionName":null,"ReplicaName":null,"Msg":"Connecting + string: '{"TimeStamp":"2022-11-07T23:11:00Z","Type":"Normal","ContainerAppName":null,"RevisionName":null,"ReplicaName":null,"Msg":"Connecting to the events collector...","Reason":"StartingGettingEvents","EventSource":"ContainerAppController","Count":1} - {"TimeStamp":"2022-10-07T20:19:20Z","Type":"Normal","ContainerAppName":null,"RevisionName":null,"ReplicaName":null,"Msg":"Successfully + {"TimeStamp":"2022-11-07T23:11:00Z","Type":"Normal","ContainerAppName":null,"RevisionName":null,"ReplicaName":null,"Msg":"Successfully connected to events server","Reason":"ConnectedToEventsServer","EventSource":"ContainerAppController","Count":1} - {"TimeStamp":"2022-10-07 20:19:08 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"","ReplicaName":"","Msg":"Deactivating - old revisions for ContainerApp : capp000002","Reason":"RevisionDeactivating","EventSource":"ContainerAppController","Count":1} + {"TimeStamp":"2022-11-07 23:10:48 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"","ReplicaName":"","Msg":"Setting + traffic weight of \u0027100%\u0027 for revision \u0027capp000002--n071fgf\u0027","Reason":"RevisionUpdate","EventSource":"ContainerAppController","Count":3} - {"TimeStamp":"2022-10-07 20:19:08 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--ag5ou61","ReplicaName":"","Msg":"Successfully - provisioned revision \u0027capp000002--ag5ou61\u0027","Reason":"RevisionReady","EventSource":"ContainerAppController","Count":1} + {"TimeStamp":"2022-11-07 23:10:48 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"","ReplicaName":"","Msg":"Deactivating + old revisions for ContainerApp : capp000002","Reason":"RevisionDeactivating","EventSource":"ContainerAppController","Count":3} - {"TimeStamp":"2022-10-07 20:19:08 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"","ReplicaName":"","Msg":"Successfully - updated containerApp: capp000002","Reason":"ContainerAppReady","EventSource":"ContainerAppController","Count":1} + {"TimeStamp":"2022-11-07 23:10:48 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--n071fgf","ReplicaName":"","Msg":"Successfully + provisioned revision \u0027capp000002--n071fgf\u0027","Reason":"RevisionReady","EventSource":"ContainerAppController","Count":3} - {"TimeStamp":"2022-10-07 20:19:08 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--ag5ou61","ReplicaName":"capp000002--ag5ou61-5c5469b87f-kpt8d","Msg":"Pulling - image \u0022mcr.microsoft.com/azuredocs/containerapps-helloworld:latest\u0022","Reason":"PullingImage","EventSource":"ContainerAppController","Count":1} + {"TimeStamp":"2022-11-07 23:10:48 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"","ReplicaName":"","Msg":"Successfully + updated containerApp: capp000002","Reason":"ContainerAppReady","EventSource":"ContainerAppController","Count":3} - {"TimeStamp":"2022-10-07 20:19:08 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--ag5ou61","ReplicaName":"capp000002--ag5ou61-5c5469b87f-kpt8d","Msg":"Pulling - image \u0022mcr.microsoft.com/azuredocs/containerapps-helloworld:latest\u0022","Reason":"PullingImage","EventSource":"ContainerAppController","Count":1} + {"TimeStamp":"2022-11-07 23:10:48 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"","ReplicaName":"","Msg":"Deactivating + old revisions for ContainerApp : capp000002","Reason":"RevisionDeactivating","EventSource":"ContainerAppController","Count":3} - {"TimeStamp":"2022-10-07 20:19:08 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--ag5ou61","ReplicaName":"capp000002--ag5ou61-5c5469b87f-kpt8d","Msg":"Successfully + {"TimeStamp":"2022-11-07 23:10:48 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--n071fgf","ReplicaName":"capp000002--n071fgf-f47665d96-5wfkn","Msg":"Successfully pulled image \u0022mcr.microsoft.com/azuredocs/containerapps-helloworld:latest\u0022 - in 65.763485ms","Reason":"ImagePulled","EventSource":"ContainerAppController","Count":1} + in 1.836289865s","Reason":"ImagePulled","EventSource":"ContainerAppController","Count":1} - {"TimeStamp":"2022-10-07 20:19:08 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--ag5ou61","ReplicaName":"capp000002--ag5ou61-5c5469b87f-kpt8d","Msg":"Created + {"TimeStamp":"2022-11-07 23:10:48 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--n071fgf","ReplicaName":"capp000002--n071fgf-f47665d96-5wfkn","Msg":"Created container capp000002","Reason":"ContainerCreated","EventSource":"ContainerAppController","Count":1} - {"TimeStamp":"2022-10-07 20:19:08 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--ag5ou61","ReplicaName":"capp000002--ag5ou61-5c5469b87f-kpt8d","Msg":"Successfully + {"TimeStamp":"2022-11-07 23:10:48 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--n071fgf","ReplicaName":"capp000002--n071fgf-f47665d96-5wfkn","Msg":"Successfully pulled image \u0022mcr.microsoft.com/azuredocs/containerapps-helloworld:latest\u0022 - in 65.763485ms","Reason":"ImagePulled","EventSource":"ContainerAppController","Count":1} + in 1.836289865s","Reason":"ImagePulled","EventSource":"ContainerAppController","Count":1} - {"TimeStamp":"2022-10-07 20:19:08 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--ag5ou61","ReplicaName":"capp000002--ag5ou61-5c5469b87f-kpt8d","Msg":"Started + {"TimeStamp":"2022-11-07 23:10:48 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--n071fgf","ReplicaName":"capp000002--n071fgf-f47665d96-5wfkn","Msg":"Started container capp000002","Reason":"ContainerStarted","EventSource":"ContainerAppController","Count":1} - {"TimeStamp":"2022-10-07 20:19:08 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--ag5ou61","ReplicaName":"capp000002--ag5ou61-5c5469b87f-kpt8d","Msg":"Started + {"TimeStamp":"2022-11-07 23:10:48 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--n071fgf","ReplicaName":"capp000002--n071fgf-f47665d96-5wfkn","Msg":"Started container capp000002","Reason":"ContainerStarted","EventSource":"ContainerAppController","Count":1} ' headers: date: - - Fri, 07 Oct 2022 20:19:18 GMT + - Mon, 07 Nov 2022 23:11:00 GMT server: - Microsoft-IIS/10.0 transfer-encoding: @@ -3970,7 +3717,7 @@ interactions: ParameterSetName: - -n -g User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -4042,7 +3789,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:19:20 GMT + - Mon, 07 Nov 2022 23:11:02 GMT expires: - '-1' pragma: @@ -4070,23 +3817,23 @@ interactions: ParameterSetName: - -n -g User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:17:08.8995677","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:17:08.8995677"},"properties":{"provisioningState":"Succeeded","defaultDomain":"ashymoss-b173f40e.canadacentral.azurecontainerapps.io","staticIp":"20.175.226.41","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"437c9025-efac-4acb-bfe3-486d2844f8cf"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:47.0866497","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:47.0866497"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"redwave-7a288e50.canadacentral.azurecontainerapps.io","staticIp":"20.175.196.177","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e664c7fc-5089-4a03-8b39-334414ffe10e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '954' + - '970' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:19:20 GMT + - Mon, 07 Nov 2022 23:11:02 GMT expires: - '-1' pragma: @@ -4122,15 +3869,15 @@ interactions: ParameterSetName: - -n -g User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: POST uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003/getAuthToken?api-version=2022-06-01-preview response: body: - string: '{"location":"Canada Central","properties":{"token":"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJjYXBwX3N1YiI6IjJlZGMyOWY0LWI4MWYtNDk0Yi1hNjI0LWNjNjE5OTAzYjgzNyIsImNhcHBfcmciOiJjbGl0ZXN0LnJnc2V2NWxpYzQydm1qZDI1ZmxiaHZucXJ4cmFubWRmbDM3dzY1c3dnY2R1ZDVzd2x2dzd0Z280aml3dXl4ZTVpYXQiLCJrdWJlZW52X25hbWUiOiJlbnZ1dmVqcHpsZTRtZ2d6Y2N1aDV5dzUiLCJjYXBwX2ppdF9leHBpcnkiOiIyMDIyLTEwLTA3VDIxOjE5OjIyLjU0NTczMTJaIiwibmJmIjoxNjY1MTczOTYyLCJleHAiOjE2NjUxNzc1NjIsImlhdCI6MTY2NTE3Mzk2MiwiaXNzIjoiQXp1cmVDb250YWluZXJBcHBzIiwiYXVkIjoiaHR0cHM6Ly9jYW5hZGFjZW50cmFsLmF6dXJlY29udGFpbmVyYXBwcy5kZXYifQ.HEGOJxsZS2TU0XnwQFrrBK5tZomvS27KUqWZSatI-uc","expires":"2022-10-07T21:19:22.5457312Z"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/environments/accesstoken"}' + string: '{"location":"Canada Central","properties":{"token":"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJjYXBwX3N1YiI6IjJlZGMyOWY0LWI4MWYtNDk0Yi1hNjI0LWNjNjE5OTAzYjgzNyIsImNhcHBfcmciOiJjbGl0ZXN0LnJndTZpNTJ1NXhjazRrZjZ0Z3BmaGdxeXc3Zmp3NTUyd3RkN3FiYTJybWVoNzU3cHg0aGRtNWZ4ZnA2YjI2eDR2cmwiLCJrdWJlZW52X25hbWUiOiJlbnY2NTJxbWF5cGJiaHdpZ2N4amRkZ3IiLCJjYXBwX2ppdF9leHBpcnkiOiIyMDIyLTExLTA4VDAwOjExOjA0LjQ2MzQ4NjZaIiwibmJmIjoxNjY3ODYyNjY0LCJleHAiOjE2Njc4NjYyNjQsImlhdCI6MTY2Nzg2MjY2NCwiaXNzIjoiQXp1cmVDb250YWluZXJBcHBzIiwiYXVkIjoiaHR0cHM6Ly9jYW5hZGFjZW50cmFsLmF6dXJlY29udGFpbmVyYXBwcy5kZXYifQ.2K8YF8SpYWRCEkFqqvEkJPQVm9PqYKDHA87dF7SLwf4","expires":"2022-11-08T00:11:04.4634866Z"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/environments/accesstoken"}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: @@ -4138,7 +3885,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 20:19:22 GMT + - Mon, 07 Nov 2022 23:11:04 GMT expires: - '-1' pragma: @@ -4175,48 +3922,48 @@ interactions: uri: https://canadacentral.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/managedEnvironments/env000003/eventstream?follow=false&tailLines=20 response: body: - string: '{"TimeStamp":"2022-10-07T20:19:23Z","Type":"Normal","ContainerAppName":null,"RevisionName":null,"ReplicaName":null,"Msg":"Connecting + string: '{"TimeStamp":"2022-11-07T23:11:05Z","Type":"Normal","ContainerAppName":null,"RevisionName":null,"ReplicaName":null,"Msg":"Connecting to the events collector...","Reason":"StartingGettingEvents","EventSource":"ContainerAppController","Count":1} - {"TimeStamp":"2022-10-07T20:19:23Z","Type":"Normal","ContainerAppName":null,"RevisionName":null,"ReplicaName":null,"Msg":"Successfully + {"TimeStamp":"2022-11-07T23:11:05Z","Type":"Normal","ContainerAppName":null,"RevisionName":null,"ReplicaName":null,"Msg":"Successfully connected to events server","Reason":"ConnectedToEventsServer","EventSource":"ContainerAppController","Count":1} - {"TimeStamp":"2022-10-07 20:19:08 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"","ReplicaName":"","Msg":"Deactivating - old revisions for ContainerApp : capp000002","Reason":"RevisionDeactivating","EventSource":"ContainerAppController","Count":1} + {"TimeStamp":"2022-11-07 23:10:48 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"","ReplicaName":"","Msg":"Setting + traffic weight of \u0027100%\u0027 for revision \u0027capp000002--n071fgf\u0027","Reason":"RevisionUpdate","EventSource":"ContainerAppController","Count":3} - {"TimeStamp":"2022-10-07 20:19:08 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--ag5ou61","ReplicaName":"","Msg":"Successfully - provisioned revision \u0027capp000002--ag5ou61\u0027","Reason":"RevisionReady","EventSource":"ContainerAppController","Count":1} + {"TimeStamp":"2022-11-07 23:10:48 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"","ReplicaName":"","Msg":"Deactivating + old revisions for ContainerApp : capp000002","Reason":"RevisionDeactivating","EventSource":"ContainerAppController","Count":3} - {"TimeStamp":"2022-10-07 20:19:08 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"","ReplicaName":"","Msg":"Successfully - updated containerApp: capp000002","Reason":"ContainerAppReady","EventSource":"ContainerAppController","Count":1} + {"TimeStamp":"2022-11-07 23:10:48 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--n071fgf","ReplicaName":"","Msg":"Successfully + provisioned revision \u0027capp000002--n071fgf\u0027","Reason":"RevisionReady","EventSource":"ContainerAppController","Count":3} - {"TimeStamp":"2022-10-07 20:19:08 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--ag5ou61","ReplicaName":"capp000002--ag5ou61-5c5469b87f-kpt8d","Msg":"Pulling - image \u0022mcr.microsoft.com/azuredocs/containerapps-helloworld:latest\u0022","Reason":"PullingImage","EventSource":"ContainerAppController","Count":1} + {"TimeStamp":"2022-11-07 23:10:48 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"","ReplicaName":"","Msg":"Successfully + updated containerApp: capp000002","Reason":"ContainerAppReady","EventSource":"ContainerAppController","Count":3} - {"TimeStamp":"2022-10-07 20:19:08 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--ag5ou61","ReplicaName":"capp000002--ag5ou61-5c5469b87f-kpt8d","Msg":"Pulling - image \u0022mcr.microsoft.com/azuredocs/containerapps-helloworld:latest\u0022","Reason":"PullingImage","EventSource":"ContainerAppController","Count":1} + {"TimeStamp":"2022-11-07 23:10:48 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"","ReplicaName":"","Msg":"Deactivating + old revisions for ContainerApp : capp000002","Reason":"RevisionDeactivating","EventSource":"ContainerAppController","Count":3} - {"TimeStamp":"2022-10-07 20:19:08 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--ag5ou61","ReplicaName":"capp000002--ag5ou61-5c5469b87f-kpt8d","Msg":"Successfully + {"TimeStamp":"2022-11-07 23:10:48 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--n071fgf","ReplicaName":"capp000002--n071fgf-f47665d96-5wfkn","Msg":"Successfully pulled image \u0022mcr.microsoft.com/azuredocs/containerapps-helloworld:latest\u0022 - in 65.763485ms","Reason":"ImagePulled","EventSource":"ContainerAppController","Count":1} + in 1.836289865s","Reason":"ImagePulled","EventSource":"ContainerAppController","Count":1} - {"TimeStamp":"2022-10-07 20:19:08 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--ag5ou61","ReplicaName":"capp000002--ag5ou61-5c5469b87f-kpt8d","Msg":"Created + {"TimeStamp":"2022-11-07 23:10:48 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--n071fgf","ReplicaName":"capp000002--n071fgf-f47665d96-5wfkn","Msg":"Created container capp000002","Reason":"ContainerCreated","EventSource":"ContainerAppController","Count":1} - {"TimeStamp":"2022-10-07 20:19:08 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--ag5ou61","ReplicaName":"capp000002--ag5ou61-5c5469b87f-kpt8d","Msg":"Successfully + {"TimeStamp":"2022-11-07 23:10:48 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--n071fgf","ReplicaName":"capp000002--n071fgf-f47665d96-5wfkn","Msg":"Successfully pulled image \u0022mcr.microsoft.com/azuredocs/containerapps-helloworld:latest\u0022 - in 65.763485ms","Reason":"ImagePulled","EventSource":"ContainerAppController","Count":1} + in 1.836289865s","Reason":"ImagePulled","EventSource":"ContainerAppController","Count":1} - {"TimeStamp":"2022-10-07 20:19:08 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--ag5ou61","ReplicaName":"capp000002--ag5ou61-5c5469b87f-kpt8d","Msg":"Started + {"TimeStamp":"2022-11-07 23:10:48 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--n071fgf","ReplicaName":"capp000002--n071fgf-f47665d96-5wfkn","Msg":"Started container capp000002","Reason":"ContainerStarted","EventSource":"ContainerAppController","Count":1} - {"TimeStamp":"2022-10-07 20:19:08 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--ag5ou61","ReplicaName":"capp000002--ag5ou61-5c5469b87f-kpt8d","Msg":"Started + {"TimeStamp":"2022-11-07 23:10:48 \u002B0000 UTC","Type":"Normal","ContainerAppName":"capp000002","RevisionName":"capp000002--n071fgf","ReplicaName":"capp000002--n071fgf-f47665d96-5wfkn","Msg":"Started container capp000002","Reason":"ContainerStarted","EventSource":"ContainerAppController","Count":1} ' headers: date: - - Fri, 07 Oct 2022 20:19:23 GMT + - Mon, 07 Nov 2022 23:11:05 GMT server: - Microsoft-IIS/10.0 transfer-encoding: diff --git a/src/containerapp/azext_containerapp/tests/latest/recordings/test_containerapp_logstream.yaml b/src/containerapp/azext_containerapp/tests/latest/recordings/test_containerapp_logstream.yaml index 0cbe46ab152..a3e97e29cb2 100644 --- a/src/containerapp/azext_containerapp/tests/latest/recordings/test_containerapp_logstream.yaml +++ b/src/containerapp/azext_containerapp/tests/latest/recordings/test_containerapp_logstream.yaml @@ -13,12 +13,12 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001?api-version=2021-04-01 response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001","name":"clitest.rg000001","type":"Microsoft.Resources/resourceGroups","location":"northeurope","tags":{"product":"azurecli","cause":"automation","date":"2022-10-07T19:01:18Z"},"properties":{"provisioningState":"Succeeded"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001","name":"clitest.rg000001","type":"Microsoft.Resources/resourceGroups","location":"northeurope","tags":{"product":"azurecli","cause":"automation","date":"2022-11-07T23:07:58Z"},"properties":{"provisioningState":"Succeeded"}}' headers: cache-control: - no-cache @@ -27,7 +27,7 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:01:20 GMT + - Mon, 07 Nov 2022 23:08:02 GMT expires: - '-1' pragma: @@ -60,12 +60,12 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001/providers/Microsoft.OperationalInsights/workspaces/containerapp-env000004?api-version=2021-12-01-preview response: body: - string: '{"properties":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac","provisioningState":"Creating","sku":{"name":"PerGB2018","lastSkuUpdate":"2022-10-07T19:01:26.820466Z"},"retentionInDays":30,"features":{"legacy":0,"searchVersion":1,"enableLogAccessUsingOnlyResourcePermissions":true},"workspaceCapping":{"dailyQuotaGb":-1.0,"quotaNextResetTime":"2022-10-08T04:00:00Z","dataIngestionStatus":"RespectQuota"},"publicNetworkAccessForIngestion":"Enabled","publicNetworkAccessForQuery":"Enabled","createdDate":"2022-10-07T19:01:26.820466Z","modifiedDate":"2022-10-07T19:01:26.820466Z"},"location":"northeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.OperationalInsights/workspaces/containerapp-env000004","name":"containerapp-env000004","type":"Microsoft.OperationalInsights/workspaces"}' + string: '{"properties":{"customerId":"fad4055d-5566-4087-8adc-47b3d7d9d091","provisioningState":"Creating","sku":{"name":"PerGB2018","lastSkuUpdate":"2022-11-07T23:08:06.7575074Z"},"retentionInDays":30,"features":{"legacy":0,"searchVersion":1,"enableLogAccessUsingOnlyResourcePermissions":true},"workspaceCapping":{"dailyQuotaGb":-1.0,"quotaNextResetTime":"2022-11-08T12:00:00Z","dataIngestionStatus":"RespectQuota"},"publicNetworkAccessForIngestion":"Enabled","publicNetworkAccessForQuery":"Enabled","createdDate":"2022-11-07T23:08:06.7575074Z","modifiedDate":"2022-11-07T23:08:06.7575074Z"},"location":"northeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.OperationalInsights/workspaces/containerapp-env000004","name":"containerapp-env000004","type":"Microsoft.OperationalInsights/workspaces"}' headers: access-control-allow-origin: - '*' @@ -74,11 +74,11 @@ interactions: cache-control: - no-cache content-length: - - '853' + - '856' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:01:27 GMT + - Mon, 07 Nov 2022 23:08:07 GMT expires: - '-1' location: @@ -87,8 +87,6 @@ interactions: - no-cache request-context: - appId=cid-v1:e6336c63-aab2-45f0-996a-e5dbab2a1508 - server: - - Microsoft-IIS/10.0 strict-transport-security: - max-age=31536000; includeSubDomains x-content-type-options: @@ -114,12 +112,12 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.OperationalInsights/workspaces/containerapp-env000004?api-version=2021-12-01-preview response: body: - string: '{"properties":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac","provisioningState":"Succeeded","sku":{"name":"PerGB2018","lastSkuUpdate":"2022-10-07T19:01:26.820466Z"},"retentionInDays":30,"features":{"legacy":0,"searchVersion":1,"enableLogAccessUsingOnlyResourcePermissions":true},"workspaceCapping":{"dailyQuotaGb":-1.0,"quotaNextResetTime":"2022-10-08T04:00:00Z","dataIngestionStatus":"RespectQuota"},"publicNetworkAccessForIngestion":"Enabled","publicNetworkAccessForQuery":"Enabled","createdDate":"2022-10-07T19:01:26.820466Z","modifiedDate":"2022-10-07T19:01:26.820466Z"},"location":"northeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.OperationalInsights/workspaces/containerapp-env000004","name":"containerapp-env000004","type":"Microsoft.OperationalInsights/workspaces"}' + string: '{"properties":{"customerId":"fad4055d-5566-4087-8adc-47b3d7d9d091","provisioningState":"Succeeded","sku":{"name":"PerGB2018","lastSkuUpdate":"2022-11-07T23:08:06.7575074Z"},"retentionInDays":30,"features":{"legacy":0,"searchVersion":1,"enableLogAccessUsingOnlyResourcePermissions":true},"workspaceCapping":{"dailyQuotaGb":-1.0,"quotaNextResetTime":"2022-11-08T12:00:00Z","dataIngestionStatus":"RespectQuota"},"publicNetworkAccessForIngestion":"Enabled","publicNetworkAccessForQuery":"Enabled","createdDate":"2022-11-07T23:08:06.7575074Z","modifiedDate":"2022-11-07T23:08:06.7575074Z"},"location":"northeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.OperationalInsights/workspaces/containerapp-env000004","name":"containerapp-env000004","type":"Microsoft.OperationalInsights/workspaces"}' headers: access-control-allow-origin: - '*' @@ -128,25 +126,23 @@ interactions: cache-control: - no-cache content-length: - - '854' + - '857' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:01:57 GMT + - Mon, 07 Nov 2022 23:08:38 GMT expires: - '-1' pragma: - no-cache request-context: - appId=cid-v1:e6336c63-aab2-45f0-996a-e5dbab2a1508 - server: - - Microsoft-IIS/10.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: - chunked vary: - - Accept-Encoding,Accept-Encoding + - Accept-Encoding,Accept-Encoding,Accept-Encoding x-content-type-options: - nosniff x-powered-by: @@ -170,12 +166,12 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 (AAZ) azsdk-python-core/1.24.0 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: POST uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001/providers/Microsoft.OperationalInsights/workspaces/containerapp-env000004/sharedKeys?api-version=2020-08-01 response: body: - string: '{"primarySharedKey":"YG9uhJrvCHQqSu4+8Q19Ycgn3ESE5IZGHwRwlR0ijw0Uy026NJdYp6PCoc14MaNdeBuC5dZ/TqkWimqenT1KzA==","secondarySharedKey":"qRDjSWzbrAiLgeAjO0GUs9oRNyn+tK6La+0m4x75VDG8KEq6VmHbp94Odf+0GL5Bp/x7KELfE+Y2OxwY2KrYZA=="}' + string: '{"primarySharedKey":"HSSb7mfNxF+hK6zmr9RmHC8WAvsUaRZKWbz+Vkn8zq4pINNTvuSDbdaTia2ZSuL0dqSjkKhu4Qr7vqUZvyB0mQ==","secondarySharedKey":"yzWnWQSITjvJ8Ryi5esRE9rtFNL3DtmWHDG8G9RBKttKWY4Xw3qh31BdATJFSD2Y5j+XX5U7y4P1CvLzyBIpAw=="}' headers: access-control-allow-origin: - '*' @@ -188,15 +184,13 @@ interactions: content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:01:59 GMT + - Mon, 07 Nov 2022 23:08:39 GMT expires: - '-1' pragma: - no-cache request-context: - appId=cid-v1:e6336c63-aab2-45f0-996a-e5dbab2a1508 - server: - - Microsoft-IIS/10.0 strict-transport-security: - max-age=31536000; includeSubDomains transfer-encoding: @@ -226,7 +220,7 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -286,19 +280,19 @@ interactions: US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache content-length: - - '6768' + - '6928' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:00 GMT + - Mon, 07 Nov 2022 23:08:40 GMT expires: - '-1' pragma: @@ -326,26 +320,21 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/managedEnvironments?api-version=2022-06-01-preview response: body: - string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage1","name":"stage1","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-09-28T17:38:06.6749058","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-09-28T17:38:39.9430587"},"properties":{"provisioningState":"Succeeded","defaultDomain":"orangesea-b0ddedf9.northcentralusstage.azurecontainerapps.io","staticIp":"40.78.136.230","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"42416ea7-1d40-4e2b-bcc0-46cf5d10586b"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage2","name":"stage2","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-09-28T17:38:23.3920633","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-09-28T17:38:23.3920633"},"properties":{"provisioningState":"Succeeded","defaultDomain":"bluebay-d2205489.northcentralusstage.azurecontainerapps.io","staticIp":"20.9.30.16","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"b374381c-f952-4858-a789-fd2ce3d357e1"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage3","name":"stage3","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-09-28T17:38:30.6476935","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-09-28T17:38:30.6476935"},"properties":{"provisioningState":"Succeeded","defaultDomain":"gentleplant-7d8e062e.northcentralusstage.azurecontainerapps.io","staticIp":"40.69.148.186","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"020e8711-a427-4557-9f7f-1b570c7158e3"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage4","name":"stage4","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-09-28T17:38:34.9804085","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-09-28T17:38:34.9804085"},"properties":{"provisioningState":"Succeeded","defaultDomain":"purplebeach-59f87b0f.northcentralusstage.azurecontainerapps.io","staticIp":"40.122.192.208","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6feeece8-6e32-4bf2-b6e1-984df1e0e80c"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage5","name":"stage5","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-09-28T17:41:38.6378033","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-09-28T17:41:38.6378033"},"properties":{"provisioningState":"Succeeded","defaultDomain":"politestone-92f56073.northcentralusstage.azurecontainerapps.io","staticIp":"104.43.243.97","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"c824ca53-d283-4b65-945c-47b1ec4f6100"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/envtest5","name":"envtest5","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-06T19:28:27.4694266","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-06T19:30:07.3077731"},"properties":{"provisioningState":"Succeeded","defaultDomain":"salmonsmoke-59b14ebe.canadacentral.azurecontainerapps.io","staticIp":"20.220.49.207","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"d2cbd538-a1e1-4bfc-9721-10867b77a39a"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/envtest6","name":"envtest6","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-06T19:31:33.0189896","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-06T19:31:33.0189896"},"properties":{"provisioningState":"Succeeded","defaultDomain":"gentleplant-95f16518.canadacentral.azurecontainerapps.io","staticIp":"20.175.151.231","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"793692eb-e52a-47a2-8374-988efa9647a5"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/envtest7","name":"envtest7","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-06T19:38:57.0547447","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-06T19:38:57.0547447"},"properties":{"provisioningState":"Succeeded","defaultDomain":"gentlepebble-5ad3a367.canadacentral.azurecontainerapps.io","staticIp":"20.175.176.27","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"d15d0d46-9a50-4449-ae86-9c587d33d287"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/envtest8","name":"envtest8","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-06T19:42:56.1895676","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-06T19:42:56.1895676"},"properties":{"provisioningState":"Succeeded","defaultDomain":"niceisland-a2b9dd80.canadacentral.azurecontainerapps.io","staticIp":"20.175.233.137","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e731f5e8-2721-4383-a3a9-43be88e91674"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rgmgfaitgwgv4z2ettz46gbokwbik6longoqcjf2he4i5cupp7n4n2jeccjt5szty4i/providers/Microsoft.App/managedEnvironments/env7uhswgd2coskk7xoxkzae","name":"env7uhswgd2coskk7xoxkzae","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:00:06.3895643","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:00:06.3895643"},"properties":{"provisioningState":"Waiting","defaultDomain":"salmonpebble-39752d28.canadacentral.azurecontainerapps.io","staticIp":"20.175.167.179","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"9ec4e822-c754-4568-a3ad-90d4f6225edb"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/fet-11966","name":"fet-11966","type":"Microsoft.App/managedEnvironments","location":"westeurope","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-03T21:11:34.0571304","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-03T21:34:27.1018875"},"properties":{"provisioningState":"Succeeded","vnetConfiguration":{"internal":false,"infrastructureSubnetId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.Network/virtualNetworks/vnet-31402/subnets/sub-15282","dockerBridgeCidr":"10.1.0.1/16","platformReservedCidr":"10.0.0.0/16","platformReservedDnsIP":"10.0.0.2","outboundSettings":{"outBoundType":"LoadBalancer"}},"defaultDomain":"wittycliff-ea92adde.westeurope.azurecontainerapps.io","staticIp":"20.76.150.85","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"93e2a5c9-1cd5-4331-8d93-8c10e553e1d5"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"},"workloadProfiles":[{"workloadProfileType":"GP1","minimumCount":3,"maximumCount":5},{"workloadProfileType":"CO1","minimumCount":3,"maximumCount":5},{"workloadProfileType":"CO2","minimumCount":3,"maximumCount":5}]},"sku":{"name":"Premium"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/fet-13744","name":"fet-13744","type":"Microsoft.App/managedEnvironments","location":"westeurope","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-09-28T18:17:22.9801163","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-09-28T19:42:32.2163617"},"properties":{"provisioningState":"Succeeded","vnetConfiguration":{"internal":false,"infrastructureSubnetId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.Network/virtualNetworks/vnet-21024/subnets/sub-26353","dockerBridgeCidr":"10.1.0.1/16","platformReservedCidr":"10.0.0.0/16","platformReservedDnsIP":"10.0.0.2","outboundSettings":{"outBoundType":"LoadBalancer"}},"defaultDomain":"proudforest-3925a589.westeurope.azurecontainerapps.io","staticIp":"20.4.105.35","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"77bcc74c-8f89-4cd6-b060-a9e18a222c00"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"},"workloadProfiles":[{"workloadProfileType":"GP1","minimumCount":3,"maximumCount":5},{"workloadProfileType":"CO1","minimumCount":3,"maximumCount":5},{"workloadProfileType":"CO2","minimumCount":3,"maximumCount":5}]},"sku":{"name":"Premium"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/fet-31955","name":"fet-31955","type":"Microsoft.App/managedEnvironments","location":"westeurope","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-03T19:48:36.2404101","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-03T20:14:48.6500521"},"properties":{"provisioningState":"Failed","vnetConfiguration":{"internal":false,"infrastructureSubnetId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.Network/virtualNetworks/vnet-11211/subnets/sub-11595","dockerBridgeCidr":"10.1.0.1/16","platformReservedCidr":"10.0.0.0/16","platformReservedDnsIP":"10.0.0.2","outboundSettings":{"outBoundType":"LoadBalancer"}},"deploymentErrors":"ErrorCode: - ManagedEnvironmentUpgradeError, Message: Update the managed environment failed.","defaultDomain":"greenwave-7fb8075d.westeurope.azurecontainerapps.io","staticIp":"20.31.180.242","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"94d82cf5-d0c6-4331-8f28-39dcea0b38ce"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"},"workloadProfiles":[{"workloadProfileType":"GP1","minimumCount":3,"maximumCount":5},{"workloadProfileType":"CO1","minimumCount":3,"maximumCount":5}]},"sku":{"name":"Premium"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg6argf6plv56q5apdl277lnr6t2hj6kdcql267lfvr7pr4zucfwmt5xalv3t6fhlbs/providers/Microsoft.App/managedEnvironments/containerapp-env3pbwre6f","name":"containerapp-env3pbwre6f","type":"Microsoft.App/managedEnvironments","location":"westeurope","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T18:54:35.679587","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T18:54:35.679587"},"properties":{"provisioningState":"ScheduledForDelete","defaultDomain":"yellowbay-698920f8.westeurope.azurecontainerapps.io","staticIp":"20.8.250.186","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"61826289-c6f1-484a-b564-d7d73b4b8a9e"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rgpqef5segwmp2vh53wdqnjgg5s7yxu6vkh53arycqhxxzdwa24ymm3ytgqplnzizcd/providers/Microsoft.App/managedEnvironments/containerapp-enve2eadjjw","name":"containerapp-enve2eadjjw","type":"Microsoft.App/managedEnvironments","location":"westeurope","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T18:54:35.7651272","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T18:54:35.7651272"},"properties":{"provisioningState":"ScheduledForDelete","defaultDomain":"kindforest-d1717c37.westeurope.azurecontainerapps.io","staticIp":"20.8.253.62","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"dface860-3d8e-46ce-b1b7-ff35df435867"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rgflparpz5ngtt3huxtiglzw7th6h56mnth6ch33yuc2fdxqtmpsrgyuplopuov26dr/providers/Microsoft.App/managedEnvironments/containerapp-envtxrzhgsi","name":"containerapp-envtxrzhgsi","type":"Microsoft.App/managedEnvironments","location":"northeurope","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T18:54:43.0487194","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T18:54:43.0487194"},"properties":{"provisioningState":"ScheduledForDelete","defaultDomain":"bravemeadow-5338b5cc.northeurope.azurecontainerapps.io","staticIp":"20.223.87.185","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"6384a662-ce6a-44c3-9577-98e7773eec73"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg6y57cc63bhoknpuii6nswpud3452cto3haae6pfd2rifeahastoouzru3ppflnpxp/providers/Microsoft.App/managedEnvironments/containerapp-env3cnnwuel","name":"containerapp-env3cnnwuel","type":"Microsoft.App/managedEnvironments","location":"northeurope","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T18:55:25.5284858","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T18:55:25.5284858"},"properties":{"provisioningState":"ScheduledForDelete","defaultDomain":"purplecliff-3ccc195b.northeurope.azurecontainerapps.io","staticIp":"20.93.62.73","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"15b0e353-5ce4-4072-81ec-29d83732dbbc"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rgm4c6v7i5avp7zkjvavmhve3rq2qpur3waszucsvxztc2iibxzrwfonp2cyu2ipz2i/providers/Microsoft.App/managedEnvironments/containerapp-envunzyjvpo","name":"containerapp-envunzyjvpo","type":"Microsoft.App/managedEnvironments","location":"northeurope","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T18:57:08.7918171","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T18:57:08.7918171"},"properties":{"provisioningState":"ScheduledForDelete","defaultDomain":"whitehill-41b0c34a.northeurope.azurecontainerapps.io","staticIp":"20.93.62.178","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"24bc76d1-f0e9-43d9-a61f-0a58d15e3071"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rgx4r6rbcrfwg57j2yxvernh7jgihprqjfwszjvpm6ef2qryb5eawizgssgf27kq5iw/providers/Microsoft.App/managedEnvironments/envhcdyckvbqh5xfjfhgttnf","name":"envhcdyckvbqh5xfjfhgttnf","type":"Microsoft.App/managedEnvironments","location":"northeurope","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T18:58:55.9800893","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T18:58:55.9800893"},"properties":{"provisioningState":"ScheduledForDelete","defaultDomain":"proudocean-4723a22a.northeurope.azurecontainerapps.io","staticIp":"20.93.62.225","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"49a7539f-267c-405d-98d8-f38744fb5fb9"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rggjo2flgeyxzxaqvvlaggzaog3vtblhgdhguvhqn7tflokuyd7ogdvjifzno6mgyrh/providers/Microsoft.App/managedEnvironments/envmd3g6h2godkzyjsu34x3c","name":"envmd3g6h2godkzyjsu34x3c","type":"Microsoft.App/managedEnvironments","location":"northeurope","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T18:59:06.6122446","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T18:59:06.6122446"},"properties":{"provisioningState":"Succeeded","defaultDomain":"whitefield-655e34dc.northeurope.azurecontainerapps.io","staticIp":"20.82.206.227","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e3952fe6-63a6-4bff-9211-9ac7279c6efe"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/envtest","name":"envtest","type":"Microsoft.App/managedEnvironments","location":"eastus","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-09-30T23:30:43.0272218","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-09-30T23:30:43.0272218"},"properties":{"provisioningState":"Succeeded","defaultDomain":"kindforest-56e5b513.eastus.azurecontainerapps.io","staticIp":"20.231.118.136","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"bff599fa-4287-4fae-a6ec-194f6a655cb0"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/test","name":"test","type":"Microsoft.App/managedEnvironments","location":"eastus","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-09-28T21:17:20.5417547","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-09-28T21:17:20.5417547"},"properties":{"provisioningState":"Succeeded","defaultDomain":"orangefield-db78e5f5.eastus.azurecontainerapps.io","staticIp":"20.237.57.92","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"1897f9ef-703f-4635-afbe-bdc993491002"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/test2","name":"test2","type":"Microsoft.App/managedEnvironments","location":"eastus","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-09-30T16:34:47.4679092","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-09-30T16:34:47.4679092"},"properties":{"provisioningState":"Succeeded","defaultDomain":"victoriousisland-e8fc27b2.eastus.azurecontainerapps.io","staticIp":"52.188.178.175","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"8f753204-2b1e-4ccd-ac2f-03ed521853c3"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/test4","name":"test4","type":"Microsoft.App/managedEnvironments","location":"eastus","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-09-30T17:06:07.8707789","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-09-30T17:06:07.8707789"},"properties":{"provisioningState":"Succeeded","defaultDomain":"thankfulmoss-e1ee5560.eastus.azurecontainerapps.io","staticIp":"20.127.176.126","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"db55bae6-e01b-4ac3-ab3e-d7587661468f"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rgm2c2uuher5ujtas57exo4lvn3vfo7srcz7nmbmc6vootey65zyk7y3nbs4lw7btvp/providers/Microsoft.App/managedEnvironments/env2giqi2rww55bp3w6hy5j5","name":"env2giqi2rww55bp3w6hy5j5","type":"Microsoft.App/managedEnvironments","location":"eastus","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T18:59:39.3925304","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T18:59:39.3925304"},"properties":{"provisioningState":"ScheduledForDelete","defaultDomain":"ashyrock-1e30db58.eastus.azurecontainerapps.io","staticIp":"20.124.51.237","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"62327528-8e7a-44db-914e-b39dcafea9c5"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/fet-11601","name":"fet-11601","type":"Microsoft.App/managedEnvironments","location":"eastus2","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-03T19:52:03.7552135","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-03T20:14:50.6180243"},"properties":{"provisioningState":"Failed","vnetConfiguration":{"internal":false,"infrastructureSubnetId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.Network/virtualNetworks/vnet-21666/subnets/sub-31793","dockerBridgeCidr":"10.1.0.1/16","platformReservedCidr":"10.0.0.0/16","platformReservedDnsIP":"10.0.0.2","outboundSettings":{"outBoundType":"LoadBalancer"}},"deploymentErrors":"ErrorCode: - ManagedEnvironmentUpgradeError, Message: Update the managed environment failed.","defaultDomain":"orangepond-934dc488.eastus2.azurecontainerapps.io","staticIp":"20.62.55.98","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"d882ba9d-0d52-4943-aa6d-3b691a6ea406"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"},"workloadProfiles":[{"workloadProfileType":"GP1","minimumCount":3,"maximumCount":5},{"workloadProfileType":"CO1","minimumCount":3,"maximumCount":5}]},"sku":{"name":"Premium"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/fet-12440","name":"fet-12440","type":"Microsoft.App/managedEnvironments","location":"eastus2","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-03T21:02:52.951416","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-03T21:15:06.9000996"},"properties":{"provisioningState":"Failed","vnetConfiguration":{"internal":false,"infrastructureSubnetId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.Network/virtualNetworks/vnet-23035/subnets/sub-10114","dockerBridgeCidr":"10.1.0.1/16","platformReservedCidr":"10.0.0.0/16","platformReservedDnsIP":"10.0.0.2","outboundSettings":{"outBoundType":"LoadBalancer"}},"deploymentErrors":"ErrorCode: - ManagedEnvironmentUpgradeError, Message: Update the managed environment failed.","defaultDomain":"agreeablemoss-133c2969.eastus2.azurecontainerapps.io","staticIp":"20.96.144.78","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"2800c0d3-0f51-40f7-b6c7-dee47504702f"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"},"workloadProfiles":[{"workloadProfileType":"GP1","minimumCount":3,"maximumCount":4}]},"sku":{"name":"Premium"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/fet-14362","name":"fet-14362","type":"Microsoft.App/managedEnvironments","location":"eastus2","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-05T18:22:17.8694366","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-05T20:07:53.0700083"},"properties":{"provisioningState":"Succeeded","vnetConfiguration":{"internal":false,"infrastructureSubnetId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.Network/virtualNetworks/vnet-26234/subnets/sub-2013","dockerBridgeCidr":"10.1.0.1/16","platformReservedCidr":"10.0.0.0/16","platformReservedDnsIP":"10.0.0.2","outboundSettings":{"outBoundType":"LoadBalancer"}},"defaultDomain":"gentleflower-b539dcc2.eastus2.azurecontainerapps.io","staticIp":"20.72.69.28","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"5343a697-1f76-4f44-bed4-0cade80a7565"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"},"workloadProfiles":[{"workloadProfileType":"GP1","minimumCount":3,"maximumCount":4}]},"sku":{"name":"Premium"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/fet-20625","name":"fet-20625","type":"Microsoft.App/managedEnvironments","location":"eastus2","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-03T20:20:59.6912614","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-03T20:55:04.9026568"},"properties":{"provisioningState":"Failed","vnetConfiguration":{"internal":false,"infrastructureSubnetId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.Network/virtualNetworks/vnet-25659/subnets/sub-768","dockerBridgeCidr":"10.1.0.1/16","platformReservedCidr":"10.0.0.0/16","platformReservedDnsIP":"10.0.0.2","outboundSettings":{"outBoundType":"LoadBalancer"}},"deploymentErrors":"ErrorCode: - ManagedEnvironmentUpgradeError, Message: Update the managed environment failed.","defaultDomain":"kindbeach-64959eed.eastus2.azurecontainerapps.io","staticIp":"20.69.194.32","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"07feec7e-49c2-4215-a778-7245eebb2c78"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"},"workloadProfiles":[{"workloadProfileType":"GP1","minimumCount":3,"maximumCount":5},{"workloadProfileType":"CO1","minimumCount":3,"maximumCount":5}]},"sku":{"name":"Premium"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rgkpvzd2ttrwdksl7zijp73qzuyryhr6qitrv2oayx6kej6rwct4q6elpc5iuwdoy3k/providers/Microsoft.App/managedEnvironments/containerapp-env5mnhxwhc","name":"containerapp-env5mnhxwhc","type":"Microsoft.App/managedEnvironments","location":"eastus2","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:01:38.7914436","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:01:38.7914436"},"properties":{"provisioningState":"Waiting","defaultDomain":"blackground-84d1b1a0.eastus2.azurecontainerapps.io","staticIp":"52.247.18.119","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"d05cd300-cb3f-4131-8141-14a6c602b06f"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/fet-18969","name":"fet-18969","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-03T19:51:51.3946242","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-03T20:19:19.7062024"},"properties":{"provisioningState":"Failed","vnetConfiguration":{"internal":false,"infrastructureSubnetId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.Network/virtualNetworks/vnet-24315/subnets/sub-17580","dockerBridgeCidr":"10.1.0.1/16","platformReservedCidr":"10.0.0.0/16","platformReservedDnsIP":"10.0.0.2","outboundSettings":{"outBoundType":"LoadBalancer"}},"deploymentErrors":"ErrorCode: - ManagedEnvironmentUpgradeError, Message: Update the managed environment failed.","defaultDomain":"lemonsea-39dd8599.australiaeast.azurecontainerapps.io","staticIp":"20.28.161.18","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"e86802d8-6a16-44da-bde1-9745baefc4c2"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"},"workloadProfiles":[{"workloadProfileType":"GP1","minimumCount":3,"maximumCount":5}]},"sku":{"name":"Premium"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/fet-31512","name":"fet-31512","type":"Microsoft.App/managedEnvironments","location":"australiaeast","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-09-28T18:17:12.7488339","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-09-28T19:42:34.4090527"},"properties":{"provisioningState":"Succeeded","vnetConfiguration":{"internal":false,"infrastructureSubnetId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.Network/virtualNetworks/vnet-1804/subnets/sub-14949","dockerBridgeCidr":"10.1.0.1/16","platformReservedCidr":"10.0.0.0/16","platformReservedDnsIP":"10.0.0.2","outboundSettings":{"outBoundType":"LoadBalancer"}},"defaultDomain":"whitepebble-60d07cbe.australiaeast.azurecontainerapps.io","staticIp":"20.227.50.147","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"36fbae6f-8fa2-414a-a418-b9f358d4ffbf"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"},"workloadProfiles":[{"workloadProfileType":"GP1","minimumCount":3,"maximumCount":5},{"workloadProfileType":"CO1","minimumCount":3,"maximumCount":5},{"workloadProfileType":"CO2","minimumCount":3,"maximumCount":5}]},"sku":{"name":"Premium"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/fet-10611","name":"fet-10611","type":"Microsoft.App/managedEnvironments","location":"centraluseuap","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-09-28T18:17:00.4966659","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-09-28T19:42:35.6180268"},"properties":{"provisioningState":"Succeeded","vnetConfiguration":{"internal":false,"infrastructureSubnetId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.Network/virtualNetworks/vnet-28296/subnets/sub-23308","dockerBridgeCidr":"10.1.0.1/16","platformReservedCidr":"10.0.0.0/16","platformReservedDnsIP":"10.0.0.2","outboundSettings":{"outBoundType":"LoadBalancer"}},"defaultDomain":"wonderfultree-08e8746f.centraluseuap.azurecontainerapps.io","staticIp":"20.228.56.200","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"cfec842c-76d8-4b8a-93e2-95231391f373"}},"zoneRedundant":false,"useKubenet":true,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"},"workloadProfiles":[{"workloadProfileType":"GP1","minimumCount":3,"maximumCount":5},{"workloadProfileType":"CO1","minimumCount":3,"maximumCount":5},{"workloadProfileType":"CO2","minimumCount":3,"maximumCount":5}]},"sku":{"name":"Premium"}}]}' + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage1","name":"stage1","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:06:38.7041799","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:06:38.7041799"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"calmpond-b28c41d1.northcentralusstage.azurecontainerapps.io","staticIp":"13.67.139.178","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"21980690-af3f-47d2-ba10-1382b5450cc0"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage2","name":"stage2","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:07:15.2866819","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:07:15.2866819"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"wittymeadow-4dbec5e3.northcentralusstage.azurecontainerapps.io","staticIp":"23.100.80.227","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"71828812-a8df-459b-b6ef-c164550d9f5b"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage3","name":"stage3","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:07:27.14113","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:07:27.14113"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"redisland-f0228bca.northcentralusstage.azurecontainerapps.io","staticIp":"20.9.4.10","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"da72e22b-f2d3-4eab-b0bc-78680ea9ade5"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage4","name":"stage4","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:11:33.3982648","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:11:33.3982648"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"bravedune-ac753656.northcentralusstage.azurecontainerapps.io","staticIp":"20.9.3.250","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"aa3cdac6-e5ed-4ef6-808f-97996f1a8cd3"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/stage5","name":"stage5","type":"Microsoft.App/managedEnvironments","location":"northcentralusstage","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T20:11:23.672865","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T20:11:23.672865"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"blackwater-28f0311e.northcentralusstage.azurecontainerapps.io","staticIp":"40.69.173.180","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"5815914d-d73c-4e61-8f50-e71bcd901812"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/env","name":"env","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-02T17:28:30.2161566","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T17:40:00.0261021"},"properties":{"provisioningState":"Failed","useLegionServerlessCompute":false,"defaultDomain":"wittysea-79949ad9.canadacentral.azurecontainerapps.io","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/logstream-env","name":"logstream-env","type":"Microsoft.App/managedEnvironments","location":"eastus","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T19:31:58.3535946","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T19:31:58.3535946"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"salmoncoast-d1f7a6de.eastus.azurecontainerapps.io","staticIp":"20.121.84.21","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"b4591f71-4b84-4cd5-b397-377a1771e36f"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/container_apps/providers/Microsoft.App/managedEnvironments/logstream2-env","name":"logstream2-env","type":"Microsoft.App/managedEnvironments","location":"centralus","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T19:37:52.0110005","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T19:37:52.0110005"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"ashybeach-727d6692.centralus.azurecontainerapps.io","staticIp":"20.84.132.103","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"d2b86418-edcf-4f2b-b600-d473b2c3588e"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}]}' headers: cache-control: - no-cache content-length: - - '38123' + - '7693' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:01 GMT + - Mon, 07 Nov 2022 23:08:41 GMT expires: - '-1' pragma: @@ -361,10 +350,6 @@ interactions: - '' - '' - '' - - '' - - '' - - '' - - '' status: code: 200 message: OK @@ -382,7 +367,7 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -442,19 +427,19 @@ interactions: US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache content-length: - - '6768' + - '6928' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:01 GMT + - Mon, 07 Nov 2022 23:08:42 GMT expires: - '-1' pragma: @@ -482,7 +467,7 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -542,19 +527,19 @@ interactions: US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache content-length: - - '6768' + - '6928' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:02 GMT + - Mon, 07 Nov 2022 23:08:42 GMT expires: - '-1' pragma: @@ -582,7 +567,7 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -642,19 +627,19 @@ interactions: US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache content-length: - - '6768' + - '6928' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:01 GMT + - Mon, 07 Nov 2022 23:08:42 GMT expires: - '-1' pragma: @@ -669,11 +654,11 @@ interactions: code: 200 message: OK - request: - body: '{"location": "eastasia", "tags": null, "sku": {"name": "Consumption"}, + body: '{"location": "canadacentral", "tags": null, "sku": {"name": "Consumption"}, "properties": {"daprAIInstrumentationKey": null, "vnetConfiguration": null, "internalLoadBalancerEnabled": null, "appLogsConfiguration": {"destination": - "log-analytics", "logAnalyticsConfiguration": {"customerId": "41ec4b16-1663-40b2-90c8-19bd464f9dac", - "sharedKey": "YG9uhJrvCHQqSu4+8Q19Ycgn3ESE5IZGHwRwlR0ijw0Uy026NJdYp6PCoc14MaNdeBuC5dZ/TqkWimqenT1KzA=="}}, + "log-analytics", "logAnalyticsConfiguration": {"customerId": "fad4055d-5566-4087-8adc-47b3d7d9d091", + "sharedKey": "HSSb7mfNxF+hK6zmr9RmHC8WAvsUaRZKWbz+Vkn8zq4pINNTvuSDbdaTia2ZSuL0dqSjkKhu4Qr7vqUZvyB0mQ=="}}, "customDomainConfiguration": null, "zoneRedundant": false}}' headers: Accept: @@ -685,31 +670,31 @@ interactions: Connection: - keep-alive Content-Length: - - '491' + - '496' Content-Type: - application/json ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: PUT uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308Z","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308Z"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:46.2766299Z","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:46.2766299Z"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"agreeablebush-1943b534.canadacentral.azurecontainerapps.io","staticIp":"20.220.245.143","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"fad4055d-5566-4087-8adc-47b3d7d9d091"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/locations/eastasia/managedEnvironmentOperationStatuses/6ce1cd4a-4461-4133-bedf-5fec51dca846?api-version=2022-06-01-preview&azureAsyncOperation=true + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/locations/canadacentral/managedEnvironmentOperationStatuses/f6d6a330-42bc-4b5a-8310-90f76b8127d9?api-version=2022-06-01-preview&azureAsyncOperation=true cache-control: - no-cache content-length: - - '947' + - '976' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:06 GMT + - Mon, 07 Nov 2022 23:08:48 GMT expires: - '-1' pragma: @@ -743,23 +728,23 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:46.2766299","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:46.2766299"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"agreeablebush-1943b534.canadacentral.azurecontainerapps.io","staticIp":"20.220.245.143","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"fad4055d-5566-4087-8adc-47b3d7d9d091"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '945' + - '974' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:08 GMT + - Mon, 07 Nov 2022 23:08:49 GMT expires: - '-1' pragma: @@ -793,23 +778,23 @@ interactions: ParameterSetName: - -g -n --logs-workspace-id --logs-workspace-key User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:46.2766299","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:46.2766299"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"agreeablebush-1943b534.canadacentral.azurecontainerapps.io","staticIp":"20.220.245.143","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"fad4055d-5566-4087-8adc-47b3d7d9d091"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '945' + - '974' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:11 GMT + - Mon, 07 Nov 2022 23:08:52 GMT expires: - '-1' pragma: @@ -843,7 +828,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -903,19 +888,19 @@ interactions: US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache content-length: - - '6768' + - '6928' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:11 GMT + - Mon, 07 Nov 2022 23:08:55 GMT expires: - '-1' pragma: @@ -943,23 +928,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:46.2766299","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:46.2766299"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"agreeablebush-1943b534.canadacentral.azurecontainerapps.io","staticIp":"20.220.245.143","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"fad4055d-5566-4087-8adc-47b3d7d9d091"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '945' + - '974' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:12 GMT + - Mon, 07 Nov 2022 23:08:56 GMT expires: - '-1' pragma: @@ -993,7 +978,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -1053,19 +1038,19 @@ interactions: US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache content-length: - - '6768' + - '6928' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:18 GMT + - Mon, 07 Nov 2022 23:09:02 GMT expires: - '-1' pragma: @@ -1093,23 +1078,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:46.2766299","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:46.2766299"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"agreeablebush-1943b534.canadacentral.azurecontainerapps.io","staticIp":"20.220.245.143","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"fad4055d-5566-4087-8adc-47b3d7d9d091"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '945' + - '974' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:20 GMT + - Mon, 07 Nov 2022 23:09:03 GMT expires: - '-1' pragma: @@ -1143,7 +1128,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -1203,19 +1188,19 @@ interactions: US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache content-length: - - '6768' + - '6928' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:25 GMT + - Mon, 07 Nov 2022 23:09:10 GMT expires: - '-1' pragma: @@ -1243,23 +1228,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:46.2766299","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:46.2766299"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"agreeablebush-1943b534.canadacentral.azurecontainerapps.io","staticIp":"20.220.245.143","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"fad4055d-5566-4087-8adc-47b3d7d9d091"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '945' + - '974' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:27 GMT + - Mon, 07 Nov 2022 23:09:10 GMT expires: - '-1' pragma: @@ -1293,7 +1278,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -1353,19 +1338,19 @@ interactions: US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache content-length: - - '6768' + - '6928' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:32 GMT + - Mon, 07 Nov 2022 23:09:16 GMT expires: - '-1' pragma: @@ -1393,23 +1378,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:46.2766299","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:46.2766299"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"agreeablebush-1943b534.canadacentral.azurecontainerapps.io","staticIp":"20.220.245.143","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"fad4055d-5566-4087-8adc-47b3d7d9d091"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '945' + - '974' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:34 GMT + - Mon, 07 Nov 2022 23:09:18 GMT expires: - '-1' pragma: @@ -1443,7 +1428,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -1503,19 +1488,19 @@ interactions: US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache content-length: - - '6768' + - '6928' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:39 GMT + - Mon, 07 Nov 2022 23:09:24 GMT expires: - '-1' pragma: @@ -1543,23 +1528,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:46.2766299","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:46.2766299"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"agreeablebush-1943b534.canadacentral.azurecontainerapps.io","staticIp":"20.220.245.143","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"fad4055d-5566-4087-8adc-47b3d7d9d091"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '945' + - '974' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:41 GMT + - Mon, 07 Nov 2022 23:09:25 GMT expires: - '-1' pragma: @@ -1593,7 +1578,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -1653,19 +1638,19 @@ interactions: US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache content-length: - - '6768' + - '6928' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:46 GMT + - Mon, 07 Nov 2022 23:09:31 GMT expires: - '-1' pragma: @@ -1693,23 +1678,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:46.2766299","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:46.2766299"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"agreeablebush-1943b534.canadacentral.azurecontainerapps.io","staticIp":"20.220.245.143","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"fad4055d-5566-4087-8adc-47b3d7d9d091"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '945' + - '974' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:48 GMT + - Mon, 07 Nov 2022 23:09:32 GMT expires: - '-1' pragma: @@ -1743,7 +1728,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -1803,19 +1788,19 @@ interactions: US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache content-length: - - '6768' + - '6928' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:53 GMT + - Mon, 07 Nov 2022 23:09:37 GMT expires: - '-1' pragma: @@ -1843,23 +1828,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:46.2766299","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:46.2766299"},"properties":{"provisioningState":"Waiting","useLegionServerlessCompute":false,"defaultDomain":"agreeablebush-1943b534.canadacentral.azurecontainerapps.io","staticIp":"20.220.245.143","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"fad4055d-5566-4087-8adc-47b3d7d9d091"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '945' + - '974' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:02:56 GMT + - Mon, 07 Nov 2022 23:09:39 GMT expires: - '-1' pragma: @@ -1893,7 +1878,7 @@ interactions: ParameterSetName: - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -1953,19 +1938,19 @@ interactions: US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache content-length: - - '6768' + - '6928' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:03:02 GMT + - Mon, 07 Nov 2022 23:09:46 GMT expires: - '-1' pragma: @@ -1993,23 +1978,23 @@ interactions: ParameterSetName: - -g -n User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:46.2766299","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:46.2766299"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"agreeablebush-1943b534.canadacentral.azurecontainerapps.io","staticIp":"20.220.245.143","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"fad4055d-5566-4087-8adc-47b3d7d9d091"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '945' + - '976' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:03:03 GMT + - Mon, 07 Nov 2022 23:09:47 GMT expires: - '-1' pragma: @@ -2037,13 +2022,13 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - containerapp env show + - containerapp create Connection: - keep-alive ParameterSetName: - - -g -n + - -g -n --environment --min-replicas --ingress --target-port User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -2103,19 +2088,19 @@ interactions: US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache content-length: - - '6768' + - '6928' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:03:08 GMT + - Mon, 07 Nov 2022 23:09:48 GMT expires: - '-1' pragma: @@ -2137,29 +2122,29 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - containerapp env show + - containerapp create Connection: - keep-alive ParameterSetName: - - -g -n + - -g -n --environment --min-replicas --ingress --target-port User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"canadacentral","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:08:46.2766299","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:08:46.2766299"},"properties":{"provisioningState":"Succeeded","useLegionServerlessCompute":false,"defaultDomain":"agreeablebush-1943b534.canadacentral.azurecontainerapps.io","staticIp":"20.220.245.143","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"fad4055d-5566-4087-8adc-47b3d7d9d091"}},"zoneRedundant":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '945' + - '976' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:03:10 GMT + - Mon, 07 Nov 2022 23:09:49 GMT expires: - '-1' pragma: @@ -2187,13 +2172,13 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - containerapp env show + - containerapp create Connection: - keep-alive ParameterSetName: - - -g -n + - -g -n --environment --min-replicas --ingress --target-port User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - AZURECLI/2.42.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) method: GET uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 response: @@ -2253,19 +2238,19 @@ interactions: US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North + Central US (Stage)","Australia East","East US 2","West Europe","Central US","East + US","North Europe","South Central US","UK South","West US 3","Central US EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' headers: cache-control: - no-cache content-length: - - '6768' + - '6928' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:03:15 GMT + - Mon, 07 Nov 2022 23:09:50 GMT expires: - '-1' pragma: @@ -2279,6 +2264,71 @@ interactions: status: code: 200 message: OK +- request: + body: '{"location": "canadacentral", "identity": {"type": "None", "userAssignedIdentities": + null}, "properties": {"managedEnvironmentId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003", + "configuration": {"secrets": null, "activeRevisionsMode": "single", "ingress": + {"fqdn": null, "external": true, "targetPort": 80, "transport": "auto", "exposedPort": + null, "traffic": null, "customDomains": null}, "dapr": null, "registries": null}, + "template": {"revisionSuffix": null, "containers": [{"image": "mcr.microsoft.com/azuredocs/containerapps-helloworld:latest", + "name": "capp000002", "command": null, "args": null, "env": null, "resources": + null, "volumeMounts": null}], "scale": {"minReplicas": 1, "maxReplicas": null, + "rules": []}, "volumes": null}}, "tags": null}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - containerapp create + Connection: + - keep-alive + Content-Length: + - '849' + Content-Type: + - application/json + ParameterSetName: + - -g -n --environment --min-replicas --ingress --target-port + User-Agent: + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerApps/capp000002?api-version=2022-06-01-preview + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerapps/capp000002","name":"capp000002","type":"Microsoft.App/containerApps","location":"Canada + Central","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:53.9498016Z","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:09:53.9498016Z"},"properties":{"provisioningState":"InProgress","managedEnvironmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","environmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","outboundIpAddresses":["20.175.160.221"],"latestRevisionName":"","latestRevisionFqdn":"","customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7","configuration":{"activeRevisionsMode":"Single","ingress":{"fqdn":"capp000002.agreeablebush-1943b534.canadacentral.azurecontainerapps.io","external":true,"targetPort":80,"exposedPort":0,"transport":"Auto","traffic":[{"weight":100,"latestRevision":true}],"allowInsecure":false}},"template":{"revisionSuffix":"","containers":[{"image":"mcr.microsoft.com/azuredocs/containerapps-helloworld:latest","name":"capp000002","resources":{"cpu":0.5,"memory":"1Gi","ephemeralStorage":""}}],"scale":{"minReplicas":1,"maxReplicas":10}},"eventStreamEndpoint":"https://canadacentral.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/eventstream"},"identity":{"type":"None"}}' + headers: + api-supported-versions: + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/locations/canadacentral/containerappOperationStatuses/64a36e47-cd6b-4d5e-aa95-33b0ac941e85?api-version=2022-06-01-preview&azureAsyncOperation=true + cache-control: + - no-cache + content-length: + - '1758' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 07 Nov 2022 23:09:56 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-async-operation-timeout: + - PT15M + x-ms-ratelimit-remaining-subscription-resource-requests: + - '499' + x-powered-by: + - ASP.NET + status: + code: 201 + message: Created - request: body: null headers: @@ -2287,29 +2337,30 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - containerapp env show + - containerapp create Connection: - keep-alive ParameterSetName: - - -g -n + - -g -n --environment --min-replicas --ingress --target-port User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerApps/capp000002?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerapps/capp000002","name":"capp000002","type":"Microsoft.App/containerApps","location":"Canada + Central","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:53.9498016","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:09:53.9498016"},"properties":{"provisioningState":"InProgress","managedEnvironmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","environmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","outboundIpAddresses":["20.175.160.221"],"latestRevisionName":"capp000002--76m03l8","latestRevisionFqdn":"capp000002--76m03l8.agreeablebush-1943b534.canadacentral.azurecontainerapps.io","customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7","configuration":{"activeRevisionsMode":"Single","ingress":{"fqdn":"capp000002.agreeablebush-1943b534.canadacentral.azurecontainerapps.io","external":true,"targetPort":80,"exposedPort":0,"transport":"Auto","traffic":[{"weight":100,"latestRevision":true}],"allowInsecure":false}},"template":{"revisionSuffix":"","containers":[{"image":"mcr.microsoft.com/azuredocs/containerapps-helloworld:latest","name":"capp000002","resources":{"cpu":0.5,"memory":"1Gi","ephemeralStorage":""}}],"scale":{"minReplicas":1,"maxReplicas":10}},"eventStreamEndpoint":"https://canadacentral.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/eventstream"},"identity":{"type":"None"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '945' + - '1853' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:03:17 GMT + - Mon, 07 Nov 2022 23:09:56 GMT expires: - '-1' pragma: @@ -2333,99 +2384,50 @@ interactions: body: null headers: Accept: - - application/json + - '*/*' Accept-Encoding: - gzip, deflate CommandName: - - containerapp env show + - containerapp create Connection: - keep-alive ParameterSetName: - - -g -n + - -g -n --environment --min-replicas --ingress --target-port User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerApps/capp000002?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App","namespace":"Microsoft.App","authorizations":[{"applicationId":"7e3bc4fd-85a3-4192-b177-5b8bfc87f42c","roleDefinitionId":"39a74f72-b40f-4bdc-b639-562fe2260bf0"},{"applicationId":"3734c1a4-2bed-4998-a37a-ff1a9e7bf019","roleDefinitionId":"5c779a4f-5cb2-4547-8c41-478d9be8ba90"},{"applicationId":"55ebbb62-3b9c-49fd-9b87-9595226dd4ac","roleDefinitionId":"e49ca620-7992-4561-a7df-4ed67dad77b5"}],"resourceTypes":[{"resourceType":"managedEnvironments","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"managedEnvironments/certificates","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"containerApps","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SystemAssignedResourceIdentity, SupportsTags, - SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"operations","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","Canada Central","West - Europe","North Europe","East US","East US 2","East Asia","Australia East","Germany - West Central","Japan East","UK South","West US","Central US","North Central - US","South Central US","Korea Central","Brazil South","West US 3","France - Central","South Africa North","Norway East"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"connectedEnvironments","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"connectedEnvironments/certificates","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"locations/connectedEnvironmentOperationResults","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerapps/capp000002","name":"capp000002","type":"Microsoft.App/containerApps","location":"Canada + Central","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:53.9498016","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:09:53.9498016"},"properties":{"provisioningState":"InProgress","managedEnvironmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","environmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","outboundIpAddresses":["20.175.160.221"],"latestRevisionName":"capp000002--76m03l8","latestRevisionFqdn":"capp000002--76m03l8.agreeablebush-1943b534.canadacentral.azurecontainerapps.io","customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7","configuration":{"activeRevisionsMode":"Single","ingress":{"fqdn":"capp000002.agreeablebush-1943b534.canadacentral.azurecontainerapps.io","external":true,"targetPort":80,"exposedPort":0,"transport":"Auto","traffic":[{"weight":100,"latestRevision":true}],"allowInsecure":false}},"template":{"revisionSuffix":"","containers":[{"image":"mcr.microsoft.com/azuredocs/containerapps-helloworld:latest","name":"capp000002","resources":{"cpu":0.5,"memory":"1Gi","ephemeralStorage":""}}],"scale":{"minReplicas":1,"maxReplicas":10}},"eventStreamEndpoint":"https://canadacentral.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/eventstream"},"identity":{"type":"None"}}' headers: + api-supported-versions: + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '6768' + - '1853' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:03:22 GMT + - Mon, 07 Nov 2022 23:10:00 GMT expires: - '-1' pragma: - no-cache + server: + - Microsoft-IIS/10.0 strict-transport-security: - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked vary: - - Accept-Encoding + - Accept-Encoding,Accept-Encoding x-content-type-options: - nosniff + x-powered-by: + - ASP.NET status: code: 200 message: OK @@ -2437,29 +2439,30 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - containerapp env show + - containerapp create Connection: - keep-alive ParameterSetName: - - -g -n + - -g -n --environment --min-replicas --ingress --target-port User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerApps/capp000002?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerapps/capp000002","name":"capp000002","type":"Microsoft.App/containerApps","location":"Canada + Central","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:53.9498016","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:09:53.9498016"},"properties":{"provisioningState":"InProgress","managedEnvironmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","environmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","outboundIpAddresses":["20.175.160.221"],"latestRevisionName":"capp000002--76m03l8","latestRevisionFqdn":"capp000002--76m03l8.agreeablebush-1943b534.canadacentral.azurecontainerapps.io","customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7","configuration":{"activeRevisionsMode":"Single","ingress":{"fqdn":"capp000002.agreeablebush-1943b534.canadacentral.azurecontainerapps.io","external":true,"targetPort":80,"exposedPort":0,"transport":"Auto","traffic":[{"weight":100,"latestRevision":true}],"allowInsecure":false}},"template":{"revisionSuffix":"","containers":[{"image":"mcr.microsoft.com/azuredocs/containerapps-helloworld:latest","name":"capp000002","resources":{"cpu":0.5,"memory":"1Gi","ephemeralStorage":""}}],"scale":{"minReplicas":1,"maxReplicas":10}},"eventStreamEndpoint":"https://canadacentral.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/eventstream"},"identity":{"type":"None"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '945' + - '1853' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:03:24 GMT + - Mon, 07 Nov 2022 23:10:04 GMT expires: - '-1' pragma: @@ -2483,99 +2486,50 @@ interactions: body: null headers: Accept: - - application/json + - '*/*' Accept-Encoding: - gzip, deflate CommandName: - - containerapp env show + - containerapp create Connection: - keep-alive ParameterSetName: - - -g -n + - -g -n --environment --min-replicas --ingress --target-port User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerApps/capp000002?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App","namespace":"Microsoft.App","authorizations":[{"applicationId":"7e3bc4fd-85a3-4192-b177-5b8bfc87f42c","roleDefinitionId":"39a74f72-b40f-4bdc-b639-562fe2260bf0"},{"applicationId":"3734c1a4-2bed-4998-a37a-ff1a9e7bf019","roleDefinitionId":"5c779a4f-5cb2-4547-8c41-478d9be8ba90"},{"applicationId":"55ebbb62-3b9c-49fd-9b87-9595226dd4ac","roleDefinitionId":"e49ca620-7992-4561-a7df-4ed67dad77b5"}],"resourceTypes":[{"resourceType":"managedEnvironments","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"managedEnvironments/certificates","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"containerApps","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SystemAssignedResourceIdentity, SupportsTags, - SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"operations","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","Canada Central","West - Europe","North Europe","East US","East US 2","East Asia","Australia East","Germany - West Central","Japan East","UK South","West US","Central US","North Central - US","South Central US","Korea Central","Brazil South","West US 3","France - Central","South Africa North","Norway East"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"connectedEnvironments","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"connectedEnvironments/certificates","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"locations/connectedEnvironmentOperationResults","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerapps/capp000002","name":"capp000002","type":"Microsoft.App/containerApps","location":"Canada + Central","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:53.9498016","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:09:53.9498016"},"properties":{"provisioningState":"Succeeded","managedEnvironmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","environmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","outboundIpAddresses":["20.175.160.221"],"latestRevisionName":"capp000002--76m03l8","latestRevisionFqdn":"capp000002--76m03l8.agreeablebush-1943b534.canadacentral.azurecontainerapps.io","customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7","configuration":{"activeRevisionsMode":"Single","ingress":{"fqdn":"capp000002.agreeablebush-1943b534.canadacentral.azurecontainerapps.io","external":true,"targetPort":80,"exposedPort":0,"transport":"Auto","traffic":[{"weight":100,"latestRevision":true}],"allowInsecure":false}},"template":{"revisionSuffix":"","containers":[{"image":"mcr.microsoft.com/azuredocs/containerapps-helloworld:latest","name":"capp000002","resources":{"cpu":0.5,"memory":"1Gi","ephemeralStorage":""}}],"scale":{"minReplicas":1,"maxReplicas":10}},"eventStreamEndpoint":"https://canadacentral.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/eventstream"},"identity":{"type":"None"}}' headers: + api-supported-versions: + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '6768' + - '1852' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:03:29 GMT + - Mon, 07 Nov 2022 23:10:07 GMT expires: - '-1' pragma: - no-cache + server: + - Microsoft-IIS/10.0 strict-transport-security: - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked vary: - - Accept-Encoding + - Accept-Encoding,Accept-Encoding x-content-type-options: - nosniff + x-powered-by: + - ASP.NET status: code: 200 message: OK @@ -2587,29 +2541,30 @@ interactions: Accept-Encoding: - gzip, deflate CommandName: - - containerapp env show + - containerapp logs show Connection: - keep-alive ParameterSetName: - - -g -n + - -n -g User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 + - python/3.8.13 (macOS-12.6.1-x86_64-i386-64bit) AZURECLI/2.42.0 method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerApps/capp000002?api-version=2022-06-01-preview response: body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerapps/capp000002","name":"capp000002","type":"Microsoft.App/containerApps","location":"Canada + Central","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-11-07T23:09:53.9498016","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-11-07T23:09:53.9498016"},"properties":{"provisioningState":"Succeeded","managedEnvironmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","environmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","outboundIpAddresses":["20.175.160.221"],"latestRevisionName":"capp000002--76m03l8","latestRevisionFqdn":"capp000002--76m03l8.agreeablebush-1943b534.canadacentral.azurecontainerapps.io","customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7","configuration":{"activeRevisionsMode":"Single","ingress":{"fqdn":"capp000002.agreeablebush-1943b534.canadacentral.azurecontainerapps.io","external":true,"targetPort":80,"exposedPort":0,"transport":"Auto","traffic":[{"weight":100,"latestRevision":true}],"allowInsecure":false}},"template":{"revisionSuffix":"","containers":[{"image":"mcr.microsoft.com/azuredocs/containerapps-helloworld:latest","name":"capp000002","resources":{"cpu":0.5,"memory":"1Gi","ephemeralStorage":""}}],"scale":{"minReplicas":1,"maxReplicas":10}},"eventStreamEndpoint":"https://canadacentral.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/eventstream"},"identity":{"type":"None"}}' headers: api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview + - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview, 2022-10-01 cache-control: - no-cache content-length: - - '945' + - '1852' content-type: - application/json; charset=utf-8 date: - - Fri, 07 Oct 2022 19:03:30 GMT + - Mon, 07 Nov 2022 23:10:09 GMT expires: - '-1' pragma: @@ -2633,1983 +2588,15 @@ interactions: body: null headers: Accept: - - application/json + - '*/*' Accept-Encoding: - gzip, deflate - CommandName: - - containerapp env show Connection: - keep-alive - ParameterSetName: - - -g -n User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) + - python-requests/2.26.0 method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App","namespace":"Microsoft.App","authorizations":[{"applicationId":"7e3bc4fd-85a3-4192-b177-5b8bfc87f42c","roleDefinitionId":"39a74f72-b40f-4bdc-b639-562fe2260bf0"},{"applicationId":"3734c1a4-2bed-4998-a37a-ff1a9e7bf019","roleDefinitionId":"5c779a4f-5cb2-4547-8c41-478d9be8ba90"},{"applicationId":"55ebbb62-3b9c-49fd-9b87-9595226dd4ac","roleDefinitionId":"e49ca620-7992-4561-a7df-4ed67dad77b5"}],"resourceTypes":[{"resourceType":"managedEnvironments","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"managedEnvironments/certificates","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"containerApps","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SystemAssignedResourceIdentity, SupportsTags, - SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"operations","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","Canada Central","West - Europe","North Europe","East US","East US 2","East Asia","Australia East","Germany - West Central","Japan East","UK South","West US","Central US","North Central - US","South Central US","Korea Central","Brazil South","West US 3","France - Central","South Africa North","Norway East"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"connectedEnvironments","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"connectedEnvironments/certificates","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"locations/connectedEnvironmentOperationResults","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' - headers: - cache-control: - - no-cache - content-length: - - '6768' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:03:36 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp env show - Connection: - - keep-alive - ParameterSetName: - - -g -n - User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' - headers: - api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview - cache-control: - - no-cache - content-length: - - '945' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:03:37 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding,Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp env show - Connection: - - keep-alive - ParameterSetName: - - -g -n - User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App","namespace":"Microsoft.App","authorizations":[{"applicationId":"7e3bc4fd-85a3-4192-b177-5b8bfc87f42c","roleDefinitionId":"39a74f72-b40f-4bdc-b639-562fe2260bf0"},{"applicationId":"3734c1a4-2bed-4998-a37a-ff1a9e7bf019","roleDefinitionId":"5c779a4f-5cb2-4547-8c41-478d9be8ba90"},{"applicationId":"55ebbb62-3b9c-49fd-9b87-9595226dd4ac","roleDefinitionId":"e49ca620-7992-4561-a7df-4ed67dad77b5"}],"resourceTypes":[{"resourceType":"managedEnvironments","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"managedEnvironments/certificates","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"containerApps","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SystemAssignedResourceIdentity, SupportsTags, - SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"operations","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","Canada Central","West - Europe","North Europe","East US","East US 2","East Asia","Australia East","Germany - West Central","Japan East","UK South","West US","Central US","North Central - US","South Central US","Korea Central","Brazil South","West US 3","France - Central","South Africa North","Norway East"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"connectedEnvironments","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"connectedEnvironments/certificates","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"locations/connectedEnvironmentOperationResults","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' - headers: - cache-control: - - no-cache - content-length: - - '6768' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:03:43 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp env show - Connection: - - keep-alive - ParameterSetName: - - -g -n - User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' - headers: - api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview - cache-control: - - no-cache - content-length: - - '945' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:03:45 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding,Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp env show - Connection: - - keep-alive - ParameterSetName: - - -g -n - User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App","namespace":"Microsoft.App","authorizations":[{"applicationId":"7e3bc4fd-85a3-4192-b177-5b8bfc87f42c","roleDefinitionId":"39a74f72-b40f-4bdc-b639-562fe2260bf0"},{"applicationId":"3734c1a4-2bed-4998-a37a-ff1a9e7bf019","roleDefinitionId":"5c779a4f-5cb2-4547-8c41-478d9be8ba90"},{"applicationId":"55ebbb62-3b9c-49fd-9b87-9595226dd4ac","roleDefinitionId":"e49ca620-7992-4561-a7df-4ed67dad77b5"}],"resourceTypes":[{"resourceType":"managedEnvironments","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"managedEnvironments/certificates","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"containerApps","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SystemAssignedResourceIdentity, SupportsTags, - SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"operations","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","Canada Central","West - Europe","North Europe","East US","East US 2","East Asia","Australia East","Germany - West Central","Japan East","UK South","West US","Central US","North Central - US","South Central US","Korea Central","Brazil South","West US 3","France - Central","South Africa North","Norway East"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"connectedEnvironments","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"connectedEnvironments/certificates","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"locations/connectedEnvironmentOperationResults","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' - headers: - cache-control: - - no-cache - content-length: - - '6768' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:03:50 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp env show - Connection: - - keep-alive - ParameterSetName: - - -g -n - User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' - headers: - api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview - cache-control: - - no-cache - content-length: - - '945' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:03:51 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding,Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp env show - Connection: - - keep-alive - ParameterSetName: - - -g -n - User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App","namespace":"Microsoft.App","authorizations":[{"applicationId":"7e3bc4fd-85a3-4192-b177-5b8bfc87f42c","roleDefinitionId":"39a74f72-b40f-4bdc-b639-562fe2260bf0"},{"applicationId":"3734c1a4-2bed-4998-a37a-ff1a9e7bf019","roleDefinitionId":"5c779a4f-5cb2-4547-8c41-478d9be8ba90"},{"applicationId":"55ebbb62-3b9c-49fd-9b87-9595226dd4ac","roleDefinitionId":"e49ca620-7992-4561-a7df-4ed67dad77b5"}],"resourceTypes":[{"resourceType":"managedEnvironments","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"managedEnvironments/certificates","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"containerApps","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SystemAssignedResourceIdentity, SupportsTags, - SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"operations","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","Canada Central","West - Europe","North Europe","East US","East US 2","East Asia","Australia East","Germany - West Central","Japan East","UK South","West US","Central US","North Central - US","South Central US","Korea Central","Brazil South","West US 3","France - Central","South Africa North","Norway East"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"connectedEnvironments","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"connectedEnvironments/certificates","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"locations/connectedEnvironmentOperationResults","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' - headers: - cache-control: - - no-cache - content-length: - - '6768' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:03:57 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp env show - Connection: - - keep-alive - ParameterSetName: - - -g -n - User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' - headers: - api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview - cache-control: - - no-cache - content-length: - - '945' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:03:58 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding,Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp env show - Connection: - - keep-alive - ParameterSetName: - - -g -n - User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App","namespace":"Microsoft.App","authorizations":[{"applicationId":"7e3bc4fd-85a3-4192-b177-5b8bfc87f42c","roleDefinitionId":"39a74f72-b40f-4bdc-b639-562fe2260bf0"},{"applicationId":"3734c1a4-2bed-4998-a37a-ff1a9e7bf019","roleDefinitionId":"5c779a4f-5cb2-4547-8c41-478d9be8ba90"},{"applicationId":"55ebbb62-3b9c-49fd-9b87-9595226dd4ac","roleDefinitionId":"e49ca620-7992-4561-a7df-4ed67dad77b5"}],"resourceTypes":[{"resourceType":"managedEnvironments","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"managedEnvironments/certificates","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"containerApps","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SystemAssignedResourceIdentity, SupportsTags, - SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"operations","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","Canada Central","West - Europe","North Europe","East US","East US 2","East Asia","Australia East","Germany - West Central","Japan East","UK South","West US","Central US","North Central - US","South Central US","Korea Central","Brazil South","West US 3","France - Central","South Africa North","Norway East"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"connectedEnvironments","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"connectedEnvironments/certificates","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"locations/connectedEnvironmentOperationResults","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' - headers: - cache-control: - - no-cache - content-length: - - '6768' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:04:04 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp env show - Connection: - - keep-alive - ParameterSetName: - - -g -n - User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' - headers: - api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview - cache-control: - - no-cache - content-length: - - '945' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:04:06 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding,Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp env show - Connection: - - keep-alive - ParameterSetName: - - -g -n - User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App","namespace":"Microsoft.App","authorizations":[{"applicationId":"7e3bc4fd-85a3-4192-b177-5b8bfc87f42c","roleDefinitionId":"39a74f72-b40f-4bdc-b639-562fe2260bf0"},{"applicationId":"3734c1a4-2bed-4998-a37a-ff1a9e7bf019","roleDefinitionId":"5c779a4f-5cb2-4547-8c41-478d9be8ba90"},{"applicationId":"55ebbb62-3b9c-49fd-9b87-9595226dd4ac","roleDefinitionId":"e49ca620-7992-4561-a7df-4ed67dad77b5"}],"resourceTypes":[{"resourceType":"managedEnvironments","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"managedEnvironments/certificates","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"containerApps","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SystemAssignedResourceIdentity, SupportsTags, - SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"operations","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","Canada Central","West - Europe","North Europe","East US","East US 2","East Asia","Australia East","Germany - West Central","Japan East","UK South","West US","Central US","North Central - US","South Central US","Korea Central","Brazil South","West US 3","France - Central","South Africa North","Norway East"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"connectedEnvironments","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"connectedEnvironments/certificates","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"locations/connectedEnvironmentOperationResults","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' - headers: - cache-control: - - no-cache - content-length: - - '6768' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:04:12 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp env show - Connection: - - keep-alive - ParameterSetName: - - -g -n - User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' - headers: - api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview - cache-control: - - no-cache - content-length: - - '945' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:04:13 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding,Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp env show - Connection: - - keep-alive - ParameterSetName: - - -g -n - User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App","namespace":"Microsoft.App","authorizations":[{"applicationId":"7e3bc4fd-85a3-4192-b177-5b8bfc87f42c","roleDefinitionId":"39a74f72-b40f-4bdc-b639-562fe2260bf0"},{"applicationId":"3734c1a4-2bed-4998-a37a-ff1a9e7bf019","roleDefinitionId":"5c779a4f-5cb2-4547-8c41-478d9be8ba90"},{"applicationId":"55ebbb62-3b9c-49fd-9b87-9595226dd4ac","roleDefinitionId":"e49ca620-7992-4561-a7df-4ed67dad77b5"}],"resourceTypes":[{"resourceType":"managedEnvironments","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"managedEnvironments/certificates","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"containerApps","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SystemAssignedResourceIdentity, SupportsTags, - SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"operations","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","Canada Central","West - Europe","North Europe","East US","East US 2","East Asia","Australia East","Germany - West Central","Japan East","UK South","West US","Central US","North Central - US","South Central US","Korea Central","Brazil South","West US 3","France - Central","South Africa North","Norway East"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"connectedEnvironments","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"connectedEnvironments/certificates","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"locations/connectedEnvironmentOperationResults","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' - headers: - cache-control: - - no-cache - content-length: - - '6768' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:04:19 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp env show - Connection: - - keep-alive - ParameterSetName: - - -g -n - User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' - headers: - api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview - cache-control: - - no-cache - content-length: - - '945' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:04:22 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding,Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp env show - Connection: - - keep-alive - ParameterSetName: - - -g -n - User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App","namespace":"Microsoft.App","authorizations":[{"applicationId":"7e3bc4fd-85a3-4192-b177-5b8bfc87f42c","roleDefinitionId":"39a74f72-b40f-4bdc-b639-562fe2260bf0"},{"applicationId":"3734c1a4-2bed-4998-a37a-ff1a9e7bf019","roleDefinitionId":"5c779a4f-5cb2-4547-8c41-478d9be8ba90"},{"applicationId":"55ebbb62-3b9c-49fd-9b87-9595226dd4ac","roleDefinitionId":"e49ca620-7992-4561-a7df-4ed67dad77b5"}],"resourceTypes":[{"resourceType":"managedEnvironments","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"managedEnvironments/certificates","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"containerApps","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SystemAssignedResourceIdentity, SupportsTags, - SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"operations","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","Canada Central","West - Europe","North Europe","East US","East US 2","East Asia","Australia East","Germany - West Central","Japan East","UK South","West US","Central US","North Central - US","South Central US","Korea Central","Brazil South","West US 3","France - Central","South Africa North","Norway East"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"connectedEnvironments","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"connectedEnvironments/certificates","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"locations/connectedEnvironmentOperationResults","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' - headers: - cache-control: - - no-cache - content-length: - - '6768' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:04:27 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp env show - Connection: - - keep-alive - ParameterSetName: - - -g -n - User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' - headers: - api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview - cache-control: - - no-cache - content-length: - - '945' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:04:28 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding,Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp env show - Connection: - - keep-alive - ParameterSetName: - - -g -n - User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App","namespace":"Microsoft.App","authorizations":[{"applicationId":"7e3bc4fd-85a3-4192-b177-5b8bfc87f42c","roleDefinitionId":"39a74f72-b40f-4bdc-b639-562fe2260bf0"},{"applicationId":"3734c1a4-2bed-4998-a37a-ff1a9e7bf019","roleDefinitionId":"5c779a4f-5cb2-4547-8c41-478d9be8ba90"},{"applicationId":"55ebbb62-3b9c-49fd-9b87-9595226dd4ac","roleDefinitionId":"e49ca620-7992-4561-a7df-4ed67dad77b5"}],"resourceTypes":[{"resourceType":"managedEnvironments","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"managedEnvironments/certificates","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"containerApps","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SystemAssignedResourceIdentity, SupportsTags, - SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"operations","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","Canada Central","West - Europe","North Europe","East US","East US 2","East Asia","Australia East","Germany - West Central","Japan East","UK South","West US","Central US","North Central - US","South Central US","Korea Central","Brazil South","West US 3","France - Central","South Africa North","Norway East"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"connectedEnvironments","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"connectedEnvironments/certificates","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"locations/connectedEnvironmentOperationResults","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' - headers: - cache-control: - - no-cache - content-length: - - '6768' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:04:34 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp env show - Connection: - - keep-alive - ParameterSetName: - - -g -n - User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Waiting","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' - headers: - api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview - cache-control: - - no-cache - content-length: - - '945' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:04:35 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding,Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp env show - Connection: - - keep-alive - ParameterSetName: - - -g -n - User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App","namespace":"Microsoft.App","authorizations":[{"applicationId":"7e3bc4fd-85a3-4192-b177-5b8bfc87f42c","roleDefinitionId":"39a74f72-b40f-4bdc-b639-562fe2260bf0"},{"applicationId":"3734c1a4-2bed-4998-a37a-ff1a9e7bf019","roleDefinitionId":"5c779a4f-5cb2-4547-8c41-478d9be8ba90"},{"applicationId":"55ebbb62-3b9c-49fd-9b87-9595226dd4ac","roleDefinitionId":"e49ca620-7992-4561-a7df-4ed67dad77b5"}],"resourceTypes":[{"resourceType":"managedEnvironments","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"managedEnvironments/certificates","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"containerApps","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SystemAssignedResourceIdentity, SupportsTags, - SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"operations","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","Canada Central","West - Europe","North Europe","East US","East US 2","East Asia","Australia East","Germany - West Central","Japan East","UK South","West US","Central US","North Central - US","South Central US","Korea Central","Brazil South","West US 3","France - Central","South Africa North","Norway East"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"connectedEnvironments","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"connectedEnvironments/certificates","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"locations/connectedEnvironmentOperationResults","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' - headers: - cache-control: - - no-cache - content-length: - - '6768' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:04:40 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp env show - Connection: - - keep-alive - ParameterSetName: - - -g -n - User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Succeeded","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' - headers: - api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview - cache-control: - - no-cache - content-length: - - '947' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:04:43 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding,Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp create - Connection: - - keep-alive - ParameterSetName: - - -g -n --environment --min-replicas --ingress --target-port - User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App","namespace":"Microsoft.App","authorizations":[{"applicationId":"7e3bc4fd-85a3-4192-b177-5b8bfc87f42c","roleDefinitionId":"39a74f72-b40f-4bdc-b639-562fe2260bf0"},{"applicationId":"3734c1a4-2bed-4998-a37a-ff1a9e7bf019","roleDefinitionId":"5c779a4f-5cb2-4547-8c41-478d9be8ba90"},{"applicationId":"55ebbb62-3b9c-49fd-9b87-9595226dd4ac","roleDefinitionId":"e49ca620-7992-4561-a7df-4ed67dad77b5"}],"resourceTypes":[{"resourceType":"managedEnvironments","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"managedEnvironments/certificates","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"containerApps","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SystemAssignedResourceIdentity, SupportsTags, - SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"operations","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","Canada Central","West - Europe","North Europe","East US","East US 2","East Asia","Australia East","Germany - West Central","Japan East","UK South","West US","Central US","North Central - US","South Central US","Korea Central","Brazil South","West US 3","France - Central","South Africa North","Norway East"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"connectedEnvironments","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"connectedEnvironments/certificates","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"locations/connectedEnvironmentOperationResults","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' - headers: - cache-control: - - no-cache - content-length: - - '6768' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:04:44 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp create - Connection: - - keep-alive - ParameterSetName: - - -g -n --environment --min-replicas --ingress --target-port - User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003?api-version=2022-06-01-preview - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","name":"env000003","type":"Microsoft.App/managedEnvironments","location":"eastasia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:02:04.6604308","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:02:04.6604308"},"properties":{"provisioningState":"Succeeded","defaultDomain":"kindforest-0f0a7d07.eastasia.azurecontainerapps.io","staticIp":"20.239.117.241","appLogsConfiguration":{"destination":"log-analytics","logAnalyticsConfiguration":{"customerId":"41ec4b16-1663-40b2-90c8-19bd464f9dac"}},"zoneRedundant":false,"useKubenet":false,"customDomainConfiguration":{"customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7"}},"sku":{"name":"Consumption"}}' - headers: - api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview - cache-control: - - no-cache - content-length: - - '947' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:04:45 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding,Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp create - Connection: - - keep-alive - ParameterSetName: - - -g -n --environment --min-replicas --ingress --target-port - User-Agent: - - AZURECLI/2.40.0 azsdk-python-azure-mgmt-resource/21.1.0b1 Python/3.8.13 (macOS-12.6-x86_64-i386-64bit) - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App?api-version=2021-04-01 - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App","namespace":"Microsoft.App","authorizations":[{"applicationId":"7e3bc4fd-85a3-4192-b177-5b8bfc87f42c","roleDefinitionId":"39a74f72-b40f-4bdc-b639-562fe2260bf0"},{"applicationId":"3734c1a4-2bed-4998-a37a-ff1a9e7bf019","roleDefinitionId":"5c779a4f-5cb2-4547-8c41-478d9be8ba90"},{"applicationId":"55ebbb62-3b9c-49fd-9b87-9595226dd4ac","roleDefinitionId":"e49ca620-7992-4561-a7df-4ed67dad77b5"}],"resourceTypes":[{"resourceType":"managedEnvironments","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"managedEnvironments/certificates","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"containerApps","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SystemAssignedResourceIdentity, SupportsTags, - SupportsLocation"},{"resourceType":"locations","locations":[],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/managedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationResults","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"locations/containerappOperationStatuses","locations":["North - Central US (Stage)","Canada Central","West Europe","North Europe","East US","East - US 2","East Asia","Australia East","Germany West Central","Japan East","UK - South","West US","Central US","North Central US","South Central US","Korea - Central","Brazil South","West US 3","France Central","South Africa North","Norway - East","Central US EUAP","East US 2 EUAP"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"operations","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","Canada Central","West - Europe","North Europe","East US","East US 2","East Asia","Australia East","Germany - West Central","Japan East","UK South","West US","Central US","North Central - US","South Central US","Korea Central","Brazil South","West US 3","France - Central","South Africa North","Norway East"],"apiVersions":["2022-06-01-preview","2022-03-01","2022-01-01-preview"],"capabilities":"None"},{"resourceType":"connectedEnvironments","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"connectedEnvironments/certificates","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"CrossResourceGroupResourceMove, - CrossSubscriptionResourceMove, SupportsTags, SupportsLocation"},{"resourceType":"locations/connectedEnvironmentOperationResults","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/connectedEnvironmentOperationStatuses","locations":["North - Central US (Stage)","Central US EUAP","East US 2 EUAP","North Central US","East - US","East Asia","West Europe"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/billingMeters","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"},{"resourceType":"locations/availableManagedEnvironmentsWorkloadProfileTypes","locations":["North - Central US (Stage)","Australia East","East US 2","West Europe","Central US - EUAP"],"apiVersions":["2022-06-01-preview"],"capabilities":"None"}],"registrationState":"Registered","registrationPolicy":"RegistrationRequired"}' - headers: - cache-control: - - no-cache - content-length: - - '6768' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:04:45 GMT - expires: - - '-1' - pragma: - - no-cache - strict-transport-security: - - max-age=31536000; includeSubDomains - vary: - - Accept-Encoding - x-content-type-options: - - nosniff - status: - code: 200 - message: OK -- request: - body: '{"location": "eastasia", "identity": {"type": "None", "userAssignedIdentities": - null}, "properties": {"managedEnvironmentId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003", - "configuration": {"secrets": null, "activeRevisionsMode": "single", "ingress": - {"fqdn": null, "external": true, "targetPort": 80, "transport": "auto", "exposedPort": - null, "traffic": null, "customDomains": null}, "dapr": null, "registries": null}, - "template": {"revisionSuffix": null, "containers": [{"image": "mcr.microsoft.com/azuredocs/containerapps-helloworld:latest", - "name": "capp000002", "command": null, "args": null, "env": null, "resources": - null, "volumeMounts": null}], "scale": {"minReplicas": 1, "maxReplicas": null, - "rules": []}, "volumes": null}}, "tags": null}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp create - Connection: - - keep-alive - Content-Length: - - '844' - Content-Type: - - application/json - ParameterSetName: - - -g -n --environment --min-replicas --ingress --target-port - User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: PUT - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerApps/capp000002?api-version=2022-06-01-preview - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerapps/capp000002","name":"capp000002","type":"Microsoft.App/containerApps","location":"East - Asia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:04:48.5979372Z","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:04:48.5979372Z"},"properties":{"provisioningState":"InProgress","managedEnvironmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","environmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","outboundIpAddresses":["20.239.114.85"],"latestRevisionName":"","latestRevisionFqdn":"","customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7","configuration":{"activeRevisionsMode":"Single","ingress":{"fqdn":"capp000002.kindforest-0f0a7d07.eastasia.azurecontainerapps.io","external":true,"targetPort":80,"exposedPort":0,"transport":"Auto","traffic":[{"weight":100,"latestRevision":true}],"allowInsecure":false}},"template":{"revisionSuffix":"","containers":[{"image":"mcr.microsoft.com/azuredocs/containerapps-helloworld:latest","name":"capp000002","resources":{"cpu":0.5,"memory":"1Gi","ephemeralStorage":""}}],"scale":{"minReplicas":1,"maxReplicas":10}},"eventStreamEndpoint":"https://eastasia.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/eventstream"},"identity":{"type":"None"}}' - headers: - api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview - azure-asyncoperation: - - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.App/locations/eastasia/containerappOperationStatuses/8a0c60cb-26b6-475e-a076-77c1c3b51376?api-version=2022-06-01-preview&azureAsyncOperation=true - cache-control: - - no-cache - content-length: - - '1739' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:04:51 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - x-content-type-options: - - nosniff - x-ms-async-operation-timeout: - - PT15M - x-ms-ratelimit-remaining-subscription-resource-requests: - - '499' - x-powered-by: - - ASP.NET - status: - code: 201 - message: Created -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp create - Connection: - - keep-alive - ParameterSetName: - - -g -n --environment --min-replicas --ingress --target-port - User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerApps/capp000002?api-version=2022-06-01-preview - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerapps/capp000002","name":"capp000002","type":"Microsoft.App/containerApps","location":"East - Asia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:04:48.5979372","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:04:48.5979372"},"properties":{"provisioningState":"InProgress","managedEnvironmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","environmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","outboundIpAddresses":["20.239.114.85"],"latestRevisionName":"capp000002--abouno8","latestRevisionFqdn":"capp000002--abouno8.kindforest-0f0a7d07.eastasia.azurecontainerapps.io","customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7","configuration":{"activeRevisionsMode":"Single","ingress":{"fqdn":"capp000002.kindforest-0f0a7d07.eastasia.azurecontainerapps.io","external":true,"targetPort":80,"exposedPort":0,"transport":"Auto","traffic":[{"weight":100,"latestRevision":true}],"allowInsecure":false}},"template":{"revisionSuffix":"","containers":[{"image":"mcr.microsoft.com/azuredocs/containerapps-helloworld:latest","name":"capp000002","resources":{"cpu":0.5,"memory":"1Gi","ephemeralStorage":""}}],"scale":{"minReplicas":1,"maxReplicas":10}},"eventStreamEndpoint":"https://eastasia.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/eventstream"},"identity":{"type":"None"}}' - headers: - api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview - cache-control: - - no-cache - content-length: - - '1826' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:04:53 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding,Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp create - Connection: - - keep-alive - ParameterSetName: - - -g -n --environment --min-replicas --ingress --target-port - User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerApps/capp000002?api-version=2022-06-01-preview - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerapps/capp000002","name":"capp000002","type":"Microsoft.App/containerApps","location":"East - Asia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:04:48.5979372","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:04:48.5979372"},"properties":{"provisioningState":"Succeeded","managedEnvironmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","environmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","outboundIpAddresses":["20.239.114.85"],"latestRevisionName":"capp000002--abouno8","latestRevisionFqdn":"capp000002--abouno8.kindforest-0f0a7d07.eastasia.azurecontainerapps.io","customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7","configuration":{"activeRevisionsMode":"Single","ingress":{"fqdn":"capp000002.kindforest-0f0a7d07.eastasia.azurecontainerapps.io","external":true,"targetPort":80,"exposedPort":0,"transport":"Auto","traffic":[{"weight":100,"latestRevision":true}],"allowInsecure":false}},"template":{"revisionSuffix":"","containers":[{"image":"mcr.microsoft.com/azuredocs/containerapps-helloworld:latest","name":"capp000002","resources":{"cpu":0.5,"memory":"1Gi","ephemeralStorage":""}}],"scale":{"minReplicas":1,"maxReplicas":10}},"eventStreamEndpoint":"https://eastasia.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/eventstream"},"identity":{"type":"None"}}' - headers: - api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview - cache-control: - - no-cache - content-length: - - '1825' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:04:58 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding,Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - CommandName: - - containerapp logs show - Connection: - - keep-alive - ParameterSetName: - - -n -g - User-Agent: - - python/3.8.13 (macOS-12.6-x86_64-i386-64bit) AZURECLI/2.40.0 - method: GET - uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerApps/capp000002?api-version=2022-06-01-preview - response: - body: - string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/containerapps/capp000002","name":"capp000002","type":"Microsoft.App/containerApps","location":"East - Asia","systemData":{"createdBy":"silasstrawn@microsoft.com","createdByType":"User","createdAt":"2022-10-07T19:04:48.5979372","lastModifiedBy":"silasstrawn@microsoft.com","lastModifiedByType":"User","lastModifiedAt":"2022-10-07T19:04:48.5979372"},"properties":{"provisioningState":"Succeeded","managedEnvironmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","environmentId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.App/managedEnvironments/env000003","outboundIpAddresses":["20.239.114.85"],"latestRevisionName":"capp000002--abouno8","latestRevisionFqdn":"capp000002--abouno8.kindforest-0f0a7d07.eastasia.azurecontainerapps.io","customDomainVerificationId":"333646C25EDA7C903C86F0F0D0193C412978B2E48FA0B4F1461D339FBBAE3EB7","configuration":{"activeRevisionsMode":"Single","ingress":{"fqdn":"capp000002.kindforest-0f0a7d07.eastasia.azurecontainerapps.io","external":true,"targetPort":80,"exposedPort":0,"transport":"Auto","traffic":[{"weight":100,"latestRevision":true}],"allowInsecure":false}},"template":{"revisionSuffix":"","containers":[{"image":"mcr.microsoft.com/azuredocs/containerapps-helloworld:latest","name":"capp000002","resources":{"cpu":0.5,"memory":"1Gi","ephemeralStorage":""}}],"scale":{"minReplicas":1,"maxReplicas":10}},"eventStreamEndpoint":"https://eastasia.azurecontainerapps.dev/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/containerApps/capp000002/eventstream"},"identity":{"type":"None"}}' - headers: - api-supported-versions: - - 2022-01-01-preview, 2022-03-01, 2022-06-01-preview - cache-control: - - no-cache - content-length: - - '1825' - content-type: - - application/json; charset=utf-8 - date: - - Fri, 07 Oct 2022 19:05:00 GMT - expires: - - '-1' - pragma: - - no-cache - server: - - Microsoft-IIS/10.0 - strict-transport-security: - - max-age=31536000; includeSubDomains - transfer-encoding: - - chunked - vary: - - Accept-Encoding,Accept-Encoding - x-content-type-options: - - nosniff - x-powered-by: - - ASP.NET - status: - code: 200 - message: OK -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - User-Agent: - - python-requests/2.26.0 - method: GET - uri: https://capp000002.kindforest-0f0a7d07.eastasia.azurecontainerapps.io/ + uri: https://capp000002.agreeablebush-1943b534.canadacentral.azurecontainerapps.io/ response: body: string: "\n\n Welcome to Azure Container Apps!\n\n\n\n\n\n
\n

Welcome to Azure Container Apps!

\n\n\n \n \n \n \n - \ \n \n - \ \n \n \n \n \n - \ \n \n \n \n \n \n \n \n \n \n - \ \n \n \n \n \n \n \n \n - \ \n \n - \ \n \n - \ \n \n \n \n \n \n \n \n \n \n \n
\n\n\n\n" - headers: - accept-ranges: - - bytes - cache-control: - - public, max-age=0 - content-length: - - '3987' - content-type: - - text/html; charset=UTF-8 - date: - - Thu, 08 Sep 2022 19:23:41 GMT - etag: - - W/"f93-17c81533370" - last-modified: - - Fri, 15 Oct 2021 00:21:26 GMT - x-powered-by: - - Express - status: - code: 200 - message: OK -version: 1 diff --git a/src/containerapp/azext_containerapp/tests/latest/test_containerapp_commands.py b/src/containerapp/azext_containerapp/tests/latest/test_containerapp_commands.py index a206dac0613..a3eb5041b5d 100644 --- a/src/containerapp/azext_containerapp/tests/latest/test_containerapp_commands.py +++ b/src/containerapp/azext_containerapp/tests/latest/test_containerapp_commands.py @@ -395,6 +395,144 @@ def test_containerapp_tcp_ingress(self, resource_group): JMESPathCheck('exposedPort', 3020), ]) + @AllowLargeResponse(8192) + @ResourceGroupPreparer(location="northeurope") + def test_containerapp_ip_restrictions(self, resource_group): + env_name = self.create_random_name(prefix='containerapp-env', length=24) + ca_name = self.create_random_name(prefix='containerapp', length=24) + + create_containerapp_env(self, env_name, resource_group) + + # self.cmd('containerapp create -g {} -n {} --environment {}'.format(resource_group, ca_name, env_name)) + self.cmd('containerapp create -g {} -n {} --environment {} --ingress external --target-port 80'.format(resource_group, ca_name, env_name)) + + self.cmd('containerapp ingress access-restriction set -g {} -n {} --rule-name name --ip-address 192.168.1.1/32 --description "Description here." --action Allow'.format(resource_group, ca_name), checks=[ + JMESPathCheck('[0].name', "name"), + JMESPathCheck('[0].ipAddressRange', "192.168.1.1/32"), + JMESPathCheck('[0].description', "Description here."), + JMESPathCheck('[0].action', "Allow"), + ]) + + self.cmd('containerapp ingress access-restriction list -g {} -n {}'.format(resource_group, ca_name), checks=[ + JMESPathCheck('[0].name', "name"), + JMESPathCheck('[0].ipAddressRange', "192.168.1.1/32"), + JMESPathCheck('[0].description', "Description here."), + JMESPathCheck('[0].action', "Allow"), + ]) + + self.cmd('containerapp ingress access-restriction set -g {} -n {} --rule-name name2 --ip-address 192.168.1.1/8 --description "Description here 2." --action Allow'.format(resource_group, ca_name), checks=[ + JMESPathCheck('[0].name', "name"), + JMESPathCheck('[0].ipAddressRange', "192.168.1.1/32"), + JMESPathCheck('[0].description', "Description here."), + JMESPathCheck('[0].action', "Allow"), + JMESPathCheck('[1].name', "name2"), + JMESPathCheck('[1].ipAddressRange', "192.168.1.1/8"), + JMESPathCheck('[1].description', "Description here 2."), + JMESPathCheck('[1].action', "Allow"), + ]) + + self.cmd('containerapp ingress access-restriction list -g {} -n {}'.format(resource_group, ca_name), checks=[ + JMESPathCheck('[0].name', "name"), + JMESPathCheck('[0].ipAddressRange', "192.168.1.1/32"), + JMESPathCheck('[0].description', "Description here."), + JMESPathCheck('[0].action', "Allow"), + JMESPathCheck('[1].name', "name2"), + JMESPathCheck('[1].ipAddressRange', "192.168.1.1/8"), + JMESPathCheck('[1].description', "Description here 2."), + JMESPathCheck('[1].action', "Allow"), + ]) + + self.cmd('containerapp ingress access-restriction remove -g {} -n {} --rule-name name'.format(resource_group, ca_name), checks=[ + JMESPathCheck('[0].name', "name2"), + JMESPathCheck('[0].ipAddressRange', "192.168.1.1/8"), + JMESPathCheck('[0].description', "Description here 2."), + JMESPathCheck('[0].action', "Allow"), + ]) + + self.cmd('containerapp ingress access-restriction list -g {} -n {}'.format(resource_group, ca_name), checks=[ + JMESPathCheck('[0].name', "name2"), + JMESPathCheck('[0].ipAddressRange', "192.168.1.1/8"), + JMESPathCheck('[0].description', "Description here 2."), + JMESPathCheck('[0].action', "Allow"), + ]) + + self.cmd('containerapp ingress access-restriction remove -g {} -n {} --rule-name name2'.format(resource_group, ca_name), checks=[ + JMESPathCheck('length(@)', 0), + ]) + + self.cmd('containerapp ingress access-restriction list -g {} -n {}'.format(resource_group, ca_name), checks=[ + JMESPathCheck('length(@)', 0), + ]) + + @AllowLargeResponse(8192) + @ResourceGroupPreparer(location="northeurope") + def test_containerapp_ip_restrictions_deny(self, resource_group): + env_name = self.create_random_name(prefix='containerapp-env', length=24) + ca_name = self.create_random_name(prefix='containerapp', length=24) + + create_containerapp_env(self, env_name, resource_group) + + # self.cmd('containerapp create -g {} -n {} --environment {}'.format(resource_group, ca_name, env_name)) + self.cmd('containerapp create -g {} -n {} --environment {} --ingress external --target-port 80'.format(resource_group, ca_name, env_name)) + + self.cmd('containerapp ingress access-restriction set -g {} -n {} --rule-name name --ip-address 192.168.1.1/32 --description "Description here." --action Deny'.format(resource_group, ca_name), checks=[ + JMESPathCheck('[0].name', "name"), + JMESPathCheck('[0].ipAddressRange', "192.168.1.1/32"), + JMESPathCheck('[0].description', "Description here."), + JMESPathCheck('[0].action', "Deny"), + ]) + + self.cmd('containerapp ingress access-restriction list -g {} -n {}'.format(resource_group, ca_name), checks=[ + JMESPathCheck('[0].name', "name"), + JMESPathCheck('[0].ipAddressRange', "192.168.1.1/32"), + JMESPathCheck('[0].description', "Description here."), + JMESPathCheck('[0].action', "Deny"), + ]) + + self.cmd('containerapp ingress access-restriction set -g {} -n {} --rule-name name2 --ip-address 192.168.1.1/8 --description "Description here 2." --action Deny'.format(resource_group, ca_name), checks=[ + JMESPathCheck('[0].name', "name"), + JMESPathCheck('[0].ipAddressRange', "192.168.1.1/32"), + JMESPathCheck('[0].description', "Description here."), + JMESPathCheck('[0].action', "Deny"), + JMESPathCheck('[1].name', "name2"), + JMESPathCheck('[1].ipAddressRange', "192.168.1.1/8"), + JMESPathCheck('[1].description', "Description here 2."), + JMESPathCheck('[1].action', "Deny"), + ]) + + self.cmd('containerapp ingress access-restriction list -g {} -n {}'.format(resource_group, ca_name), checks=[ + JMESPathCheck('[0].name', "name"), + JMESPathCheck('[0].ipAddressRange', "192.168.1.1/32"), + JMESPathCheck('[0].description', "Description here."), + JMESPathCheck('[0].action', "Deny"), + JMESPathCheck('[1].name', "name2"), + JMESPathCheck('[1].ipAddressRange', "192.168.1.1/8"), + JMESPathCheck('[1].description', "Description here 2."), + JMESPathCheck('[1].action', "Deny"), + ]) + + self.cmd('containerapp ingress access-restriction remove -g {} -n {} --rule-name name'.format(resource_group, ca_name), checks=[ + JMESPathCheck('[0].name', "name2"), + JMESPathCheck('[0].ipAddressRange', "192.168.1.1/8"), + JMESPathCheck('[0].description', "Description here 2."), + JMESPathCheck('[0].action', "Deny"), + ]) + + self.cmd('containerapp ingress access-restriction list -g {} -n {}'.format(resource_group, ca_name), checks=[ + JMESPathCheck('[0].name', "name2"), + JMESPathCheck('[0].ipAddressRange', "192.168.1.1/8"), + JMESPathCheck('[0].description', "Description here 2."), + JMESPathCheck('[0].action', "Deny"), + ]) + + self.cmd('containerapp ingress access-restriction remove -g {} -n {} --rule-name name2'.format(resource_group, ca_name), checks=[ + JMESPathCheck('length(@)', 0), + ]) + + self.cmd('containerapp ingress access-restriction list -g {} -n {}'.format(resource_group, ca_name), checks=[ + JMESPathCheck('length(@)', 0), + ]) + class ContainerappDaprTests(ScenarioTest): @AllowLargeResponse(8192) diff --git a/src/containerapp/setup.py b/src/containerapp/setup.py index 3f15c3cbeb5..5d2676788a6 100644 --- a/src/containerapp/setup.py +++ b/src/containerapp/setup.py @@ -17,7 +17,7 @@ # TODO: Confirm this is the right version number you want and it matches your # HISTORY.rst entry. -VERSION = '0.3.14' +VERSION = '0.3.15' # The full list of classifiers is available at # https://pypi.python.org/pypi?%3Aaction=list_classifiers From 1018073ca1e71c7643727a1a5d44cb6e398bf4cf Mon Sep 17 00:00:00 2001 From: Xiaoyun Ding Date: Fri, 18 Nov 2022 11:25:04 +0800 Subject: [PATCH 84/85] Scg app level routing (#5504) --- src/spring/azext_spring/commands.py | 2 +- src/spring/azext_spring/gateway.py | 32 +- .../tests/latest/files/gateway_routes_v2.json | 29 + .../tests/latest/recordings/test_gateway.yaml | 3035 +++++++++++++---- .../tests/latest/test_asa_gateway.py | 15 +- 5 files changed, 2392 insertions(+), 721 deletions(-) create mode 100644 src/spring/azext_spring/tests/latest/files/gateway_routes_v2.json diff --git a/src/spring/azext_spring/commands.py b/src/spring/azext_spring/commands.py index 9603c9ac660..a3db8c5adba 100644 --- a/src/spring/azext_spring/commands.py +++ b/src/spring/azext_spring/commands.py @@ -91,7 +91,7 @@ def load_command_table(self, _): gateway_route_config_cmd_group = CliCommandType( operations_tmpl='azext_spring.gateway#{}', - client_factory=cf_spring_20220901preview + client_factory=cf_spring_20221101preview ) api_portal_cmd_group = CliCommandType( diff --git a/src/spring/azext_spring/gateway.py b/src/spring/azext_spring/gateway.py index b818355dc22..cdc319d6d67 100644 --- a/src/spring/azext_spring/gateway.py +++ b/src/spring/azext_spring/gateway.py @@ -4,13 +4,14 @@ # -------------------------------------------------------------------------------------------- import json +import re from azure.cli.core.azclierror import InvalidArgumentValueError from azure.cli.core.util import sdk_no_wait from knack.log import get_logger from .custom import LOG_RUNNING_PROMPT -from .vendored_sdks.appplatform.v2022_09_01_preview import models +from .vendored_sdks.appplatform.v2022_11_01_preview import models logger = get_logger(__name__) DEFAULT_NAME = "default" @@ -139,7 +140,7 @@ def gateway_route_config_create(cmd, client, resource_group, service, name, app_name=None, routes_json=None, routes_file=None): - _validate_route_config_exist(client, resource_group, service, name) + _validate_route_config_not_exist(client, resource_group, service, name) route_properties = models.GatewayRouteConfigProperties() return _create_or_update_gateway_route_configs(client, resource_group, service, name, route_properties, app_name, routes_file, routes_json) @@ -149,6 +150,7 @@ def gateway_route_config_update(cmd, client, resource_group, service, name, app_name=None, routes_json=None, routes_file=None): + _validate_route_config_exist(client, resource_group, service, name) route_properties = client.gateway_route_configs.get( resource_group, service, DEFAULT_NAME, name).properties return _create_or_update_gateway_route_configs(client, resource_group, service, name, route_properties, @@ -195,13 +197,20 @@ def _update_cors(existing, allowed_origins, allowed_methods, allowed_headers, ma return cors -def _validate_route_config_exist(client, resource_group, service, name): +def _validate_route_config_not_exist(client, resource_group, service, name): route_configs = client.gateway_route_configs.list( resource_group, service, DEFAULT_NAME) if name in (route_config.name for route_config in list(route_configs)): raise InvalidArgumentValueError("Route config " + name + " already exists") +def _validate_route_config_exist(client, resource_group, service, name): + route_configs = client.gateway_route_configs.list( + resource_group, service, DEFAULT_NAME) + if name not in (route_config.name for route_config in list(route_configs)): + raise InvalidArgumentValueError("Route config " + name + " doesn't exist") + + def _create_or_update_gateway_route_configs(client, resource_group, service, name, route_properties, app_name, routes_file, routes_json): app_resource_id = _get_app_resource_id_by_name(client, resource_group, service, app_name) @@ -225,6 +234,7 @@ def _create_or_update_routes_properties(routes_file, routes_json, route_properti if routes_file is None and routes_json is None: return route_properties + route_properties = models.GatewayRouteConfigProperties() if routes_file is not None: with open(routes_file, 'r') as json_file: raw_json = json.load(json_file) @@ -235,5 +245,21 @@ def _create_or_update_routes_properties(routes_file, routes_json, route_properti if isinstance(raw_json, list): route_properties.routes = raw_json else: + raw_json = _route_config_property_convert(raw_json) route_properties = models.GatewayRouteConfigProperties(**raw_json) return route_properties + + +# Convert camelCase to snake_case to align with backend +def _route_config_property_convert(raw_json): + if raw_json is None: + return raw_json + + convert_raw_json = {} + for key in raw_json: + if key == "routes": + convert_raw_json[key] = list(map(lambda v: _route_config_property_convert(v), raw_json[key])) + else: + replaced_key = re.sub('(? Date: Fri, 18 Nov 2022 03:32:08 +0000 Subject: [PATCH 85/85] [Release] Update index.json for extension [ containerapp ] Triggered by Azure CLI Extensions Release Pipeline - ADO_BUILD_URL: https://dev.azure.com/azclitools/internal/_build/results?buildId=15753&view=results Last commit: https://github.com/Azure/azure-cli-extensions/commit/2bfa89a39f51b12f07082f3996306695b37a8ff6 --- src/index.json | 54 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) diff --git a/src/index.json b/src/index.json index 1bf58f799b9..c85f27d66f0 100644 --- a/src/index.json +++ b/src/index.json @@ -18768,6 +18768,60 @@ "version": "0.3.14" }, "sha256Digest": "61ed509c783ecd07e6f94557c090150a818962b148def5e15cc30603f9bca196" + }, + { + "downloadUrl": "https://azcliprod.blob.core.windows.net/cli-extensions/containerapp-0.3.15-py2.py3-none-any.whl", + "filename": "containerapp-0.3.15-py2.py3-none-any.whl", + "metadata": { + "azext.isPreview": true, + "azext.minCliCoreVersion": "2.37.0", + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "License :: OSI Approved :: MIT License" + ], + "extensions": { + "python.details": { + "contacts": [ + { + "email": "azpycli@microsoft.com", + "name": "Microsoft Corporation", + "role": "author" + } + ], + "document_names": { + "description": "DESCRIPTION.rst" + }, + "project_urls": { + "Home": "https://github.com/Azure/azure-cli-extensions" + } + } + }, + "extras": [], + "generator": "bdist_wheel (0.30.0)", + "license": "MIT", + "metadata_version": "2.0", + "name": "containerapp", + "run_requires": [ + { + "requires": [ + "azure-cli-core", + "pycomposefile (>=0.0.29)" + ] + } + ], + "summary": "Microsoft Azure Command-Line Tools Containerapp Extension", + "version": "0.3.15" + }, + "sha256Digest": "fab4b6bbed951ad7e94b50af4e169ece562379b91a7ca3fae1987ebed01470e4" } ], "cosmosdb-preview": [